diff --git a/shell/agents/AIShell.Ollama.Agent/AIShell.Ollama.Agent.csproj b/shell/agents/AIShell.Ollama.Agent/AIShell.Ollama.Agent.csproj
index 56068bee..d03afbaa 100644
--- a/shell/agents/AIShell.Ollama.Agent/AIShell.Ollama.Agent.csproj
+++ b/shell/agents/AIShell.Ollama.Agent/AIShell.Ollama.Agent.csproj
@@ -4,6 +4,7 @@
net8.0
enable
true
+ true
false
@@ -15,6 +16,10 @@
None
+
+
+
+
diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
index 36aa6140..480cf383 100644
--- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
+++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
@@ -1,10 +1,33 @@
using System.Diagnostics;
+using System.Text;
+using System.Text.Json;
+using System.Text.RegularExpressions;
using AIShell.Abstraction;
+using OllamaSharp;
+using OllamaSharp.Models;
namespace AIShell.Ollama.Agent;
-public sealed class OllamaAgent : ILLMAgent
+public sealed partial class OllamaAgent : ILLMAgent
{
+ private bool _reloadSettings;
+ private bool _isDisposed;
+ private string _configRoot;
+ private Settings _settings;
+ private OllamaApiClient _client;
+ private GenerateRequest _request;
+ private FileSystemWatcher _watcher;
+
+ ///
+ /// The name of setting file
+ ///
+ private const string SettingFileName = "ollama.config.json";
+
+ ///
+ /// Gets the settings.
+ ///
+ internal Settings Settings => _settings;
+
///
/// The name of the agent
///
@@ -13,7 +36,7 @@ public sealed class OllamaAgent : ILLMAgent
///
/// The description of the agent to be shown at start up
///
- public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool. Be sure to follow all prerequisites in aka.ms/aish/ollama";
+ public string Description => "This is an AI assistant to interact with a language model running locally or remotely by utilizing the Ollama API. Be sure to follow all prerequisites in https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent";
///
/// This is the company added to /like and /dislike verbiage for who the telemetry helps.
@@ -30,19 +53,25 @@ public sealed class OllamaAgent : ILLMAgent
///
/// These are any optional legal/additional information links you want to provide at start up
///
- public Dictionary LegalLinks { private set; get; }
-
- ///
- /// This is the chat service to call the API from
- ///
- private OllamaChatService _chatService;
+ public Dictionary LegalLinks { private set; get; } = new(StringComparer.OrdinalIgnoreCase)
+ {
+ ["Ollama Docs"] = "https://github.com/ollama/ollama",
+ ["Prerequisites"] = "https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent"
+ };
///
/// Dispose method to clean up the unmanaged resource of the chatService
///
public void Dispose()
{
- _chatService?.Dispose();
+ if (_isDisposed)
+ {
+ return;
+ }
+
+ GC.SuppressFinalize(this);
+ _watcher.Dispose();
+ _isDisposed = true;
}
///
@@ -51,13 +80,31 @@ public void Dispose()
/// Agent configuration for any configuration file and other settings
public void Initialize(AgentConfig config)
{
- _chatService = new OllamaChatService();
+ _configRoot = config.ConfigurationRoot;
+
+ SettingFile = Path.Combine(_configRoot, SettingFileName);
+ _settings = ReadSettings();
+
+ if (_settings is null)
+ {
+ // Create the setting file with examples to serve as a template for user to update.
+ NewExampleSettingFile();
+ _settings = ReadSettings();
+ }
+
+ // Create Ollama request
+ _request = new GenerateRequest();
+
+ // Create Ollama client
+ _client = new OllamaApiClient(_settings.Endpoint);
- LegalLinks = new(StringComparer.OrdinalIgnoreCase)
+ // Watch for changes to the settings file
+ _watcher = new FileSystemWatcher(_configRoot, SettingFileName)
{
- ["Ollama Docs"] = "https://github.com/ollama/ollama",
- ["Prerequisites"] = "https://aka.ms/ollama/readme"
+ NotifyFilter = NotifyFilters.LastWrite,
+ EnableRaisingEvents = true,
};
+ _watcher.Changed += OnSettingFileChange;
}
///
@@ -68,7 +115,7 @@ public void Initialize(AgentConfig config)
///
/// Gets the path to the setting file of the agent.
///
- public string SettingFile { private set; get; } = null;
+ public string SettingFile { private set; get; }
///
/// Gets a value indicating whether the agent accepts a specific user action feedback.
@@ -87,7 +134,19 @@ public void OnUserAction(UserActionPayload actionPayload) {}
/// Refresh the current chat by starting a new chat session.
/// This method allows an agent to reset chat states, interact with user for authentication, print welcome message, and more.
///
- public Task RefreshChatAsync(IShell shell, bool force) => Task.CompletedTask;
+ public Task RefreshChatAsync(IShell shell, bool force)
+ {
+ if (force)
+ {
+ // Reload the setting file if needed.
+ ReloadSettings();
+
+ // Reset context
+ _request.Context = null;
+ }
+
+ return Task.CompletedTask;
+ }
///
/// Main chat function that takes the users input and passes it to the LLM and renders it.
@@ -100,26 +159,171 @@ public async Task ChatAsync(string input, IShell shell)
// Get the shell host
IHost host = shell.Host;
- // get the cancellation token
+ // Get the cancellation token
CancellationToken token = shell.CancellationToken;
- if (Process.GetProcessesByName("ollama").Length is 0)
+ // Reload the setting file if needed.
+ ReloadSettings();
+
+ if (IsLocalHost().IsMatch(_client.Uri.Host) && Process.GetProcessesByName("ollama").Length is 0)
{
- host.RenderFullResponse("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
+ host.WriteErrorLine("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
return false;
}
- ResponseData ollamaResponse = await host.RunWithSpinnerAsync(
- status: "Thinking ...",
- func: async context => await _chatService.GetChatResponseAsync(context, input, token)
- ).ConfigureAwait(false);
+ // Prepare request
+ _request.Prompt = input;
+ _request.Model = _settings.Model;
+ _request.Stream = _settings.Stream;
- if (ollamaResponse is not null)
+ try
{
- // render the content
- host.RenderFullResponse(ollamaResponse.response);
+ if (_request.Stream)
+ {
+ // Wait for the stream with the spinner running
+ var ollamaStreamEnumerator = await host.RunWithSpinnerAsync(
+ status: "Thinking ...",
+ func: async () =>
+ {
+ // Start generating the stream asynchronously and return an enumerator
+ var enumerator = _client.GenerateAsync(_request, token).GetAsyncEnumerator(token);
+ if (await enumerator.MoveNextAsync().ConfigureAwait(false))
+ {
+ return enumerator;
+ }
+ return null;
+ }
+ ).ConfigureAwait(false);
+
+ if (ollamaStreamEnumerator is not null)
+ {
+ using IStreamRender streamingRender = host.NewStreamRender(token);
+
+ do
+ {
+ var currentStream = ollamaStreamEnumerator.Current;
+
+ // Update the render with stream response
+ streamingRender.Refresh(currentStream.Response);
+
+ if (currentStream.Done)
+ {
+ // If the stream is complete, update the request context with the last stream context
+ var ollamaLastStream = (GenerateDoneResponseStream)currentStream;
+ _request.Context = ollamaLastStream.Context;
+ }
+ }
+ while (await ollamaStreamEnumerator.MoveNextAsync().ConfigureAwait(false));
+ }
+ }
+ else
+ {
+ // Build single response with spinner
+ var ollamaResponse = await host.RunWithSpinnerAsync(
+ status: "Thinking ...",
+ func: async () => { return await _client.GenerateAsync(_request, token).StreamToEndAsync(); }
+ ).ConfigureAwait(false);
+
+ // Update request context
+ _request.Context = ollamaResponse.Context;
+
+ // Render the full response
+ host.RenderFullResponse(ollamaResponse.Response);
+ }
}
-
+ catch (OperationCanceledException)
+ {
+ // Ignore the cancellation exception.
+ }
+ catch (HttpRequestException e)
+ {
+ host.WriteErrorLine($"{e.Message}");
+ host.WriteErrorLine($"Ollama model: \"{_settings.Model}\"");
+ host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\"");
+ host.WriteErrorLine($"Ollama settings: \"{SettingFile}\"");
+ }
+
return true;
}
+
+ private void ReloadSettings()
+ {
+ if (_reloadSettings)
+ {
+ _reloadSettings = false;
+ var settings = ReadSettings();
+ if (settings is null)
+ {
+ return;
+ }
+
+ _settings = settings;
+
+ // Check if the endpoint has changed
+ bool isEndpointChanged = !string.Equals(_settings.Endpoint, _client.Uri.OriginalString, StringComparison.OrdinalIgnoreCase);
+
+ if (isEndpointChanged)
+ {
+ // Create a new client with updated endpoint
+ _client = new OllamaApiClient(_settings.Endpoint);
+ }
+ }
+ }
+
+ private Settings ReadSettings()
+ {
+ Settings settings = null;
+ FileInfo file = new(SettingFile);
+
+ if (file.Exists)
+ {
+ try
+ {
+ using var stream = file.OpenRead();
+ var data = JsonSerializer.Deserialize(stream, SourceGenerationContext.Default.ConfigData);
+ settings = new Settings(data);
+ }
+ catch (Exception e)
+ {
+ throw new InvalidDataException($"Parsing settings from '{SettingFile}' failed with the following error: {e.Message}", e);
+ }
+ }
+
+ return settings;
+ }
+
+ private void OnSettingFileChange(object sender, FileSystemEventArgs e)
+ {
+ if (e.ChangeType is WatcherChangeTypes.Changed)
+ {
+ _reloadSettings = true;
+ }
+ }
+
+ private void NewExampleSettingFile()
+ {
+ string SampleContent = """
+ {
+ // To use Ollama API service:
+ // 1. Install Ollama: `winget install Ollama.Ollama`
+ // 2. Start Ollama API server: `ollama serve`
+ // 3. Install Ollama model: `ollama pull phi3`
+
+ // Declare Ollama model
+ "Model": "phi3",
+ // Declare Ollama endpoint
+ "Endpoint": "http://localhost:11434",
+ // Enable Ollama streaming
+ "Stream": false
+ }
+ """;
+ File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8);
+ }
+
+ ///
+ /// Defines a generated regular expression to match localhost addresses
+ /// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity.
+ ///
+ [GeneratedRegex("^(localhost|127\\.0\\.0\\.1|\\[::1\\])$", RegexOptions.IgnoreCase)]
+ internal partial Regex IsLocalHost();
}
diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs b/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs
deleted file mode 100644
index 8809eff3..00000000
--- a/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs
+++ /dev/null
@@ -1,85 +0,0 @@
-using System.Text;
-using System.Text.Json;
-
-using AIShell.Abstraction;
-
-namespace AIShell.Ollama.Agent;
-
-internal class OllamaChatService : IDisposable
-{
- ///
- /// Ollama endpoint to call to generate a response
- ///
- internal const string Endpoint = "http://localhost:11434/api/generate";
-
- ///
- /// Http client
- ///
- private readonly HttpClient _client;
-
- ///
- /// Initialization method to initialize the http client
- ///
- internal OllamaChatService()
- {
- _client = new HttpClient();
- }
-
- ///
- /// Dispose of the http client
- ///
- public void Dispose()
- {
- _client.Dispose();
- }
-
- ///
- /// Preparing chat with data to be sent
- ///
- /// The user input from the chat experience
- /// The HTTP request message
- private HttpRequestMessage PrepareForChat(string input)
- {
- // Main data to send to the endpoint
- var requestData = new Query
- {
- model = "phi3",
- prompt = input,
- stream = false
- };
-
- var json = JsonSerializer.Serialize(requestData);
-
- var data = new StringContent(json, Encoding.UTF8, "application/json");
- var request = new HttpRequestMessage(HttpMethod.Post, Endpoint) { Content = data };
-
- return request;
- }
-
- ///
- /// Getting the chat response async
- ///
- /// Interface for the status context used when displaying a spinner.
- /// The user input from the chat experience
- /// The cancellation token to exit out of request
- /// Response data from the API call
- internal async Task GetChatResponseAsync(IStatusContext context, string input, CancellationToken cancellationToken)
- {
- try
- {
- HttpRequestMessage request = PrepareForChat(input);
- HttpResponseMessage response = await _client.SendAsync(request, cancellationToken);
- response.EnsureSuccessStatusCode();
-
- context?.Status("Receiving Payload ...");
- var content = await response.Content.ReadAsStreamAsync(cancellationToken);
- return JsonSerializer.Deserialize(content);
- }
- catch (OperationCanceledException)
- {
- // Operation was cancelled by user.
- }
-
- return null;
- }
-}
diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaSchema.cs b/shell/agents/AIShell.Ollama.Agent/OllamaSchema.cs
deleted file mode 100644
index 7dcd42c7..00000000
--- a/shell/agents/AIShell.Ollama.Agent/OllamaSchema.cs
+++ /dev/null
@@ -1,34 +0,0 @@
-namespace AIShell.Ollama.Agent;
-
-// Query class for the data to send to the endpoint
-internal class Query
-{
- public string prompt { get; set; }
- public string model { get; set; }
- public bool stream { get; set; }
-}
-
-// Response data schema
-internal class ResponseData
-{
- public string model { get; set; }
- public string created_at { get; set; }
- public string response { get; set; }
- public bool done { get; set; }
- public string done_reason { get; set; }
- public int[] context { get; set; }
- public double total_duration { get; set; }
- public long load_duration { get; set; }
- public int prompt_eval_count { get; set; }
- public int prompt_eval_duration { get; set; }
- public int eval_count { get; set; }
- public long eval_duration { get; set; }
-}
-
-internal class OllamaResponse
-{
- public int Status { get; set; }
- public string Error { get; set; }
- public string Api_version { get; set; }
- public ResponseData Data { get; set; }
-}
diff --git a/shell/agents/AIShell.Ollama.Agent/README.md b/shell/agents/AIShell.Ollama.Agent/README.md
index 559d504a..bd8e91e1 100644
--- a/shell/agents/AIShell.Ollama.Agent/README.md
+++ b/shell/agents/AIShell.Ollama.Agent/README.md
@@ -1,28 +1,30 @@
# Ollama Plugin
-This agent is used to interact with a language model running locally by utilizing the Ollama API. Before using
-this agent you need to have Ollama installed and running.
+This agent is used to interact with a language model running locally or remotely by utilizing the Ollama API.
+Before using this agent locally you need to have Ollama installed and running.
-## Pre-requisites to using the agent
+## Pre-requisites to using the agent locally
-- Install [Ollama](https://github.com/ollama/ollama)
-- Install a [Ollama model](https://github.com/ollama/ollama?tab=readme-ov-file#model-library), we
- suggest using the `phi3` model as it is set as the default model in the code
+- Install [Ollama](https://github.com/ollama/ollama)
+- Install a [Ollama model](https://github.com/ollama/ollama?tab=readme-ov-file#model-library), we suggest using the `phi3` model as it is set as the default model in the code
- [Start the Ollama API server](https://github.com/ollama/ollama?tab=readme-ov-file#start-ollama)
## Configuration
-Currently to change the model you will need to modify the query in the code in the
-`OllamaChatService` class. The default model is `phi3`.
-
-The default endpoint is `http://localhost:11434/api/generate` with `11434` being the default port. This can be changed in the code
-and eventually will be added to a configuration file.
-
-## Known Limitations
-
-- There is no history shared across queries so the model will not be able to remember previous
- queries
-- Streaming is currently not supported if you change the stream value to `true` in the data to send
- to the API it will not work
-- Configuration is currently hard coded in the code and will be moved to a configuration file in the
- future
\ No newline at end of file
+To configure the agent, run `/agent config ollama` to open up the setting file in your default editor, and then update the file based on the following example.
+
+```jsonc
+{
+ // To use Ollama API service:
+ // 1. Install Ollama: `winget install Ollama.Ollama`
+ // 2. Start Ollama API server: `ollama serve`
+ // 3. Install Ollama model: `ollama pull phi3`
+
+ // Declare Ollama model
+ "Model": "phi3",
+ // Declare Ollama endpoint
+ "Endpoint": "http://localhost:11434",
+ // Enable Ollama streaming
+ "Stream": false
+}
+```
diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs
new file mode 100644
index 00000000..11ebd8de
--- /dev/null
+++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs
@@ -0,0 +1,49 @@
+using System.Text.Json;
+using System.Text.Json.Serialization;
+
+namespace AIShell.Ollama.Agent;
+
+internal class Settings
+{
+ public string Model { get; }
+ public string Endpoint { get; }
+ public bool Stream { get; }
+
+ public Settings(ConfigData configData)
+ {
+ // Validate Model and Endpoint for null or empty values
+ if (string.IsNullOrWhiteSpace(configData.Model))
+ {
+ throw new ArgumentException("\"Model\" key is missing.");
+ }
+
+ if (string.IsNullOrWhiteSpace(configData.Endpoint))
+ {
+ throw new ArgumentException("\"Endpoint\" key is missing.");
+ }
+
+ Model = configData.Model;
+ Endpoint = configData.Endpoint;
+ Stream = configData.Stream;
+ }
+}
+
+internal class ConfigData
+{
+ public string Model { get; set; }
+ public string Endpoint { get; set; }
+ public bool Stream { get; set; }
+}
+
+///
+/// Use source generation to serialize and deserialize the setting file.
+/// Both metadata-based and serialization-optimization modes are used to gain the best performance.
+///
+[JsonSourceGenerationOptions(
+ WriteIndented = true,
+ AllowTrailingCommas = true,
+ PropertyNameCaseInsensitive = true,
+ ReadCommentHandling = JsonCommentHandling.Skip,
+ UseStringEnumConverter = true)]
+[JsonSerializable(typeof(ConfigData))]
+internal partial class SourceGenerationContext : JsonSerializerContext { }
diff --git a/shell/nuget.config b/shell/nuget.config
index a10ce9b3..f78f7db6 100644
--- a/shell/nuget.config
+++ b/shell/nuget.config
@@ -1,4 +1,4 @@
-
+