Skip to content

Commit

Permalink
Drop callback support in favor for IAsyncEnumerable<>
Browse files Browse the repository at this point in the history
Fixes #63
+semver: breaking
  • Loading branch information
awaescher committed Aug 26, 2024
1 parent 7d26390 commit d8d5b0f
Show file tree
Hide file tree
Showing 32 changed files with 641 additions and 1,017 deletions.
19 changes: 6 additions & 13 deletions OllamaApiConsole/Demos/ChatConsole.cs
Original file line number Diff line number Diff line change
@@ -1,22 +1,17 @@
using OllamaSharp;
using OllamaSharp.Models.Chat;
using Spectre.Console;

public class ChatConsole : OllamaConsole
{
public ChatConsole(IOllamaApiClient ollama)
: base(ollama)
{
}
namespace OllamaApiConsole.Demos;

public class ChatConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
{
public override async Task Run()
{
AnsiConsole.Write(new Rule("Chat demo").LeftJustified());
AnsiConsole.WriteLine();

Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:");


if (!string.IsNullOrEmpty(Ollama.SelectedModel))
{
var keepChatting = true;
Expand All @@ -30,10 +25,7 @@ public override async Task Run()
AnsiConsole.MarkupLine("[gray]Type \"[red]/new[/]\" to start over.[/]");
AnsiConsole.MarkupLine("[gray]Type \"[red]/exit[/]\" to leave the chat.[/]");

var chat = Ollama.Chat(stream => AnsiConsole.MarkupInterpolated($"[cyan]{stream?.Message.Content ?? ""}[/]"));

if (!string.IsNullOrEmpty(systemPrompt))
chat.SetMessages([new Message { Role = ChatRole.System, Content = systemPrompt }]);
var chat = new Chat(Ollama, systemPrompt);

string message;

Expand All @@ -54,7 +46,8 @@ public override async Task Run()
break;
}

await chat.Send(message);
await foreach (var answerToken in chat.Send(message))
AnsiConsole.MarkupInterpolated($"[cyan]{answerToken}[/]");

AnsiConsole.WriteLine();
} while (!string.IsNullOrEmpty(message));
Expand Down
39 changes: 18 additions & 21 deletions OllamaApiConsole/Demos/ImageChatConsole.cs
Original file line number Diff line number Diff line change
@@ -1,23 +1,18 @@
using System.Text.RegularExpressions;
using OllamaSharp;
using OllamaSharp.Models.Chat;
using Spectre.Console;

public class ImageChatConsole : OllamaConsole
{
public ImageChatConsole(IOllamaApiClient ollama)
: base(ollama)
{
}
namespace OllamaApiConsole.Demos;

public partial class ImageChatConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
{
public override async Task Run()
{
AnsiConsole.Write(new Rule("Image chat demo").LeftJustified());
AnsiConsole.WriteLine();

Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:");


if (!string.IsNullOrEmpty(Ollama.SelectedModel))
{
var keepChatting = true;
Expand All @@ -33,10 +28,7 @@ public override async Task Run()
AnsiConsole.MarkupLine("[gray]Type \"[red]/new[/]\" to start over.[/]");
AnsiConsole.MarkupLine("[gray]Type \"[red]/exit[/]\" to leave the chat.[/]");

var chat = Ollama.Chat(stream => AnsiConsole.MarkupInterpolated($"[cyan]{stream?.Message.Content ?? ""}[/]"));

if (!string.IsNullOrEmpty(systemPrompt))
chat.SetMessages([new Message { Role = ChatRole.System, Content = systemPrompt }]);
var chat = new Chat(Ollama, systemPrompt);

string message;

Expand All @@ -57,15 +49,15 @@ public override async Task Run()
break;
}

var imageMatches = Regex.Matches(message, "{([^}]*)}").Select(m => m.Value);
var imageMatches = ImagePathRegex().Matches(message).Where(m => !string.IsNullOrEmpty(m.Value));
var imageCount = imageMatches.Count();
var hasImages = imageCount > 0;

if (hasImages)
{
byte[][] imageBytes;
var imagePathsWithCurlyBraces = Regex.Matches(message, "{([^}]*)}").Select(m => m.Value);
var imagePaths = Regex.Matches(message, "{([^}]*)}").Select(m => m.Groups[1].Value);
var imagePathsWithCurlyBraces = imageMatches.Select(m => m.Value);
var imagePaths = imageMatches.Select(m => m.Groups[1].Value);

try
{
Expand All @@ -90,9 +82,9 @@ public override async Task Run()
AnsiConsole.MarkupLineInterpolated($"[silver]{Markup.Escape(message)}[/]");
AnsiConsole.WriteLine();
if (imageCount == 1)
AnsiConsole.MarkupLineInterpolated($"[gray]{"Here is the image, that is sent to the chat model in addition to your message."}[/]");
AnsiConsole.MarkupLine("[gray]Here is the image, that is sent to the chat model in addition to your message.[/]");
else
AnsiConsole.MarkupLineInterpolated($"[gray]{"Here are the images, that are sent to the chat model in addition to your message."}[/]");
AnsiConsole.MarkupLine("[gray]Here are the images, that are sent to the chat model in addition to your message.[/]");
AnsiConsole.WriteLine();

foreach (var consoleImage in imageBytes.Select(bytes => new CanvasImage(bytes)))
Expand All @@ -103,21 +95,26 @@ public override async Task Run()

AnsiConsole.WriteLine();
if (imageCount == 1)
AnsiConsole.MarkupLineInterpolated($"[gray]{"The image was scaled down for the console only, the model gets the full version."}[/]");
AnsiConsole.MarkupLine("[gray]The image was scaled down for the console only, the model gets the full version.[/]");
else
AnsiConsole.MarkupLineInterpolated($"[gray]{"The images were scaled down for the console only, the model gets full versions."}[/]");
AnsiConsole.MarkupLine("[gray]The images were scaled down for the console only, the model gets full versions.[/]");
AnsiConsole.WriteLine();

await chat.Send(message, [], imagesBase64);
await foreach (var answerToken in chat.Send(message, [], imagesBase64))
AnsiConsole.MarkupInterpolated($"[cyan]{answerToken}[/]");
}
else
{
await chat.Send(message);
await foreach (var answerToken in chat.Send(message))
AnsiConsole.MarkupInterpolated($"[cyan]{answerToken}[/]");
}

AnsiConsole.WriteLine();
} while (!string.IsNullOrEmpty(message));
} while (keepChatting);
}
}

[GeneratedRegex("{([^}]*)}")]
private static partial Regex ImagePathRegex();
}
33 changes: 18 additions & 15 deletions OllamaApiConsole/Demos/ModelManagerConsole.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,10 @@
using OllamaSharp.Models;
using Spectre.Console;

public class ModelManagerConsole : OllamaConsole
{
public ModelManagerConsole(IOllamaApiClient ollama)
: base(ollama)
{
}
namespace OllamaApiConsole.Demos;

public class ModelManagerConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
{
public override async Task Run()
{
AnsiConsole.Write(new Rule("Chat demo").LeftJustified());
Expand All @@ -23,7 +20,7 @@ public override async Task Run()
new SelectionPrompt<string>()
.PageSize(10)
.Title("What do you want to do?")
.AddChoices(["..", "Copy model", "Create model", "Delete model", "Generate embeddings", "Show model information", "List local models", "Pull model", "Push model"]));
.AddChoices("..", "Copy model", "Create model", "Delete model", "Generate embeddings", "Show model information", "List local models", "Pull model", "Push model"));

switch (command)
{
Expand Down Expand Up @@ -82,7 +79,8 @@ private async Task CreateModel()
{
var createName = ReadInput("Enter a name for your new model:");
var createModelFileContent = ReadMultilineInput("Enter the contents for the model file:", "[gray]See [/][blue][link]https://ollama.ai/library[/][/][gray] for available models[/]");
await Ollama.CreateModel(createName, createModelFileContent, status => AnsiConsole.MarkupLineInterpolated($"{status.Status}"));
await foreach (var status in Ollama.CreateModel(createName, createModelFileContent))
AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}");
}

private async Task DeleteModel()
Expand All @@ -97,10 +95,10 @@ private async Task GenerateEmbedding()
var embedModel = await SelectModel("Which model should be used to create embeddings?");
if (!string.IsNullOrEmpty(embedModel))
{
var embedContent = ReadInput("Enter a string to to embed:");
var embedContent = ReadMultilineInput("Enter a string to to embed:");
Ollama.SelectedModel = embedModel;
var embedResponse = await Ollama.GenerateEmbeddings(embedContent);
AnsiConsole.MarkupLineInterpolated($"[cyan]{string.Join(", ", embedResponse.Embeddings.First())}[/]");
var embedResponse = await Ollama.Embed(embedContent);
AnsiConsole.MarkupLineInterpolated($"[cyan]{string.Join(", ", embedResponse.Embeddings[0])}[/]");
}
}

Expand All @@ -109,7 +107,7 @@ private async Task ShowModelInformation()
var infoModel = await SelectModel("Which model do you want to retrieve information for?");
if (!string.IsNullOrEmpty(infoModel))
{
var infoResponse = await Ollama.ShowModelInformation(infoModel);
var infoResponse = await Ollama.ShowModel(infoModel);
PropertyConsoleRenderer.Render(infoResponse);
}
}
Expand All @@ -128,19 +126,24 @@ private async Task PullModel()
await AnsiConsole.Progress().StartAsync(async context =>
{
ProgressTask? task = null;
await Ollama.PullModel(pullModel, status => UpdateProgressTaskByStatus(context, ref task, status));
await foreach (var status in Ollama.PullModel(pullModel))
UpdateProgressTaskByStatus(context, ref task, status);
task?.StopTask();
});
}

private async Task PushModel()
{
var pushModel = ReadInput("Which model do you want to push?");
await Ollama.PushModel("mattw/pygmalion:latest", status => AnsiConsole.MarkupLineInterpolated($"{status.Status}"));
await foreach (var status in Ollama.PushModel(pushModel))
AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}");
}

private void UpdateProgressTaskByStatus(ProgressContext context, ref ProgressTask? task, PullModelResponse modelResponse)
private static void UpdateProgressTaskByStatus(ProgressContext context, ref ProgressTask? task, PullModelResponse? modelResponse)
{
if (modelResponse is null)
return;

if (modelResponse.Status != task?.Description)
{
task?.StopTask();
Expand Down
23 changes: 9 additions & 14 deletions OllamaApiConsole/Demos/ToolConsole.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,10 @@
using OllamaSharp.Models.Exceptions;
using Spectre.Console;

public class ToolConsole : OllamaConsole
{
public ToolConsole(IOllamaApiClient ollama)
: base(ollama)
{
}
namespace OllamaApiConsole.Demos;

public class ToolConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
{
public override async Task Run()
{
AnsiConsole.Write(new Rule("Tool demo").LeftJustified());
Expand All @@ -32,10 +29,7 @@ public override async Task Run()
AnsiConsole.MarkupLine("[gray]Type \"[red]/new[/]\" to start over.[/]");
AnsiConsole.MarkupLine("[gray]Type \"[red]/exit[/]\" to leave the chat.[/]");

var chat = Ollama.Chat(stream => AnsiConsole.MarkupInterpolated($"[cyan]{stream?.Message.Content ?? ""}[/]"));

if (!string.IsNullOrEmpty(systemPrompt))
chat.SetMessages([new Message { Role = ChatRole.System, Content = systemPrompt }]);
var chat = new Chat(Ollama, systemPrompt);

string message;

Expand All @@ -58,7 +52,8 @@ public override async Task Run()

try
{
await chat.SendAs(ChatRole.User, message, GetTools());
await foreach (var answerToken in chat.Send(message, GetTools()))
AnsiConsole.MarkupInterpolated($"[cyan]{answerToken}[/]");
}
catch (OllamaException ex)
{
Expand All @@ -70,11 +65,11 @@ public override async Task Run()
{
AnsiConsole.MarkupLine("\n[purple]Tools used:[/]");

foreach (var tool in toolCalls.Where(t => t.Function != null))
foreach (var function in toolCalls.Where(t => t.Function != null).Select(t => t.Function))
{
AnsiConsole.MarkupLineInterpolated($" - [purple]{tool.Function!.Name}[/]");
AnsiConsole.MarkupLineInterpolated($" - [purple]{function!.Name}[/]");

foreach (var argument in tool.Function.Arguments ?? [])
foreach (var argument in function.Arguments ?? [])
AnsiConsole.MarkupLineInterpolated($" - [purple]{argument.Key}[/]: [purple]{argument.Value}[/]");
}
}
Expand Down
13 changes: 5 additions & 8 deletions OllamaApiConsole/OllamaConsole.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@
using OllamaSharp;
using Spectre.Console;

public abstract class OllamaConsole
{
public OllamaConsole(IOllamaApiClient ollama)
{
Ollama = ollama ?? throw new ArgumentNullException(nameof(ollama));
}
namespace OllamaApiConsole;

public IOllamaApiClient Ollama { get; }
public abstract class OllamaConsole(IOllamaApiClient ollama)
{
public IOllamaApiClient Ollama { get; } = ollama ?? throw new ArgumentNullException(nameof(ollama));

public abstract Task Run();

Expand Down Expand Up @@ -51,7 +48,7 @@ protected async Task<string> SelectModel(string prompt, string additionalInforma
var modelsWithBackChoice = models.OrderBy(m => m.Name).Select(m => m.Name).ToList();
if (modelsWithBackChoice.Count == 1)
{
return modelsWithBackChoice.First();
return modelsWithBackChoice[0];
}
else
{
Expand Down
4 changes: 3 additions & 1 deletion OllamaApiConsole/Program.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
using OllamaApiConsole;
using OllamaApiConsole.Demos;
using OllamaSharp;
using Spectre.Console;

Expand Down Expand Up @@ -80,7 +82,7 @@
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"An error occurred. Press [blue][Return][/] to start over.");
AnsiConsole.MarkupLine("An error occurred. Press [blue]Return[/] to start over.");
AnsiConsole.MarkupLineInterpolated($"[red]{Markup.Escape(ex.Message)}[/]");
Console.ReadLine();
}
Expand Down
8 changes: 8 additions & 0 deletions OllamaSharp.v3.ncrunchsolution
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<SolutionConfiguration>
<Settings>
<AllowParallelTestExecution>True</AllowParallelTestExecution>
<EnableRDI>False</EnableRDI>
<RdiConfigured>True</RdiConfigured>
<SolutionConfigured>True</SolutionConfigured>
</Settings>
</SolutionConfiguration>
Loading

0 comments on commit d8d5b0f

Please sign in to comment.