From 13e76921a26b7e95cfd2455401da9fb05f3c3ffb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jochen=20Kirst=C3=A4tter?= <7329802+jochenkirstaetter@users.noreply.github.com> Date: Wed, 27 Mar 2024 11:57:05 +0400 Subject: [PATCH] add models of Gemini 1.5 and Gemini 1.0 Ultra --- src/Mscc.GenerativeAI.Google/CHANGELOG.md | 6 + src/Mscc.GenerativeAI.Web/CHANGELOG.md | 6 + src/Mscc.GenerativeAI/CHANGELOG.md | 10 +- src/Mscc.GenerativeAI/Constants/Model.cs | 4 +- .../Mscc.GenerativeAI/GenerativeAI_Should.cs | 2 - .../GoogleAi_Gemini15Pro_Should.cs | 238 ++++ .../GoogleAi_GeminiUltra_Should.cs | 1256 +++++++++++++++++ 7 files changed, 1518 insertions(+), 4 deletions(-) create mode 100644 tests/Mscc.GenerativeAI/GoogleAi_Gemini15Pro_Should.cs create mode 100644 tests/Mscc.GenerativeAI/GoogleAi_GeminiUltra_Should.cs diff --git a/src/Mscc.GenerativeAI.Google/CHANGELOG.md b/src/Mscc.GenerativeAI.Google/CHANGELOG.md index 8c7fd58..59ba5f7 100644 --- a/src/Mscc.GenerativeAI.Google/CHANGELOG.md +++ b/src/Mscc.GenerativeAI.Google/CHANGELOG.md @@ -11,6 +11,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed ### Fixed +## 0.9.2 + +### Changed + +- bump version + ## 0.9.1 ### Changed diff --git a/src/Mscc.GenerativeAI.Web/CHANGELOG.md b/src/Mscc.GenerativeAI.Web/CHANGELOG.md index 13c02fa..748cc39 100644 --- a/src/Mscc.GenerativeAI.Web/CHANGELOG.md +++ b/src/Mscc.GenerativeAI.Web/CHANGELOG.md @@ -11,6 +11,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed ### Fixed +## 0.9.2 + +### Changed + +- bump version + ## 0.9.1 ### Changed diff --git a/src/Mscc.GenerativeAI/CHANGELOG.md b/src/Mscc.GenerativeAI/CHANGELOG.md index 2e842e8..da116fe 100644 --- a/src/Mscc.GenerativeAI/CHANGELOG.md +++ b/src/Mscc.GenerativeAI/CHANGELOG.md @@ -10,11 +10,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Feature suggestion: Retry mechanism ([#2](https://github.com/mscraftsman/generative-ai/issues/2)) +- Feature suggestion: Add logs with LogLevel using the Standard logging in .NET ([#6](https://github.com/mscraftsman/generative-ai/issues/6)) - implement Automatic Function Call (AFC) ### Changed ### Fixed +## 0.9.2 + +### Added + +- models of Gemini 1.5 and Gemini 1.0 Ultra +- tests for Gemini 1.5 and Gemini 1.0 Ultra + ## 0.9.1 ### Added @@ -29,7 +37,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - improve creation of generative model in Google AI class - SafetySettings can be easier and less error-prone. ([#8](https://github.com/mscraftsman/generative-ai/issues/8)) -- remove _useHeaderApiKey ([#10](https://github.com/mscraftsman/generative-ai/issues/10]) +- remove _useHeaderApiKey ([#10](https://github.com/mscraftsman/generative-ai/issues/10])) ## 0.9.0 diff --git a/src/Mscc.GenerativeAI/Constants/Model.cs b/src/Mscc.GenerativeAI/Constants/Model.cs index 1d5e3e3..e6302c5 100644 --- a/src/Mscc.GenerativeAI/Constants/Model.cs +++ b/src/Mscc.GenerativeAI/Constants/Model.cs @@ -19,7 +19,9 @@ public static class Model public const string Gemini10ProVision = "gemini-1.0-pro-vision"; // public const string Gemini10ProVision001 = "gemini-1.0-pro-vision-001"; public const string GeminiProVisionLatest = "gemini-1.0-pro-vision-latest"; - public const string Gemini15Pro = "gemini-1.5-pro"; + public const string GeminiUltra = GeminiUltraLatest; + public const string GeminiUltraLatest = "gemini-1.0-ultra-latest"; + public const string Gemini15Pro = Gemini15ProLatest; public const string Gemini15ProLatest = "gemini-1.5-pro-latest"; // public const string Gemini15ProVision = "gemini-1.5-pro-vision"; public const string BisonText001 = "text-bison-001"; diff --git a/tests/Mscc.GenerativeAI/GenerativeAI_Should.cs b/tests/Mscc.GenerativeAI/GenerativeAI_Should.cs index e6e7a8e..8f1b1dc 100644 --- a/tests/Mscc.GenerativeAI/GenerativeAI_Should.cs +++ b/tests/Mscc.GenerativeAI/GenerativeAI_Should.cs @@ -1,9 +1,7 @@ #if NET472_OR_GREATER || NETSTANDARD2_0 -using System.Collections.Generic; #endif using FluentAssertions; using Mscc.GenerativeAI; -using System.Text; using Xunit; using Xunit.Abstractions; diff --git a/tests/Mscc.GenerativeAI/GoogleAi_Gemini15Pro_Should.cs b/tests/Mscc.GenerativeAI/GoogleAi_Gemini15Pro_Should.cs new file mode 100644 index 0000000..605b2aa --- /dev/null +++ b/tests/Mscc.GenerativeAI/GoogleAi_Gemini15Pro_Should.cs @@ -0,0 +1,238 @@ +#if NET472_OR_GREATER || NETSTANDARD2_0 +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +#endif +using FluentAssertions; +using Mscc.GenerativeAI; +using Xunit; +using Xunit.Abstractions; + +namespace Test.Mscc.GenerativeAI +{ + [Collection(nameof(ConfigurationFixture))] + public class GoogleAi_Gemini15Pro_Should + { + private readonly ITestOutputHelper output; + private readonly ConfigurationFixture fixture; + private readonly string model = Model.Gemini15Pro; + + public GoogleAi_Gemini15Pro_Should(ITestOutputHelper output, ConfigurationFixture fixture) + { + this.output = output; + this.fixture = fixture; + } + + [Fact] + public void Initialize_Gemini15Pro() + { + // Arrange + + // Act + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + + // Assert + model.Should().NotBeNull(); + model.Name.Should().Be(Model.Gemini15Pro.SanitizeModelName()); + } + + [Fact] + public async void Generate_Text_From_Image() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest { Contents = new List() }; + var base64image = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg=="; + var parts = new List + { + new TextData { Text = "What is this picture about?" }, + new InlineData { MimeType = "image/jpeg", Data = base64image } + }; + request.Contents.Add(new Content { Role = Role.User, Parts = parts }); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + response.Text.Should().Contain("red"); + output.WriteLine(response?.Text); + } + + [Fact] + public async void Describe_Image_From_InlineData() + { + // Arrange + var prompt = "Parse the time and city from the airport board shown in this image into a list, in Markdown"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + // Images + var board = await TestExtensions.ReadImageFileBase64Async("https://ai.google.dev/static/docs/images/timetable.png"); + var request = new GenerateContentRequest(prompt); + request.Contents[0].Parts.Add( + new InlineData { MimeType = "image/png", Data = board } + ); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + output.WriteLine(response?.Text); + } + + [Theory] + [InlineData("scones.jpg", "image/jpeg", "What is this picture?", "blueberries")] + [InlineData("cat.jpg", "image/jpeg", "Describe this image", "snow")] + [InlineData("cat.jpg", "image/jpeg", "Is it a cat?", "Yes")] + //[InlineData("animals.mp4", "video/mp4", "What's in the video?", "Zootopia")] + public async void Generate_Text_From_ImageFile(string filename, string mimetype, string prompt, string expected) + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var base64image = Convert.ToBase64String(File.ReadAllBytes(Path.Combine(Environment.CurrentDirectory, "payload", filename))); + var parts = new List + { + new TextData { Text = prompt }, + new InlineData { MimeType = mimetype, Data = base64image } + }; + var generationConfig = new GenerationConfig() + { + Temperature = 0.4f, TopP = 1, TopK = 32, MaxOutputTokens = 1024 + }; + + // Act + var response = await model.GenerateContent(parts, generationConfig); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + response.Text.Should().Contain(expected); + output.WriteLine(response?.Text); + } + + [Theory] + [InlineData("scones.jpg", "What is this picture?", "blueberries")] + [InlineData("cat.jpg", "Describe this image", "snow")] + [InlineData("cat.jpg", "Is it a feline?", "Yes")] + //[InlineData("animals.mp4", "video/mp4", "What's in the video?", "Zootopia")] + public async void Describe_AddMedia_From_ImageFile(string filename, string prompt, string expected) + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest(prompt) + { + GenerationConfig = new GenerationConfig() + { + Temperature = 0.4f, TopP = 1, TopK = 32, MaxOutputTokens = 1024 + } + }; + request.AddMedia(Path.Combine(Environment.CurrentDirectory, "payload", filename)); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + response.Text.Should().Contain(expected); + output.WriteLine(response?.Text); + } + + [Fact] + public async void Describe_AddMedia_From_Url() + { + // Arrange + var prompt = "Parse the time and city from the airport board shown in this image into a list, in Markdown"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest(prompt); + await request.AddMedia("https://ai.google.dev/static/docs/images/timetable.png"); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + output.WriteLine(response?.Text); + } + + [Fact] + public async void Describe_AddMedia_From_UrlRemote() + { + // Arrange + var prompt = "Parse the time and city from the airport board shown in this image into a list, in Markdown"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest(prompt); + await request.AddMedia("https://ai.google.dev/static/docs/images/timetable.png", true); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + output.WriteLine(response?.Text); + } + + [Fact(Skip = "Bad Request due to FileData part")] + public async void Describe_Image_From_FileData() + { + // Arrange + var prompt = "Parse the time and city from the airport board shown in this image into a list, in Markdown"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest(prompt); + request.Contents[0].Parts.Add(new FileData + { + FileUri = "https://ai.google.dev/static/docs/images/timetable.png", + MimeType = "image/png" + }); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + output.WriteLine(response?.Text); + } + + [Fact(Skip = "URL scheme not supported")] + public async void Multimodal_Video_Input() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var video = await TestExtensions.ReadImageFileBase64Async("gs://cloud-samples-data/video/animals.mp4"); + var request = new GenerateContentRequest("What's in the video?"); + request.Contents[0].Role = Role.User; + request.Contents[0].Parts.Add(new InlineData { MimeType = "video/mp4", Data = video }); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Candidates.FirstOrDefault().Content.Should().NotBeNull(); + response.Candidates.FirstOrDefault().Content.Parts.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + response.Text.Should().Contain("Zootopia"); + output.WriteLine(response?.Text); + } + } +} diff --git a/tests/Mscc.GenerativeAI/GoogleAi_GeminiUltra_Should.cs b/tests/Mscc.GenerativeAI/GoogleAi_GeminiUltra_Should.cs new file mode 100644 index 0000000..ce84c7d --- /dev/null +++ b/tests/Mscc.GenerativeAI/GoogleAi_GeminiUltra_Should.cs @@ -0,0 +1,1256 @@ +#if NET472_OR_GREATER || NETSTANDARD2_0 +using System; +using System.Collections.Generic; +using System.Net.Http; +#endif +using FluentAssertions; +using Mscc.GenerativeAI; +using Xunit; +using Xunit.Abstractions; + +namespace Test.Mscc.GenerativeAI +{ + [Collection(nameof(ConfigurationFixture))] + public class GoogleAi_GeminiUltra_Should + { + private readonly ITestOutputHelper output; + private readonly ConfigurationFixture fixture; + private readonly string model = Model.GeminiUltra; + + public GoogleAi_GeminiUltra_Should(ITestOutputHelper output, ConfigurationFixture fixture) + { + this.output = output; + this.fixture = fixture; + } + + [Fact] + public void Initialize_GoogleAI() + { + // Arrange + + // Act + var googleAI = new GoogleAI(apiKey: fixture.ApiKey); + + // Assert + googleAI.Should().NotBeNull(); + } + + [Fact] + public void Initialize_Using_GoogleAI() + { + // Arrange + var expected = Environment.GetEnvironmentVariable("GOOGLE_AI_MODEL") ?? Model.GeminiUltra; + var googleAI = new GoogleAI(apiKey: fixture.ApiKey); + + // Act + var model = googleAI.GenerativeModel(); + + // Assert + model.Should().NotBeNull(); + model.Name.Should().Be($"{expected.SanitizeModelName()}"); + } + + [Fact] + public void Initialize_EnvVars() + { + // Arrange + Environment.SetEnvironmentVariable("GOOGLE_API_KEY", fixture.ApiKey); + var expected = Environment.GetEnvironmentVariable("GOOGLE_AI_MODEL") ?? Model.GeminiUltra; + + // Act + var model = new GenerativeModel(); + + // Assert + model.Should().NotBeNull(); + model.Name.Should().Be($"{expected.SanitizeModelName()}"); + } + + [Fact] + public void Initialize_Default_Model() + { + // Arrange + var expected = Environment.GetEnvironmentVariable("GOOGLE_AI_MODEL") ?? Model.GeminiUltra; + + // Act + var model = new GenerativeModel(apiKey: fixture.ApiKey); + + // Assert + model.Should().NotBeNull(); + model.Name.Should().Be($"{expected.SanitizeModelName()}"); + } + + [Fact] + public void Initialize_Model() + { + // Arrange + var expected = this.model; + + // Act + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + + // Assert + model.Should().NotBeNull(); + model.Name.Should().Be($"{expected.SanitizeModelName()}"); + } + + [Fact] + public async void List_Models() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey); + + // Act + var sut = await model.ListModels(); + + // Assert + sut.Should().NotBeNull(); + sut.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + sut.ForEach(x => + { + output.WriteLine($"Model: {x.DisplayName} ({x.Name})"); + x.SupportedGenerationMethods.ForEach(m => output.WriteLine($" Method: {m}")); + }); + } + + [Fact] + public async void List_Models_Using_OAuth() + { + // Arrange + var model = new GenerativeModel { AccessToken = fixture.AccessToken }; + + // Act + var sut = await model.ListModels(); + + // Assert + sut.Should().NotBeNull(); + sut.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + sut.ForEach(x => + { + output.WriteLine($"Model: {x.DisplayName} ({x.Name})"); + x.SupportedGenerationMethods.ForEach(m => output.WriteLine($" Method: {m}")); + }); + } + + [Fact] + public async void List_Tuned_Models() + { + // Arrange + var model = new GenerativeModel { AccessToken = fixture.AccessToken }; + + // Act + var sut = await model.ListModels(true); + // var sut = await model.ListTunedModels(); + + // Assert + sut.Should().NotBeNull(); + sut.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + sut.ForEach(x => + { + output.WriteLine($"Model: {x.DisplayName} ({x.Name})"); + x.TuningTask.Snapshots.ForEach(m => output.WriteLine($" Snapshot: {m}")); + }); + } + + [Theory] + [InlineData(Model.GeminiUltra)] + [InlineData(Model.GeminiProVision)] + [InlineData(Model.BisonText)] + [InlineData(Model.BisonChat)] + [InlineData("tunedModels/number-generator-model-psx3d3gljyko")] + public async void Get_Model_Information(string modelName) + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey); + + // Act + var sut = await model.GetModel(model: modelName); + + // Assert + sut.Should().NotBeNull(); + // sut.Name.Should().Be($"{modelName.SanitizeModelName()}"); + output.WriteLine($"Model: {sut.DisplayName} ({sut.Name})"); + sut.SupportedGenerationMethods.ForEach(m => output.WriteLine($" Method: {m}")); + } + + [Theory] + [InlineData("tunedModels/number-generator-model-psx3d3gljyko")] + public async void Get_TunedModel_Information_Using_ApiKey(string modelName) + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey); + + + // Act & Assert + await Assert.ThrowsAsync(() => model.GetModel(model: modelName)); + } + + [Theory] + [InlineData(Model.GeminiUltra)] + [InlineData(Model.GeminiProVision)] + [InlineData(Model.BisonText)] + [InlineData(Model.BisonChat)] + [InlineData("tunedModels/number-generator-model-psx3d3gljyko")] + public async void Get_Model_Information_Using_OAuth(string modelName) + { + // Arrange + var model = new GenerativeModel { AccessToken = fixture.AccessToken }; + var expected = modelName; + if (!expected.Contains("/")) + expected = $"{expected.SanitizeModelName()}"; + + // Act + var sut = await model.GetModel(model: modelName); + + // Assert + sut.Should().NotBeNull(); + sut.Name.Should().Be(expected); + output.WriteLine($"Model: {sut.DisplayName} ({sut.Name})"); + if (sut.State is null) + { + sut?.SupportedGenerationMethods?.ForEach(m => output.WriteLine($" Method: {m}")); + } + else + { + output.WriteLine($"State: {sut.State}"); + } + } + + [Fact] + public async void Generate_Content() + { + // Arrange + var prompt = "Write a story about a magic backpack."; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + + // Act + var response = await model.GenerateContent(prompt); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + } + + [Fact] + public async void GenerateContent_WithEmptyPrompt_ThrowsArgumentNullException() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + string prompt = null; + + // Act & Assert + await Assert.ThrowsAsync(() => model.GenerateContent(prompt)); + } + + [Fact] + public async void Generate_Content_MultiplePrompt() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var parts = new List + { + new TextData { Text = "What is x multiplied by 2?" }, + new TextData { Text = "x = 42" } + }; + + // Act + var response = await model.GenerateContent(parts); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + output.WriteLine(response?.Text); + response.Text.Should().Be("84"); + } + + [Fact] + public async void Generate_Content_Request() + { + // Arrange + var prompt = "Write a story about a magic backpack."; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest { Contents = new List() }; + request.Contents.Add(new Content + { + Role = Role.User, + Parts = new List { new TextData { Text = prompt } } + }); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + } + + [Fact] + public async void GenerateContent_WithRequest_MultipleCandidates_ThrowsHttpRequestException() + { + // Arrange + var prompt = "Write a short poem about koi fish."; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest + { + Contents = new List(), + GenerationConfig = new GenerationConfig() + { + CandidateCount = 3 + } + }; + request.Contents.Add(new Content + { + Role = Role.User, + Parts = new List { new TextData { Text = prompt } } + }); + + // Act & Assert + await Assert.ThrowsAsync(() => model.GenerateContent(request)); + } + + [Fact] + public async void GenerateContent_WithNullRequest_ThrowsArgumentNullException() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + GenerateContentRequest request = null; + + // Act & Assert + await Assert.ThrowsAsync(() => model.GenerateContent(request)); + } + + [Fact] + public async void Generate_Content_RequestConstructor() + { + // Arrange + var prompt = "Write a story about a magic backpack."; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest(prompt); + request.Contents[0].Role = Role.User; + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + } + + [Fact] + public async void Generate_Content_Stream() + { + // Arrange + var prompt = "How are you doing today?"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + + // Act + var responseStream = model.GenerateContentStream(prompt); + + // Assert + responseStream.Should().NotBeNull(); + await foreach (var response in responseStream) + { + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + // response.UsageMetadata.Should().NotBeNull(); + // output.WriteLine($"PromptTokenCount: {response?.UsageMetadata?.PromptTokenCount}"); + // output.WriteLine($"CandidatesTokenCount: {response?.UsageMetadata?.CandidatesTokenCount}"); + // output.WriteLine($"TotalTokenCount: {response?.UsageMetadata?.TotalTokenCount}"); + } + } + + [Fact] + public async void Generate_Content_Stream_Request() + { + // Arrange + var prompt = "How are you doing today?"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest { Contents = new List() }; + request.Contents.Add(new Content + { + Role = Role.User, + Parts = new List { new TextData { Text = prompt } } + }); + + // Act + var responseStream = model.GenerateContentStream(request); + + // Assert + responseStream.Should().NotBeNull(); + await foreach (var response in responseStream) + { + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + // response.UsageMetadata.Should().NotBeNull(); + // output.WriteLine($"PromptTokenCount: {response?.UsageMetadata?.PromptTokenCount}"); + // output.WriteLine($"CandidatesTokenCount: {response?.UsageMetadata?.CandidatesTokenCount}"); + // output.WriteLine($"TotalTokenCount: {response?.UsageMetadata?.TotalTokenCount}"); + } + } + + [Fact] + public async void GenerateAnswer_WithValidRequest_ReturnsAnswerResponse() + { + // Arrange + var model = new GenerativeModel(apiKey: "YOUR_API_KEY", model: Model.AttributedQuestionAnswering); + var request = new GenerateAnswerRequest("What is the capital of France?", AnswerStyle.Abstractive); + + // Act + var response = await model.GenerateAnswer(request); + + // Assert + response.Should().NotBeNull(); + response.Answer.Should().NotBeNull(); + response.Text.Should().Be("Paris"); + } + + [Theory] + [InlineData("How are you doing today?", 6)] + [InlineData("What kind of fish is this?", 7)] + [InlineData("Write a story about a magic backpack.", 8)] + [InlineData("Write an extended story about a magic backpack.", 9)] + public async void Count_Tokens(string prompt, int expected) + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + + // Act + var response = await model.CountTokens(prompt); + + // Assert + response.Should().NotBeNull(); + response.TotalTokens.Should().BeGreaterThanOrEqualTo(expected); + output.WriteLine($"Tokens: {response?.TotalTokens}"); + } + + [Theory] + [InlineData("How are you doing today?", 7)] + [InlineData("What kind of fish is this?", 8)] + [InlineData("Write a story about a magic backpack.", 9)] + [InlineData("Write an extended story about a magic backpack.", 10)] + public async void Count_Tokens_Request(string prompt, int expected) + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest { Contents = new List() }; + request.Contents.Add(new Content + { + Role = Role.User, + Parts = new List { new TextData { Text = prompt } } + }); + + // Act + var response = await model.CountTokens(request); + + // Assert + response.Should().NotBeNull(); + response.TotalTokens.Should().BeGreaterOrEqualTo(expected); + output.WriteLine($"Tokens: {response?.TotalTokens}"); + } + + [Fact] + public async void Start_Chat() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(); + var prompt = "How can I learn more about C#?"; + + // Act + var response = await chat.SendMessage(prompt); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + } + + [Fact] + public async void Start_Chat_With_History() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var history = new List + { + new ContentResponse { Role = Role.User, Parts = new List { new Part("Hello") } }, + new ContentResponse { Role = Role.Model, Parts = new List { new Part("Hello! How can I assist you today?") } } + }; + var chat = model.StartChat(history); + var prompt = "How does electricity work?"; + + // Act + var response = await chat.SendMessage(prompt); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(prompt); + output.WriteLine(response?.Text); + //output.WriteLine(response?.PromptFeedback); + } + + [Fact] + // Refs: + // https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/send-chat-prompts-gemini + public async void Start_Chat_Multiple_Prompts() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(); + + // Act + var prompt = "Hello, let's talk a bit about nature."; + var response = await chat.SendMessage(prompt); + output.WriteLine(prompt); + output.WriteLine(response?.Text); + prompt = "What are all the colors in a rainbow?"; + response = await chat.SendMessage(prompt); + output.WriteLine(prompt); + output.WriteLine(response?.Text); + prompt = "Why does it appear when it rains?"; + response = await chat.SendMessage(prompt); + output.WriteLine(prompt); + output.WriteLine(response?.Text); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + } + + [Fact] + // Refs: + // https://ai.google.dev/tutorials/python_quickstart#chat_conversations + public async void Start_Chat_Conversations() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(); + + // Act + _ = await chat.SendMessage("Hello, fancy brainstorming about IT?"); + _ = await chat.SendMessage("In one sentence, explain how a computer works to a young child."); + _ = await chat.SendMessage("Okay, how about a more detailed explanation to a high schooler?"); + _ = await chat.SendMessage("Lastly, give a thorough definition for a CS graduate."); + + // Assert + chat.History.ForEach(c => + { + output.WriteLine($"{new string('-', 20)}"); + output.WriteLine($"{c.Role}: {c.Text}"); + }); + } + + [Fact] + // Refs: + // https://ai.google.dev/tutorials/python_quickstart#chat_conversations + public async void Start_Chat_Rewind_Conversation() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(); + _ = await chat.SendMessage("Hello, fancy brainstorming about IT?"); + _ = await chat.SendMessage("In one sentence, explain how a computer works to a young child."); + _ = await chat.SendMessage("Okay, how about a more detailed explanation to a high school kid?"); + _ = await chat.SendMessage("Lastly, give a thorough definition for a CS graduate."); + + // Act + var entries = chat.Rewind(); + + // Assert + entries.Should().NotBeNull(); + entries.Sent.Should().NotBeNull(); + entries.Received.Should().NotBeNull(); + output.WriteLine("------ Rewind ------"); + output.WriteLine($"{entries.Sent.Role}: {entries.Sent.Text}"); + output.WriteLine($"{new string('-', 20)}"); + output.WriteLine($"{entries.Received.Role}: {entries.Received.Text}"); + output.WriteLine($"{new string('-', 20)}"); + + chat.History.Count.Should().Be(6); + output.WriteLine("------ History -----"); + chat.History.ForEach(c => + { + output.WriteLine($"{new string('-', 20)}"); + output.WriteLine($"{c.Role}: {c.Text}"); + }); + } + + [Fact] + // Refs: + // https://ai.google.dev/tutorials/python_quickstart#chat_conversations + public async void Start_Chat_Conversations_Get_Last() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(); + _ = await chat.SendMessage("Hello, fancy brainstorming about IT?"); + _ = await chat.SendMessage("In one sentence, explain how a computer works to a young child."); + _ = await chat.SendMessage("Okay, how about a more detailed explanation to a high school kid?"); + _ = await chat.SendMessage("Lastly, give a thorough definition for a CS graduate."); + + // Act + var sut = chat.Last; + + // Assert + sut.Should().NotBeNull(); + output.WriteLine($"{sut.Role}: {sut.Text}"); + } + + [Fact] + public async void Start_Chat_Streaming() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(); + var prompt = "How can I learn more about C#?"; + + // Act + var responseStream = chat.SendMessageStream(prompt); + + // Assert + responseStream.Should().NotBeNull(); + await foreach (var response in responseStream) + { + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + // response.UsageMetadata.Should().NotBeNull(); + // output.WriteLine($"PromptTokenCount: {response?.UsageMetadata?.PromptTokenCount}"); + // output.WriteLine($"CandidatesTokenCount: {response?.UsageMetadata?.CandidatesTokenCount}"); + // output.WriteLine($"TotalTokenCount: {response?.UsageMetadata?.TotalTokenCount}"); + } + chat.History.Count.Should().Be(2); + output.WriteLine($"{new string('-', 20)}"); + output.WriteLine("------ History -----"); + chat.History.ForEach(c => + { + output.WriteLine($"{new string('-', 20)}"); + output.WriteLine($"{c.Role}: {c.Text}"); + }); + } + + [Fact] + // Ref: https://ai.google.dev/docs/function_calling + public async void Function_Calling() + { + // Arrange + var prompt = "Which theaters in Mountain View show Barbie movie?"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + List tools = + [ + new Tool() + { + FunctionDeclarations = + [ + new() + { + Name = "find_movies", + Description = + "find movie titles currently playing in theaters based on any description, genre, title words, etc.", + Parameters = new() + { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Description = new + { + Type = ParameterType.String, + Description = "Any kind of description including category or genre, title words, attributes, etc." + } + }, + Required = ["description"] + } + }, + + + new() + { + Name = "find_theaters", + Description = + "find theaters based on location and optionally movie title which are is currently playing in theaters", + Parameters = new() { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Movie = new + { + Type = ParameterType.String, + Description = "Any movie title" + } + }, + Required = ["location"] + } + }, + + + new() + { + Name = "get_showtimes", + Description = "Find the start times for movies playing in a specific theater", + Parameters = new() + { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Movie = new + { + Type = ParameterType.String, + Description = "Any movie title" + }, + Theater = new + { + Type = ParameterType.String, + Description = "Name of the theater" + }, + Date = new + { + Type = ParameterType.String, + Description = "Date for requested showtime" + } + }, + Required = ["location", "movie", "theater", "date"] + } + } + ] + } + ]; + + // Act + var response = await model.GenerateContent(prompt, tools: tools); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response?.Candidates?[0]?.Content?.Parts[0]?.FunctionCall?.Should().NotBeNull(); + output.WriteLine(response?.Candidates?[0]?.Content?.Parts[0]?.FunctionCall?.Name); + output.WriteLine(response?.Candidates?[0]?.Content?.Parts[0]?.FunctionCall?.Args?.ToString()); + } + + [Fact] + // Ref: https://ai.google.dev/docs/function_calling#function-calling-one-and-a-half-turn-curl-sample + public async void Function_Calling_MultiTurn() + { + // Arrange + var prompt = "Which theaters in Mountain View show Barbie movie?"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + List tools = + [ + new Tool() + { + FunctionDeclarations = + [ + new() + { + Name = "find_movies", + Description = + "find movie titles currently playing in theaters based on any description, genre, title words, etc.", + Parameters = new() + { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Description = new + { + Type = ParameterType.String, + Description = "Any kind of description including category or genre, title words, attributes, etc." + } + }, + Required = ["description"] + } + }, + + + new() + { + Name = "find_theaters", + Description = + "find theaters based on location and optionally movie title which are is currently playing in theaters", + Parameters = new() { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Movie = new + { + Type = ParameterType.String, + Description = "Any movie title" + } + }, + Required = ["location"] + } + }, + + + new() + { + Name = "get_showtimes", + Description = "Find the start times for movies playing in a specific theater", + Parameters = new() + { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Movie = new + { + Type = ParameterType.String, + Description = "Any movie title" + }, + Theater = new + { + Type = ParameterType.String, + Description = "Name of the theater" + }, + Date = new + { + Type = ParameterType.String, + Description = "Date for requested showtime" + } + }, + Required = ["location", "movie", "theater", "date"] + } + } + ] + } + ]; + var request = new GenerateContentRequest(prompt, tools: tools); + request.Contents[0].Role = Role.User; + request.Contents.Add(new Content() + { + Role = Role.Model, + Parts = new() + { + new FunctionCall() { Name = "find_theaters", Args = new { Location = "Mountain View, CA", Movie = "Barbie" } } + } + }); + request.Contents.Add(new Content() + { + Role = Role.Function, + Parts = new() + { + new FunctionResponse() { Name = "find_theaters", Response = new + { + Name = "find_theaters", Content = new + { + Movie = "Barbie", + Theaters = new dynamic[] { new + { + Name = "AMC Mountain View 16", + Address = "2000 W El Camino Real, Mountain View, CA 94040" + }, new + { + Name = "Regal Edwards 14", + Address = "245 Castro St, Mountain View, CA 94040" + } + } + } + }} + } + }); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + } + + [Fact] + // Ref: https://ai.google.dev/docs/function_calling#multi-turn-example-2 + public async void Function_Calling_MultiTurn_Multiple() + { + // Arrange + var prompt = "Which theaters in Mountain View show Barbie movie?"; + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + List tools = + [ + new Tool() + { + FunctionDeclarations = + [ + new() + { + Name = "find_movies", + Description = + "find movie titles currently playing in theaters based on any description, genre, title words, etc.", + Parameters = new() + { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Description = new + { + Type = ParameterType.String, + Description = "Any kind of description including category or genre, title words, attributes, etc." + } + }, + Required = ["description"] + } + }, + + + new() + { + Name = "find_theaters", + Description = + "find theaters based on location and optionally movie title which are is currently playing in theaters", + Parameters = new() { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Movie = new + { + Type = ParameterType.String, + Description = "Any movie title" + } + }, + Required = ["location"] + } + }, + + + new() + { + Name = "get_showtimes", + Description = "Find the start times for movies playing in a specific theater", + Parameters = new() + { + Type = ParameterType.Object, + Properties = new + { + Location = new + { + Type = ParameterType.String, + Description = "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616" + }, + Movie = new + { + Type = ParameterType.String, + Description = "Any movie title" + }, + Theater = new + { + Type = ParameterType.String, + Description = "Name of the theater" + }, + Date = new + { + Type = ParameterType.String, + Description = "Date for requested showtime" + } + }, + Required = ["location", "movie", "theater", "date"] + } + } + ] + } + ]; + var request = new GenerateContentRequest(prompt, tools: tools); + request.Contents[0].Role = Role.User; + request.Contents.Add(new Content() + { + Role = Role.Model, + Parts = new() + { + new FunctionCall() { Name = "find_theaters", Args = new { Location = "Mountain View, CA", Movie = "Barbie" } } + } + }); + request.Contents.Add(new Content() + { + Role = Role.Function, + Parts = new() + { + new FunctionResponse() { Name = "find_theaters", Response = new + { + Name = "find_theaters", Content = new + { + Movie = "Barbie", + Theaters = new dynamic[] { new + { + Name = "AMC Mountain View 16", + Address = "2000 W El Camino Real, Mountain View, CA 94040" + }, new + { + Name = "Regal Edwards 14", + Address = "245 Castro St, Mountain View, CA 94040" + } + } + } + }} + } + }); + request.Contents.Add(new Content() + { + Role = Role.Model, + Parts = new() + { + new TextData(){ Text = "OK. I found two theaters in Mountain View showing Barbie: AMC Mountain View 16 and Regal Edwards 14." } + } + }); + request.Contents.Add(new Content() + { + Role = Role.User, + Parts = new() + { + new TextData(){ Text = "Can we recommend some comedy movies on show in Mountain View?" } + } + }); + + // Act + var response = await model.GenerateContent(request); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response?.Candidates?[0]?.Content?.Parts[0]?.FunctionCall?.Should().NotBeNull(); + output.WriteLine(response?.Candidates?[0]?.Content?.Parts[0]?.FunctionCall?.Name); + output.WriteLine(response?.Candidates?[0]?.Content?.Parts[0]?.FunctionCall?.Args?.ToString()); + } + + [Fact(Skip = "Work in progress")] + public async void Function_Calling_Chat() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var chat = model.StartChat(tools: new List()); + var chatInput1 = "What is the weather in Boston?"; + + // Act + //var result1 = await chat.SendMessageStream(prompt); + //var response1 = await result1.Response; + //var result2 = await chat.SendMessageStream(new List { new FunctionResponse() }); + //var response2 = await result2.Response; + + //// Assert + //response1.Should().NotBeNull(); + //response.Candidates.Should().NotBeNull().And.HaveCount(1); + //response.Text.Should().NotBeEmpty(); + //output.WriteLine(response?.Text); + } + + [Fact(Skip = "Work in progress")] + public async void Function_Calling_ContentStream() + { + // Arrange + var model = new GenerativeModel(apiKey: fixture.ApiKey, model: this.model); + var request = new GenerateContentRequest + { + Contents = new List(), + Tools = new List { } + }; + request.Contents.Add(new Content + { + Role = Role.User, + Parts = new List { new TextData { Text = "What is the weather in Boston?" } } + }); + request.Contents.Add(new Content + { + Role = Role.Model, + Parts = new List { new FunctionCall { Name = "get_current_weather", Args = new { location = "Boston" } } } + }); + request.Contents.Add(new Content + { + Role = Role.Function, + Parts = new List { new FunctionResponse() } + }); + + // Act + var response = model.GenerateContentStream(request); + + // Assert + // response.Should().NotBeNull().And.HaveCountGreaterThanOrEqualTo(1); + // response.FirstOrDefault().Should().NotBeNull(); + // response.ForEach(x => output.WriteLine(x.Text)); + // response.LastOrDefault().UsageMetadata.Should().NotBeNull(); + // output.WriteLine($"PromptTokenCount: {response.LastOrDefault().UsageMetadata.PromptTokenCount}"); + // output.WriteLine($"CandidatesTokenCount: {response.LastOrDefault().UsageMetadata.CandidatesTokenCount}"); + // output.WriteLine($"TotalTokenCount: {response.LastOrDefault().UsageMetadata.TotalTokenCount}"); + } + + [Fact] + public async void Create_Tuned_Model() + { + // Arrange + var model = new GenerativeModel(apiKey: null, model: Model.GeminiUltra) + { + AccessToken = fixture.AccessToken, ProjectId = fixture.ProjectId + }; + var request = new CreateTunedModelRequest() + { + BaseModel = $"{Model.GeminiUltra.SanitizeModelName()}", + DisplayName = "Autogenerated Test model", + TuningTask = new() + { + Hyperparameters = new() { BatchSize = 2, LearningRate = 0.001f, EpochCount = 3 }, + TrainingData = new() + { + Examples = new() + { + Examples = new() + { + new TuningExample() { TextInput = "1", Output = "2" }, + new TuningExample() { TextInput = "3", Output = "4" }, + new TuningExample() { TextInput = "-3", Output = "-2" }, + new TuningExample() { TextInput = "twenty two", Output = "twenty three" }, + new TuningExample() { TextInput = "two hundred", Output = "two hundred one" }, + new TuningExample() { TextInput = "ninety nine", Output = "one hundred" }, + new TuningExample() { TextInput = "8", Output = "9" }, + new TuningExample() { TextInput = "-98", Output = "-97" }, + new TuningExample() { TextInput = "1,000", Output = "1,001" }, + new TuningExample() { TextInput = "thirteen", Output = "fourteen" }, + new TuningExample() { TextInput = "seven", Output = "eight" }, + } + } + } + } + }; + + // Act + var response = await model.CreateTunedModel(request); + + // Assert + response.Should().NotBeNull(); + response.Name.Should().NotBeNull(); + response.Metadata.Should().NotBeNull(); + output.WriteLine($"Name: {response.Name}"); + output.WriteLine($"Model: {response.Metadata.TunedModel} (Steps: {response.Metadata.TotalSteps})"); + } + + [Fact] + public async void Create_Tuned_Model_Simply() + { + // Arrange + var model = new GenerativeModel(apiKey: null, model: Model.GeminiUltra) + { + AccessToken = fixture.AccessToken, ProjectId = fixture.ProjectId + }; + var parameters = new HyperParameters() { BatchSize = 2, LearningRate = 0.001f, EpochCount = 3 }; + var dataset = new List + { + new() { TextInput = "1", Output = "2" }, + new() { TextInput = "3", Output = "4" }, + new() { TextInput = "-3", Output = "-2" }, + new() { TextInput = "twenty two", Output = "twenty three" }, + new() { TextInput = "two hundred", Output = "two hundred one" }, + new() { TextInput = "ninety nine", Output = "one hundred" }, + new() { TextInput = "8", Output = "9" }, + new() { TextInput = "-98", Output = "-97" }, + new() { TextInput = "1,000", Output = "1,001" }, + new() { TextInput = "thirteen", Output = "fourteen" }, + new() { TextInput = "seven", Output = "eight" }, + }; + var request = new CreateTunedModelRequest(Model.GeminiUltra, + "Simply autogenerated Test model", + dataset, + parameters); + + // Act + var response = await model.CreateTunedModel(request); + + // Assert + response.Should().NotBeNull(); + response.Name.Should().NotBeNull(); + response.Metadata.Should().NotBeNull(); + output.WriteLine($"Name: {response.Name}"); + output.WriteLine($"Model: {response.Metadata.TunedModel} (Steps: {response.Metadata.TotalSteps})"); + } + + [Fact] + public async void Delete_Tuned_Model() + { + // Arrange + var modelName = "tunedModels/number-generator-model-psx3d3gljyko"; // see List_Tuned_Models for available options. + var model = new GenerativeModel() + { + AccessToken = fixture.AccessToken, + ProjectId = fixture.ProjectId + }; + + // Act + var response = await model.DeleteTunedModel(modelName); + + // Assert + response.Should().NotBeNull(); + output.WriteLine(response); + } + + [Theory] + [InlineData("255", "256")] + [InlineData("41", "42")] + // [InlineData("five", "six")] + // [InlineData("Six hundred thirty nine", "Six hundred forty")] + public async void Generate_Content_TunedModel(string prompt, string expected) + { + // Arrange + var model = new GenerativeModel(apiKey: null, model: "tunedModels/autogenerated-test-model-48gob9c9v54p") + { + AccessToken = fixture.AccessToken, + ProjectId = fixture.ProjectId + }; + + // Act + var response = await model.GenerateContent(prompt); + + // Assert + response.Should().NotBeNull(); + response.Candidates.Should().NotBeNull().And.HaveCount(1); + response.Text.Should().NotBeEmpty(); + output.WriteLine(response?.Text); + response?.Text.Should().Be(expected); + } + } +} \ No newline at end of file