diff --git a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuan2Llm.cs b/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuan2Llm.cs
deleted file mode 100644
index 8ee2af3..0000000
--- a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuan2Llm.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-namespace Cnblogs.DashScope.Sdk.BaiChuan;
-
-///
-/// BaiChuan2 model, supports prompt and message format.
-///
-public enum BaiChuan2Llm
-{
- ///
- /// baichuan2-7b-chat-v1
- ///
- BaiChuan2_7BChatV1 = 1,
-
- ///
- /// baichuan2-13b-chat-v1
- ///
- BaiChuan2_13BChatV1 = 2
-}
diff --git a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanLlm.cs b/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanLlm.cs
deleted file mode 100644
index 53c5d8a..0000000
--- a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanLlm.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-namespace Cnblogs.DashScope.Sdk.BaiChuan;
-
-///
-/// Supported baichuan model: https://help.aliyun.com/zh/dashscope/developer-reference/api-details-2
-///
-public enum BaiChuanLlm
-{
- ///
- /// baichuan-7b-v1
- ///
- BaiChuan7B = 1
-}
diff --git a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanLlmName.cs b/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanLlmName.cs
deleted file mode 100644
index b5ffff1..0000000
--- a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanLlmName.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-namespace Cnblogs.DashScope.Sdk.BaiChuan;
-
-internal static class BaiChuanLlmName
-{
- public static string GetModelName(this BaiChuanLlm llm)
- {
- return llm switch
- {
- BaiChuanLlm.BaiChuan7B => "baichuan-7b-v1",
- _ => ThrowHelper.UnknownModelName(nameof(llm), llm)
- };
- }
-
- public static string GetModelName(this BaiChuan2Llm llm)
- {
- return llm switch
- {
- BaiChuan2Llm.BaiChuan2_7BChatV1 => "baichuan2-7b-chat-v1",
- BaiChuan2Llm.BaiChuan2_13BChatV1 => "baichuan2-13b-chat-v1",
- _ => ThrowHelper.UnknownModelName(nameof(llm), llm)
- };
- }
-}
diff --git a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs b/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs
deleted file mode 100644
index ef3ec60..0000000
--- a/src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs
+++ /dev/null
@@ -1,87 +0,0 @@
-using Cnblogs.DashScope.Core;
-
-namespace Cnblogs.DashScope.Sdk.BaiChuan;
-
-///
-/// BaiChuan LLM generation apis, doc: https://help.aliyun.com/zh/dashscope/developer-reference/api-details-2
-///
-public static class BaiChuanTextGenerationApi
-{
- ///
- /// Get text completion from baichuan model.
- ///
- /// The .
- /// The llm to use.
- /// The prompt to generate completion from.
- ///
- public static Task> GetBaiChuanTextCompletionAsync(
- this IDashScopeClient client,
- BaiChuanLlm llm,
- string prompt)
- {
- return client.GetBaiChuanTextCompletionAsync(llm.GetModelName(), prompt);
- }
-
- ///
- /// Get text completion from baichuan model.
- ///
- /// The .
- /// The llm to use.
- /// The prompt to generate completion from.
- ///
- public static Task> GetBaiChuanTextCompletionAsync(
- this IDashScopeClient client,
- string llm,
- string prompt)
- {
- return client.GetTextCompletionAsync(
- new ModelRequest
- {
- Model = llm,
- Input = new TextGenerationInput { Prompt = prompt },
- Parameters = null
- });
- }
-
- ///
- /// Get text completion from baichuan model.
- ///
- /// The .
- /// The model name.
- /// The context messages.
- /// Can be 'text' or 'message', defaults to 'text'. Call to get available options.
- ///
- public static Task> GetBaiChuanTextCompletionAsync(
- this IDashScopeClient client,
- BaiChuan2Llm llm,
- IEnumerable messages,
- string? resultFormat = null)
- {
- return client.GetBaiChuanTextCompletionAsync(llm.GetModelName(), messages, resultFormat);
- }
-
- ///
- /// Get text completion from baichuan model.
- ///
- /// The .
- /// The model name.
- /// The context messages.
- /// Can be 'text' or 'message', defaults to 'text'. Call to get available options.
- ///
- public static Task> GetBaiChuanTextCompletionAsync(
- this IDashScopeClient client,
- string llm,
- IEnumerable messages,
- string? resultFormat = null)
- {
- return client.GetTextCompletionAsync(
- new ModelRequest
- {
- Model = llm,
- Input = new TextGenerationInput { Messages = messages },
- Parameters = string.IsNullOrEmpty(resultFormat) == false
- ? new TextGenerationParameters { ResultFormat = resultFormat }
- : null
- });
- }
-}
diff --git a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2Model.cs b/src/Cnblogs.DashScope.Sdk/Llama2/Llama2Model.cs
deleted file mode 100644
index 827fd14..0000000
--- a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2Model.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-namespace Cnblogs.DashScope.Sdk.Llama2;
-
-///
-/// Supported models for LLaMa2.
-///
-public enum Llama2Model
-{
- ///
- /// llama2-7b-chat-v2
- ///
- Chat7Bv2 = 1,
-
- ///
- /// llama2-13b-chat-v2
- ///
- Chat13Bv2 = 2
-}
diff --git a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2ModelNames.cs b/src/Cnblogs.DashScope.Sdk/Llama2/Llama2ModelNames.cs
deleted file mode 100644
index 44357d3..0000000
--- a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2ModelNames.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-namespace Cnblogs.DashScope.Sdk.Llama2;
-
-internal static class Llama2ModelNames
-{
- public static string GetModelName(this Llama2Model model)
- {
- return model switch
- {
- Llama2Model.Chat7Bv2 => "llama2-7b-chat-v2",
- Llama2Model.Chat13Bv2 => "llama2-13b-chat-v2",
- _ => ThrowHelper.UnknownModelName(nameof(model), model)
- };
- }
-}
diff --git a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs b/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs
deleted file mode 100644
index 5fa9b45..0000000
--- a/src/Cnblogs.DashScope.Sdk/Llama2/Llama2TextGenerationApi.cs
+++ /dev/null
@@ -1,53 +0,0 @@
-using Cnblogs.DashScope.Core;
-
-namespace Cnblogs.DashScope.Sdk.Llama2;
-
-///
-/// Extensions for llama2 text generation, docs: https://help.aliyun.com/zh/dashscope/developer-reference/api-details-11
-///
-public static class Llama2TextGenerationApi
-{
- ///
- /// Get text completion from llama2 model.
- ///
- /// The .
- /// The model name.
- /// The context messages.
- /// Can be 'text' or 'message'. Call to get available options.
- ///
- public static async Task>
- GetLlama2TextCompletionAsync(
- this IDashScopeClient client,
- Llama2Model model,
- IEnumerable messages,
- string? resultFormat = null)
- {
- return await client.GetLlama2TextCompletionAsync(model.GetModelName(), messages, resultFormat);
- }
-
- ///
- /// Get text completion from llama2 model.
- ///
- /// The .
- /// The model name.
- /// The context messages.
- /// Can be 'text' or 'message'. Call to get available options.
- ///
- public static async Task>
- GetLlama2TextCompletionAsync(
- this IDashScopeClient client,
- string model,
- IEnumerable messages,
- string? resultFormat = null)
- {
- return await client.GetTextCompletionAsync(
- new ModelRequest
- {
- Model = model,
- Input = new TextGenerationInput { Messages = messages },
- Parameters = resultFormat != null
- ? new TextGenerationParameters { ResultFormat = resultFormat }
- : null
- });
- }
-}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/BaiChuanApiTests.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/BaiChuanApiTests.cs
deleted file mode 100644
index 579789e..0000000
--- a/test/Cnblogs.DashScope.Sdk.UnitTests/BaiChuanApiTests.cs
+++ /dev/null
@@ -1,110 +0,0 @@
-using Cnblogs.DashScope.Core;
-using Cnblogs.DashScope.Sdk.BaiChuan;
-using Cnblogs.DashScope.Tests.Shared.Utils;
-using NSubstitute;
-
-namespace Cnblogs.DashScope.Sdk.UnitTests;
-
-public class BaiChuanApiTests
-{
- [Fact]
- public async Task BaiChuanTextGeneration_UseEnum_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- _ = await client.GetBaiChuanTextCompletionAsync(BaiChuanLlm.BaiChuan7B, Cases.Prompt);
-
- // Assert
- _ = await client.Received().GetTextCompletionAsync(
- Arg.Is>(
- s => s.Model == "baichuan-7b-v1" && s.Input.Prompt == Cases.Prompt && s.Parameters == null));
- }
-
- [Fact]
- public async Task BaiChuanTextGeneration_UseInvalidEnum_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- var act = async () => await client.GetBaiChuanTextCompletionAsync((BaiChuanLlm)(-1), Cases.Prompt);
-
- // Assert
- await Assert.ThrowsAsync(act);
- }
-
- [Fact]
- public async Task BaiChuanTextGeneration_CustomModel_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- _ = await client.GetBaiChuanTextCompletionAsync(Cases.CustomModelName, Cases.Prompt);
-
- // Assert
- _ = await client.Received().GetTextCompletionAsync(
- Arg.Is>(
- s => s.Model == Cases.CustomModelName && s.Input.Prompt == Cases.Prompt && s.Parameters == null));
- }
-
- [Fact]
- public async Task BaiChuan2TextGeneration_UseEnum_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- var act = async () => await client.GetBaiChuanTextCompletionAsync(
- (BaiChuan2Llm)(-1),
- Cases.TextMessages,
- ResultFormats.Message);
-
- // Assert
- await Assert.ThrowsAsync(act);
- }
-
- [Fact]
- public async Task BaiChuan2TextGeneration_UseInvalidEnum_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- _ = await client.GetBaiChuanTextCompletionAsync(
- BaiChuan2Llm.BaiChuan2_13BChatV1,
- Cases.TextMessages,
- ResultFormats.Message);
-
- // Assert
- _ = await client.Received().GetTextCompletionAsync(
- Arg.Is>(
- s => s.Model == "baichuan2-13b-chat-v1"
- && s.Input.Messages == Cases.TextMessages
- && s.Parameters != null
- && s.Parameters.ResultFormat == ResultFormats.Message));
- }
-
- [Fact]
- public async Task BaiChuan2TextGeneration_CustomModel_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- _ = await client.GetBaiChuanTextCompletionAsync(
- Cases.CustomModelName,
- Cases.TextMessages,
- ResultFormats.Message);
-
- // Assert
- _ = await client.Received().GetTextCompletionAsync(
- Arg.Is>(
- s => s.Model == Cases.CustomModelName
- && s.Input.Messages == Cases.TextMessages
- && s.Parameters != null
- && s.Parameters.ResultFormat == ResultFormats.Message));
- }
-}
diff --git a/test/Cnblogs.DashScope.Sdk.UnitTests/Llama2TextGenerationApiTests.cs b/test/Cnblogs.DashScope.Sdk.UnitTests/Llama2TextGenerationApiTests.cs
deleted file mode 100644
index 7daf36c..0000000
--- a/test/Cnblogs.DashScope.Sdk.UnitTests/Llama2TextGenerationApiTests.cs
+++ /dev/null
@@ -1,61 +0,0 @@
-using Cnblogs.DashScope.Core;
-using Cnblogs.DashScope.Sdk.Llama2;
-using Cnblogs.DashScope.Tests.Shared.Utils;
-using NSubstitute;
-
-namespace Cnblogs.DashScope.Sdk.UnitTests;
-
-public class Llama2TextGenerationApiTests
-{
- [Fact]
- public async Task Llama2_UseEnum_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- _ = await client.GetLlama2TextCompletionAsync(Llama2Model.Chat13Bv2, Cases.TextMessages, ResultFormats.Message);
-
- // Assert
- _ = await client.Received().GetTextCompletionAsync(
- Arg.Is>(s
- => s.Input.Messages == Cases.TextMessages
- && s.Model == "llama2-13b-chat-v2"
- && s.Parameters != null
- && s.Parameters.ResultFormat == ResultFormats.Message));
- }
-
- [Fact]
- public async Task Llama2_UseInvalidEnum_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- var act = async () => await client.GetLlama2TextCompletionAsync(
- (Llama2Model)(-1),
- Cases.TextMessages,
- ResultFormats.Message);
-
- // Assert
- await Assert.ThrowsAsync(act);
- }
-
- [Fact]
- public async Task Llama2_CustomModel_SuccessAsync()
- {
- // Arrange
- var client = Substitute.For();
-
- // Act
- _ = await client.GetLlama2TextCompletionAsync(Cases.CustomModelName, Cases.TextMessages, ResultFormats.Message);
-
- // Assert
- _ = await client.Received().GetTextCompletionAsync(
- Arg.Is>(s
- => s.Input.Messages == Cases.TextMessages
- && s.Model == Cases.CustomModelName
- && s.Parameters != null
- && s.Parameters.ResultFormat == ResultFormats.Message));
- }
-}