diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs
index 54a3d3c098..862957d002 100644
--- a/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs
+++ b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs
@@ -20,7 +20,7 @@ public class ChatStore
{
public ChatStatus Status { get; set; }
- public IWantToRun IWantToRun { get; set; }
+ public string History { get; set; }
}
///
@@ -28,6 +28,10 @@ public class ChatStore
///
public enum ChatStatus
{
+ ///
+ /// 默认状态(可能是转换失败)
+ ///
+ None,
///
/// 聊天中
///
diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs
index 6e6f154b6c..742a2050e5 100644
--- a/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs
+++ b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs
@@ -13,6 +13,8 @@
using System.Threading.Tasks;
using Senparc.AI.Entities;
using Senparc.AI.Kernel;
+using Senparc.CO2NET.Extensions;
+using Senparc.CO2NET.Helpers;
using Senparc.CO2NET.Trace;
using Senparc.NeuChar.Entities;
using Senparc.Weixin.MP.Entities;
@@ -42,40 +44,15 @@ private async Task StartAIChatAsync()
{
var currentMessageContext = await base.GetCurrentMessageContext();
- /* 模型配置
- * 注意: 需要在 appsettings.json 中的 节点配置 AI 模型参数,否则无法使用 AI 能力
- */
- var setting = (SenparcAiSetting)Senparc.AI.Config.SenparcAiSetting;//也可以留空,将自动获取
-
- //模型请求参数
- var parameter = new PromptConfigParameter()
- {
- MaxTokens = 2000,
- Temperature = 0.7,
- TopP = 0.5,
- };
-
- //最大保存 AI 对话记录数
- var maxHistoryCount = 10;
-
- //默认 SystemMessage(可根据自己需要修改)
- var systemMessage = Senparc.AI.DefaultSetting.DEFAULT_SYSTEM_MESSAGE;
-
- var semanticAiHandler = new SemanticAiHandler(setting);
- var iWantToRun = semanticAiHandler.ChatConfig(parameter,
- userId: "Jeffrey",
- maxHistoryStore: maxHistoryCount,
- chatSystemMessage: systemMessage,
- senparcAiSetting: setting).iWantToRun;
//新建个人对话缓存(由于使用了 CurrentMessageContext,多用户之前完全隔离,对话不会串)
var storage = new ChatStore()
{
Status = ChatStatus.Chat,
- IWantToRun = iWantToRun
+ History = ""
};
- currentMessageContext.StorageData = storage;
+ currentMessageContext.StorageData = storage.ToJson();//为了提升兼容性,采用字符格式
await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
@@ -94,7 +71,22 @@ private async Task AIChatAsync(RequestMessageBase requestM
{
var currentMessageContext = await base.GetCurrentMessageContext();
- if (!(currentMessageContext.StorageData is ChatStore chatStore))
+ if (!(currentMessageContext.StorageData is string chatJson))
+ {
+ return null;
+ }
+
+ ChatStore chatStore;
+
+ try
+ {
+ chatStore = chatJson.GetObject();
+ if (chatStore == null || chatStore.Status == ChatStatus.None || chatStore.History == null)
+ {
+ return null;
+ }
+ }
+ catch
{
return null;
}
@@ -110,8 +102,7 @@ private async Task AIChatAsync(RequestMessageBase requestM
prompt = $"我即将结束对话,请发送一段文字和我告别,并提醒我:输入“AI”可以再次启动对话。";
//消除状态记录
- currentMessageContext.StorageData = null;
- await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
+ await UpdateMessageContext(currentMessageContext, null);
}
else if (requestMessageText.Content.Equals("P", StringComparison.OrdinalIgnoreCase))
{
@@ -119,17 +110,17 @@ private async Task AIChatAsync(RequestMessageBase requestM
// 修改状态记录
chatStore.Status = ChatStatus.Paused;
- await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
+ await UpdateMessageContext(currentMessageContext, chatStore);
}
else if (chatStore.Status == ChatStatus.Paused)
{
if (requestMessageText.Content.Equals("AI", StringComparison.OrdinalIgnoreCase))
{
- prompt = @"我将重新开始对话,请发送一段欢迎信息,并且在最后提示我(注意保留换行):"+ WELCOME_MESSAGE;
+ prompt = @"我将重新开始对话,请发送一段欢迎信息,并且在最后提示我(注意保留换行):" + WELCOME_MESSAGE;
// 修改状态记录
chatStore.Status = ChatStatus.Chat;
- await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
+ await UpdateMessageContext(currentMessageContext, chatStore);
}
else
{
@@ -141,12 +132,48 @@ private async Task AIChatAsync(RequestMessageBase requestM
prompt = requestMessageText.Content;
}
- var aiHandler = chatStore.IWantToRun.SemanticAiHandler;
+ #region 请求 AI 模型进入 Chat 的经典模式
+
+ /* 模型配置
+ * 注意:需要在 appsettings.json 中的 节点配置 AI 模型参数,否则无法使用 AI 能力
+ */
+ var setting = (SenparcAiSetting)Senparc.AI.Config.SenparcAiSetting;//也可以留空,将自动获取
+
+ //模型请求参数
+ var parameter = new PromptConfigParameter()
+ {
+ MaxTokens = 2000,
+ Temperature = 0.7,
+ TopP = 0.5,
+ };
+
+ //最大保存 AI 对话记录数
+ var maxHistoryCount = 10;
+
+ //默认 SystemMessage(可根据自己需要修改)
+ var systemMessage = Senparc.AI.DefaultSetting.DEFAULT_SYSTEM_MESSAGE;
+
+ var aiHandler = new SemanticAiHandler(setting);
+ var iWantToRun = aiHandler.ChatConfig(parameter,
+ userId: "Jeffrey",
+ maxHistoryStore: maxHistoryCount,
+ chatSystemMessage: systemMessage,
+ senparcAiSetting: setting).iWantToRun;
+
+ //注入历史记录(也可以把 iWantToRun 对象缓存起来,其中会自动包含 history,不需要每次读取或者保存)
+ iWantToRun.StoredAiArguments.Context["history"] = chatStore.History;
//获取请求(注意:因为微信需要一次返回所有文本,所以此处不使用 AI 流行的 Stream(流式)输出
- var result = await aiHandler.ChatAsync(chatStore.IWantToRun, prompt);
+ var result = await aiHandler.ChatAsync(iWantToRun, prompt);
+
+ #endregion
+ //保存历史记录
+ chatStore.History = iWantToRun.StoredAiArguments.Context["history"]?.ToString();
+ await UpdateMessageContext(currentMessageContext, chatStore);
+
+ //组织返回消息
var responseMessage = base.CreateResponseMessage();
responseMessage.Content = result.OutputString;
return responseMessage;
@@ -168,5 +195,11 @@ private async Task AIChatAsync(RequestMessageBase requestM
}
}
+
+ private async Task UpdateMessageContext(CustomMessageContext currentMessageContext, ChatStore chatStore)
+ {
+ currentMessageContext.StorageData = chatStore==null?null : chatStore.ToJson();
+ await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
+ }
}
}
\ No newline at end of file
diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs
index 7e1fa0a776..86b96a8275 100644
--- a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs
+++ b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs
@@ -28,7 +28,9 @@ public override async Task OnExecutingAsync(CancellationToken cancellationToken)
{
//演示:MessageContext.StorageData
- var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
+ //var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
+ var currentMessageContext = await base.GetCurrentMessageContext();
+
if (currentMessageContext.StorageData == null)
{
currentMessageContext.StorageData = 0;
@@ -41,7 +43,8 @@ public override async Task OnExecutedAsync(CancellationToken cancellationToken)
{
//演示:MessageContext.StorageData
- var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
+ //var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
+ var currentMessageContext = await base.GetCurrentMessageContext();
if (currentMessageContext.StorageData is int data)
{
diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs
index 61dc148f6f..97cbc32ad2 100644
--- a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs
+++ b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs
@@ -467,8 +467,7 @@ public override async Task OnTextRequestAsync(RequestMessa
var currentMessageContext = await base.GetCurrentMessageContext();
if (currentMessageContext.RequestMessages.Count > 1)
{
- result.AppendFormat("您此前还发送了如下消息({0}/{1}):\r\n", currentMessageContext.RequestMessages.Count,
- currentMessageContext.StorageData);
+ result.AppendFormat("您此前还发送了如下消息({0}):\r\n", currentMessageContext.RequestMessages.Count);
for (int i = currentMessageContext.RequestMessages.Count - 2; i >= 0; i--)
{
var historyMessage = currentMessageContext.RequestMessages[i];
diff --git a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj
index 3675f29540..2bf6729490 100644
--- a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj
+++ b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj
@@ -62,6 +62,23 @@
Always
+
+ Never
+
+
+ Never
+
+
+ Never
+
+
+ Never
+
+
+
+
+
+