1、安装NuGet包
dotnet add package Microsoft.Extensions.AI
dotnet add package Microsoft.Extensions.AI.OpenAI
2、使用Microsoft.Extensions.AI调用大模型
(1)非流式输出
using Microsoft.Extensions.AI;
using OpenAI;
IChatClient chatclient=new OpenAIClient(
new System.ClientModel.ApiKeyCredential("ms-ee0ab5e3-d373-498b-923e-c727be632d50"),
new OpenAIClientOptions{
Endpoint = new Uri("https://api-inference.modelscope.cn/v1")
}).GetChatClient("deepseek-ai/DeepSeek-V3.2").AsIChatClient();
/*以上是调用OpenAI兼容的云端大模型,若调用OpenAI兼容的本地大模型用:
IChatClient chatclient=new OpenAIClient(
new System.ClientModel.ApiKeyCredential("任意字符串,因为本地大模型默认不需要api key,例如ollama"),
new OpenAIClientOptions{
Endpoint = new Uri("http://127.0.0.1:11434/v1") //在默认地址后加/v后缀以兼容 OpenAI 接口
}).GetChatClient("phi3").AsIChatClient();
调用Ollama本地部署的大模型还可以用:
using OllamaSharp; //dotnet add package OllamaSharp
IChatClient chatclient=new OllamaApiClient(new Uri("http://localhost:11434/"), "phi3");*/
while(true){
Console.Write("Q:")
string msg=Console.ReadLine("")??"exit";
if(msg=="exit") break;
Console.Write("AI:");
Console.WriteLine(await chatclient.GetResponseAsync(msg)); //一次性输出大模型的回答
}
(2)带对话历史(上下文,短期记忆)的非流式输出
using Microsoft.Extensions.AI;
using OpenAI;
var chatclient=new OpenAIClient(
new System.ClientModel.ApiKeyCredential("ms-ee0ab5e3-d373-498b-923e-c727be632d50"),
new OpenAIClientOptions{
Endpoint = new Uri("https://api-inference.modelscope.cn/v1")
}).GetChatClient("deepseek-ai/DeepSeek-V3.2").AsIChatClient();
List<ChatMessage> history=[];
history.Add(new ChatMessage(ChatRole.System,"你是一名python专家,回答python专业问题"));
while(true){
Console.Write("Q:")
string msg=Console.ReadLine("")??"exit";
if(msg=="exit") break;
history.Add(new ChatMessage(ChatRole.User,msg));
Console.Write("AI:");
ChatResponse response=await chatclient.GetResponseAsync(history)
Console.WriteLine(response);
history.AddMessages(response);
}
(3)流式输出
using Microsoft.Extensions.AI;
using OpenAI;
var chatclient=new OpenAIClient(
new System.ClientModel.ApiKeyCredential("ms-ee0ab5e3-d373-498b-923e-c727be632d50"),
new OpenAIClientOptions{
Endpoint = new Uri("https://api-inference.modelscope.cn/v1")
}).GetChatClient("deepseek-ai/DeepSeek-V3.2").AsIChatClient();
while(true){
Console.Write("Q:")
string msg=Console.ReadLine("")??"exit";
if(msg=="exit") break;
Console.Write("AI:");
await foreach(ChatResponseUpdate update in chatclient.GetStreamingResponseAsync(msg)){
Console.Write(update);
}
Console.WriteLine();
}
(4)带对话历史(上下文,短期记忆)的流式输出
using Microsoft.Extensions.AI;
using OpenAI;
var chatclient=new OpenAIClient(
new System.ClientModel.ApiKeyCredential("ms-ee0ab5e3-d373-498b-923e-c727be632d50"),
new OpenAIClientOptions{
Endpoint = new Uri("https://api-inference.modelscope.cn/v1")
}).GetChatClient("deepseek-ai/DeepSeek-V3.2").AsIChatClient();
List<ChatMessage> history=[];
history.Add(new(ChatRole.System,"你是一名python专家,回答python专业问题"));
while(true){
Console.Write("Q:")
string msg=Console.ReadLine("")??"exit";
if(msg=="exit") break;
history.Add(new(ChatRole.User,msg));
List<ChatResponseUpdate> updates=[];
Console.Write("AI:");
await foreach(ChatResponseUpdate update in GetStreamingResponseAsync(history){
Console.Write(update);
updates.Add(update);
}
Console.WriteLine();
history.AddMessages(updates);
}
3、在asp.net core中使用依赖注入(DI)
(1)非流式输出
using Microsoft.Extensions.AI;
using OpenAI;
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddChatClient(
new OpenAIClient(
new System.ClientModel.ApiKeyCredential("ms-ee0ab5e3-d373-498b-923e-c727be632d20"),
new OpenAIClientOptions()
{
Endpoint = new Uri("https://api-inference.modelscope.cn/v1")
}).GetChatClient("deepseek-ai/DeepSeek-V3.2").AsIChatClient()
);
/*
using OllamaSharp; //dotnet add package OllamaSharp
builder.Services.AddChatClient(
new OllamaApiClient("http://127.0.0.1:11434","phi3")
);*/
var app = builder.Build();
app.MapGet("/", async (IChatClient chatclient,string msg="你是谁") =>{
ChatResponse response = await chatclient.GetResponseAsync(msg);
return response.Text;
});
//在控制器中,可以使用构造函数注入获取一个IChatClient的实例。
/*还可以使用以下方法手动从依赖注入容器中获取一个IChatClient的实例:
IChatClient chatclient=app.Services.GetRequiredService<IChatClient>();*/
app.Run();
(2)流式输出
using Microsoft.Extensions.AI;
using OpenAI;
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddChatClient(
new OpenAIClient(
new System.ClientModel.ApiKeyCredential("ms-ee0ab5e3-d373-498b-923e-c727be632d20"),
new OpenAIClientOptions()
{
Endpoint = new Uri("https://api-inference.modelscope.cn/v1")
}).GetChatClient("deepseek-ai/DeepSeek-V3.2").AsIChatClient()
);
var app = builder.Build();
app.MapGet("/chat", async (IChatClient chatclient,HttpResponse response,string msg="你是谁") =>{
//还可以传递HttpContext context,用context.Response。在控制器中直接使用Response属性
response.ContentType="text/event-stream";
response.Headers.CachControl="no-cache";
response.Headers.Connection="keep-alive";
await foreach(var update in chatclient.GetStreamingResponseAsync(msg){
if( !string.IsNullOrEmpty(update.Text)){
await response.WriteAsync(update.Text);
await response.Body.FlushAsync();
//关键:立即刷新缓冲区,让客户端实时收到数据
}
}
});
app.Run();
前端接收(javascript):
<div id="c"></div>
<button id="b">点击</button>
<script>
document.getElementById('b').addEventListener("click", async function () {
var response=await fetch("/chat");
var reader=response.body.getReader();
while(true){
var {done,value}=await reader.read();
if(done) break;
document.getElementById('c').textContent +=new TextDecoder().decode(value);
}
});
</script>
(3)使用SSE流式输出
方式一:
app.MapGet("/chat", async (IChatClient chatclient,HttpResponse response,string msg="你是谁") =>{
response.ContentType="text/event-stream";
response.Headers.CachControl="no-cache";
response.Headers.Connection="keep-alive";
await foreach(var update in chatclient.GetStreamingResponseAsync(msg){
if( !string.IsNullOrEmpty(update.Text)){
await response.WriteAsync($"data:{update.Text}\n\n");
await response.Body.FlushAsync();
}
}
await response.WriteAsync("[DONE]");
await response.Body.FlushAsync();
});方式二:
using System.Net.ServerSentEvents;
app.MapGet("/chat", async (IChatClient chatclient, string msg = "你是谁") =>{
async IAsyncEnumerable<SseItem<string>> GetResponse(){
await foreach (var update in chatclient.GetStreamingResponseAsync(msg)){
if (!string.IsNullOrEmpty(update.Text)){
yield return new SseItem<string>(update.Text);
//yield return new SseItem<string>(update.Text,"自定义事件名");
}
}
yield return new SseItem<string>("[DONE]");
//yield return new SseItem<string>("[DONE]","自定义事件名");
}
return TypedResults.ServerSentEvents(GetResponse());
});
前端接收(javascript):
document.getElementById('b').addEventListener("click", async function () {
var eventsource = new EventSource("/chat")
eventsource.addEventListener("message", (e)=>{ //message为默认事件名,也可以使用服务器端的自定义事件名
if(e.data==='[DONE]'){
eventsource.close();
eventsource=null;
return
}
document.getElementById('c')[xss_clean] += e.data
});
});.net9前端接收:
using System.Net.ServerSentEvents;
using var client=new HttpClient();
using var stream=await client.GetStreamAsync("http://127.0.0.1:11434/chat");
await foreach(var item in SseParser.Create(stream).EnumerateAsync()){
if(item.Data!="[DONE]"){
Console.Write(item.Data);
}
}
Console.WriteLine();
SSE:服务器发送事件(Server-Sent Events)的特点:
①单向通信:数据只能从服务器流向客户端。
②文本数据流:传输格式主要为文本(如JSON),适合实时通知、日志流等场景。
③自动重连:连接中断时,浏览器会自动尝试重新建立连接。
④事件分发:支持发送自定义事件类型,便于客户端分类处理。
⑤只能以get方式发送数据。
下一篇:没有了