【C#】如何实现手写Ollama服务交互,实现本地模型对话。_c# ollama
-
前言
-
C#手写Ollama服务交互,实现本地模型对话
- 最近使用C#调用OllamaSharpe库实现Ollama本地对话,然后思考着能否自己实现这个功能。
- 经过一番查找,和查看OllamaSharpe源码发现确实可以。
- 其实就是开启Ollama服务后,发送HTTP请求,获取返回结果以及一些数据处理。
-
基本流程
- 1、启动Ollama服务进程。
- 2、创建HttpClient对象。
- 3、创建请求体(参数:模型名称、提示语、是否流式生成)。
- 4、将请求体序列化为JSON。
- 5、创建HTTP请求内容。
- 6、发送POST请求,并确保请求成功。
- 7、读取并返回响应内容,并解析相应字符串。
- 8、返回结果。
//创建请求体:模型名称、提示语、是否流式生成 var request = new RequestModel{Model = model,Prompt = prompt,Stream = false}; // 将请求体序列化为JSON var json = JsonSerializer.Serialize(request); // 创建HTTP请求内容 var content = new StringContent(json, Encoding.UTF8, \"application/json\"); // 发送POST请求 var response = await _httpClient.PostAsync(\"/api/generate\", content); // 确保请求成功 response.EnsureSuccessStatusCode(); // 读取并返回响应内容 string responseString = await response.Content.ReadAsStringAsync(); ///解析相应字符串 ResponseModel results = JsonSerializer.Deserialize<ResponseModel>(responseString); //返回结果 return results.Response;
-
-
项目结构
- OllamaClient:实现基本的对话请求、获取模型列表功能。
- Model :创建模型结果的一些参数。
- RequestModel:请求参数模型。
- ResponseModel:结果参数模型,用于解析返回的结果。
- MainWindow:用户界面。
- MainWindowViewModel:界面交互业务处理
-
案例
-
模型加载
-
发送聊天
-
代码
-
OllamaSharpe
-
Ollama客户端 OllamaClient
public class OllamaClient { public IEnumerable<Model> ModelList { get; set; } private readonly HttpClient _httpClient; public OllamaClient(string baseAddress = \"http://localhost:11434\") { _httpClient = new HttpClient { BaseAddress = new Uri(baseAddress) }; ExecuteCommand(\"ollama list\"); //启动Ollama服务 } ///
/// 异步生成文本 /// public async Task<string> GenerateTextAsync(string model, string prompt) { try { //创建请求体:模型名称、提示语、是否流式生成 var request = new RequestModel{Model = model,Prompt = prompt,Stream = false}; // 将请求体序列化为JSON var json = JsonSerializer.Serialize(request); // 创建HTTP请求内容 var content = new StringContent(json, Encoding.UTF8, \"application/json\"); // 发送POST请求 var response = await _httpClient.PostAsync(\"/api/generate\", content); // 确保请求成功 response.EnsureSuccessStatusCode(); // 读取并返回响应内容 string responseString = await response.Content.ReadAsStringAsync(); ///解析相应字符串 ResponseModel results = JsonSerializer.Deserialize<ResponseModel>(responseString); //返回结果 return results.Response; } catch (HttpRequestException e) { throw new Exception($\"Request failed: {e.Message}\"); } } /// /// 异步流式生成文本 /// public async IAsyncEnumerable<string> StreamGenerateTextAsync(string model, string prompt) { //创建请求体:模型名称、提示语、是否流式生成 var request = new RequestModel{Model = model,Prompt = prompt, Stream = true}; // 将请求体序列化为JSON var json = JsonSerializer.Serialize(request); //创建HTTP请求内容 var content = new StringContent(json, Encoding.UTF8, \"application/json\"); //发送POST请求 using var response = await _httpClient.PostAsync(\"/api/generate\", content); // 确保请求成功 response.EnsureSuccessStatusCode(); // 读取流并解析为ResponseModel using var stream = await response.Content.ReadAsStreamAsync(); // 创建流读取器 using var reader = new StreamReader(stream); // 循环读取流 while (!reader.EndOfStream) { // 读取一行 var line = await reader.ReadLineAsync(); // 如果行不为空,则解析为ResponseModel并返回 if (!string.IsNullOrEmpty(line)) { var partial = JsonSerializer.Deserialize<ResponseModel>(line); yield return partial.Response; } } } /// /// 异步获取本地模型列表 /// public async Task<IEnumerable<Model>> ListLocalModelsAsync() { //相应请求 HttpResponseMessage responseMessage = await _httpClient.GetAsync(\"/api/tags\").ConfigureAwait(false);; //确保请求成功 responseMessage.EnsureSuccessStatusCode(); //读取响应 string response = await responseMessage.Content.ReadAsStringAsync(); //读取流并解析为LocalModels LocalModels localModel = JsonSerializer.Deserialize<LocalModels>(response); await Task.Delay(3000); //返回结果 ModelList = localModel.Models; return localModel.Models; } // /// 执行CMD指令:用于启动Ollama服务, /// public static bool ExecuteCommand(string command) { // 创建一个新的进程启动信息 ProcessStartInfo processStartInfo = new ProcessStartInfo { FileName = \"cmd.exe\", // 设置要启动的程序为cmd.exe Arguments = $\"/C {command}\", // 设置要执行的命令 UseShellExecute = true, // 使用操作系统shell启动进程 CreateNoWindow = false, //不创建窗体 }; try { Process process = Process.Start(processStartInfo);// 启动进程 process.WaitForExit(); // 等待进程退出 process.Close(); // 返回是否成功执行 return process.ExitCode == 0; } catch (Exception ex) { Debug.WriteLine($\"发生错误: {ex.Message}\");// 其他异常处理 return false; } } } -
-
请求模型:RequestModel
///
/// 请求模型/// public class RequestModel{ public string Model { get; set; } public string Prompt { get; set; } public bool Stream { get; set; }} -
响应模型:ResponseModel
///
/// 响应模型/// public class ResponseModel{ /// /// 模型名称 /// [JsonPropertyName(\"model\")] public string Model { get; set; } /// /// 创建时间 /// [JsonPropertyName(\"created_at\")] public string CreatedTime { get; set; } /// /// 响应:返回文本 /// [JsonPropertyName(\"response\")] public string Response { get; set; } /// /// 是否结束 /// [JsonPropertyName(\"done\")] public bool Done { get; set; } /// /// 结束原因 /// [JsonPropertyName(\"done_reason\")] public string Done_Reason { get; set; } /// /// 上下文 /// [JsonPropertyName(\"context\")] public List<int> Context { get; set; } /// /// 总耗时 /// [JsonPropertyName(\"total_duration\")] public long TotalDuration { get; set; } /// /// 加载耗时 /// [JsonPropertyName(\"load_duration\")] public long LoadDuration { get; set; } /// /// 提示词评估次数 /// [JsonPropertyName(\"prompt_eval_count\")] public long PromptEvalCount { get; set; } /// /// 提示词评估耗时 /// [JsonPropertyName(\"prompt_eval_duration\")] public long PromptEvalDuration { get; set; } /// /// 评估次数 /// [JsonPropertyName(\"eval_count\")] public long EvalCount { get; set; } /// /// 评估耗时 /// [JsonPropertyName(\"eval_duration\")] public long EvalDuration { get; set; }} -
结果模型:LocalModels | Model
///
/// 本地模型/// public class LocalModels{ [JsonPropertyName(\"models\")] public IEnumerable<Model> Models { get; set; }}/// /// 模型/// public class Model{ /// /// 模型名称 /// [JsonPropertyName(\"name\")] public string Name { get; set; } /// /// 模型名称 /// [JsonPropertyName(\"model\")] public string ModelName { get; set; } /// /// 修改时间 /// [JsonPropertyName(\"modified_at\")] public DateTime ModifiedAt { get; set; } /// /// 大小 /// [JsonPropertyName(\"size\")] public long Size { get; set; } /// /// /// [JsonPropertyName(\"digest\")] public string Digest { get; set; } /// /// 模型细节 /// [JsonPropertyName(\"details\")] public ModelDetails Details { get; set; }}/// /// 模型细节/// public class ModelDetails{ /// /// 父模型 /// [JsonPropertyName(\"parent_model\")] public string ParentModel { get; set; } /// /// 格式 /// [JsonPropertyName(\"format\")] public string Format { get; set; } /// /// /// [JsonPropertyName(\"family\")] public string Family { get; set; } /// /// /// [JsonPropertyName(\"families\")] public List<string> Families { get; set; } /// /// 参数大小 /// [JsonPropertyName(\"parameter_size\")] public string ParameterSize { get; set; } /// /// 质量等级 /// [JsonPropertyName(\"quantization_level\")] public string QuantizationLevel { get; set; }} -
简单的界面
-
MainWindow
<Window.DataContext> <local:MainWindowViewModel x:Name=\"ViewModel\"/></Window.DataContext><Grid> <Grid.RowDefinitions> <RowDefinition Height=\"50\"/> <RowDefinition Height=\"*\"/> <RowDefinition Height=\"300\"/> </Grid.RowDefinitions> <Grid Grid.Row=\"0\"> <WrapPanel VerticalAlignment=\"Center\" Margin=\"5\"> <Label Content=\"模型列表\" Margin=\"5\"/> <ComboBox Width=\"200\" Margin=\"5\" Name=\"ModelListBox\"ItemsSource=\"{Binding ModelCollection}\"SelectedItem=\"{Binding SelectedModel}\"/> </WrapPanel> </Grid> <Grid Grid.Row=\"1\"> <TextBox x:Name=\"OutputBox\" Text=\"{Binding OutputText}\" ScrollViewer.HorizontalScrollBarVisibility=\"Visible\" ScrollViewer.VerticalScrollBarVisibility=\"Visible\"/> </Grid> <Grid Grid.Row=\"2\"> <Grid.RowDefinitions> <RowDefinition Height=\"*\"/> <RowDefinition Height=\"50\"/> </Grid.RowDefinitions> <TextBox Grid.Row=\"0\" x:Name=\"InputBox\" Background=\"#AAAAAA\" Text=\"{Binding InputText}\" TextWrapping=\"WrapWithOverflow\" ScrollViewer.VerticalScrollBarVisibility=\"Auto\" ScrollViewer.HorizontalScrollBarVisibility=\"Auto\" > </TextBox> <WrapPanel Grid.Row=\"1\" HorizontalAlignment=\"Right\" VerticalAlignment=\"Center\" Margin=\"5\"> <Button Grid.Row=\"1\" Width=\"100\" Height=\"30\" x:Name=\"Btn_Submit\"Command=\"{Binding SendQuestionCommand}\">发送</Button> </WrapPanel> </Grid></Grid>
-
MainWindowViewModel
public class MainWindowViewModel: PropertyChangedBase{ #region 字段、属性 private string _inputText = \"\"; //输入文本 private string _outputText = \"\"; //输出文本 private OllamaClient _ollama; //Ollama客户端 private string _selectedModel = \"deepseek-r1:1.5b\"; //选择模型 private ObservableCollection<string> _modelCollection; //模型列表 #region 属性 public ObservableCollection<string> ModelCollection { get => _modelCollection; set { if (_modelCollection != value) { _modelCollection = value; OnPropertyChanged(); } } } public string SelectedModel { get => _selectedModel; set { if (_selectedModel != value) { _selectedModel = value; OnPropertyChanged(); } } } private OllamaClient Ollama { get => _ollama; } public string OutputText { get => _outputText; set { if (_outputText != value) { _outputText = value; OnPropertyChanged(); } } } public string InputText { get => _inputText; set { if (_inputText != value) { _inputText = value; OnPropertyChanged(); } } } public ICommand SendQuestionCommand { get; set; } #endregion #endregion public MainWindowViewModel() { Initialze(); } ///
/// 初始化 /// private void Initialze() { _ollama = new OllamaClient(); _modelCollection = new ObservableCollection<string>(); SelectedModel = \"deepseek-r1:1.5b\"; var models = Ollama.ListLocalModelsAsync(); AppendLine($\"模型列表;{Environment.NewLine}\"); foreach (var model in models.Result) { ModelCollection.Add(model.ModelName); AppendLine($\"{model.ModelName},{FormatFileSize(model.Size)}\\r\\n\"); } SendQuestionCommand = new ParameterlessCommand(OnSendQuestion); } /// /// 格式化文件大小 /// private string FormatFileSize(long bytes) { string[] sizes = { \"B\", \"KB\", \"MB\", \"GB\", \"TB\" }; int order = 0; while (bytes >= 1024 && order < sizes.Length - 1) { order++; bytes = bytes / 1024; } return $\"{bytes:0.##} {sizes[order]}\"; } /// /// 发送文本 /// public async void OnSendQuestion() { try { AppendLine($\"【用户】{InputText}\\r\\n\\r\\n\"); AppendLine($\"【AI】\\r\\n\\r\\n\"); await foreach (var answerToken in Ollama.StreamGenerateTextAsync(SelectedModel, InputText)) { AppendText(answerToken); } AppendLine($\"\\r\\n\"); } catch (Exception ex) { AppendText($\"Error: {ex.Message}\"); } } /// /// 附加文本 /// private async void AppendText(string text) { Debug.Print($\"{text}\"); OutputText += text; } /// /// 附加文本行 /// private async void AppendLine(string text) { Debug.Print($\"{text}\"); OutputText += $\"{text}\\r\\n\"; }} ///
/// 属性变更 /// public class PropertyChangedBase : INotifyPropertyChanged { public event PropertyChangedEventHandler PropertyChanged; protected void OnPropertyChanged([CallerMemberName] string propertyName = null) { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName)); } } -
总结
- 案例代码实现了与Ollama的HTTP交互,通过使用HttpClient、JSON序列化和错误处理,提供了一个简洁的异步文本生成接口。
- 适合直接调用本地Ollama服务的场景,更多功能,可以后续拓展。
-
结语
- 既是分享,也是备份。
-
最后
-
如果你觉得这篇文章对你有帮助,不妨点个赞支持一下!
-
如有疑问,欢迎评论区留言。
-
也可以关注微信公众号 [编程笔记in] ,共同学习交流!