diff --git a/api/proto/gen/ai/v1/ai.pb.go b/api/proto/gen/ai/v1/ai.pb.go new file mode 100644 index 0000000..2575e80 --- /dev/null +++ b/api/proto/gen/ai/v1/ai.pb.go @@ -0,0 +1,743 @@ +// Copyright 2021 ecodeclub +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.6 +// protoc (unknown) +// source: ai/v1/ai.proto + +package aiv1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" + unsafe "unsafe" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Role int32 + +const ( + Role_UNKNOWN Role = 0 + Role_USER Role = 1 + Role_ASSISTANT Role = 2 + Role_SYSTEM Role = 3 + Role_TOOL Role = 4 +) + +// Enum value maps for Role. +var ( + Role_name = map[int32]string{ + 0: "UNKNOWN", + 1: "USER", + 2: "ASSISTANT", + 3: "SYSTEM", + 4: "TOOL", + } + Role_value = map[string]int32{ + "UNKNOWN": 0, + "USER": 1, + "ASSISTANT": 2, + "SYSTEM": 3, + "TOOL": 4, + } +) + +func (x Role) Enum() *Role { + p := new(Role) + *p = x + return p +} + +func (x Role) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Role) Descriptor() protoreflect.EnumDescriptor { + return file_ai_v1_ai_proto_enumTypes[0].Descriptor() +} + +func (Role) Type() protoreflect.EnumType { + return &file_ai_v1_ai_proto_enumTypes[0] +} + +func (x Role) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Role.Descriptor instead. +func (Role) EnumDescriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{0} +} + +type StreamEvent struct { + state protoimpl.MessageState `protogen:"open.v1"` + Final bool `protobuf:"varint,1,opt,name=final,proto3" json:"final,omitempty"` + ReasoningContent string `protobuf:"bytes,2,opt,name=reasoningContent,proto3" json:"reasoningContent,omitempty"` + Content string `protobuf:"bytes,3,opt,name=content,proto3" json:"content,omitempty"` + Err string `protobuf:"bytes,4,opt,name=err,proto3" json:"err,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *StreamEvent) Reset() { + *x = StreamEvent{} + mi := &file_ai_v1_ai_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *StreamEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StreamEvent) ProtoMessage() {} + +func (x *StreamEvent) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StreamEvent.ProtoReflect.Descriptor instead. +func (*StreamEvent) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{0} +} + +func (x *StreamEvent) GetFinal() bool { + if x != nil { + return x.Final + } + return false +} + +func (x *StreamEvent) GetReasoningContent() string { + if x != nil { + return x.ReasoningContent + } + return "" +} + +func (x *StreamEvent) GetContent() string { + if x != nil { + return x.Content + } + return "" +} + +func (x *StreamEvent) GetErr() string { + if x != nil { + return x.Err + } + return "" +} + +type Conversation struct { + state protoimpl.MessageState `protogen:"open.v1"` + Sn string `protobuf:"bytes,1,opt,name=sn,proto3" json:"sn,omitempty"` + Uid string `protobuf:"bytes,2,opt,name=uid,proto3" json:"uid,omitempty"` + Title string `protobuf:"bytes,3,opt,name=title,proto3" json:"title,omitempty"` + Message []*Message `protobuf:"bytes,4,rep,name=message,proto3" json:"message,omitempty"` + Ctime string `protobuf:"bytes,5,opt,name=ctime,proto3" json:"ctime,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Conversation) Reset() { + *x = Conversation{} + mi := &file_ai_v1_ai_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Conversation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Conversation) ProtoMessage() {} + +func (x *Conversation) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Conversation.ProtoReflect.Descriptor instead. +func (*Conversation) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{1} +} + +func (x *Conversation) GetSn() string { + if x != nil { + return x.Sn + } + return "" +} + +func (x *Conversation) GetUid() string { + if x != nil { + return x.Uid + } + return "" +} + +func (x *Conversation) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *Conversation) GetMessage() []*Message { + if x != nil { + return x.Message + } + return nil +} + +func (x *Conversation) GetCtime() string { + if x != nil { + return x.Ctime + } + return "" +} + +type ListReq struct { + state protoimpl.MessageState `protogen:"open.v1"` + Uid string `protobuf:"bytes,1,opt,name=uid,proto3" json:"uid,omitempty"` + Offset int64 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"` + Limit int64 `protobuf:"varint,3,opt,name=limit,proto3" json:"limit,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListReq) Reset() { + *x = ListReq{} + mi := &file_ai_v1_ai_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListReq) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListReq) ProtoMessage() {} + +func (x *ListReq) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListReq.ProtoReflect.Descriptor instead. +func (*ListReq) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{2} +} + +func (x *ListReq) GetUid() string { + if x != nil { + return x.Uid + } + return "" +} + +func (x *ListReq) GetOffset() int64 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *ListReq) GetLimit() int64 { + if x != nil { + return x.Limit + } + return 0 +} + +type ListResp struct { + state protoimpl.MessageState `protogen:"open.v1"` + Conversations []*Conversation `protobuf:"bytes,1,rep,name=conversations,proto3" json:"conversations,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListResp) Reset() { + *x = ListResp{} + mi := &file_ai_v1_ai_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListResp) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListResp) ProtoMessage() {} + +func (x *ListResp) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListResp.ProtoReflect.Descriptor instead. +func (*ListResp) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{3} +} + +func (x *ListResp) GetConversations() []*Conversation { + if x != nil { + return x.Conversations + } + return nil +} + +type LLMRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Sn string `protobuf:"bytes,1,opt,name=sn,proto3" json:"sn,omitempty"` + Message []*Message `protobuf:"bytes,2,rep,name=message,proto3" json:"message,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *LLMRequest) Reset() { + *x = LLMRequest{} + mi := &file_ai_v1_ai_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *LLMRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LLMRequest) ProtoMessage() {} + +func (x *LLMRequest) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LLMRequest.ProtoReflect.Descriptor instead. +func (*LLMRequest) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{4} +} + +func (x *LLMRequest) GetSn() string { + if x != nil { + return x.Sn + } + return "" +} + +func (x *LLMRequest) GetMessage() []*Message { + if x != nil { + return x.Message + } + return nil +} + +type DetailRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Sn string `protobuf:"bytes,1,opt,name=sn,proto3" json:"sn,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DetailRequest) Reset() { + *x = DetailRequest{} + mi := &file_ai_v1_ai_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DetailRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DetailRequest) ProtoMessage() {} + +func (x *DetailRequest) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DetailRequest.ProtoReflect.Descriptor instead. +func (*DetailRequest) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{5} +} + +func (x *DetailRequest) GetSn() string { + if x != nil { + return x.Sn + } + return "" +} + +type DetailResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Message []*Message `protobuf:"bytes,2,rep,name=message,proto3" json:"message,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DetailResponse) Reset() { + *x = DetailResponse{} + mi := &file_ai_v1_ai_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DetailResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DetailResponse) ProtoMessage() {} + +func (x *DetailResponse) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DetailResponse.ProtoReflect.Descriptor instead. +func (*DetailResponse) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{6} +} + +func (x *DetailResponse) GetMessage() []*Message { + if x != nil { + return x.Message + } + return nil +} + +type Message struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Role Role `protobuf:"varint,2,opt,name=role,proto3,enum=ai.v1.Role" json:"role,omitempty"` + Content string `protobuf:"bytes,3,opt,name=content,proto3" json:"content,omitempty"` + ReasoningContent string `protobuf:"bytes,4,opt,name=reasoningContent,proto3" json:"reasoningContent,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Message) Reset() { + *x = Message{} + mi := &file_ai_v1_ai_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Message) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Message) ProtoMessage() {} + +func (x *Message) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Message.ProtoReflect.Descriptor instead. +func (*Message) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{7} +} + +func (x *Message) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *Message) GetRole() Role { + if x != nil { + return x.Role + } + return Role_UNKNOWN +} + +func (x *Message) GetContent() string { + if x != nil { + return x.Content + } + return "" +} + +func (x *Message) GetReasoningContent() string { + if x != nil { + return x.ReasoningContent + } + return "" +} + +type ChatResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Sn string `protobuf:"bytes,1,opt,name=sn,proto3" json:"sn,omitempty"` + Response *Message `protobuf:"bytes,2,opt,name=response,proto3" json:"response,omitempty"` + Metadata string `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ChatResponse) Reset() { + *x = ChatResponse{} + mi := &file_ai_v1_ai_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ChatResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ChatResponse) ProtoMessage() {} + +func (x *ChatResponse) ProtoReflect() protoreflect.Message { + mi := &file_ai_v1_ai_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ChatResponse.ProtoReflect.Descriptor instead. +func (*ChatResponse) Descriptor() ([]byte, []int) { + return file_ai_v1_ai_proto_rawDescGZIP(), []int{8} +} + +func (x *ChatResponse) GetSn() string { + if x != nil { + return x.Sn + } + return "" +} + +func (x *ChatResponse) GetResponse() *Message { + if x != nil { + return x.Response + } + return nil +} + +func (x *ChatResponse) GetMetadata() string { + if x != nil { + return x.Metadata + } + return "" +} + +var File_ai_v1_ai_proto protoreflect.FileDescriptor + +const file_ai_v1_ai_proto_rawDesc = "" + + "\n" + + "\x0eai/v1/ai.proto\x12\x05ai.v1\"{\n" + + "\vStreamEvent\x12\x14\n" + + "\x05final\x18\x01 \x01(\bR\x05final\x12*\n" + + "\x10reasoningContent\x18\x02 \x01(\tR\x10reasoningContent\x12\x18\n" + + "\acontent\x18\x03 \x01(\tR\acontent\x12\x10\n" + + "\x03err\x18\x04 \x01(\tR\x03err\"\x86\x01\n" + + "\fConversation\x12\x0e\n" + + "\x02sn\x18\x01 \x01(\tR\x02sn\x12\x10\n" + + "\x03uid\x18\x02 \x01(\tR\x03uid\x12\x14\n" + + "\x05title\x18\x03 \x01(\tR\x05title\x12(\n" + + "\amessage\x18\x04 \x03(\v2\x0e.ai.v1.MessageR\amessage\x12\x14\n" + + "\x05ctime\x18\x05 \x01(\tR\x05ctime\"I\n" + + "\aListReq\x12\x10\n" + + "\x03uid\x18\x01 \x01(\tR\x03uid\x12\x16\n" + + "\x06offset\x18\x02 \x01(\x03R\x06offset\x12\x14\n" + + "\x05limit\x18\x03 \x01(\x03R\x05limit\"E\n" + + "\bListResp\x129\n" + + "\rconversations\x18\x01 \x03(\v2\x13.ai.v1.ConversationR\rconversations\"F\n" + + "\n" + + "LLMRequest\x12\x0e\n" + + "\x02sn\x18\x01 \x01(\tR\x02sn\x12(\n" + + "\amessage\x18\x02 \x03(\v2\x0e.ai.v1.MessageR\amessage\"\x1f\n" + + "\rDetailRequest\x12\x0e\n" + + "\x02sn\x18\x01 \x01(\tR\x02sn\":\n" + + "\x0eDetailResponse\x12(\n" + + "\amessage\x18\x02 \x03(\v2\x0e.ai.v1.MessageR\amessage\"\x80\x01\n" + + "\aMessage\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n" + + "\x04role\x18\x02 \x01(\x0e2\v.ai.v1.RoleR\x04role\x12\x18\n" + + "\acontent\x18\x03 \x01(\tR\acontent\x12*\n" + + "\x10reasoningContent\x18\x04 \x01(\tR\x10reasoningContent\"f\n" + + "\fChatResponse\x12\x0e\n" + + "\x02sn\x18\x01 \x01(\tR\x02sn\x12*\n" + + "\bresponse\x18\x02 \x01(\v2\x0e.ai.v1.MessageR\bresponse\x12\x1a\n" + + "\bmetadata\x18\x03 \x01(\tR\bmetadata*B\n" + + "\x04Role\x12\v\n" + + "\aUNKNOWN\x10\x00\x12\b\n" + + "\x04USER\x10\x01\x12\r\n" + + "\tASSISTANT\x10\x02\x12\n" + + "\n" + + "\x06SYSTEM\x10\x03\x12\b\n" + + "\x04TOOL\x10\x042h\n" + + "\tAIService\x12+\n" + + "\x04Chat\x12\x0e.ai.v1.Message\x1a\x13.ai.v1.ChatResponse\x12.\n" + + "\x06Stream\x12\x0e.ai.v1.Message\x1a\x12.ai.v1.StreamEvent0\x012\x8c\x02\n" + + "\x13ConversationService\x122\n" + + "\x06Create\x12\x13.ai.v1.Conversation\x1a\x13.ai.v1.Conversation\x12'\n" + + "\x04List\x12\x0e.ai.v1.ListReq\x1a\x0f.ai.v1.ListResp\x12.\n" + + "\x04Chat\x12\x11.ai.v1.LLMRequest\x1a\x13.ai.v1.ChatResponse\x125\n" + + "\x06Detail\x12\x14.ai.v1.DetailRequest\x1a\x15.ai.v1.DetailResponse\x121\n" + + "\x06Stream\x12\x11.ai.v1.LLMRequest\x1a\x12.ai.v1.StreamEvent0\x01B\x86\x01\n" + + "\tcom.ai.v1B\aAiProtoP\x01Z;github.com/ecodeclub/ai-gateway-go/api/proto/gen/ai/v1;aiv1\xa2\x02\x03AXX\xaa\x02\x05Ai.V1\xca\x02\x05Ai\\V1\xe2\x02\x11Ai\\V1\\GPBMetadata\xea\x02\x06Ai::V1b\x06proto3" + +var ( + file_ai_v1_ai_proto_rawDescOnce sync.Once + file_ai_v1_ai_proto_rawDescData []byte +) + +func file_ai_v1_ai_proto_rawDescGZIP() []byte { + file_ai_v1_ai_proto_rawDescOnce.Do(func() { + file_ai_v1_ai_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_ai_v1_ai_proto_rawDesc), len(file_ai_v1_ai_proto_rawDesc))) + }) + return file_ai_v1_ai_proto_rawDescData +} + +var file_ai_v1_ai_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_ai_v1_ai_proto_msgTypes = make([]protoimpl.MessageInfo, 9) +var file_ai_v1_ai_proto_goTypes = []any{ + (Role)(0), // 0: ai.v1.Role + (*StreamEvent)(nil), // 1: ai.v1.StreamEvent + (*Conversation)(nil), // 2: ai.v1.Conversation + (*ListReq)(nil), // 3: ai.v1.ListReq + (*ListResp)(nil), // 4: ai.v1.ListResp + (*LLMRequest)(nil), // 5: ai.v1.LLMRequest + (*DetailRequest)(nil), // 6: ai.v1.DetailRequest + (*DetailResponse)(nil), // 7: ai.v1.DetailResponse + (*Message)(nil), // 8: ai.v1.Message + (*ChatResponse)(nil), // 9: ai.v1.ChatResponse +} +var file_ai_v1_ai_proto_depIdxs = []int32{ + 8, // 0: ai.v1.Conversation.message:type_name -> ai.v1.Message + 2, // 1: ai.v1.ListResp.conversations:type_name -> ai.v1.Conversation + 8, // 2: ai.v1.LLMRequest.message:type_name -> ai.v1.Message + 8, // 3: ai.v1.DetailResponse.message:type_name -> ai.v1.Message + 0, // 4: ai.v1.Message.role:type_name -> ai.v1.Role + 8, // 5: ai.v1.ChatResponse.response:type_name -> ai.v1.Message + 8, // 6: ai.v1.AIService.Chat:input_type -> ai.v1.Message + 8, // 7: ai.v1.AIService.Stream:input_type -> ai.v1.Message + 2, // 8: ai.v1.ConversationService.Create:input_type -> ai.v1.Conversation + 3, // 9: ai.v1.ConversationService.List:input_type -> ai.v1.ListReq + 5, // 10: ai.v1.ConversationService.Chat:input_type -> ai.v1.LLMRequest + 6, // 11: ai.v1.ConversationService.Detail:input_type -> ai.v1.DetailRequest + 5, // 12: ai.v1.ConversationService.Stream:input_type -> ai.v1.LLMRequest + 9, // 13: ai.v1.AIService.Chat:output_type -> ai.v1.ChatResponse + 1, // 14: ai.v1.AIService.Stream:output_type -> ai.v1.StreamEvent + 2, // 15: ai.v1.ConversationService.Create:output_type -> ai.v1.Conversation + 4, // 16: ai.v1.ConversationService.List:output_type -> ai.v1.ListResp + 9, // 17: ai.v1.ConversationService.Chat:output_type -> ai.v1.ChatResponse + 7, // 18: ai.v1.ConversationService.Detail:output_type -> ai.v1.DetailResponse + 1, // 19: ai.v1.ConversationService.Stream:output_type -> ai.v1.StreamEvent + 13, // [13:20] is the sub-list for method output_type + 6, // [6:13] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name +} + +func init() { file_ai_v1_ai_proto_init() } +func file_ai_v1_ai_proto_init() { + if File_ai_v1_ai_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_ai_v1_ai_proto_rawDesc), len(file_ai_v1_ai_proto_rawDesc)), + NumEnums: 1, + NumMessages: 9, + NumExtensions: 0, + NumServices: 2, + }, + GoTypes: file_ai_v1_ai_proto_goTypes, + DependencyIndexes: file_ai_v1_ai_proto_depIdxs, + EnumInfos: file_ai_v1_ai_proto_enumTypes, + MessageInfos: file_ai_v1_ai_proto_msgTypes, + }.Build() + File_ai_v1_ai_proto = out.File + file_ai_v1_ai_proto_goTypes = nil + file_ai_v1_ai_proto_depIdxs = nil +} diff --git a/api/proto/gen/ai/v1/ai_grpc.pb.go b/api/proto/gen/ai/v1/ai_grpc.pb.go new file mode 100644 index 0000000..e0845c7 --- /dev/null +++ b/api/proto/gen/ai/v1/ai_grpc.pb.go @@ -0,0 +1,435 @@ +// Copyright 2021 ecodeclub +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc (unknown) +// source: ai/v1/ai.proto + +package aiv1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + AIService_Chat_FullMethodName = "/ai.v1.AIService/Chat" + AIService_Stream_FullMethodName = "/ai.v1.AIService/Stream" +) + +// AIServiceClient is the client API for AIService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type AIServiceClient interface { + Chat(ctx context.Context, in *Message, opts ...grpc.CallOption) (*ChatResponse, error) + Stream(ctx context.Context, in *Message, opts ...grpc.CallOption) (grpc.ServerStreamingClient[StreamEvent], error) +} + +type aIServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewAIServiceClient(cc grpc.ClientConnInterface) AIServiceClient { + return &aIServiceClient{cc} +} + +func (c *aIServiceClient) Chat(ctx context.Context, in *Message, opts ...grpc.CallOption) (*ChatResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ChatResponse) + err := c.cc.Invoke(ctx, AIService_Chat_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *aIServiceClient) Stream(ctx context.Context, in *Message, opts ...grpc.CallOption) (grpc.ServerStreamingClient[StreamEvent], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &AIService_ServiceDesc.Streams[0], AIService_Stream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[Message, StreamEvent]{ClientStream: stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type AIService_StreamClient = grpc.ServerStreamingClient[StreamEvent] + +// AIServiceServer is the server API for AIService service. +// All implementations must embed UnimplementedAIServiceServer +// for forward compatibility. +type AIServiceServer interface { + Chat(context.Context, *Message) (*ChatResponse, error) + Stream(*Message, grpc.ServerStreamingServer[StreamEvent]) error + mustEmbedUnimplementedAIServiceServer() +} + +// UnimplementedAIServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedAIServiceServer struct{} + +func (UnimplementedAIServiceServer) Chat(context.Context, *Message) (*ChatResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Chat not implemented") +} +func (UnimplementedAIServiceServer) Stream(*Message, grpc.ServerStreamingServer[StreamEvent]) error { + return status.Errorf(codes.Unimplemented, "method Stream not implemented") +} +func (UnimplementedAIServiceServer) mustEmbedUnimplementedAIServiceServer() {} +func (UnimplementedAIServiceServer) testEmbeddedByValue() {} + +// UnsafeAIServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to AIServiceServer will +// result in compilation errors. +type UnsafeAIServiceServer interface { + mustEmbedUnimplementedAIServiceServer() +} + +func RegisterAIServiceServer(s grpc.ServiceRegistrar, srv AIServiceServer) { + // If the following call pancis, it indicates UnimplementedAIServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&AIService_ServiceDesc, srv) +} + +func _AIService_Chat_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Message) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AIServiceServer).Chat(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: AIService_Chat_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AIServiceServer).Chat(ctx, req.(*Message)) + } + return interceptor(ctx, in, info, handler) +} + +func _AIService_Stream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(Message) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(AIServiceServer).Stream(m, &grpc.GenericServerStream[Message, StreamEvent]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type AIService_StreamServer = grpc.ServerStreamingServer[StreamEvent] + +// AIService_ServiceDesc is the grpc.ServiceDesc for AIService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var AIService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "ai.v1.AIService", + HandlerType: (*AIServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Chat", + Handler: _AIService_Chat_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Stream", + Handler: _AIService_Stream_Handler, + ServerStreams: true, + }, + }, + Metadata: "ai/v1/ai.proto", +} + +const ( + ConversationService_Create_FullMethodName = "/ai.v1.ConversationService/Create" + ConversationService_List_FullMethodName = "/ai.v1.ConversationService/List" + ConversationService_Chat_FullMethodName = "/ai.v1.ConversationService/Chat" + ConversationService_Detail_FullMethodName = "/ai.v1.ConversationService/Detail" + ConversationService_Stream_FullMethodName = "/ai.v1.ConversationService/Stream" +) + +// ConversationServiceClient is the client API for ConversationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ConversationServiceClient interface { + Create(ctx context.Context, in *Conversation, opts ...grpc.CallOption) (*Conversation, error) + List(ctx context.Context, in *ListReq, opts ...grpc.CallOption) (*ListResp, error) + Chat(ctx context.Context, in *LLMRequest, opts ...grpc.CallOption) (*ChatResponse, error) + Detail(ctx context.Context, in *DetailRequest, opts ...grpc.CallOption) (*DetailResponse, error) + Stream(ctx context.Context, in *LLMRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[StreamEvent], error) +} + +type conversationServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewConversationServiceClient(cc grpc.ClientConnInterface) ConversationServiceClient { + return &conversationServiceClient{cc} +} + +func (c *conversationServiceClient) Create(ctx context.Context, in *Conversation, opts ...grpc.CallOption) (*Conversation, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Conversation) + err := c.cc.Invoke(ctx, ConversationService_Create_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conversationServiceClient) List(ctx context.Context, in *ListReq, opts ...grpc.CallOption) (*ListResp, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListResp) + err := c.cc.Invoke(ctx, ConversationService_List_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conversationServiceClient) Chat(ctx context.Context, in *LLMRequest, opts ...grpc.CallOption) (*ChatResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ChatResponse) + err := c.cc.Invoke(ctx, ConversationService_Chat_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conversationServiceClient) Detail(ctx context.Context, in *DetailRequest, opts ...grpc.CallOption) (*DetailResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DetailResponse) + err := c.cc.Invoke(ctx, ConversationService_Detail_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conversationServiceClient) Stream(ctx context.Context, in *LLMRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[StreamEvent], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &ConversationService_ServiceDesc.Streams[0], ConversationService_Stream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[LLMRequest, StreamEvent]{ClientStream: stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type ConversationService_StreamClient = grpc.ServerStreamingClient[StreamEvent] + +// ConversationServiceServer is the server API for ConversationService service. +// All implementations must embed UnimplementedConversationServiceServer +// for forward compatibility. +type ConversationServiceServer interface { + Create(context.Context, *Conversation) (*Conversation, error) + List(context.Context, *ListReq) (*ListResp, error) + Chat(context.Context, *LLMRequest) (*ChatResponse, error) + Detail(context.Context, *DetailRequest) (*DetailResponse, error) + Stream(*LLMRequest, grpc.ServerStreamingServer[StreamEvent]) error + mustEmbedUnimplementedConversationServiceServer() +} + +// UnimplementedConversationServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedConversationServiceServer struct{} + +func (UnimplementedConversationServiceServer) Create(context.Context, *Conversation) (*Conversation, error) { + return nil, status.Errorf(codes.Unimplemented, "method Create not implemented") +} +func (UnimplementedConversationServiceServer) List(context.Context, *ListReq) (*ListResp, error) { + return nil, status.Errorf(codes.Unimplemented, "method List not implemented") +} +func (UnimplementedConversationServiceServer) Chat(context.Context, *LLMRequest) (*ChatResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Chat not implemented") +} +func (UnimplementedConversationServiceServer) Detail(context.Context, *DetailRequest) (*DetailResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Detail not implemented") +} +func (UnimplementedConversationServiceServer) Stream(*LLMRequest, grpc.ServerStreamingServer[StreamEvent]) error { + return status.Errorf(codes.Unimplemented, "method Stream not implemented") +} +func (UnimplementedConversationServiceServer) mustEmbedUnimplementedConversationServiceServer() {} +func (UnimplementedConversationServiceServer) testEmbeddedByValue() {} + +// UnsafeConversationServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ConversationServiceServer will +// result in compilation errors. +type UnsafeConversationServiceServer interface { + mustEmbedUnimplementedConversationServiceServer() +} + +func RegisterConversationServiceServer(s grpc.ServiceRegistrar, srv ConversationServiceServer) { + // If the following call pancis, it indicates UnimplementedConversationServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ConversationService_ServiceDesc, srv) +} + +func _ConversationService_Create_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Conversation) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversationServiceServer).Create(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ConversationService_Create_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversationServiceServer).Create(ctx, req.(*Conversation)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConversationService_List_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListReq) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversationServiceServer).List(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ConversationService_List_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversationServiceServer).List(ctx, req.(*ListReq)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConversationService_Chat_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LLMRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversationServiceServer).Chat(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ConversationService_Chat_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversationServiceServer).Chat(ctx, req.(*LLMRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConversationService_Detail_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DetailRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversationServiceServer).Detail(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ConversationService_Detail_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversationServiceServer).Detail(ctx, req.(*DetailRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConversationService_Stream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(LLMRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(ConversationServiceServer).Stream(m, &grpc.GenericServerStream[LLMRequest, StreamEvent]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type ConversationService_StreamServer = grpc.ServerStreamingServer[StreamEvent] + +// ConversationService_ServiceDesc is the grpc.ServiceDesc for ConversationService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ConversationService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "ai.v1.ConversationService", + HandlerType: (*ConversationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Create", + Handler: _ConversationService_Create_Handler, + }, + { + MethodName: "List", + Handler: _ConversationService_List_Handler, + }, + { + MethodName: "Chat", + Handler: _ConversationService_Chat_Handler, + }, + { + MethodName: "Detail", + Handler: _ConversationService_Detail_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Stream", + Handler: _ConversationService_Stream_Handler, + ServerStreams: true, + }, + }, + Metadata: "ai/v1/ai.proto", +} diff --git a/go.mod b/go.mod index 0afdb9f..359cb29 100644 --- a/go.mod +++ b/go.mod @@ -10,6 +10,8 @@ require ( github.com/golang-jwt/jwt/v5 v5.2.2 github.com/google/uuid v1.6.0 github.com/gotomicro/ego v1.2.3 + github.com/openai/openai-go v1.8.2 + github.com/pkg/errors v0.9.1 github.com/redis/go-redis/v9 v9.3.0 github.com/stretchr/testify v1.10.0 github.com/yumosx/got v1.0.1 @@ -63,7 +65,6 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/montanaflynn/stats v0.7.1 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect - github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_golang v1.12.1 // indirect github.com/prometheus/client_model v0.2.0 // indirect @@ -73,6 +74,10 @@ require ( github.com/samber/lo v1.39.0 // indirect github.com/shirou/gopsutil/v3 v3.21.6 // indirect github.com/spf13/cast v1.4.1 // indirect + github.com/tidwall/gjson v1.14.4 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/tidwall/sjson v1.2.5 // indirect github.com/tklauser/go-sysconf v0.3.6 // indirect github.com/tklauser/numcpus v0.2.2 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect diff --git a/go.sum b/go.sum index d937eb4..4260d7b 100644 --- a/go.sum +++ b/go.sum @@ -362,6 +362,8 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/openai/openai-go v1.8.2 h1:UqSkJ1vCOPUpz9Ka5tS0324EJFEuOvMc+lA/EarJWP8= +github.com/openai/openai-go v1.8.2/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y= github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= @@ -471,6 +473,16 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM= +github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= github.com/tklauser/go-sysconf v0.3.6 h1:oc1sJWvKkmvIxhDHeKWvZS4f6AW+YcoguSfRF2/Hmo4= github.com/tklauser/go-sysconf v0.3.6/go.mod h1:MkWzOF4RMCshBAMXuhXJs64Rte09mITnppBXY/rYEFI= github.com/tklauser/numcpus v0.2.2 h1:oyhllyrScuYI6g+h/zUvNXNp1wy7x8qQy3t/piefldA= diff --git a/internal/service/llm/platform/openai/openai.go b/internal/service/llm/platform/openai/openai.go new file mode 100644 index 0000000..eb08e1e --- /dev/null +++ b/internal/service/llm/platform/openai/openai.go @@ -0,0 +1,174 @@ +// Copyright 2021 ecodeclub +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openai + +import ( + "context" + "io" + "time" + + "github.com/ecodeclub/ai-gateway-go/internal/domain" + "github.com/ecodeclub/ekit/slice" + "github.com/openai/openai-go" + "github.com/openai/openai-go/option" + "github.com/openai/openai-go/packages/ssestream" + "github.com/openai/openai-go/shared/constant" + "github.com/pkg/errors" +) + +type Handler struct { + client *openai.Client + model string + toolCallID string + opts []option.RequestOption +} + +func NewHandler(client *openai.Client, model string, options ...Option) *Handler { + h := &Handler{ + client: client, + model: model, + } + + for _, o := range options { + o(h) + } + + return h +} + +type Option func(*Handler) + +func WithToolCallID(toolCallID string) Option { + return func(h *Handler) { + h.toolCallID = toolCallID + } +} + +func WithRequestOptions(opts ...option.RequestOption) Option { + return func(h *Handler) { + h.opts = opts + } +} + +func (h *Handler) Handle(ctx context.Context, req []domain.Message) (domain.ChatResponse, error) { + messages := h.toMessage(req) + request := openai.ChatCompletionNewParams{ + Messages: messages, + Model: h.model, + } + response, err := h.client.Chat.Completions.New(ctx, request, h.opts...) + if err != nil { + return domain.ChatResponse{}, err + } + + if len(response.Choices) == 0 { + return domain.ChatResponse{}, errors.New("no response choices returned") + } + + message := domain.Message{ + Role: toDomainRole(response.Choices[0].Message.Role), + Content: response.Choices[0].Message.Content, + ReasoningContent: "", // OpenAI does not provide reasoning content in the response + } + + return domain.ChatResponse{Response: message}, nil +} + +func (h *Handler) StreamHandle(ctx context.Context, req []domain.Message) (chan domain.StreamEvent, error) { + request := openai.ChatCompletionNewParams{ + Model: h.model, + Messages: h.toMessage(req), + } + events := make(chan domain.StreamEvent, 10) + + go func() { + defer close(events) + + // 设置对应的超时时间 + newCtx, cancel := context.WithTimeout(ctx, time.Minute*10) + defer cancel() + stream := h.client.Chat.Completions.NewStreaming(newCtx, request, h.opts...) + if stream.Err() != nil { + events <- domain.StreamEvent{Error: stream.Err()} + return + } + + h.recv(events, stream) + }() + + return events, nil +} + +func (h *Handler) recv(eventCh chan domain.StreamEvent, stream *ssestream.Stream[openai.ChatCompletionChunk]) { + for stream.Next() { + if err := stream.Err(); err != nil { + handleStreamError(eventCh, err) + return + } + + node := stream.Current() + if len(node.Choices) == 0 { + continue + } + + choice := node.Choices[0] + if choice.Delta.Content != "" { + eventCh <- domain.StreamEvent{Content: choice.Delta.Content, Error: nil} + } + } + + // 检查最终错误 + if err := stream.Err(); err != nil { + handleStreamError(eventCh, err) + } else { + eventCh <- domain.StreamEvent{Done: true} + } +} + +func handleStreamError(eventCh chan domain.StreamEvent, err error) { + if errors.Is(err, io.EOF) { + eventCh <- domain.StreamEvent{Done: true} + } else { + eventCh <- domain.StreamEvent{Error: err} + } +} + +func (h *Handler) toMessage(messages []domain.Message) []openai.ChatCompletionMessageParamUnion { + return slice.FilterMap(messages, func(idx int, src domain.Message) (openai.ChatCompletionMessageParamUnion, bool) { + var temp openai.ChatCompletionMessageParamUnion + switch src.Role { + case domain.TOOL: + return openai.ToolMessage(src.Content, h.toolCallID), true + case domain.SYSTEM: + return openai.SystemMessage(src.Content), true + case domain.USER: + return openai.UserMessage(src.Content), true + case domain.ASSISTANT: + return openai.AssistantMessage(src.Content), true + case domain.UNKNOWN: + return temp, false + } + return temp, false + }) +} + +func toDomainRole(c constant.Assistant) int32 { + switch c { + case "assistant": + return domain.ASSISTANT + default: + return domain.UNKNOWN + } +} diff --git a/internal/service/llm/platform/openai/openai_test.go b/internal/service/llm/platform/openai/openai_test.go new file mode 100644 index 0000000..a6fd51a --- /dev/null +++ b/internal/service/llm/platform/openai/openai_test.go @@ -0,0 +1,108 @@ +// Copyright 2021 ecodeclub +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build onlyUsage + +package openai + +import ( + "context" + "os" + "testing" + + "github.com/ecodeclub/ai-gateway-go/internal/domain" + "github.com/openai/openai-go" + "github.com/openai/openai-go/option" + "github.com/stretchr/testify/require" +) + +func TestOpenai(t *testing.T) { + client := openai.NewClient( + option.WithAPIKey(os.Getenv("ALI_QIANWEN_DASHSCOPE_API_KEY")), + option.WithBaseURL("https://dashscope.aliyuncs.com/compatible-mode/v1/"), + ) + chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{ + Messages: []openai.ChatCompletionMessageParamUnion{ + openai.SystemMessage("Say this is a test"), + }, + Model: "qwen-plus", + }) + if err != nil { + panic(err.Error()) + } + + println(chatCompletion.Choices[0].Message.Content) +} + +func TestHandle(t *testing.T) { + client := openai.NewClient( + option.WithAPIKey(os.Getenv("ALI_QIANWEN_DASHSCOPE_API_KEY")), + option.WithBaseURL("https://dashscope.aliyuncs.com/compatible-mode/v1/"), + ) + h := NewHandler(&client, "qwen-plus") + handle, err := h.Handle(context.Background(), []domain.Message{ + { + Role: domain.SYSTEM, + Content: "你好", + }, + { + Role: domain.SYSTEM, + Content: "你是谁", + }, + }) + + require.NoError(t, err) + println(handle.Response.Content) + require.NotEmpty(t, handle.Response.Content) +} + +func TestStreamHandle(t *testing.T) { + client := openai.NewClient( + option.WithAPIKey(os.Getenv("ALI_QIANWEN_DASHSCOPE_API_KEY")), + option.WithBaseURL("https://dashscope.aliyuncs.com/compatible-mode/v1/"), + ) + h := NewHandler(&client, "qwen-plus") + s, err := h.StreamHandle(context.Background(), []domain.Message{ + { + Role: domain.SYSTEM, + Content: "你好", + }, + { + Role: domain.SYSTEM, + Content: "你是谁", + }, + }) + + require.NoError(t, err) + + for { + select { + case event, ok := <-s: + if !ok { + return + } + if event.Error != nil { + t.Errorf("stream error: %v", event.Error) + return + } + if event.Done { + println("stream done") + return + } + println(event.Content) + require.NotEmpty(t, event.Content) + } + } + +} diff --git a/internal/test/conversation_test.go b/internal/test/conversation_test.go index eca8946..b089b15 100644 --- a/internal/test/conversation_test.go +++ b/internal/test/conversation_test.go @@ -288,7 +288,7 @@ func (c *ConversationSuite) TestDetail() { conversationService := service.NewConversationService(repo, handler) server := grpc.NewConversationServer(conversationService) tc.before() - detail, err := server.Detail(context.Background(), &aiv1.MsgListReq{Sn: "1"}) + detail, err := server.Detail(context.Background(), &aiv1.DetailRequest{Sn: "1"}) require.NoError(t, err) assert.ElementsMatch(t, detail.Message, []*aiv1.Message{ {Role: aiv1.Role_USER, Content: "user1"},