Skip to content

Commit

Permalink
remove unused codes
Browse files Browse the repository at this point in the history
  • Loading branch information
cr7258 committed Aug 9, 2024
1 parent 6b6be99 commit 161dd38
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 77 deletions.
32 changes: 0 additions & 32 deletions plugins/wasm-go/extensions/ai-proxy/provider/minimax.go
Original file line number Diff line number Diff line change
Expand Up @@ -245,42 +245,10 @@ func (m *minimaxProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name
// data: {"created":1689747645,"model":"abab6.5s-chat","reply":"I am from China.","choices":[{"finish_reason":"stop","messages":[{"sender_type":"BOT","sender_name":"MM智能助理","text":"I am from China."}]}],"usage":{"total_tokens":187},"input_sensitive":false,"output_sensitive":false,"id":"0106b3bc9fd844a9f3de1aa06004e2ab","base_resp":{"status_code":0,"status_msg":""}}
modifiedResponseChunk := processStreamEvent(ctx, chunk, isLastChunk, log, m.responseV2ToOpenAI)
return modifiedResponseChunk, nil

//responseBuilder := &strings.Builder{}
//lines := strings.Split(string(chunk), "\n")
//for _, data := range lines {
// if len(data) < 6 {
// // ignore blank line or wrong format
// continue
// }
// data = data[6:]
// var minimaxResp minimaxChatCompletionV2Resp
// if err := json.Unmarshal([]byte(data), &minimaxResp); err != nil {
// log.Errorf("unable to unmarshal minimax response: %v", err)
// continue
// }
// response := m.responseV2ToOpenAI(&minimaxResp)
// responseBody, err := json.Marshal(response)
// if err != nil {
// log.Errorf("unable to marshal response: %v", err)
// return nil, err
// }
// m.appendResponse(responseBuilder, string(responseBody))
//}
//modifiedResponseChunk := responseBuilder.String()
//log.Debugf("=== modified response chunk: %s", modifiedResponseChunk)
//return []byte(modifiedResponseChunk), nil
}

// OnResponseBody 只处理使用OpenAI协议 且 模型对应接口为ChatCompletion Pro的流式响应
func (m *minimaxProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
//minimaxResp := &minimaxChatCompletionV2Resp{}
//if err := json.Unmarshal(body, minimaxResp); err != nil {
// return types.ActionContinue, fmt.Errorf("unable to unmarshal minimax response: %v", err)
//}
//if minimaxResp.BaseResp.StatusCode != 0 {
// return types.ActionContinue, fmt.Errorf("minimax response error, error_code: %d, error_message: %s", minimaxResp.BaseResp.StatusCode, minimaxResp.BaseResp.StatusMsg)
//}
modifiedResponseChunk := m.responseV2ToOpenAI(ctx, body, log)
return types.ActionContinue, replaceJsonResponseBody(modifiedResponseChunk, log)
}
Expand Down
35 changes: 1 addition & 34 deletions plugins/wasm-go/extensions/ai-proxy/provider/model.go
Original file line number Diff line number Diff line change
@@ -1,18 +1,7 @@
package provider

import "strings"

const (
streamEventIdItemKey = "id:"
streamEventNameItemKey = "event:"
streamBuiltInItemKey = ":"
streamHttpStatusValuePrefix = "HTTP_STATUS/"
streamDataItemKey = "data:"
streamEndDataValue = "[DONE]"

eventResult = "result"

httpStatus200 = "200"
streamDataItemKey = "data:"
)

type chatCompletionRequest struct {
Expand Down Expand Up @@ -119,28 +108,6 @@ func (m *functionCall) IsEmpty() bool {
return m.Name == "" && m.Arguments == ""
}

type streamEvent struct {
Id string `json:"id"`
Event string `json:"event"`
Data string `json:"data"`
HttpStatus string `json:"http_status"`
}

func (e *streamEvent) setValue(key, value string) {
switch key {
case streamEventIdItemKey:
e.Id = value
case streamEventNameItemKey:
e.Event = value
case streamDataItemKey:
e.Data = value
case streamBuiltInItemKey:
if strings.HasPrefix(value, streamHttpStatusValuePrefix) {
e.HttpStatus = value[len(streamHttpStatusValuePrefix):]
}
}
}

type embeddingsRequest struct {
Input interface{} `json:"input"`
Model string `json:"model"`
Expand Down
6 changes: 0 additions & 6 deletions plugins/wasm-go/extensions/ai-proxy/provider/qwen.go
Original file line number Diff line number Diff line change
Expand Up @@ -448,12 +448,6 @@ func (m *qwenProvider) insertContextMessage(request *qwenTextGenRequest, content
}
}

func (m *qwenProvider) appendStreamEvent(responseBuilder *strings.Builder, event *streamEvent) {
responseBuilder.WriteString(streamDataItemKey)
responseBuilder.WriteString(event.Data)
responseBuilder.WriteString("\n\n")
}

func (m *qwenProvider) buildQwenTextEmbeddingRequest(request *embeddingsRequest) (*qwenTextEmbeddingRequest, error) {
var texts []string
if str, isString := request.Input.(string); isString {
Expand Down
11 changes: 6 additions & 5 deletions plugins/wasm-go/extensions/ai-proxy/provider/request_helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ func replaceJsonResponseBody(response interface{}, log wrapper.Log) error {

type chatCompletionResponseConverter interface{}

// processStreamEvent 从上下文中取出缓冲区,将新chunk追加到缓冲区,然后处理缓冲区中的完整事件
// processStreamEvent 从上下文中取出缓冲区,将新 chunk 追加到缓冲区,然后处理缓冲区中的完整事件
func processStreamEvent(
ctx wrapper.HttpContext,
chunk []byte, isLastChunk bool,
Expand All @@ -77,17 +77,17 @@ func processStreamEvent(
if isLastChunk || len(chunk) == 0 {
return nil
}
// 从上下文中取出缓冲区,将新chunk追加到缓冲区
// 从上下文中取出缓冲区,将新 chunk 追加到缓冲区
newBufferedBody := chunk
if bufferedBody, has := ctx.GetContext(ctxKeyStreamingBody).([]byte); has {
newBufferedBody = append(bufferedBody, chunk...)
}

// 初始化处理下标,以及将要返回的处理过的chunks
// 初始化处理下标,以及将要返回的处理过的 chunk
var newEventPivot = -1
var outputBuffer []byte

// 从buffer区取出若干完整的chunk,将其转为openAI格式后返回
// 从缓冲区取出若干完整的 chunk,将其转为 openAI 格式后返回
// 处理可能包含多个事件的缓冲区
for {
eventStartIndex := bytes.Index(newBufferedBody, []byte(streamDataItemKey))
Expand All @@ -101,7 +101,7 @@ func processStreamEvent(
// 查找事件结束的位置(即下一个事件的开始)
newEventPivot = bytes.Index(newBufferedBody, []byte("\n\n"))
if newEventPivot == -1 {
// 未找到事件结束标识,跳出循环等待更多数据,若是最后一个chunk,不一定有2个换行符
// 未找到事件结束标识,跳出循环等待更多数据,若是最后一个 chunk,不一定有 2 个换行符
break
}

Expand All @@ -119,6 +119,7 @@ func processStreamEvent(
}
outputBuffer = append(outputBuffer, convertedData...)
}
// qwen 的 chunk 中可能包含多个事件
case func(ctx wrapper.HttpContext, chunk []byte, log wrapper.Log) []*chatCompletionResponse:
if openAIResponses := fn(ctx, eventData, log); openAIResponses != nil {
for _, response := range openAIResponses {
Expand Down

0 comments on commit 161dd38

Please sign in to comment.