Skip to content

Commit

Permalink
Beta116 (#114)
Browse files Browse the repository at this point in the history
* beta1

* beta2

* beta3

* beta4

* beta5

* beta6

* beta7

* beta8

* beta9

* beta10

* beta11

* beta12

* beta13

* beta14

* beta15

* beta16

* beta16

* beta19

* beta20

* beta21

* beta22

* beta23

* beta24

* beta25

* beta27

* beta28

* beta29

* beta30

* beta31

* beta33

* beta34

* beta35

* beta36

* beta37

* beta38

* beta39

* beta40

* beta41

* beta42

* beta43

* beta44

* beta45

* beta45

* beta46

* beat48

* beta49

* beta50

* beta51

* beta52

* beta53

* beta54

* beta55

* beta57

* beta58

* beta59

* beta61

* beta62

* beta63

* beta63

* beta64

* beta65

* beta66

* beta67

* beta70

* beta71

* beta72

* beta72

* beta74

* beta75

* beta76

* beta77

* beta78

* beta79

* beta80

* beta81

* beta82

* beta83

* beta85

* beta86

* beta87

* beta88

* beta89

* beta90

* beta91

* beta92

* beta93

* beta94

* beta94

* beta96

* beta97

* beta98

* beta99

* beta100

* beta101

* beta102

* beta104

* beta105

* beta106

* beta107

* beta108

* beta109

* beta110

* beta111

* beta112

* beta113

* beta115

* beta116
  • Loading branch information
Hoshinonyaruko authored May 14, 2024
1 parent c8d606a commit d23d523
Show file tree
Hide file tree
Showing 3 changed files with 405 additions and 93 deletions.
6 changes: 3 additions & 3 deletions applogic/chatglm.go
Original file line number Diff line number Diff line change
Expand Up @@ -225,16 +225,16 @@ func (app *App) ChatHandlerGlm(w http.ResponseWriter, r *http.Request) {
}

// 获取配置信息
apiKey := config.GetGlmApiKey()
apiKey := config.GetGlmApiKey(promptstr)
// 创建请求体的映射结构
requestBody := map[string]interface{}{
"model": config.GetGlmModel(),
"model": config.GetGlmModel(promptstr),
"messages": messages,
"do_sample": config.GetGlmDoSample(),
"stream": config.GetuseSse(promptstr),
"temperature": config.GetGlmTemperature(),
"top_p": config.GetGlmTopP(),
"max_tokens": config.GetGlmMaxTokens(),
"max_tokens": config.GetGlmMaxTokens(promptstr),
"stop": config.GetGlmStop(),
//"tools": config.GetGlmTools(), 不太清楚参数格式
"tool_choice": config.GetGlmToolChoice(),
Expand Down
50 changes: 25 additions & 25 deletions applogic/tongyiqianwen.go
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ func (app *App) ChatHandlerTyqw(w http.ResponseWriter, r *http.Request) {
fmtf.Printf("Tyqw上下文history:%v\n", history)

// 构建请求到Tyqw API
apiURL := config.GetTyqwApiPath()
apiURL := config.GetTyqwApiPath(promptstr)

// 构造消息历史和当前消息
messages := []map[string]interface{}{}
Expand All @@ -216,29 +216,29 @@ func (app *App) ChatHandlerTyqw(w http.ResponseWriter, r *http.Request) {
})

var isIncrementalOutput bool
if config.GetTyqwSseType() == 1 {
if config.GetTyqwSseType(promptstr) == 1 {
isIncrementalOutput = true
}
// 获取配置信息
useSSE := config.GetuseSse(promptstr)
apiKey := config.GetTyqwKey()
apiKey := config.GetTyqwKey(promptstr)
var requestBody map[string]interface{}
if useSSE {
// 构建请求体,根据提供的文档重新调整
requestBody = map[string]interface{}{
"parameters": map[string]interface{}{
"max_tokens": config.GetTyqwMaxTokens(), // 最大生成的token数
"temperature": config.GetTyqwTemperature(), // 控制随机性和多样性的温度
"top_p": config.GetTyqwTopP(), // 核采样方法的概率阈值
"top_k": config.GetTyqwTopK(), // 采样候选集的大小
"repetition_penalty": config.GetTyqwRepetitionPenalty(), // 控制重复度的惩罚因子
"stop": config.GetTyqwStopTokens(), // 停止标记
"seed": config.GetTyqwSeed(), // 随机数种子
"result_format": "message", // 返回结果的格式
"enable_search": config.GetTyqwEnableSearch(), // 是否启用互联网搜索
"incremental_output": isIncrementalOutput, // 是否使用增量SSE模式,使用增量模式会更快,rwkv和openai不支持增量模式
"max_tokens": config.GetTyqwMaxTokens(promptstr), // 最大生成的token数
"temperature": config.GetTyqwTemperature(promptstr), // 控制随机性和多样性的温度
"top_p": config.GetTyqwTopP(promptstr), // 核采样方法的概率阈值
"top_k": config.GetTyqwTopK(promptstr), // 采样候选集的大小
"repetition_penalty": config.GetTyqwRepetitionPenalty(), // 控制重复度的惩罚因子
"stop": config.GetTyqwStopTokens(), // 停止标记
"seed": config.GetTyqwSeed(), // 随机数种子
"result_format": "message", // 返回结果的格式
"enable_search": config.GetTyqwEnableSearch(), // 是否启用互联网搜索
"incremental_output": isIncrementalOutput, // 是否使用增量SSE模式,使用增量模式会更快,rwkv和openai不支持增量模式
},
"model": config.GetTyqwModel(), // 指定对话模型
"model": config.GetTyqwModel(promptstr), // 指定对话模型
"input": map[string]interface{}{
"messages": messages, // 用户与模型的对话历史
},
Expand All @@ -251,17 +251,17 @@ func (app *App) ChatHandlerTyqw(w http.ResponseWriter, r *http.Request) {
// 构建请求体,根据提供的文档重新调整
requestBody = map[string]interface{}{
"parameters": map[string]interface{}{
"max_tokens": config.GetTyqwMaxTokens(), // 最大生成的token数
"temperature": config.GetTyqwTemperature(), // 控制随机性和多样性的温度
"top_p": config.GetTyqwTopP(), // 核采样方法的概率阈值
"top_k": config.GetTyqwTopK(), // 采样候选集的大小
"repetition_penalty": config.GetTyqwRepetitionPenalty(), // 控制重复度的惩罚因子
"stop": config.GetTyqwStopTokens(), // 停止标记
"seed": config.GetTyqwSeed(), // 随机数种子
"result_format": "message", // 返回结果的格式
"enable_search": config.GetTyqwEnableSearch(), // 是否启用互联网搜索
"max_tokens": config.GetTyqwMaxTokens(promptstr), // 最大生成的token数
"temperature": config.GetTyqwTemperature(promptstr), // 控制随机性和多样性的温度
"top_p": config.GetTyqwTopP(promptstr), // 核采样方法的概率阈值
"top_k": config.GetTyqwTopK(promptstr), // 采样候选集的大小
"repetition_penalty": config.GetTyqwRepetitionPenalty(), // 控制重复度的惩罚因子
"stop": config.GetTyqwStopTokens(), // 停止标记
"seed": config.GetTyqwSeed(), // 随机数种子
"result_format": "message", // 返回结果的格式
"enable_search": config.GetTyqwEnableSearch(), // 是否启用互联网搜索
},
"model": config.GetTyqwModel(), // 指定对话模型
"model": config.GetTyqwModel(promptstr), // 指定对话模型
"input": map[string]interface{}{
"messages": messages, // 用户与模型的对话历史
},
Expand Down Expand Up @@ -402,7 +402,7 @@ func (app *App) ChatHandlerTyqw(w http.ResponseWriter, r *http.Request) {

reader := bufio.NewReader(resp.Body)
var totalUsage structs.GPTUsageInfo
if config.GetTyqwSseType() == 1 {
if config.GetTyqwSseType(promptstr) == 1 {
for {
line, err := reader.ReadString('\n')
if err != nil {
Expand Down
Loading

0 comments on commit d23d523

Please sign in to comment.