ChatGLM接入成功

This commit is contained in:
aixianling
2023-05-17 18:16:53 +08:00
parent 31c75dd885
commit c179400d67
4 changed files with 120 additions and 46 deletions

View File

@@ -11,6 +11,8 @@
"dependencies": {
"@kangc/v-md-editor": "^2.3.15",
"element-plus": "^2.3.4",
"nanoid": "^4.0.2",
"node-forge": "^1.3.1",
"query-string": "^8.1.0",
"sass": "^1.62.1",
"sass-loader": "^13.2.2",

View File

@@ -65,7 +65,7 @@ export default {
}
this.chatHistory.push(aiMsg)
if (this.config.stream) {
ai.chatStream(this.chatHistory).then(reader => this.streamOutput(reader, this.chatHistory.at(-1))).finally(() => this.loading = false)
ai.chatStream(this.chatHistory).then(reader => ai.streamOutput(reader, this.chatHistory.at(-1))).finally(() => this.loading = false)
} else {
ai.chat(this.chatHistory).then(reply => {
const decodeArr = reply.split("")
@@ -75,40 +75,6 @@ export default {
} else {
this.$message.error("请不要发送空消息!")
}
},
streamOutput(reader, chat) {
return reader.read().then(({done, value}) => {
if (done) {
return;
}
if (!chat.reminder) {
chat.reminder = ""
}
let decode = new TextDecoder().decode(value)
decode = chat.reminder + decode
let decodedArray = decode.split("data: ");
let longstr = "";
decodedArray.forEach(decoded => {
decoded = decoded.trim();
try {
if (longstr != "") {
decoded = longstr + decoded;
longstr = "";
}
} catch (e) {
longstr = decoded;
decoded = "";
}
if (!!decoded && decoded !== "[DONE]") {
const choices = JSON.parse(decoded).choices
if (choices?.length > 0) {
const response = choices[0].delta.content || "";
chat.msg += response
}
}
})
return this.streamOutput(reader, chat)
})
}
}
}

View File

@@ -4,14 +4,17 @@
<el-form-item label="语言模型">
<el-row class="flexWrap">
<ai-model v-for="m in models" :model="m" small :class="{active:settings.model.id==m.id}"
@click="settings.model=new m()"/>
@click="initModel(m)"/>
</el-row>
</el-form-item>
<el-form-item label="流式输出">
<el-switch v-model="settings.stream" :active-value="true" :inactive-value="false"/>
</el-form-item>
<el-form-item label="API KEY">
<el-input v-model="settings.model.apiKey" clearable @change="v=>settings.model.setApiKey(v),getModelAccount()"/>
<el-row class="w100">
<el-input v-model="settings.model.apiKey" clearable class="fill mar-r8"/>
<el-button type="text" @click="getModelAccount">应用</el-button>
</el-row>
</el-form-item>
<el-row v-loading="loadingAccount" element-loading-background="#272A37">
<el-form-item label="账号用户" class="fill">{{ account.username }}</el-form-item>
@@ -46,6 +49,7 @@ export default {
immediate: true,
handler(v) {
this.settings = v
this.getModelAccount()
}
}
},
@@ -54,15 +58,23 @@ export default {
account: v => v.settings.account || {usage: 0, total: 0}
},
methods: {
initModel(model) {
const ins = new model()
const timer = setInterval(() => {
if (ins.apiKey) {
clearInterval(timer)
this.settings.model = ins
this.getModelAccount()
}
}, 500)
},
getModelAccount() {
const ai = this.settings.model
if (ai.getAccount) {
if (ai.apiKey) {
this.loadingAccount = true
ai.getAccount().then(v => this.settings.account = v).finally(() => this.loadingAccount = false)
}
}
},
created() {
}
}
</script>

View File

@@ -1,4 +1,6 @@
import {dayjs} from "element-plus";
import {nanoid} from "nanoid";
import forge from 'node-forge';
import axios from "./axios";
import {AI_AVATAR, OPEN_AI_KEY} from "./env";
@@ -9,7 +11,7 @@ class BaseModel {
}
this.headers = {
"Content-Type": "application/json",
Accept: "application/json",
Accept: "application/json,text/event-stream",
}
}
@@ -19,6 +21,9 @@ class BaseModel {
}
}
/**
* ChatGPT gpt-3.5-turbo的api.
*/
export class ChatGPT extends BaseModel {
static base = "https://chatwithai.pages.dev"
static avatar = AI_AVATAR
@@ -69,6 +74,41 @@ export class ChatGPT extends BaseModel {
});
} else return Promise.reject("没有权限或者网络异常,请重新尝试!")
}
streamOutput(reader, chat) {
return reader.read().then(({done, value}) => {
if (done) {
return;
}
if (!chat.reminder) {
chat.reminder = ""
}
let decode = new TextDecoder().decode(value)
decode = chat.reminder + decode
let decodedArray = decode.split("data: ");
let longstr = "";
decodedArray.forEach(decoded => {
decoded = decoded.trim();
try {
if (longstr != "") {
decoded = longstr + decoded;
longstr = "";
}
} catch (e) {
longstr = decoded;
decoded = "";
}
if (!!decoded && decoded !== "[DONE]") {
const choices = JSON.parse(decoded).choices
if (choices?.length > 0) {
const response = choices[0].delta.content || "";
chat.msg += response
}
}
})
return this.streamOutput(reader, chat)
})
}
}
/**
@@ -76,26 +116,80 @@ export class ChatGPT extends BaseModel {
*/
export class ChatGLM extends BaseModel {
static base = "https://maas.aminer.cn/api/paas"
"/model/v1/open/engines/chatGLM/chatGLM"
static avatar = "https://cdn.cunwuyun.cn/chat/chatglm.svg"
static name = "ChatGLM"
static id = "chatglm-130b"
static desc = "ChatGLM-130B所基于的模型"
static publicKey = "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAMZXxmDh2Rs1lh3Ymud1eVBjds/9SfjczHJFpNe9+0FsUffILVMTBcTqmdPZxjC6M1Ad2EHaHMWXZuc0fIc4Lh8CAwEAAQ=="
constructor(params) {
const {avatar, name, desc, id} = ChatGLM
super({avatar, name, desc, id, ...params})
super({avatar, name, desc, id, taskId: nanoid(), ...params})
this.getToken().then(e => this.setApiKey(e))
}
getToken() {
const encrypted = "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAMZXxmDh2Rs1lh3Ymud1eVBjds/9SfjczHJFpNe9+0FsUffILVMTBcTqmdPZxjC6M1Ad2EHaHMWXZuc0fIc4Lh8CAwEAAQ=="
return axios.post(ChatGLM.base + "/passApiToken/createApiToken", JSON.stringify({apiKey: "4e3ceff669c143dfa09e763663aa72cd", encrypted}), {
async getToken() {
if (this.apiKey) return await this.apiKey
const timestamp = new TextEncoder().encode(Date.now().toFixed(0))
const encrypted = ChatGLM.encrypt(ChatGLM.publicKey, timestamp)
return await axios.post(ChatGLM.base + "/passApiToken/createApiToken", JSON.stringify({apiKey: "4e3ceff669c143dfa09e763663aa72cd", encrypted}), {
headers: this.headers,
}).then(res => res.json()).then(data => data?.token || "key无效或网络波动,请重新尝试");
}).then(res => res.json()).then(data => data?.data || "key无效或网络波动,请重新尝试");
}
async chat(history, callback) {
const context = await axios.post(ChatGLM.base + "/v1/stream_context").then(res => res.json());
return await axios.get(ChatGPT.base + "/v1/stream", {params: context.result})
}
async chatStream(messages) {
const history = messages.map(e => e.msg)
history.pop()
const prompt = history.pop()
const url = ChatGLM.base + "/model/v1/open/engines/sse/chatGLM/chatGLM"
// const url = ChatGLM.base + "/model/v2/open/engines/chatglm_qa_6b/chatglm_6b"
return await axios.post(url, JSON.stringify({
history, prompt,
temperature: 1, top_p: 0.6, requestTaskNo: this.taskId
}), {
headers: this.headers,
}).then(res => res?.body?.getReader());
}
static encrypt(publicKey, timestamp) {
const public_key = forge.util.decode64(publicKey)
const decoded_key = forge.asn1.fromDer(public_key); // 使用 fromDer 方法解码
const key = forge.pki.publicKeyFromAsn1(decoded_key); // 使用 publicKeyFromAsn1 方法导入公钥
const encrypted = key.encrypt(timestamp, 'RSAES-PKCS1-V1_5');
return forge.util.encode64(encrypted);
}
async getAccount() {
const {headers} = this
const usages = await axios.get("https://open.bigmodel.ai/api/paas/account/query-customer-account-report", {headers}).then(res => res.json());
if (usages.code == 200) {
const {data} = usages
return {
...data, username: "Kubbo",
usage: data.totalSpendAmount?.toFixed(4),
total: data.rechargeAmount?.toFixed(4)
}
} else return Promise.reject("没有权限或者网络异常,请重新尝试!")
}
streamOutput(reader, chat) {
return reader.read().then(({done, value}) => {
if (done) {
return;
}
const decode = new TextDecoder().decode(value)
const contents = decode.split("event:finish")[0].split("\n")
if (contents.length > 0) {
console.log(contents)
chat.msg = contents.filter(e => e.startsWith("data:") && e.trim() != "data:")?.map(e => e.replace(/data:/, '')).join("\n") || ""
}
return this.streamOutput(reader, chat)
})
}
}