diff --git a/src/services/translate/chatglm/Config.jsx b/src/services/translate/chatglm/Config.jsx
index 405e8f7d22..82ebde18a8 100644
--- a/src/services/translate/chatglm/Config.jsx
+++ b/src/services/translate/chatglm/Config.jsx
@@ -15,6 +15,9 @@ import { useToastStyle } from '../../../hooks';
import { translate } from './index';
import { Language } from './index';
+// https://open.bigmodel.cn/dev/api/normal-model/glm-4
+const availableModels = 'glm-4-plus、glm-4-0520、glm-4 、glm-4-air、glm-4-airx、glm-4-long 、 glm-4-flash'.split('、').map(it => it.trim());
+
export function Config(props) {
const { instanceKey, updateServiceList, onClose } = props;
const { t } = useTranslation();
@@ -96,7 +99,7 @@ export function Config(props) {
{t('services.translate.chatglm.model')}
-
+
-
- {t(`services.translate.chatglm.chatglm_turbo`)}
-
- {t(`services.translate.chatglm.chatglm_pro`)}
- {t(`services.translate.chatglm.chatglm_std`)}
-
- {t(`services.translate.chatglm.chatglm_lite`)}
-
+ {availableModels.map(it => (
+
+ {it}
+
+ ))}
diff --git a/src/services/translate/chatglm/index.jsx b/src/services/translate/chatglm/index.jsx
index 22b0840482..a6514c58a7 100644
--- a/src/services/translate/chatglm/index.jsx
+++ b/src/services/translate/chatglm/index.jsx
@@ -1,5 +1,6 @@
import { Language } from './info';
import * as jose from 'jose';
+import { info } from 'tauri-plugin-log-api';
export async function translate(text, from, to, options = {}) {
const { config, setResult, detect } = options;
@@ -8,7 +9,7 @@ export async function translate(text, from, to, options = {}) {
let [id, secret] = apiKey.split('.');
if (id === undefined || secret === undefined) {
- throw new Error('invalid apikey');
+ return Promise.reject('invalid apikey');
}
promptList = promptList.map((item) => {
return {
@@ -34,62 +35,64 @@ export async function translate(text, from, to, options = {}) {
const headers = {
'Content-Type': 'application/json',
- accept: 'text/event-stream',
- Authorization: token,
+ 'Authorization': token,
};
- let body = {
- prompt: promptList,
+ const body = {
+ model: model,
+ messages: promptList,
+ stream: true,
};
- const res = await window.fetch(`https://open.bigmodel.cn/api/paas/v3/model-api/${model}/sse-invoke`, {
- method: 'POST',
- headers: headers,
- body: JSON.stringify(body),
- });
- if (res.ok) {
- let target = '';
- let errRes = res.clone();
- const reader = res.body.getReader();
- try {
+ let result = '';
+ try {
+ const response = await fetch('https://open.bigmodel.cn/api/paas/v4/chat/completions', {
+ method: 'POST',
+ headers: headers,
+ body: JSON.stringify(body),
+ });
+ if (!response.ok) {
+ throw new Error(`Http Request Error\nHttp Status: ${response.status}\n${await response.text()}`);
+ }
+
+ let buffer = '';
+ // Function to process the stream data
+ const processChatStream = async (reader, decoder) => {
while (true) {
const { done, value } = await reader.read();
- if (done) {
- if (target === '') {
- let json = await errRes.json();
- if (json.msg) {
- throw json.msg;
- } else {
- throw JSON.stringify(json);
- }
- }
- setResult(target.trim());
- return target.trim();
- }
- const str = new TextDecoder().decode(value);
- let list = str.split('\n');
- for (let line of list) {
- if (line.startsWith('data:')) {
- let data = line.replace('data:', '');
- if (data === '') {
- target += '\n';
- } else {
- target += data;
+ if (done) break;
+
+ // Convert binary data to string
+ buffer += decoder.decode(value, { stream: true });
+
+ // Process complete events
+ const boundary = buffer.lastIndexOf('\n\n');
+ if (boundary !== -1) {
+ const event = buffer.slice(0, boundary);
+ buffer = buffer.slice(boundary + 2);
+ const chunks = event.split('\n\n');
+
+ for (const chunk of chunks) {
+ const text = chunk.replace(/^data:/, '').trim();
+ if (text === '[DONE]') {
+ continue;
}
+ const data = JSON.parse(text);
+ result += data.choices[0].delta.content;
if (setResult) {
- setResult(target + '_');
- } else {
- return '[STREAM]';
+ setResult(result + '_');
}
}
}
}
- } finally {
- reader.releaseLock();
- }
- } else {
- throw `Http Request Error\nHttp Status: ${res.status}\n${JSON.stringify(res.data)}`;
+ };
+
+ await processChatStream(response.body.getReader(), new TextDecoder());
+ } catch (error) {
+ return Promise.reject(error);
}
+
+ return result;
}
export * from './Config';
diff --git a/src/services/translate/openai/index.jsx b/src/services/translate/openai/index.jsx
index fc36424e3f..b001716d87 100644
--- a/src/services/translate/openai/index.jsx
+++ b/src/services/translate/openai/index.jsx
@@ -10,13 +10,13 @@ export async function translate(text, from, to, options) {
if (!/https?:\/\/.+/.test(requestPath)) {
requestPath = `https://${requestPath}`;
}
- if (requestPath.endsWith('/')) {
- requestPath = requestPath.slice(0, -1);
- }
+ const apiUrl = new URL(requestPath);
- // /v1 is not required
- if (service === 'openai' && !requestPath.endsWith('/chat/completions')) {
- requestPath += '/v1/chat/completions';
+ // in openai like api, /v1 is not required
+ if (service === 'openai' && !apiUrl.pathname.endsWith('/chat/completions')) {
+ // not openai like, populate completion endpoint
+ apiUrl.pathname += apiUrl.pathname.endsWith('/') ? '' : '/';
+ apiUrl.pathname += 'v1/chat/completions';
}
// 兼容旧版
@@ -61,7 +61,7 @@ export async function translate(text, from, to, options) {
body['model'] = model;
}
if (stream) {
- const res = await window.fetch(requestPath, {
+ const res = await window.fetch(apiUrl.href, {
method: 'POST',
headers: headers,
body: JSON.stringify(body),
@@ -118,7 +118,7 @@ export async function translate(text, from, to, options) {
throw `Http Request Error\nHttp Status: ${res.status}\n${JSON.stringify(res.data)}`;
}
} else {
- let res = await fetch(requestPath, {
+ let res = await fetch(apiUrl.href, {
method: 'POST',
headers: headers,
body: Body.json(body),
diff --git a/src/window/Translate/components/TargetArea/index.jsx b/src/window/Translate/components/TargetArea/index.jsx
index 5ffeee562d..4d6ea56615 100644
--- a/src/window/Translate/components/TargetArea/index.jsx
+++ b/src/window/Translate/components/TargetArea/index.jsx
@@ -39,7 +39,7 @@ import { invoke_plugin } from '../../../../utils/invoke_plugin';
import * as builtinServices from '../../../../services/translate';
import * as builtinTtsServices from '../../../../services/tts';
-import { store } from '../../../../utils/store';
+import { info, error as logError } from 'tauri-plugin-log-api';
import {
INSTANCE_NAME_CONFIG_KEY,
ServiceSourceType,
@@ -86,6 +86,12 @@ export default function TargetArea(props) {
const speak = useVoice();
const theme = useTheme();
+ useEffect(() => {
+ if (error) {
+ logError(`[${currentTranslateServiceInstanceKey}]happened error: ` + error);
+ }
+ }, [error]);
+
// listen to translation
useEffect(() => {
setResult('');
@@ -184,6 +190,7 @@ export default function TargetArea(props) {
utils,
}).then(
(v) => {
+ info(`[${currentTranslateServiceInstanceKey}]resolve:` + v);
if (translateID[index] !== id) return;
setResult(typeof v === 'string' ? v.trim() : v);
setIsLoading(false);
@@ -224,6 +231,7 @@ export default function TargetArea(props) {
}
},
(e) => {
+ info(`[${currentTranslateServiceInstanceKey}]reject:` + e);
if (translateID[index] !== id) return;
setError(e.toString());
setIsLoading(false);
@@ -255,6 +263,7 @@ export default function TargetArea(props) {
})
.then(
(v) => {
+ info(`[${currentTranslateServiceInstanceKey}]resolve:` + v);
if (translateID[index] !== id) return;
setResult(typeof v === 'string' ? v.trim() : v);
setIsLoading(false);
@@ -295,6 +304,7 @@ export default function TargetArea(props) {
}
},
(e) => {
+ info(`[${currentTranslateServiceInstanceKey}]reject:` + e);
if (translateID[index] !== id) return;
setError(e.toString());
setIsLoading(false);