chatgpt优化

This commit is contained in:
xiaoqi.cxq 2023-05-23 10:44:26 +08:00
parent a40af9c545
commit c1232b59db
3 changed files with 37 additions and 5 deletions

View File

@ -17,6 +17,12 @@
<b>apiKey</b> 请到 <a href="https://platform.openai.com/account/api-keys" target="_blank">https://platform.openai.com/account/api-keys</a> <br>
</div>
</form-entry>
<form-entry label="采样温度" error="temperature">
<input slot="field" class="textfield" type="number" v-model.trim="temperature" @keydown.enter="resolve()">
<div class="form-entry__info">
<b>采样温度</b>介于 0 2 之间较高的值 0.8将使输出更加随机而较低的值 0.2将使输出更加集中和确定<br>
</div>
</form-entry>
</div>
<div class="modal__button-bar">
<button class="button" @click="config.reject()">取消</button>
@ -32,21 +38,32 @@ export default modalTemplate({
data: () => ({
apiKey: null,
proxyHost: null,
temperature: 1,
}),
methods: {
resolve() {
if (!this.apiKey) {
this.setError('apiKey');
return;
}
if (this.temperature < 0 || this.temperature > 2) {
this.setError('temperature');
return;
}
if (this.proxyHost && this.proxyHost.endsWith('/')) {
this.proxyHost = this.proxyHost.substring(0, this.proxyHost.length - 1);
}
this.config.resolve({ apiKey: this.apiKey, proxyHost: this.proxyHost });
this.config.resolve({
apiKey: this.apiKey,
proxyHost: this.proxyHost,
temperature: parseFloat(this.temperature),
});
},
},
mounted() {
this.apiKey = this.config.apiKey;
this.proxyHost = this.config.proxyHost;
this.temperature = this.config.temperature || this.temperature;
},
});
</script>

View File

@ -73,7 +73,12 @@ export default modalTemplate({
this.generating = true;
this.result = '';
try {
this.xhr = chatGptSvc.chat(this.chatGptConfig.proxyHost, this.chatGptConfig.apiKey, `${this.content}\n(使用Markdown方式输出结果)`, this.process);
this.xhr = chatGptSvc.chat({
proxyHost: this.chatGptConfig.proxyHost,
apiKey: this.chatGptConfig.apiKey,
content: `${this.content}\n(使用Markdown方式输出结果)`,
temperature: this.chatGptConfig.temperature || 1,
}, this.process);
} catch (err) {
this.generating = false;
store.dispatch('notification/error', err);
@ -81,7 +86,12 @@ export default modalTemplate({
},
async openConfig() {
try {
const config = await store.dispatch('modal/open', { type: 'chatGptConfig', apiKey: this.chatGptConfig.apiKey, proxyHost: this.chatGptConfig.proxyHost });
const config = await store.dispatch('modal/open', {
type: 'chatGptConfig',
apiKey: this.chatGptConfig.apiKey,
proxyHost: this.chatGptConfig.proxyHost,
temperature: this.chatGptConfig.temperature,
});
store.dispatch('chatgpt/setCurrConfig', config);
} catch (e) { /* Cancel */ }
},

View File

@ -1,7 +1,12 @@
import store from '../store';
export default {
chat(proxyHost, apiKey, content, callback) {
chat({
proxyHost,
apiKey,
content,
temperature,
}, callback) {
const xhr = new XMLHttpRequest();
const url = `${proxyHost || 'https://api.openai.com'}/v1/chat/completions`;
xhr.open('POST', url);
@ -10,7 +15,7 @@ export default {
xhr.send(JSON.stringify({
model: 'gpt-3.5-turbo',
messages: [{ role: 'user', content }],
temperature: 1,
temperature: temperature || 1,
stream: true,
}));
let lastRespLen = 0;