chatgpt优化
This commit is contained in:
parent
a40af9c545
commit
c1232b59db
@ -17,6 +17,12 @@
|
|||||||
<b>apiKey</b> 请到 <a href="https://platform.openai.com/account/api-keys" target="_blank">https://platform.openai.com/account/api-keys</a> 获取<br>
|
<b>apiKey</b> 请到 <a href="https://platform.openai.com/account/api-keys" target="_blank">https://platform.openai.com/account/api-keys</a> 获取<br>
|
||||||
</div>
|
</div>
|
||||||
</form-entry>
|
</form-entry>
|
||||||
|
<form-entry label="采样温度" error="temperature">
|
||||||
|
<input slot="field" class="textfield" type="number" v-model.trim="temperature" @keydown.enter="resolve()">
|
||||||
|
<div class="form-entry__info">
|
||||||
|
<b>采样温度</b>,介于 0 和 2 之间。较高的值(如 0.8)将使输出更加随机,而较低的值(如 0.2)将使输出更加集中和确定。<br>
|
||||||
|
</div>
|
||||||
|
</form-entry>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal__button-bar">
|
<div class="modal__button-bar">
|
||||||
<button class="button" @click="config.reject()">取消</button>
|
<button class="button" @click="config.reject()">取消</button>
|
||||||
@ -32,21 +38,32 @@ export default modalTemplate({
|
|||||||
data: () => ({
|
data: () => ({
|
||||||
apiKey: null,
|
apiKey: null,
|
||||||
proxyHost: null,
|
proxyHost: null,
|
||||||
|
temperature: 1,
|
||||||
}),
|
}),
|
||||||
methods: {
|
methods: {
|
||||||
resolve() {
|
resolve() {
|
||||||
if (!this.apiKey) {
|
if (!this.apiKey) {
|
||||||
this.setError('apiKey');
|
this.setError('apiKey');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this.temperature < 0 || this.temperature > 2) {
|
||||||
|
this.setError('temperature');
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if (this.proxyHost && this.proxyHost.endsWith('/')) {
|
if (this.proxyHost && this.proxyHost.endsWith('/')) {
|
||||||
this.proxyHost = this.proxyHost.substring(0, this.proxyHost.length - 1);
|
this.proxyHost = this.proxyHost.substring(0, this.proxyHost.length - 1);
|
||||||
}
|
}
|
||||||
this.config.resolve({ apiKey: this.apiKey, proxyHost: this.proxyHost });
|
this.config.resolve({
|
||||||
|
apiKey: this.apiKey,
|
||||||
|
proxyHost: this.proxyHost,
|
||||||
|
temperature: parseFloat(this.temperature),
|
||||||
|
});
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
mounted() {
|
mounted() {
|
||||||
this.apiKey = this.config.apiKey;
|
this.apiKey = this.config.apiKey;
|
||||||
this.proxyHost = this.config.proxyHost;
|
this.proxyHost = this.config.proxyHost;
|
||||||
|
this.temperature = this.config.temperature || this.temperature;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
@ -73,7 +73,12 @@ export default modalTemplate({
|
|||||||
this.generating = true;
|
this.generating = true;
|
||||||
this.result = '';
|
this.result = '';
|
||||||
try {
|
try {
|
||||||
this.xhr = chatGptSvc.chat(this.chatGptConfig.proxyHost, this.chatGptConfig.apiKey, `${this.content}\n(使用Markdown方式输出结果)`, this.process);
|
this.xhr = chatGptSvc.chat({
|
||||||
|
proxyHost: this.chatGptConfig.proxyHost,
|
||||||
|
apiKey: this.chatGptConfig.apiKey,
|
||||||
|
content: `${this.content}\n(使用Markdown方式输出结果)`,
|
||||||
|
temperature: this.chatGptConfig.temperature || 1,
|
||||||
|
}, this.process);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.generating = false;
|
this.generating = false;
|
||||||
store.dispatch('notification/error', err);
|
store.dispatch('notification/error', err);
|
||||||
@ -81,7 +86,12 @@ export default modalTemplate({
|
|||||||
},
|
},
|
||||||
async openConfig() {
|
async openConfig() {
|
||||||
try {
|
try {
|
||||||
const config = await store.dispatch('modal/open', { type: 'chatGptConfig', apiKey: this.chatGptConfig.apiKey, proxyHost: this.chatGptConfig.proxyHost });
|
const config = await store.dispatch('modal/open', {
|
||||||
|
type: 'chatGptConfig',
|
||||||
|
apiKey: this.chatGptConfig.apiKey,
|
||||||
|
proxyHost: this.chatGptConfig.proxyHost,
|
||||||
|
temperature: this.chatGptConfig.temperature,
|
||||||
|
});
|
||||||
store.dispatch('chatgpt/setCurrConfig', config);
|
store.dispatch('chatgpt/setCurrConfig', config);
|
||||||
} catch (e) { /* Cancel */ }
|
} catch (e) { /* Cancel */ }
|
||||||
},
|
},
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
import store from '../store';
|
import store from '../store';
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
chat(proxyHost, apiKey, content, callback) {
|
chat({
|
||||||
|
proxyHost,
|
||||||
|
apiKey,
|
||||||
|
content,
|
||||||
|
temperature,
|
||||||
|
}, callback) {
|
||||||
const xhr = new XMLHttpRequest();
|
const xhr = new XMLHttpRequest();
|
||||||
const url = `${proxyHost || 'https://api.openai.com'}/v1/chat/completions`;
|
const url = `${proxyHost || 'https://api.openai.com'}/v1/chat/completions`;
|
||||||
xhr.open('POST', url);
|
xhr.open('POST', url);
|
||||||
@ -10,7 +15,7 @@ export default {
|
|||||||
xhr.send(JSON.stringify({
|
xhr.send(JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
messages: [{ role: 'user', content }],
|
messages: [{ role: 'user', content }],
|
||||||
temperature: 1,
|
temperature: temperature || 1,
|
||||||
stream: true,
|
stream: true,
|
||||||
}));
|
}));
|
||||||
let lastRespLen = 0;
|
let lastRespLen = 0;
|
||||||
|
Loading…
Reference in New Issue
Block a user