Skip to content

Commit

Permalink
feat: fix stream + edit json settings
Browse files Browse the repository at this point in the history
  • Loading branch information
Kypaku committed Jul 3, 2023
1 parent 2d0453f commit 771d166
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 53 deletions.
14 changes: 7 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"eventsource-parser": "^1.0.0",
"gpt-simple-api-ts": "^1.0.35",
"local-storage": "^2.0.0",
"openai": "^3.2.1",
"openai": "^3.3.0",
"postcss": "^8",
"request": "^2.88.2",
"tailwindcss": "npm:@tailwindcss/postcss7-compat@^2.2.17",
Expand Down
8 changes: 8 additions & 0 deletions src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@
class="w-full rounded-lg mt-8"
:rows="10"
/>
<div class="tokens-left text-xs">
<span>Tokens count (approximate): </span>
<span>{{ tokensCount }}</span>
</div>
<button
v-if="tab === 'audio'"
:disabled="isTranscribing || !audioFile"
Expand Down Expand Up @@ -162,6 +166,10 @@
};
},
computed: {
tokensCount(): number {
return this.prompt.length / 4
},
langCodes(): InputTextSuggestion[] {
return languageCodes.map((code) => ({
name: code,
Expand Down
92 changes: 51 additions & 41 deletions src/api/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ export default class SimpleGPT {
protected _configuration: Configuration | null
protected _openai: OpenAIApi | null
protected req: any
protected reader: any

public get chatModels(): string[] {
return ["gpt-3.5-turbo", "gpt-4"];
Expand Down Expand Up @@ -67,7 +68,7 @@ export default class SimpleGPT {
return response.text;
}

async getStream(prompt: string, fData: (raw: any, json: {[key: string]: any}, delta: string) => any, fEnd: any, opts?: Partial<CreateCompletionRequest & CreateChatCompletionRequest>): Promise<void> {
async getStream(prompt: string, fData: (delta: string, json: {[key: string]: any}, raw: any) => any, fEnd: any, opts?: Partial<CreateCompletionRequest & CreateChatCompletionRequest>): Promise<void> {
return new Promise((resolve, reject) => {
const model = opts?.model || this.defaultOptsGPT.model || "";

Expand All @@ -88,66 +89,75 @@ export default class SimpleGPT {
frequency_penalty: opts?.frequency_penalty || this.defaultOptsGPT.frequency_penalty,
presence_penalty: opts?.presence_penalty || this.defaultOptsGPT.presence_penalty,
stream: opts?.stream || true,
logit_bias: opts?.logit_bias || {},
function_call: opts?.function_call || undefined,
functions: opts?.functions || undefined,
};
const body = JSON.stringify(bodyRaw);

this.req = request({
url: "https://api.openai.com" + endpoint,
// hostname: "api.openai.com",
// port: 443,
// path: endpoint,
// signal: signal as any,
fetch("https://api.openai.com" + endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: "Bearer " + this._key
},
body: body,
});
}).then(async (response) => {
this.reader = response.body?.pipeThrough(new TextDecoderStream()).getReader();

this.req.on("error", (e: any) => {
console.error("problem with request:" + e.message);
});
const choices = [];

// req.write(body);
this.req.on("data", (chunk: any) => {
try {
let delta = "";
if (chunk?.toString().match(/^\{\n\s+\"error\"\:/)) {
console.error("getStream error:", chunk.toString());
reject(JSON.parse(chunk.toString().trim()));
return;
const cancelled = false;
while (true) {
if (!this.reader) break;
const { value, done } = await this.reader.read();

if (done) {
break;
}
const lines = chunk?.toString()?.split("\n") || [];
const filtredLines = lines.filter((line: string) => line.trim());
const line = filtredLines[filtredLines.length - 1];
const data = line.toString().replace("data:", "").replace("[DONE]", "").replace("data: [DONE]", "").trim();
if (data) {
const json = JSON.parse(data);
json.choices.forEach((choice: any) => {
delta += choice.text || choice.message?.content || choice.delta?.content || "";
});
fData(delta, json, chunk.toString());

if (value.startsWith("{")) {
await this.reader.cancel();

// As far as I can tell, if the response is an object, then it is an unrecoverable error.
throw new Error(value);
}
} catch (e) {
console.error("getStream handle chunk error:", e, chunk.toString());
}
});
const chunks = value.split("\n").map((chunk: any) => chunk.trim()).filter(Boolean);

this.req.on("end", () => {
fEnd?.();
resolve();
});
for (const chunk of chunks) {
if (done) {
break;
}

this.req.on("abort", () => {
fEnd?.();
resolve();
if (chunk === "") {
continue;
}

if (chunk === "data: [DONE]") {
await this.reader.cancel();

break;
}

if (!chunk.startsWith("data: ")) {
throw new Error(`Unexpected message: ${chunk}`);
}

try {
const responseChunk = JSON.parse(chunk.toString().slice("data: ".length));

fData(responseChunk.choices?.[0]?.delta?.content, responseChunk, chunk);
} catch (e) {
throw new Error(`Unexpected message: ${chunk}`);
}
}
}
});
});
}

abortStream() {
const res = this.req.abort();
this.reader.cancel();
}

async get(prompt: string, opts?: Partial<CreateCompletionRequest & CreateChatCompletionRequest>): Promise<null | string[]> {
Expand Down
33 changes: 29 additions & 4 deletions src/components/openai/OpenAITextSettings.vue
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
<template>
<div class="query-settings">
<b>GPT settings</b>
<div class="settingsWrapper flex-col">
<div class="flex-center-between">
<b>GPT settings</b>
<button class="underline text-sm" @click="editJson = !editJson"> {{ editJson ? 'Hide' : 'Edit' }} json </button>
</div>
<div class="settingsWrapper flex-col" v-if="!editJson">
<InputText
class="mt-1"
name="model"
Expand Down Expand Up @@ -69,6 +72,9 @@
"
/> -->
</div>
<div v-else>
<textarea v-model="json" @input="ev => $emit('update:value', JSON.parse(ev?.target?.value))" rows="15" class="w-full"></textarea>
</div>
</div>
</template>
Expand All @@ -94,7 +100,10 @@
ToggleSwitch
},
data() {
return {};
return {
json: "",
editJson: false,
};
},
computed: {
modelsSuggestions(): InputTextSuggestion[] {
Expand All @@ -106,7 +115,17 @@
console.log(this.value?.max_tokens);
},
},
// mounted() {},
// mounted() {},
watch: {
value: {
handler(newVal) {
this.json = JSON.stringify(newVal, null, 2);
},
deep: true,
immediate: true,
},
},
});
</script>
Expand All @@ -118,4 +137,10 @@
display: flex;
}
}
.flex-center-between {
display: flex;
justify-content: space-between;
align-items: center;
}
</style>

0 comments on commit 771d166

Please sign in to comment.