This repository has been archived by the owner on Jul 13, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathserver.ts
168 lines (151 loc) · 6.06 KB
/
server.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
import express, {Request, Response} from 'express';
import {chatComplete} from "./openai/openAiApi";
import OpenAI from "openai";
import * as fs from "fs";
if (!process.env.OPEN_AI_KEY || !process.env.OPEN_AI_ORG) {
console.log("env OPEN_AI_KEY or OPEN_AI_ORG not set!");
process.exit(1);
}
const app = express();
const mockModelsResponse: string = fs.readFileSync('openai/mocks/modelResponseMock.json').toString();
const mockChatCompleteResponse: string = fs.readFileSync('openai/mocks/chatCompleteResponseMock.json').toString();
let correspondence: OpenAI.Chat.ChatCompletionMessageParam[] = [];
app.use(express.urlencoded({extended: true}));
app.use('/css', express.static(__dirname + '/node_modules/bulma/css'))
app.get('/', (req: Request, res: Response) => {
res.send(mainPage('', false));
});
app.post('/', (req: Request, res: Response) => {
res.send(mainPage('', false));
});
app.post('/listModels', async (req: Request, res: Response) => {
const fetchedModelsInfo = await openAiListModelsRequest();
res.send(modelsPage(fetchedModelsInfo));
})
app.post('/submit', async (req: Request, res: Response) => {
const userInput = req.body.userPrompt;
const preserveChat = !!req.body.preserve;
updateMessagesStack(userInput, preserveChat);
const chatGptAnswer = await chatComplete(correspondence); // 'This is a mock answer from ChatGPT';
const resultHtml = `
<div>
<div style="padding-top: 10px; padding-bottom: 10px">Your question <strong>"${userInput}"</strong> has been answered by ChatGPT:</div>
<div>
<textarea class="textarea has-fixed-size" readonly id="result" style="width: 100%; height: 500px; resize: none">${chatGptAnswer}</textarea>
</div>
</div>
`
const html = mainPage(resultHtml, preserveChat)
res.send(html);
});
const updateMessagesStack = (userInput: string, shouldPreserve: boolean) => {
correspondence = shouldPreserve ? correspondence : [];
correspondence.push({role: 'user', content: userInput});
}
const mainPage = (result = '', preserveChat: boolean): String => {
const correspondenceString: string = readableCorrespondence();
return `
<html xmlns="http://www.w3.org/1999/html">
<head>
<link rel="stylesheet" href="/css/bulma.css" />
</head>
<body style="padding: 48px">
<h1 class="title">OpenAI ChatGPT Client</h1>
<div class="container">
</div>
<div style="display: flex; gap: 15px">
<form method="post" action="/">
<button class="button is-rounded" type="submit">Home</button>
</form>
<form method="post" action="/listModels">
<button class="button is-rounded" type="submit">List Models</button>
</form>
</div>
<form method="post" action="/submit">
<div class="columns">
<div class="column">
<textarea class="textarea has-fixed-size" name="userPrompt" style="height: 800px; resize: none" placeholder="ChatGPT prompt"></textarea>
<button
class="button is-info"
style="padding: 10px; margin: 10px 0 10px 0"
type="submit"
onclick="(() => this.className = 'button is-info is-loading disabled')()">
Submit
</button>
</div>
<div class="column" style="display: flex; flex-direction: column">
<pre class="textarea has-fixed-size has-text-info" readonly id="correspondence" placeholder="Chat correspondence overview" style="white-space: pre-wrap; max-width: 50%; height: 800px">${correspondenceString}</pre>
<div>
<label class="checkbox">
<input name="preserve" type="checkbox" ${preserveChat ? 'checked' : ''}/>
Preserve chat
</label>
</div>
</div>
</div>
</form>
${result}
</div>
</body>
</html>`
}
const readableCorrespondence = () => {
const res: string[] = [];
for (const entry of correspondence) {
res.push(`${coloredSpanForRole(entry.role)} ${entry.content}\n\n`);
}
return res.toString().replaceAll(',', '');
}
const coloredSpanForRole = (role: string): string => {
const coloredSpan = `<span class="COLOR_CLASS">${role.toLocaleUpperCase()}:</span>`
let roleColorClass = '';
switch (role) {
case 'system':
roleColorClass = 'has-text-danger-dark has-background-danger-light'
break;
case 'assistant':
roleColorClass = 'has-text-success has-background-success-light'
break;
case 'user':
roleColorClass = 'has-text-warning-dark has-background-warning-light'
break;
default:
roleColorClass = 'has-text-danger has-background-danger-light'
break;
}
return coloredSpan.replace('COLOR_CLASS', roleColorClass);
}
const modelsPage = (models = 'No answer received'): String => {
return `
<html>
<head>
<link rel="stylesheet" href="/css/bulma.css" />
</head>
<body style="padding: 48px">
<h1 class="title">Available ChatGPT models</h1>
<div class="container">
</div>
<form method="post" action="/">
<button class="button is-rounded" type="submit">Home</button>
</form>
<div>
<textarea class="textarea has-fixed-size" readonly style="width: 100%; height: 80%; resize: none;">${models}</textarea>
</div>
</div>
</body>
</html>`
}
const openAiListModelsRequest = async () => {
const response = mockModelsResponse; // await listModels();
let formatted;
try {
formatted = JSON.stringify(JSON.parse(response), null, 1);
} catch (e) {
return response;
}
return formatted.trim();
};
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`Server running on port http://localhost:${PORT}`);
});