Skip to content

Commit

Permalink
Adds OpenAI customModel settings field and adds logic to support it
Browse files Browse the repository at this point in the history
  • Loading branch information
cmgriffing committed Apr 10, 2024
1 parent 1082cba commit 7a7989d
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 9 deletions.
18 changes: 15 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3321,7 +3321,8 @@
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-1106"
"gpt-3.5-turbo-1106",
"custom"
],
"enumDescriptions": [
"GPT-4 Turbo (Latest)",
Expand All @@ -3334,7 +3335,8 @@
"GPT-3.5 Turbo",
"GPT-3.5 Turbo 16k",
"GPT-3.5 Turbo (June 13)",
"GPT-3.5 Turbo (Nov 6)"
"GPT-3.5 Turbo (Nov 6)",
"Custom"
],
"markdownDescription": "Specifies the OpenAI model to use for GitLens' experimental AI features",
"scope": "window",
Expand All @@ -3346,7 +3348,17 @@
"null"
],
"default": null,
"markdownDescription": "Specifies a custom URL to use for access to an OpenAI model via Azure. Azure URLs should be in the following format: https://{your-resource-name}.openai.azure.com/openai/deployments/{deployment-id}/chat/completions?api-version={api-version}",
"markdownDescription": "Specifies a custom URL to use for access to an OpenAI-compatible API. URLs could be in any format as long as the API responds in the same way as OpenAI. eg: https://{serverHost}/v1/chat/completions \n\nAzure URLs should be in the following format: https://{your-resource-name}.openai.azure.com/openai/deployments/{deployment-id}/chat/completions?api-version={api-version}",
"scope": "window",
"order": 102
},
"gitlens.ai.experimental.openai.customModel": {
"type": [
"string",
"null"
],
"default": null,
"markdownDescription": "Specifies a custom model to use with an OpenAI-compatible API. Only used when the OpenAI model is set to 'Custom'.",
"scope": "window",
"order": 102
},
Expand Down
2 changes: 1 addition & 1 deletion src/ai/aiProviderService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ async function confirmAIProviderToS(provider: AIProvider, storage: Storage): Pro
return false;
}

export function getMaxCharacters(model: OpenAIModels | AnthropicModels, outputLength: number): number {
export function getMaxCharacters(model: OpenAIModels | AnthropicModels | string, outputLength: number): number {
const tokensPerCharacter = 3.1;

let tokens;
Expand Down
26 changes: 21 additions & 5 deletions src/ai/openaiProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export class OpenAIProvider implements AIProvider<'openai'> {
return configuration.get('ai.experimental.openai.url') || 'https://api.openai.com/v1/chat/completions';
}

private async getOrChooseModel(): Promise<OpenAIModels | undefined> {
private async getOrChooseModel(): Promise<OpenAIModels | string | undefined> {
const model = this.model;
if (model != null) return model;

Expand All @@ -40,7 +40,14 @@ export class OpenAIProvider implements AIProvider<'openai'> {
const apiKey = await getApiKey(this.container.storage);
if (apiKey == null) return undefined;

const model = await this.getOrChooseModel();
let model = await this.getOrChooseModel();
if (model == null) return undefined;

if (model === 'custom') {
const customModel = configuration.get('ai.experimental.openai.customModel') || '';
model = customModel ? `${customModel}` : undefined;
}
// Might need to notify the user that they need to set a custom model name
if (model == null) return undefined;

let retries = 0;
Expand Down Expand Up @@ -88,6 +95,7 @@ Follow the user's instructions carefully, don't repeat yourself, don't include t
};

const rsp = await this.fetch(apiKey, request);

if (!rsp.ok) {
if (rsp.status === 404) {
throw new Error(
Expand Down Expand Up @@ -135,7 +143,14 @@ Follow the user's instructions carefully, don't repeat yourself, don't include t
const apiKey = await getApiKey(this.container.storage);
if (apiKey == null) return undefined;

const model = await this.getOrChooseModel();
let model = await this.getOrChooseModel();
if (model == null) return undefined;

if (model === 'custom') {
const customModel = configuration.get('ai.experimental.openai.customModel') || '';
model = customModel ? `${customModel}` : undefined;
}
// Might need to notify the user that they need to set a custom model name
if (model == null) return undefined;

let retries = 0;
Expand Down Expand Up @@ -302,10 +317,11 @@ export type OpenAIModels =
| 'gpt-3.5-turbo-1106'
| 'gpt-3.5-turbo'
| 'gpt-3.5-turbo-16k'
| 'gpt-3.5-turbo-0613';
| 'gpt-3.5-turbo-0613'
| 'custom';

interface OpenAIChatCompletionRequest {
model: OpenAIModels;
model: OpenAIModels | string;
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
temperature?: number;
top_p?: number;
Expand Down
1 change: 1 addition & 0 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ export interface Config {
readonly openai: {
readonly model: OpenAIModels | null;
readonly url: string | null;
readonly customModel: string | null;
};
readonly anthropic: {
readonly model: AnthropicModels | null;
Expand Down
1 change: 1 addition & 0 deletions src/quickpicks/aiModelPicker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ export async function showAIModelPicker(provider?: AIProviders): Promise<ModelQu
{ label: 'OpenAI', description: 'GPT-4', provider: 'openai', model: 'gpt-4' },
{ label: 'OpenAI', description: 'GPT-4 32k', provider: 'openai', model: 'gpt-4-32k' },
{ label: 'OpenAI', description: 'GPT-3.5 Turbo', provider: 'openai', model: 'gpt-3.5-turbo-1106' },
{ label: 'OpenAI', description: 'Custom', provider: 'openai', model: 'custom' },
{ label: 'Anthropic', kind: QuickPickItemKind.Separator },
{ label: 'Anthropic', description: 'Claude 3 Opus', provider: 'anthropic', model: 'claude-3-opus-20240229' },
{
Expand Down

0 comments on commit 7a7989d

Please sign in to comment.