Skip to content

Commit f115a24

Browse files
committed
Added ability to change models using dropdown menu
1 parent d5c25d7 commit f115a24

File tree

5 files changed

+132
-38
lines changed

5 files changed

+132
-38
lines changed

README.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,12 +36,15 @@ You can install this plugin via [BRAT](https://obsidian.md/plugins?id=obsidian42
3636
4. Click on `+` icon and press hotkey (e.g. `⌘ + M`)
3737

3838
## Roadmap
39-
- [ ] Ability to select models from the list instead of typing their names
39+
- [x] Ability to select models from the list instead of typing their names
4040
- [ ] Ability to share and apply presets (system prompt + prompt + model)
4141
- [ ] Additional AI providers (OpenAI, etc...)
4242
- [ ] Changing order of the prompts
4343
- [ ] Accounting your local documents in results as described here https://ollama.ai/blog/llms-in-obsidian
4444

45+
## Other AI providers
46+
If you would like to use other providers, please let me know [in the discussions](https://github.com/pfrankov/obsidian-local-gpt/discussions/1).
47+
4548
## My other Obsidian plugins
4649
- [Colored Tags](https://github.com/pfrankov/obsidian-colored-tags) that colorizes tags in distinguishable colors.
4750

src/LocalGPTSettingTab.ts

Lines changed: 85 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import {App, Notice, PluginSettingTab, Setting} from "obsidian";
1+
import {App, Notice, PluginSettingTab, requestUrl, Setting} from "obsidian";
22
import {DEFAULT_SETTINGS} from "defaultSettings";
33
import LocalGPT from "./main";
44
import {LocalGPTAction} from "./interfaces";
@@ -7,6 +7,7 @@ export class LocalGPTSettingTab extends PluginSettingTab {
77
plugin: LocalGPT;
88
editEnabled: boolean = false;
99
isNew: boolean = false;
10+
modelsOptions: any = {};
1011

1112
constructor(app: App, plugin: LocalGPT) {
1213
super(app, plugin);
@@ -18,31 +19,79 @@ export class LocalGPTSettingTab extends PluginSettingTab {
1819

1920
containerEl.empty();
2021

21-
new Setting(containerEl)
22-
.setName("Ollama URL")
23-
.setDesc("Default is http://localhost:11434")
24-
.addText((text) =>
25-
text
26-
.setPlaceholder("http://localhost:11434")
27-
.setValue(this.plugin.settings.ollamaUrl)
22+
const aiProvider = new Setting(containerEl)
23+
.setName('AI provider')
24+
.setDesc('')
25+
.addDropdown(dropdown =>
26+
dropdown.addOptions({
27+
'ollama': 'Ollama',
28+
})
29+
.setValue(String(this.plugin.settings.selectedProvider))
2830
.onChange(async (value) => {
29-
this.plugin.settings.ollamaUrl = value;
31+
this.plugin.settings.selectedProvider = value;
3032
await this.plugin.saveSettings();
33+
this.display()
3134
})
32-
);
35+
)
3336

34-
new Setting(containerEl)
35-
.setName("Default model")
36-
.setDesc("Name of the default Ollama model to use for prompts")
37-
.addText((text) =>
38-
text
39-
.setPlaceholder("llama2")
40-
.setValue(this.plugin.settings.defaultModel)
41-
.onChange(async (value) => {
42-
this.plugin.settings.defaultModel = value;
43-
await this.plugin.saveSettings();
37+
aiProvider.descEl.innerHTML = `If you would like to use other providers, please let me know <a href="https://github.com/pfrankov/obsidian-local-gpt/discussions/1">in the discussions</a>`
38+
39+
if (this.plugin.settings.selectedProvider === 'ollama') {
40+
new Setting(containerEl)
41+
.setName("Ollama URL")
42+
.setDesc("Default is http://localhost:11434")
43+
.addText((text) =>
44+
text
45+
.setPlaceholder("http://localhost:11434")
46+
.setValue(this.plugin.settings.providers.ollama.ollamaUrl)
47+
.onChange(async (value) => {
48+
this.plugin.settings.providers.ollama.ollamaUrl = value;
49+
await this.plugin.saveSettings();
50+
})
51+
);
52+
53+
const ollamaDefaultModel = new Setting(containerEl)
54+
.setName("Default model")
55+
.setDesc("Name of the default Ollama model to use for prompts")
56+
if (this.plugin.settings.providers.ollama.ollamaUrl) {
57+
requestUrl(`${this.plugin.settings.providers.ollama.ollamaUrl}/api/tags`)
58+
.then(({json}) => {
59+
if (!json.models || json.models.length === 0) {
60+
return Promise.reject();
61+
}
62+
this.modelsOptions = json.models.reduce((acc: any, el:any) => {
63+
const name = el.name.replace(":latest", "");
64+
acc[name] = name;
65+
return acc;
66+
}, {})
67+
68+
ollamaDefaultModel
69+
.addDropdown(dropdown =>
70+
dropdown.addOptions(this.modelsOptions)
71+
.setValue(String(this.plugin.settings.providers.ollama.defaultModel))
72+
.onChange(async (value) => {
73+
this.plugin.settings.providers.ollama.defaultModel = value;
74+
await this.plugin.saveSettings();
75+
})
76+
77+
)
78+
.addButton((button) =>
79+
button.setIcon('refresh-cw').onClick(async () => {
80+
this.display()
81+
})
82+
)
4483
})
45-
);
84+
.catch(() => {
85+
ollamaDefaultModel.descEl.innerHTML = `Get the models from <a href="https://ollama.ai/library">Ollama library</a> or check that Ollama URL is correct.`
86+
ollamaDefaultModel.addButton((button) =>
87+
button.setIcon('refresh-cw').onClick(async () => {
88+
this.display()
89+
})
90+
)
91+
})
92+
}
93+
}
94+
4695

4796
const editingAction: LocalGPTAction = {
4897
name: "",
@@ -97,15 +146,21 @@ export class LocalGPTSettingTab extends PluginSettingTab {
97146
});
98147
});
99148

100-
new Setting(containerEl)
101-
.setName("Model")
102-
.setDesc('Optional')
103-
.addText((text) => {
104-
text.setPlaceholder(this.plugin.settings.defaultModel);
105-
text.onChange(async (value) => {
106-
editingAction.model = value;
107-
});
108-
});
149+
if (this.plugin.settings.selectedProvider === 'ollama') {
150+
new Setting(containerEl)
151+
.setName("Model")
152+
.setDesc('Optional')
153+
.addDropdown(dropdown =>
154+
dropdown
155+
.addOption('', 'Default model')
156+
.addOptions(this.modelsOptions)
157+
.onChange(async (value) => {
158+
this.plugin.settings.providers.ollama.defaultModel = value;
159+
await this.plugin.saveSettings();
160+
})
161+
162+
)
163+
}
109164

110165
new Setting(containerEl)
111166
.setName("Replace selected text")

src/defaultSettings.ts

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,13 @@
11
import {LocalGPTSettings} from "./interfaces";
22

33
export const DEFAULT_SETTINGS: LocalGPTSettings = {
4-
ollamaUrl: "http://localhost:11434",
5-
defaultModel: "orca-mini",
4+
providers: {
5+
ollama: {
6+
ollamaUrl: "http://localhost:11434",
7+
defaultModel: "orca-mini",
8+
}
9+
},
10+
selectedProvider: 'ollama',
611
actions: [
712
{
813
name: "🪄 General help",
@@ -31,4 +36,5 @@ export const DEFAULT_SETTINGS: LocalGPTSettings = {
3136
system: "You are an AI assistant that follows instruction extremely well. Help as much as you can."
3237
}
3338
],
39+
_version: 1
3440
};

src/interfaces.ts

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,15 @@
11
export interface LocalGPTSettings {
2-
ollamaUrl: string;
3-
defaultModel: string;
2+
selectedProvider: string;
3+
providers: OllamaProvider;
44
actions: LocalGPTAction[];
5+
_version: number;
6+
}
7+
8+
export interface OllamaProvider {
9+
ollama: {
10+
ollamaUrl: string;
11+
defaultModel: string;
12+
}
513
}
614

715
export interface LocalGPTAction {

src/main.ts

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ export default class LocalGPT extends Plugin {
3333
this.settings.actions.forEach((action) => {
3434
const requestBody: OllamaRequestBody = {
3535
prompt: action.prompt + "\n\n" + text,
36-
model: action.model || this.settings.defaultModel,
36+
model: action.model || this.settings.providers.ollama.defaultModel,
3737
options: {
3838
temperature: action.temperature || 0.2,
3939
},
@@ -54,7 +54,7 @@ export default class LocalGPT extends Plugin {
5454

5555
requestUrl({
5656
method: "POST",
57-
url: `${this.settings.ollamaUrl}/api/generate`,
57+
url: `${this.settings.providers.ollama.ollamaUrl}/api/generate`,
5858
body: JSON.stringify(requestBody)
5959
})
6060
.then(({json}) => {
@@ -98,7 +98,29 @@ export default class LocalGPT extends Plugin {
9898
}
9999

100100
async loadSettings() {
101-
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData());
101+
const loadedData:LocalGPTSettings = await this.loadData();
102+
let needToSave = false;
103+
104+
if (loadedData && !loadedData._version || loadedData._version < 1) {
105+
needToSave = true;
106+
107+
loadedData.providers = DEFAULT_SETTINGS.providers;
108+
// @ts-ignore
109+
loadedData.providers.ollama.ollamaUrl = loadedData.ollamaUrl;
110+
// @ts-ignore
111+
delete loadedData.ollamaUrl;
112+
// @ts-ignore
113+
loadedData.providers.ollama.defaultModel = loadedData.defaultModel;
114+
// @ts-ignore
115+
delete loadedData.defaultModel;
116+
loadedData.selectedProvider = DEFAULT_SETTINGS.selectedProvider;
117+
loadedData._version = 2;
118+
}
119+
this.settings = Object.assign({}, DEFAULT_SETTINGS, loadedData);
120+
121+
if (needToSave) {
122+
await this.saveData(this.settings);
123+
}
102124
}
103125

104126
reload() {

0 commit comments

Comments
 (0)