Skip to content

Commit

Permalink
feat: Add dynamic Ollama model selection dropdown (#154)
Browse files Browse the repository at this point in the history
* feat: Add dynamic Ollama model selection dropdown

- Replace text input for Ollama model with dropdown populated from API
- Add automatic model list fetching when base URL changes

* refactor: move updateModelOptions to class method
  • Loading branch information
kevin-on authored Dec 2, 2024
1 parent 4cc0fa1 commit 160377c
Show file tree
Hide file tree
Showing 2 changed files with 95 additions and 21 deletions.
104 changes: 83 additions & 21 deletions src/settings/SettingTab.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
import { App, Modal, PluginSettingTab, Setting, TFile } from 'obsidian'
import {
App,
DropdownComponent,
Modal,
PluginSettingTab,
Setting,
TFile,
} from 'obsidian'

import {
APPLY_MODEL_OPTIONS,
Expand All @@ -7,6 +14,7 @@ import {
} from '../constants'
import SmartCopilotPlugin from '../main'
import { findFilesMatchingPatterns } from '../utils/globUtils'
import { getOllamaModels } from '../utils/ollama'

export class SmartCopilotSettingTab extends PluginSettingTab {
plugin: SmartCopilotPlugin
Expand Down Expand Up @@ -203,13 +211,15 @@ export class SmartCopilotSettingTab extends PluginSettingTab {
const ollamaContainer = containerEl.createDiv(
'smtcmp-settings-model-container',
)
let modelDropdown: DropdownComponent | null = null // Store reference to the dropdown

// Base URL Setting
new Setting(ollamaContainer)
.setName('Base URL')
.setDesc(
'The API endpoint for your Ollama service (e.g., http://127.0.0.1:11434)',
)
.addText((text) =>
.addText((text) => {
text
.setPlaceholder('http://127.0.0.1:11434')
.setValue(this.plugin.settings.ollamaChatModel.baseUrl || '')
Expand All @@ -221,28 +231,23 @@ export class SmartCopilotSettingTab extends PluginSettingTab {
baseUrl: value,
},
})
}),
)
if (modelDropdown) {
await this.updateOllamaModelOptions(value, modelDropdown)
}
})
})

// Model Setting
new Setting(ollamaContainer)
.setName('Model Name')
.setDesc(
'The specific model to use with your service (e.g., llama-3.1-70b, mixtral-8x7b)',
)
.addText((text) =>
text
.setPlaceholder('llama-3.1-70b')
.setValue(this.plugin.settings.ollamaChatModel.model || '')
.onChange(async (value) => {
await this.plugin.setSettings({
...this.plugin.settings,
ollamaChatModel: {
...this.plugin.settings.ollamaChatModel,
model: value,
},
})
}),
)
.setDesc('Select a model from your Ollama instance')
.addDropdown(async (dropdown) => {
modelDropdown = dropdown
await this.updateOllamaModelOptions(
this.plugin.settings.ollamaChatModel.baseUrl,
dropdown,
)
})
}

renderOpenAICompatibleChatModelSettings(containerEl: HTMLElement): void {
Expand Down Expand Up @@ -582,6 +587,63 @@ export class SmartCopilotSettingTab extends PluginSettingTab {
}),
)
}

private async updateOllamaModelOptions(
baseUrl: string,
dropdown: DropdownComponent,
): Promise<void> {
const currentValue = dropdown.getValue()
dropdown.selectEl.empty()

try {
const models = await getOllamaModels(baseUrl)
if (models.length > 0) {
const modelOptions = models.reduce<Record<string, string>>(
(acc, model) => {
acc[model] = model
return acc
},
{},
)

dropdown.addOptions(modelOptions)

if (models.includes(currentValue)) {
dropdown.setValue(currentValue)
} else {
dropdown.setValue(models[0])
await this.plugin.setSettings({
...this.plugin.settings,
ollamaChatModel: {
...this.plugin.settings.ollamaChatModel,
model: models[0],
},
})
}
} else {
dropdown.addOption('', 'No models found - check base URL')
dropdown.setValue('')
await this.plugin.setSettings({
...this.plugin.settings,
ollamaChatModel: {
...this.plugin.settings.ollamaChatModel,
model: '',
},
})
}
} catch (error) {
console.error('Failed to fetch Ollama models:', error)
dropdown.addOption('', 'No models found - check base URL')
dropdown.setValue('')
await this.plugin.setSettings({
...this.plugin.settings,
ollamaChatModel: {
...this.plugin.settings.ollamaChatModel,
model: '',
},
})
}
}
}

class ExcludedFilesModal extends Modal {
Expand Down
12 changes: 12 additions & 0 deletions src/utils/ollama.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import { requestUrl } from 'obsidian'

export async function getOllamaModels(ollamaUrl: string) {
try {
const response = (await requestUrl(`${ollamaUrl}/api/tags`)).json as {
models: { name: string }[]
}
return response.models.map((model) => model.name)
} catch (error) {
return []
}
}

0 comments on commit 160377c

Please sign in to comment.