Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature: Config Panel #1

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,14 @@ import java.util.concurrent.TimeUnit
* tailored for the caching of code completion suggestions.
*/
object AICCCache {

/**
* Clears all entries from the cache.
*/
fun clear() {
cache.invalidateAll()
}

/**
* Checks if the cache contains a value for the specified key.
*
Expand Down
7 changes: 7 additions & 0 deletions src/main/kotlin/com/aicc/aicodecompletionideaplugin/LLM.kt
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,11 @@ interface LLM {
* @return The generated completion suggestion, or null if no suggestion could be generated.
*/
fun call(prefix: String, suffix: String): String?

/**
* This method changes the current model used by the LLM.
*
* @param model The name of the model to be used.
*/
fun changeModel(model: String)
}
22 changes: 16 additions & 6 deletions src/main/kotlin/com/aicc/aicodecompletionideaplugin/OllamaLLM.kt
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ import java.net.http.HttpTimeoutException
* and suffix.
*/
object OllamaLLM : LLM {

/**
* The model used for code generation in the Ollama API.
*/
private var model = "codellama:7b-code"

/**
* Attempts to generate a code completion suggestion by querying the Ollama API.
* It constructs a request with a combination of prefix and suffix, handling retries
Expand All @@ -28,7 +34,7 @@ object OllamaLLM : LLM {
val suggestion = try {
OllamaAPI(HOST).apply {
setRequestTimeoutSeconds(4)
}.generate(MODEL, "<PRE> $prefix <SUF>$suffix <MID>", options).response.let {
}.generate(model, "<PRE> $prefix <SUF>$suffix <MID>", options).response.let {
if (it.endsWith(END)) it.substring(0, it.length - END.length).trim(' ', '\t', '\n') else it
}
} catch (e: HttpTimeoutException) {
Expand All @@ -44,6 +50,15 @@ object OllamaLLM : LLM {
return null
}

/**
* Changes the model used by the LLM.
* It will also clear the cache
*/
override fun changeModel(model: String) {
this.model = model
AICCCache.clear()
}

/**
* Lazily initialized options for the Ollama API call.
* These options include settings such as the temperature for the generation process.
Expand Down Expand Up @@ -74,11 +89,6 @@ object OllamaLLM : LLM {
*/
private const val HOST = "http://localhost:11434/"

/**
* The model used for code generation in the Ollama API.
*/
private const val MODEL = "codellama:7b-code"

/**
* The end of text marker used in the responses from the Ollama API.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package com.aicc.aicodecompletionideaplugin.config

import com.aicc.aicodecompletionideaplugin.OllamaLLM
import com.intellij.openapi.options.SearchableConfigurable
import javax.swing.JComponent

class AICCConfig : SearchableConfigurable {

private var panel: AICCSettingsPanel? = null

override fun createComponent(): JComponent {
return AICCSettingsPanel().also { panel = it }.mainPanel
}

override fun isModified(): Boolean {
val panel = this.panel ?: return false
val state = AICCState.getInstance()
return panel.modelField.text != state.model
}

override fun reset() {
val panel = this.panel ?: return
val state = AICCState.getInstance()
panel.modelField.text = state.model
OllamaLLM.changeModel(state.model)
}

override fun apply() {
val panel = this.panel ?: return
val state = AICCState.getInstance()
state.model = panel.modelField.text
OllamaLLM.changeModel(state.model)
}

override fun disposeUIResources() {
this.panel = null
}

override fun getDisplayName() = "AI code completion idea"
override fun getId() = "com.aicc.aicodecompletionideaplugin.config.AICCConfig"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
<form xmlns="http://www.intellij.com/uidesigner/form/" version="1" bind-to-class="com.aicc.aicodecompletionideaplugin.config.AICCSettingsPanel">
<grid id="27dc6" binding="mainPanel" layout-manager="GridLayoutManager" row-count="2" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
<margin top="0" left="0" bottom="0" right="0"/>
<constraints>
<xy x="20" y="20" width="500" height="400"/>
</constraints>
<properties/>
<border type="none"/>
<children>
<component id="1f787" class="javax.swing.JTextField" binding="modelField">
<constraints>
<grid row="0" column="1" row-span="1" col-span="1" vsize-policy="0" hsize-policy="6" anchor="8" fill="1" indent="0" use-parent-layout="false">
<preferred-size width="150" height="-1"/>
</grid>
</constraints>
<properties/>
</component>
<vspacer id="e63c3">
<constraints>
<grid row="1" column="1" row-span="1" col-span="1" vsize-policy="6" hsize-policy="1" anchor="0" fill="2" indent="0" use-parent-layout="false"/>
</constraints>
</vspacer>
<component id="c9628" class="javax.swing.JLabel">
<constraints>
<grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text value="Model"/>
</properties>
</component>
</children>
</grid>
</form>
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package com.aicc.aicodecompletionideaplugin.config;

import com.intellij.ui.IdeBorderFactory
import javax.swing.JPanel
import javax.swing.JTextField

class AICCSettingsPanel {

lateinit var mainPanel: JPanel
lateinit var modelField: JTextField

init {
mainPanel.border = IdeBorderFactory.createTitledBorder("Plugin Settings")
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package com.aicc.aicodecompletionideaplugin.config

import com.aicc.aicodecompletionideaplugin.OllamaLLM
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.PersistentStateComponent
import com.intellij.openapi.components.Storage
import com.intellij.openapi.components.State
import com.intellij.util.xmlb.XmlSerializerUtil

@State(
name = "com.aicc.aicodecompletionideaplugin.config.AICCState",
storages = [Storage("aicodecompletionideaplugin.xml")]
)
class AICCState : PersistentStateComponent<AICCState> {

@JvmField
var model: String = "codellama:7b-code"

override fun getState(): AICCState = this

override fun loadState(state: AICCState) {
XmlSerializerUtil.copyBean(state, this)
OllamaLLM.changeModel(state.model)
}

companion object {
fun getInstance(): AICCState {
return ApplicationManager.getApplication().getService(AICCState::class.java)
}
}
}
6 changes: 6 additions & 0 deletions src/main/resources/META-INF/plugin.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,12 @@
<extensions defaultExtensionNs="com.intellij">
<inline.completion.provider implementation="com.aicc.aicodecompletionideaplugin.AICCInlineCompletionProvider"/>
<statusBarWidgetFactory implementation="com.aicc.aicodecompletionideaplugin.AICCStatusBarWidgetFactory" id="AICCStatusBarWidgetFactory"/>
<applicationConfigurable
parentId="tools"
instance="com.aicc.aicodecompletionideaplugin.config.AICCConfig"
id="com.aicc.aicodecompletionideaplugin.config.AICCConfig"
displayName="AI code completion idea"/>
<applicationService serviceImplementation="com.aicc.aicodecompletionideaplugin.config.AICCState"/>
</extensions>
<actions>
<action id="com.aicc.aicodecompletionideaplugin.AICCStatsPopupDialogAction"
Expand Down