Skip to content

Commit

Permalink
fix(settings): Test connection failed without applying settings
Browse files Browse the repository at this point in the history
  • Loading branch information
lkk214 committed Oct 4, 2024
1 parent 3f81002 commit e883e38
Show file tree
Hide file tree
Showing 4 changed files with 32 additions and 18 deletions.
3 changes: 3 additions & 0 deletions core/src/main/kotlin/com/phodal/shirecore/llm/LlmConfig.kt
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ class LlmConfig(
val responseFormat: String = "\$.choices[0].delta.content",
val messageKeys: Map<String, String> = mapOf(),
) {

fun checkAvailable(): Boolean = apiKey.isNotBlank() && model.isNotBlank()

companion object {
fun fromJson(modelConfig: JsonObject): LlmConfig? {
val title = modelConfig.findString("title") ?: return null
Expand Down
11 changes: 8 additions & 3 deletions core/src/main/kotlin/com/phodal/shirecore/llm/LlmProvider.kt
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,12 @@ interface LlmProvider {
* Checks if the given project is applicable for some operation.
*
* @param project the project to check for applicability
* @param llmConfig This llmConfig is used to verify if llmProvider is available.
* For example, it may be an unsaved configuration that can be used to test LLM connection.
*
* @return true if the project is applicable, false otherwise
*/
fun isApplicable(project: Project): Boolean
fun isApplicable(project: Project, llmConfig: LlmConfig? = null): Boolean

/**
* Streams chat completion responses from the service.
Expand Down Expand Up @@ -80,10 +83,12 @@ interface LlmProvider {
* Returns an instance of LlmProvider based on the given Project.
*
* @param project the Project for which to find a suitable LlmProvider
* @param llmConfig provide llmConfig as a condition for finding a suitable LlmProvider.
*
* @return an instance of LlmProvider if a suitable provider is found, null otherwise
*/
fun provider(project: Project): LlmProvider? {
val providers = EP_NAME.extensions.filter { it.isApplicable(project) }
fun provider(project: Project, llmConfig: LlmConfig? = null): LlmProvider? {
val providers = EP_NAME.extensions.filter { it.isApplicable(project, llmConfig) }
return if (providers.isEmpty()) {
ShirelangNotifications.error(project, ShireCoreBundle.message("shire.llm.notfound"))
null
Expand Down
7 changes: 5 additions & 2 deletions src/main/kotlin/com/phodal/shire/llm/OpenAILikeProvider.kt
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,10 @@ class OpenAILikeProvider : CustomSSEHandler(), LlmProvider {
override var project: Project? = null
override fun clearMessage() = messages.clear()

override fun isApplicable(project: Project): Boolean {
override fun isApplicable(project: Project, llmConfig: LlmConfig?): Boolean {
if (llmConfig != null) return llmConfig.checkAvailable()
// If the configRunLlm configuration exists, it is also available
if (configRunLlm().let { it?.checkAvailable() == true }) return true
this.project = project
// dynamic check for the API key and model name
return ShireSettingsState.getInstance().apiToken.isNotEmpty()
Expand All @@ -66,7 +69,7 @@ class OpenAILikeProvider : CustomSSEHandler(), LlmProvider {
llmConfig: LlmConfig?
): Flow<String> {
(llmConfig ?: configRunLlm()).let {
if (it != null) {
if (it != null && it.checkAvailable()) {
modelName = it.model
temperature = it.temperature.toFloat()
key = it.apiKey
Expand Down
29 changes: 16 additions & 13 deletions src/main/kotlin/com/phodal/shire/settings/ShireSettingUi.kt
Original file line number Diff line number Diff line change
Expand Up @@ -132,20 +132,23 @@ class ShireSettingUi : ConfigurableUi<ShireSettingsState> {
testResultField.text = throwable.message ?: "Unknown error"
}) {
val flowString: Flow<String> =
LlmProvider.provider(project)
?.stream(
promptText = "hi",
systemPrompt = "",
keepHistory = false,
llmConfig = LlmConfig(
model = modelName.text,
apiKey = engineToken.text,
apiBase = apiHost.text,
temperature = temperatureField.text.toDoubleOrNull() ?: 0.0,
title = modelName.text,
LlmConfig(
model = modelName.text,
apiKey = engineToken.text,
apiBase = apiHost.text,
temperature = temperatureField.text.toDoubleOrNull() ?: 0.0,
title = modelName.text,
).let {
LlmProvider.provider(project, it)
?.stream(
promptText = "hi",
systemPrompt = "",
keepHistory = false,
llmConfig = it
)
)
?: throw Exception(ShireCoreBundle.message("shire.llm.notfound"))
?: throw Exception(ShireCoreBundle.message("shire.llm.notfound"))
}

flowString.collect {
testResultField.text += it
}
Expand Down

0 comments on commit e883e38

Please sign in to comment.