diff --git a/src/LLM-API/LLMAPI.class.st b/src/LLM-API/LLMAPI.class.st index 8a481e2..3c3008a 100644 --- a/src/LLM-API/LLMAPI.class.st +++ b/src/LLM-API/LLMAPI.class.st @@ -73,7 +73,7 @@ LLMAPI class >> llmSettingOn: aBuilder [ (aBuilder group: #LLMAPI) parent: #tools; - name: 'LLM API'; + name: self settingName; with: [ (aBuilder setting: #apiKey) order: -100000; @@ -112,6 +112,12 @@ LLMAPI class >> port: aPortNumber [ port := aPortNumber ] +{ #category : 'as yet unclassified' } +LLMAPI class >> settingName [ + + ^ 'LLM API' +] + { #category : 'accessing' } LLMAPI >> apiKey [ diff --git a/src/LLM-API/LLMAPIChat.class.st b/src/LLM-API/LLMAPIChat.class.st index 9bbf317..72e6d33 100644 --- a/src/LLM-API/LLMAPIChat.class.st +++ b/src/LLM-API/LLMAPIChat.class.st @@ -23,10 +23,48 @@ result := api performRequest. Class { #name : 'LLMAPIChat', #superclass : 'LLMAPI', + #classInstVars : [ + 'chatModel' + ], #category : 'LLM-API', #package : 'LLM-API' } +{ #category : 'accessing' } +LLMAPIChat class >> chatModel [ + + ^ chatModel +] + +{ #category : 'accessing' } +LLMAPIChat class >> chatModel: anObject [ + + chatModel := anObject +] + +{ #category : 'as yet unclassified' } +LLMAPIChat class >> llmSettingOn: aBuilder [ + + + (aBuilder group: #LLMChat) + parent: super settingName; + name: 'LLM Chat'; + with: [ + (aBuilder setting: #chatModel) + order: 0; + label: 'LLM Chat default model'; + target: self; + default: 'devstral:latest'; + ghostHelp: 'devstral:latest' ] +] + +{ #category : 'initialization' } +LLMAPIChat >> initialize [ + + super initialize. + self payload model: self class chatModel +] + { #category : 'accessing' } LLMAPIChat >> path [ ^ 'v1/chat/completions' diff --git a/src/LLM-Spec/LLMAPISpecChat.class.st b/src/LLM-Spec/LLMAPISpecChat.class.st index 369024d..003ad20 100644 --- a/src/LLM-Spec/LLMAPISpecChat.class.st +++ b/src/LLM-Spec/LLMAPISpecChat.class.st @@ -17,19 +17,16 @@ Class { #package : 'LLM-Spec' } -{ #category : 'initialization' } +{ #category : 'api' } LLMAPISpecChat >> askLLM [ | api result resultContent | history addMessage: (LLMAPIChatObjectMessage role: 'user' content: inputField text). api := LLMAPI chat. - api host: 'api.mistral.ai'. api payload temperature: 0.5; - model: 'mistral-small-latest'; top_p: 1; - max_tokens: 250; tools: (LLMAPIChatObjectTool allSubclasses collect: [ :clazz | clazz new ]); messages: history model. @@ -61,6 +58,12 @@ LLMAPISpecChat >> defaultLayout [ yourself ] +{ #category : 'accessing' } +LLMAPISpecChat >> history [ + + ^ history +] + { #category : 'initialization' } LLMAPISpecChat >> initializePresenters [