diff --git a/src/BaselineOfLLMAPI/BaselineOfLLMAPI.class.st b/src/BaselineOfLLMAPI/BaselineOfLLMAPI.class.st index 2e2c7f3..b60f91a 100644 --- a/src/BaselineOfLLMAPI/BaselineOfLLMAPI.class.st +++ b/src/BaselineOfLLMAPI/BaselineOfLLMAPI.class.st @@ -20,7 +20,11 @@ BaselineOfLLMAPI >> defineDependencies: spec [ spec baseline: 'NeoJSON' - with: [ spec repository: 'github://svenvc/NeoJSON/repository' ] + with: [ spec repository: 'github://svenvc/NeoJSON/repository' ]. + + spec + baseline: 'Mocketry' + with: [ spec repository: 'github://dionisiydk/Mocketry' ] ] { #category : 'baselines' } @@ -31,6 +35,7 @@ BaselineOfLLMAPI >> defineGroups: spec [ BaselineOfLLMAPI >> definePackages: spec [ spec package: 'LLM-API' with: [ spec requires: #( 'NeoJSON' ) ]. + spec package: 'LLM-API-Tests' with: [ spec requires: #( 'LLM-API' 'Mocketry' ) ]. spec package: 'LLM-API-Example' with: [ spec requires: #( 'LLM-API' ) ]. spec package: 'LLM-Spec' with: [ spec requires: #( 'LLM-API' ) ] ] diff --git a/src/LLM-API-Tests/LLMAPIChatTest.class.st b/src/LLM-API-Tests/LLMAPIChatTest.class.st new file mode 100644 index 0000000..7b26e95 --- /dev/null +++ b/src/LLM-API-Tests/LLMAPIChatTest.class.st @@ -0,0 +1,32 @@ +" +A LLMAPIChatTest is a test class for testing the behavior of LLMAPIChat +" +Class { + #name : 'LLMAPIChatTest', + #superclass : 'TestCase', + #instVars : [ + 'api' + ], + #category : 'LLM-API-Tests', + #package : 'LLM-API-Tests' +} + +{ #category : 'running' } +LLMAPIChatTest >> setUp [ + + super setUp. + + api := LLMAPI chat +] + +{ #category : 'running' } +LLMAPIChatTest >> testCheckModelExist [ + + self should: [ api performRequest ] raise: LLMPayloadNoModelError +] + +{ #category : 'running' } +LLMAPIChatTest >> testCorrectInstance [ + + self assert: api class equals: LLMAPIChat +] diff --git a/src/LLM-API-Tests/LLMAPIFimTest.class.st b/src/LLM-API-Tests/LLMAPIFimTest.class.st new file mode 100644 index 0000000..30ae864 --- /dev/null +++ b/src/LLM-API-Tests/LLMAPIFimTest.class.st @@ -0,0 +1,29 @@ +Class { + #name : 'LLMAPIFimTest', + #superclass : 'TestCase', + #instVars : [ + 'api' + ], + #category : 'LLM-API-Tests', + #package : 'LLM-API-Tests' +} + +{ #category : 'running' } +LLMAPIFimTest >> setUp [ + + super setUp. + + api := LLMAPI fim +] + +{ #category : 'running' } +LLMAPIFimTest >> testCheckModelExist [ + + self should: [ api performRequest ] raise: LLMPayloadNoModelError +] + +{ #category : 'running' } +LLMAPIFimTest >> testCorrectInstance [ + + self assert: api class equals: LLMAPIFim +] diff --git a/src/LLM-API-Tests/package.st b/src/LLM-API-Tests/package.st new file mode 100644 index 0000000..206696e --- /dev/null +++ b/src/LLM-API-Tests/package.st @@ -0,0 +1 @@ +Package { #name : 'LLM-API-Tests' } diff --git a/src/LLM-API/LLMAPI.class.st b/src/LLM-API/LLMAPI.class.st index 841eb16..8a481e2 100644 --- a/src/LLM-API/LLMAPI.class.st +++ b/src/LLM-API/LLMAPI.class.st @@ -6,11 +6,14 @@ Class { 'port', 'client', 'content', - 'apiKey' + 'apiKey', + 'https' ], #classInstVars : [ 'apiKey', - 'host' + 'host', + 'port', + 'https' ], #category : 'LLM-API', #package : 'LLM-API' @@ -52,6 +55,18 @@ LLMAPI class >> host: anObject [ host := anObject ] +{ #category : 'accessing' } +LLMAPI class >> https [ + + ^ https +] + +{ #category : 'accessing' } +LLMAPI class >> https: aBoolean [ + + https := aBoolean +] + { #category : 'as yet unclassified' } LLMAPI class >> llmSettingOn: aBuilder [ @@ -60,18 +75,41 @@ LLMAPI class >> llmSettingOn: aBuilder [ parent: #tools; name: 'LLM API'; with: [ - (aBuilder setting: #apiKey) - order: -100000; - label: 'LLM API Key'; - target: self; - default: ''; - ghostHelp: 'My key'. - (aBuilder setting: #host) - order: 0; - label: 'LLM host'; - target: self; - default: 'api.mistral.ai'; - ghostHelp: 'api.mistral.ai' ] + (aBuilder setting: #apiKey) + order: -100000; + label: 'LLM API Key'; + target: self; + default: ''; + ghostHelp: 'My key'. + (aBuilder setting: #host) + order: 0; + label: 'LLM host'; + target: self; + default: 'api.mistral.ai'; + ghostHelp: 'api.mistral.ai'. + (aBuilder setting: #port) + order: 0; + label: 'LLM port'; + target: self; + default: 443; + ghostHelp: '443'. + (aBuilder setting: #https) + order: 0; + label: 'LLM host uses ssl (https)'; + target: self; + default: true ] +] + +{ #category : 'accessing' } +LLMAPI class >> port [ + + ^ port +] + +{ #category : 'accessing' } +LLMAPI class >> port: aPortNumber [ + + port := aPortNumber ] { #category : 'accessing' } @@ -122,6 +160,18 @@ LLMAPI >> host: anObject [ host := anObject ] +{ #category : 'accessing' } +LLMAPI >> https [ + + ^ https +] + +{ #category : 'accessing' } +LLMAPI >> https: aBoolean [ + + https := aBoolean +] + { #category : 'initialization' } LLMAPI >> initialize [ @@ -129,6 +179,9 @@ LLMAPI >> initialize [ client := ZnClient new. self apiKey: LLMAPI apiKey. self host: LLMAPI host. + self port: LLMAPI port. + self https: LLMAPI https. + ] { #category : 'accessing' } @@ -172,11 +225,18 @@ LLMAPI >> port: anObject [ { #category : 'accessing' } LLMAPI >> prepareRequest [ + "check some properties" + (self payload model isNil or: [ self payload model isEmpty ]) + ifTrue: [ LLMPayloadNoModelError signal ]. + "Prepare request" self apiKey ifNotNil: [ :_apiKey | client setBearerAuthentication: _apiKey ]. client forJsonREST. - client https. + (self https isNil or: [ self https ]) + ifTrue: [ client https ] + ifFalse: [ client http ]. client host: self host. + client port: self port. client path: self path ] diff --git a/src/LLM-API/LLMAPIChat.class.st b/src/LLM-API/LLMAPIChat.class.st index 2df7f40..9bbf317 100644 --- a/src/LLM-API/LLMAPIChat.class.st +++ b/src/LLM-API/LLMAPIChat.class.st @@ -1,3 +1,25 @@ +" +# Example with ollama + +```st +api := LLMAPI chat. +api host: '127.0.0.1'. +api port: 11434. +api apiKey: nil. + +api payload + temperature: 0.5; + model: 'devstral'; + top_p: 1; + max_tokens: 250; + messages: { + LLMAPIChatObjectMessage role: 'system' content: 'You are a usefull assistant'. + LLMAPIChatObjectMessage role: 'user' content: 'How to write hello world in Pharo?'. + }. + +result := api performRequest. +``` +" Class { #name : 'LLMAPIChat', #superclass : 'LLMAPI', @@ -16,7 +38,7 @@ LLMAPIChat >> payloadClass [ ^ LLMAPIChatObjectPayload ] -{ #category : 'accessing' } +{ #category : 'api' } LLMAPIChat >> performRequest [ | intermediateResult | diff --git a/src/LLM-API/LLMAPIFim.class.st b/src/LLM-API/LLMAPIFim.class.st index eddccd5..3ca049c 100644 --- a/src/LLM-API/LLMAPIFim.class.st +++ b/src/LLM-API/LLMAPIFim.class.st @@ -16,7 +16,7 @@ LLMAPIFim >> payloadClass [ ^ LLMAPIFimObjectPayload ] -{ #category : 'accessing' } +{ #category : 'api' } LLMAPIFim >> performRequest [ self prepareRequest. diff --git a/src/LLM-API/LLMPayloadNoModelError.class.st b/src/LLM-API/LLMPayloadNoModelError.class.st new file mode 100644 index 0000000..4803d39 --- /dev/null +++ b/src/LLM-API/LLMPayloadNoModelError.class.st @@ -0,0 +1,13 @@ +Class { + #name : 'LLMPayloadNoModelError', + #superclass : 'Error', + #category : 'LLM-API', + #package : 'LLM-API' +} + +{ #category : 'default' } +LLMPayloadNoModelError >> defaultDescription [ + "Return a textual description of the exception." + + ^ 'No model set in the payload. Add a model before calling the API.' +]