Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion src/BaselineOfLLMAPI/BaselineOfLLMAPI.class.st
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ BaselineOfLLMAPI >> defineDependencies: spec [

spec
baseline: 'NeoJSON'
with: [ spec repository: 'github://svenvc/NeoJSON/repository' ]
with: [ spec repository: 'github://svenvc/NeoJSON/repository' ].

spec
baseline: 'Mocketry'
with: [ spec repository: 'github://dionisiydk/Mocketry' ]
]

{ #category : 'baselines' }
Expand All @@ -31,6 +35,7 @@ BaselineOfLLMAPI >> defineGroups: spec [
BaselineOfLLMAPI >> definePackages: spec [

spec package: 'LLM-API' with: [ spec requires: #( 'NeoJSON' ) ].
spec package: 'LLM-API-Tests' with: [ spec requires: #( 'LLM-API' 'Mocketry' ) ].
spec package: 'LLM-API-Example' with: [ spec requires: #( 'LLM-API' ) ].
spec package: 'LLM-Spec' with: [ spec requires: #( 'LLM-API' ) ]
]
32 changes: 32 additions & 0 deletions src/LLM-API-Tests/LLMAPIChatTest.class.st
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"
A LLMAPIChatTest is a test class for testing the behavior of LLMAPIChat
"
Class {
#name : 'LLMAPIChatTest',
#superclass : 'TestCase',
#instVars : [
'api'
],
#category : 'LLM-API-Tests',
#package : 'LLM-API-Tests'
}

{ #category : 'running' }
LLMAPIChatTest >> setUp [

super setUp.

api := LLMAPI chat
]

{ #category : 'running' }
LLMAPIChatTest >> testCheckModelExist [

self should: [ api performRequest ] raise: LLMPayloadNoModelError
]

{ #category : 'running' }
LLMAPIChatTest >> testCorrectInstance [

self assert: api class equals: LLMAPIChat
]
29 changes: 29 additions & 0 deletions src/LLM-API-Tests/LLMAPIFimTest.class.st
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
Class {
#name : 'LLMAPIFimTest',
#superclass : 'TestCase',
#instVars : [
'api'
],
#category : 'LLM-API-Tests',
#package : 'LLM-API-Tests'
}

{ #category : 'running' }
LLMAPIFimTest >> setUp [

super setUp.

api := LLMAPI fim
]

{ #category : 'running' }
LLMAPIFimTest >> testCheckModelExist [

self should: [ api performRequest ] raise: LLMPayloadNoModelError
]

{ #category : 'running' }
LLMAPIFimTest >> testCorrectInstance [

self assert: api class equals: LLMAPIFim
]
1 change: 1 addition & 0 deletions src/LLM-API-Tests/package.st
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Package { #name : 'LLM-API-Tests' }
90 changes: 75 additions & 15 deletions src/LLM-API/LLMAPI.class.st
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,14 @@ Class {
'port',
'client',
'content',
'apiKey'
'apiKey',
'https'
],
#classInstVars : [
'apiKey',
'host'
'host',
'port',
'https'
],
#category : 'LLM-API',
#package : 'LLM-API'
Expand Down Expand Up @@ -52,6 +55,18 @@ LLMAPI class >> host: anObject [
host := anObject
]

{ #category : 'accessing' }
LLMAPI class >> https [

^ https
]

{ #category : 'accessing' }
LLMAPI class >> https: aBoolean [

https := aBoolean
]

{ #category : 'as yet unclassified' }
LLMAPI class >> llmSettingOn: aBuilder [

Expand All @@ -60,18 +75,41 @@ LLMAPI class >> llmSettingOn: aBuilder [
parent: #tools;
name: 'LLM API';
with: [
(aBuilder setting: #apiKey)
order: -100000;
label: 'LLM API Key';
target: self;
default: '';
ghostHelp: 'My key'.
(aBuilder setting: #host)
order: 0;
label: 'LLM host';
target: self;
default: 'api.mistral.ai';
ghostHelp: 'api.mistral.ai' ]
(aBuilder setting: #apiKey)
order: -100000;
label: 'LLM API Key';
target: self;
default: '';
ghostHelp: 'My key'.
(aBuilder setting: #host)
order: 0;
label: 'LLM host';
target: self;
default: 'api.mistral.ai';
ghostHelp: 'api.mistral.ai'.
(aBuilder setting: #port)
order: 0;
label: 'LLM port';
target: self;
default: 443;
ghostHelp: '443'.
(aBuilder setting: #https)
order: 0;
label: 'LLM host uses ssl (https)';
target: self;
default: true ]
]

{ #category : 'accessing' }
LLMAPI class >> port [

^ port
]

{ #category : 'accessing' }
LLMAPI class >> port: aPortNumber [

port := aPortNumber
]

{ #category : 'accessing' }
Expand Down Expand Up @@ -122,13 +160,28 @@ LLMAPI >> host: anObject [
host := anObject
]

{ #category : 'accessing' }
LLMAPI >> https [

^ https
]

{ #category : 'accessing' }
LLMAPI >> https: aBoolean [

https := aBoolean
]

{ #category : 'initialization' }
LLMAPI >> initialize [

super initialize.
client := ZnClient new.
self apiKey: LLMAPI apiKey.
self host: LLMAPI host.
self port: LLMAPI port.
self https: LLMAPI https.

]

{ #category : 'accessing' }
Expand Down Expand Up @@ -172,11 +225,18 @@ LLMAPI >> port: anObject [

{ #category : 'accessing' }
LLMAPI >> prepareRequest [
"check some properties"

(self payload model isNil or: [ self payload model isEmpty ])
ifTrue: [ LLMPayloadNoModelError signal ].
"Prepare request"
self apiKey ifNotNil: [ :_apiKey |
client setBearerAuthentication: _apiKey ].
client forJsonREST.
client https.
(self https isNil or: [ self https ])
ifTrue: [ client https ]
ifFalse: [ client http ].
client host: self host.
client port: self port.
client path: self path
]
24 changes: 23 additions & 1 deletion src/LLM-API/LLMAPIChat.class.st
Original file line number Diff line number Diff line change
@@ -1,3 +1,25 @@
"
# Example with ollama

```st
api := LLMAPI chat.
api host: '127.0.0.1'.
api port: 11434.
api apiKey: nil.

api payload
temperature: 0.5;
model: 'devstral';
top_p: 1;
max_tokens: 250;
messages: {
LLMAPIChatObjectMessage role: 'system' content: 'You are a usefull assistant'.
LLMAPIChatObjectMessage role: 'user' content: 'How to write hello world in Pharo?'.
}.

result := api performRequest.
```
"
Class {
#name : 'LLMAPIChat',
#superclass : 'LLMAPI',
Expand All @@ -16,7 +38,7 @@ LLMAPIChat >> payloadClass [
^ LLMAPIChatObjectPayload
]

{ #category : 'accessing' }
{ #category : 'api' }
LLMAPIChat >> performRequest [

| intermediateResult |
Expand Down
2 changes: 1 addition & 1 deletion src/LLM-API/LLMAPIFim.class.st
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ LLMAPIFim >> payloadClass [
^ LLMAPIFimObjectPayload
]

{ #category : 'accessing' }
{ #category : 'api' }
LLMAPIFim >> performRequest [

self prepareRequest.
Expand Down
13 changes: 13 additions & 0 deletions src/LLM-API/LLMPayloadNoModelError.class.st
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Class {
#name : 'LLMPayloadNoModelError',
#superclass : 'Error',
#category : 'LLM-API',
#package : 'LLM-API'
}

{ #category : 'default' }
LLMPayloadNoModelError >> defaultDescription [
"Return a textual description of the exception."

^ 'No model set in the payload. Add a model before calling the API.'
]