Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
//! AI Core client for Anthropic (Claude) API
//!
//! This client wraps the Anthropic client with AI Core authentication.

use crate::{
anthropic::{AnthropicClient, AuthProvider, DefaultMessageConverter, RequestCustomizer},
auth::TokenManager,
types::*,
LLMProvider, StreamingCallback,
};
Expand All @@ -8,9 +13,7 @@ use async_trait::async_trait;
use serde_json::Value;
use std::sync::Arc;

use super::auth::TokenManager;

/// AiCore authentication provider using TokenManager
/// AI Core authentication provider using TokenManager
pub struct AiCoreAuthProvider {
token_manager: Arc<TokenManager>,
}
Expand All @@ -32,10 +35,10 @@ impl AuthProvider for AiCoreAuthProvider {
}
}

/// AiCore request customizer
pub struct AiCoreRequestCustomizer;
/// AI Core request customizer for Anthropic API
pub struct AiCoreAnthropicRequestCustomizer;

impl RequestCustomizer for AiCoreRequestCustomizer {
impl RequestCustomizer for AiCoreAnthropicRequestCustomizer {
fn customize_request(&self, request: &mut serde_json::Value) -> Result<()> {
if let Value::Object(ref mut map) = request {
// Remove stream and model fields after URL routing is done
Expand Down Expand Up @@ -70,18 +73,19 @@ impl RequestCustomizer for AiCoreRequestCustomizer {
}
}

pub struct AiCoreClient {
/// AI Core client for Anthropic (Claude) models
pub struct AiCoreAnthropicClient {
anthropic_client: AnthropicClient,
custom_config: Option<serde_json::Value>,
}

impl AiCoreClient {
impl AiCoreAnthropicClient {
fn create_anthropic_client(
token_manager: Arc<TokenManager>,
base_url: String,
) -> AnthropicClient {
let auth_provider = Box::new(AiCoreAuthProvider::new(token_manager));
let request_customizer = Box::new(AiCoreRequestCustomizer);
let request_customizer = Box::new(AiCoreAnthropicRequestCustomizer);
let message_converter = Box::new(DefaultMessageConverter::new());

AnthropicClient::with_customization(
Expand Down Expand Up @@ -127,7 +131,7 @@ impl AiCoreClient {
}

#[async_trait]
impl LLMProvider for AiCoreClient {
impl LLMProvider for AiCoreAnthropicClient {
async fn send_message(
&mut self,
request: LLMRequest,
Expand Down
69 changes: 69 additions & 0 deletions crates/llm/src/aicore/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
//! AI Core provider module
//!
//! AI Core acts as a proxy service that can route to different backend vendors.
//! This module provides support for multiple vendor API types:
//! - Anthropic (Claude models via Bedrock-style API)
//! - OpenAI (Chat Completions API)
//! - Vertex (Google Gemini API)

mod anthropic;
mod openai;
mod types;
mod vertex;

pub use anthropic::AiCoreAnthropicClient;
pub use openai::AiCoreOpenAIClient;
pub use types::AiCoreApiType;
pub use vertex::AiCoreVertexClient;

use crate::auth::TokenManager;
use crate::LLMProvider;
use std::path::Path;
use std::sync::Arc;

/// Create an AI Core client based on the API type
pub fn create_aicore_client(
api_type: AiCoreApiType,
token_manager: Arc<TokenManager>,
base_url: String,
model_id: String,
) -> Box<dyn LLMProvider> {
match api_type {
AiCoreApiType::Anthropic => Box::new(AiCoreAnthropicClient::new(token_manager, base_url)),
AiCoreApiType::OpenAI => {
Box::new(AiCoreOpenAIClient::new(token_manager, base_url, model_id))
}
AiCoreApiType::Vertex => {
Box::new(AiCoreVertexClient::new(token_manager, base_url, model_id))
}
}
}

/// Create an AI Core client with recording capability
pub fn create_aicore_client_with_recorder<P: AsRef<Path>>(
api_type: AiCoreApiType,
token_manager: Arc<TokenManager>,
base_url: String,
model_id: String,
recording_path: P,
) -> Box<dyn LLMProvider> {
match api_type {
AiCoreApiType::Anthropic => Box::new(AiCoreAnthropicClient::new_with_recorder(
token_manager,
base_url,
recording_path,
)),
AiCoreApiType::OpenAI => Box::new(AiCoreOpenAIClient::new_with_recorder(
token_manager,
base_url,
model_id,
recording_path,
)),
AiCoreApiType::Vertex => Box::new(AiCoreVertexClient::new_with_recorder(
token_manager,
base_url,
model_id,
recording_path,
)),
}
}
119 changes: 119 additions & 0 deletions crates/llm/src/aicore/openai.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
//! AI Core client for OpenAI Chat Completions API
//!
//! This client wraps the OpenAI client with AI Core authentication.

use crate::{
auth::TokenManager,
openai::{AuthProvider, OpenAIClient, RequestCustomizer},
types::*,
LLMProvider, StreamingCallback,
};
use anyhow::Result;
use async_trait::async_trait;
use std::sync::Arc;

/// AI Core authentication provider for OpenAI-style API
struct AiCoreOpenAIAuthProvider {
token_manager: Arc<TokenManager>,
}

impl AiCoreOpenAIAuthProvider {
fn new(token_manager: Arc<TokenManager>) -> Self {
Self { token_manager }
}
}

#[async_trait]
impl AuthProvider for AiCoreOpenAIAuthProvider {
async fn get_auth_headers(&self) -> Result<Vec<(String, String)>> {
let token = self.token_manager.get_valid_token().await?;
Ok(vec![(
"Authorization".to_string(),
format!("Bearer {token}"),
)])
}
}

/// AI Core request customizer for OpenAI Chat Completions API
struct AiCoreOpenAIRequestCustomizer;

impl RequestCustomizer for AiCoreOpenAIRequestCustomizer {
fn customize_request(&self, _request: &mut serde_json::Value) -> Result<()> {
// No additional customization needed for OpenAI-style requests
Ok(())
}

fn get_additional_headers(&self) -> Vec<(String, String)> {
vec![
("AI-Resource-Group".to_string(), "default".to_string()),
("Content-Type".to_string(), "application/json".to_string()),
]
}

fn customize_url(&self, base_url: &str, _streaming: bool) -> String {
// AI Core uses /chat/completions endpoint for OpenAI-compatible models
format!("{base_url}/chat/completions")
}
}

/// AI Core client for OpenAI Chat Completions API
pub struct AiCoreOpenAIClient {
openai_client: OpenAIClient,
custom_config: Option<serde_json::Value>,
}

impl AiCoreOpenAIClient {
fn create_openai_client(
token_manager: Arc<TokenManager>,
base_url: String,
model_id: String,
) -> OpenAIClient {
let auth_provider = Box::new(AiCoreOpenAIAuthProvider::new(token_manager));
let request_customizer = Box::new(AiCoreOpenAIRequestCustomizer);

OpenAIClient::with_customization(model_id, base_url, auth_provider, request_customizer)
}

pub fn new(token_manager: Arc<TokenManager>, base_url: String, model_id: String) -> Self {
let openai_client = Self::create_openai_client(token_manager, base_url, model_id);
Self {
openai_client,
custom_config: None,
}
}

/// Create a new client with recording capability
///
/// Note: Recording is not yet implemented for OpenAI client.
/// This constructor exists for API consistency.
pub fn new_with_recorder<P: AsRef<std::path::Path>>(
token_manager: Arc<TokenManager>,
base_url: String,
model_id: String,
_recording_path: P,
) -> Self {
// TODO: Add recording support to OpenAIClient
Self::new(token_manager, base_url, model_id)
}

/// Set custom model configuration to be merged into API requests
pub fn with_custom_config(mut self, custom_config: serde_json::Value) -> Self {
self.openai_client = self.openai_client.with_custom_config(custom_config.clone());
self.custom_config = Some(custom_config);
self
}
}

#[async_trait]
impl LLMProvider for AiCoreOpenAIClient {
async fn send_message(
&mut self,
request: LLMRequest,
streaming_callback: Option<&StreamingCallback>,
) -> Result<LLMResponse> {
// Delegate to the wrapped OpenAIClient
self.openai_client
.send_message(request, streaming_callback)
.await
}
}
42 changes: 42 additions & 0 deletions crates/llm/src/aicore/types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
//! Types for AI Core provider configuration

use serde::{Deserialize, Serialize};

/// Specifies which vendor API type to use for an AI Core deployment
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum AiCoreApiType {
/// Anthropic Claude API (Bedrock-style invoke/converse endpoints)
#[default]
Anthropic,
/// OpenAI Chat Completions API
OpenAI,
/// Google Vertex AI / Gemini API
Vertex,
}

impl std::fmt::Display for AiCoreApiType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
AiCoreApiType::Anthropic => write!(f, "anthropic"),
AiCoreApiType::OpenAI => write!(f, "openai"),
AiCoreApiType::Vertex => write!(f, "vertex"),
}
}
}

impl std::str::FromStr for AiCoreApiType {
type Err = anyhow::Error;

fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"anthropic" => Ok(AiCoreApiType::Anthropic),
"openai" => Ok(AiCoreApiType::OpenAI),
"vertex" => Ok(AiCoreApiType::Vertex),
_ => Err(anyhow::anyhow!(
"Unknown AI Core API type: '{}'. Expected one of: anthropic, openai, vertex",
s
)),
}
}
}
Loading