pub struct OpenAIClientBuilder { /* private fields */ }
Implementations§
Source§impl OpenAIClientBuilder
impl OpenAIClientBuilder
pub fn new() -> Self
Sourcepub fn with_api_key(self, api_key: impl Into<String>) -> Self
pub fn with_api_key(self, api_key: impl Into<String>) -> Self
Examples found in repository?
examples/audio_translations.rs (line 8)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
8 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
9
10 let req = AudioTranslationRequest::new(
11 "examples/data/problem_cn.mp3".to_string(),
12 WHISPER_1.to_string(),
13 );
14
15 let result = client.audio_translation(req).await?;
16 println!("{:?}", result);
17
18 Ok(())
19}
More examples
examples/embedding.rs (line 9)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
9 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
10
11 let mut req = EmbeddingRequest::new(
12 TEXT_EMBEDDING_3_SMALL.to_string(),
13 vec!["story time".to_string(), "Once upon a time".to_string()],
14 );
15 req.dimensions = Some(10);
16
17 let result = client.embedding(req).await?;
18 println!("{:?}", result.data);
19
20 Ok(())
21}
examples/model.rs (line 7)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
7 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
8
9 let result = client.list_models().await?;
10 let models = result.data;
11
12 for model in models {
13 println!("Model id: {:?}", model.id);
14 }
15
16 let result = client.retrieve_model("gpt-4.1".to_string()).await?;
17 println!("Model id: {:?}", result.id);
18 println!("Model object: {:?}", result.object);
19
20 Ok(())
21}
examples/audio_speech.rs (line 8)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
8 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
9
10 let req = AudioSpeechRequest::new(
11 TTS_1.to_string(),
12 String::from("Money is not the problem, the problem is no money."),
13 audio::VOICE_ALLOY.to_string(),
14 String::from("examples/data/problem.mp3"),
15 );
16
17 let result = client.audio_speech(req).await?;
18 println!("{:?}", result);
19
20 Ok(())
21}
examples/completion.rs (line 8)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
8 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
9
10 let req = CompletionRequest::new(
11 completion::GPT3_TEXT_DAVINCI_003.to_string(),
12 String::from("What is Bitcoin?"),
13 )
14 .max_tokens(3000)
15 .temperature(0.9)
16 .top_p(1.0)
17 .stop(vec![String::from(" Human:"), String::from(" AI:")])
18 .presence_penalty(0.6)
19 .frequency_penalty(0.0);
20
21 let result = client.completion(req).await?;
22 println!("{:}", result.choices[0].text);
23
24 Ok(())
25}
examples/openrouter.rs (line 11)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 let api_key = env::var("OPENROUTER_API_KEY").unwrap().to_string();
9 let mut client = OpenAIClient::builder()
10 .with_endpoint("https://openrouter.ai/api/v1")
11 .with_api_key(api_key)
12 .build()?;
13
14 let req = ChatCompletionRequest::new(
15 GPT4_O_MINI.to_string(),
16 vec![chat_completion::ChatCompletionMessage {
17 role: chat_completion::MessageRole::user,
18 content: chat_completion::Content::Text(String::from("What is bitcoin?")),
19 name: None,
20 tool_calls: None,
21 tool_call_id: None,
22 }],
23 );
24
25 let result = client.chat_completion(req).await?;
26 println!("Content: {:?}", result.choices[0].message.content);
27 println!("Response Headers: {:?}", client.response_headers);
28
29 Ok(())
30}
Sourcepub fn with_endpoint(self, endpoint: impl Into<String>) -> Self
pub fn with_endpoint(self, endpoint: impl Into<String>) -> Self
Examples found in repository?
examples/openrouter.rs (line 10)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 let api_key = env::var("OPENROUTER_API_KEY").unwrap().to_string();
9 let mut client = OpenAIClient::builder()
10 .with_endpoint("https://openrouter.ai/api/v1")
11 .with_api_key(api_key)
12 .build()?;
13
14 let req = ChatCompletionRequest::new(
15 GPT4_O_MINI.to_string(),
16 vec![chat_completion::ChatCompletionMessage {
17 role: chat_completion::MessageRole::user,
18 content: chat_completion::Content::Text(String::from("What is bitcoin?")),
19 name: None,
20 tool_calls: None,
21 tool_call_id: None,
22 }],
23 );
24
25 let result = client.chat_completion(req).await?;
26 println!("Content: {:?}", result.choices[0].message.content);
27 println!("Response Headers: {:?}", client.response_headers);
28
29 Ok(())
30}
More examples
examples/openrouter_reasoning.rs (line 11)
8async fn main() -> Result<(), Box<dyn std::error::Error>> {
9 let api_key = env::var("OPENROUTER_API_KEY").unwrap().to_string();
10 let mut client = OpenAIClient::builder()
11 .with_endpoint("https://openrouter.ai/api/v1")
12 .with_api_key(api_key)
13 .build()?;
14
15 // Example 1: Using reasoning with effort
16 let mut req = ChatCompletionRequest::new(
17 "x-ai/grok-3-mini".to_string(), // Grok model that supports reasoning
18 vec![chat_completion::ChatCompletionMessage {
19 role: chat_completion::MessageRole::user,
20 content: chat_completion::Content::Text(String::from(
21 "Explain quantum computing in simple terms.",
22 )),
23 name: None,
24 tool_calls: None,
25 tool_call_id: None,
26 }],
27 );
28
29 // Set reasoning with high effort
30 req.reasoning = Some(Reasoning {
31 mode: Some(ReasoningMode::Effort {
32 effort: ReasoningEffort::High,
33 }),
34 exclude: Some(false), // Include reasoning in response
35 enabled: None,
36 });
37
38 let result = client.chat_completion(req).await?;
39 println!("Content: {:?}", result.choices[0].message.content);
40
41 // Example 2: Using reasoning with max_tokens
42 let mut req2 = ChatCompletionRequest::new(
43 "anthropic/claude-4-sonnet".to_string(), // Claude model that supports max_tokens
44 vec![chat_completion::ChatCompletionMessage {
45 role: chat_completion::MessageRole::user,
46 content: chat_completion::Content::Text(String::from(
47 "What's the most efficient sorting algorithm?",
48 )),
49 name: None,
50 tool_calls: None,
51 tool_call_id: None,
52 }],
53 );
54
55 // Set reasoning with max_tokens
56 req2.reasoning = Some(Reasoning {
57 mode: Some(ReasoningMode::MaxTokens { max_tokens: 2000 }),
58 exclude: None,
59 enabled: None,
60 });
61
62 let result2 = client.chat_completion(req2).await?;
63 println!("Content: {:?}", result2.choices[0].message.content);
64
65 Ok(())
66}
pub fn with_organization(self, organization: impl Into<String>) -> Self
pub fn with_proxy(self, proxy: impl Into<String>) -> Self
pub fn with_timeout(self, timeout: u64) -> Self
pub fn with_header( self, key: impl Into<String>, value: impl Into<String>, ) -> Self
Sourcepub fn build(self) -> Result<OpenAIClient, Box<dyn Error>>
pub fn build(self) -> Result<OpenAIClient, Box<dyn Error>>
Examples found in repository?
examples/audio_translations.rs (line 8)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
8 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
9
10 let req = AudioTranslationRequest::new(
11 "examples/data/problem_cn.mp3".to_string(),
12 WHISPER_1.to_string(),
13 );
14
15 let result = client.audio_translation(req).await?;
16 println!("{:?}", result);
17
18 Ok(())
19}
More examples
examples/embedding.rs (line 9)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
9 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
10
11 let mut req = EmbeddingRequest::new(
12 TEXT_EMBEDDING_3_SMALL.to_string(),
13 vec!["story time".to_string(), "Once upon a time".to_string()],
14 );
15 req.dimensions = Some(10);
16
17 let result = client.embedding(req).await?;
18 println!("{:?}", result.data);
19
20 Ok(())
21}
examples/model.rs (line 7)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
7 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
8
9 let result = client.list_models().await?;
10 let models = result.data;
11
12 for model in models {
13 println!("Model id: {:?}", model.id);
14 }
15
16 let result = client.retrieve_model("gpt-4.1".to_string()).await?;
17 println!("Model id: {:?}", result.id);
18 println!("Model object: {:?}", result.object);
19
20 Ok(())
21}
examples/audio_speech.rs (line 8)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
8 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
9
10 let req = AudioSpeechRequest::new(
11 TTS_1.to_string(),
12 String::from("Money is not the problem, the problem is no money."),
13 audio::VOICE_ALLOY.to_string(),
14 String::from("examples/data/problem.mp3"),
15 );
16
17 let result = client.audio_speech(req).await?;
18 println!("{:?}", result);
19
20 Ok(())
21}
examples/completion.rs (line 8)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7 let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
8 let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;
9
10 let req = CompletionRequest::new(
11 completion::GPT3_TEXT_DAVINCI_003.to_string(),
12 String::from("What is Bitcoin?"),
13 )
14 .max_tokens(3000)
15 .temperature(0.9)
16 .top_p(1.0)
17 .stop(vec![String::from(" Human:"), String::from(" AI:")])
18 .presence_penalty(0.6)
19 .frequency_penalty(0.0);
20
21 let result = client.completion(req).await?;
22 println!("{:}", result.choices[0].text);
23
24 Ok(())
25}
examples/openrouter.rs (line 12)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 let api_key = env::var("OPENROUTER_API_KEY").unwrap().to_string();
9 let mut client = OpenAIClient::builder()
10 .with_endpoint("https://openrouter.ai/api/v1")
11 .with_api_key(api_key)
12 .build()?;
13
14 let req = ChatCompletionRequest::new(
15 GPT4_O_MINI.to_string(),
16 vec![chat_completion::ChatCompletionMessage {
17 role: chat_completion::MessageRole::user,
18 content: chat_completion::Content::Text(String::from("What is bitcoin?")),
19 name: None,
20 tool_calls: None,
21 tool_call_id: None,
22 }],
23 );
24
25 let result = client.chat_completion(req).await?;
26 println!("Content: {:?}", result.choices[0].message.content);
27 println!("Response Headers: {:?}", client.response_headers);
28
29 Ok(())
30}
Trait Implementations§
Source§impl Default for OpenAIClientBuilder
impl Default for OpenAIClientBuilder
Source§fn default() -> OpenAIClientBuilder
fn default() -> OpenAIClientBuilder
Returns the “default value” for a type. Read more
Auto Trait Implementations§
impl Freeze for OpenAIClientBuilder
impl RefUnwindSafe for OpenAIClientBuilder
impl Send for OpenAIClientBuilder
impl Sync for OpenAIClientBuilder
impl Unpin for OpenAIClientBuilder
impl UnwindSafe for OpenAIClientBuilder
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more