Skip to content

Commit

Permalink
chore(examples): refactor examples/openai -> examples/chatgpt (#299)
Browse files Browse the repository at this point in the history
  • Loading branch information
kanarus authored Dec 14, 2024
1 parent b93fc60 commit b564516
Show file tree
Hide file tree
Showing 9 changed files with 58 additions and 122 deletions.
2 changes: 1 addition & 1 deletion examples/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ members = [
"sse",
"form",
"hello",
"openai",
"chatgpt",
"websocket",
"realworld",
"basic_auth",
Expand Down
2 changes: 1 addition & 1 deletion examples/openai/Cargo.toml → examples/chatgpt/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "openai"
name = "chatgpt"
version = "0.1.0"
edition = "2021"

Expand Down
11 changes: 3 additions & 8 deletions examples/openai/src/error.rs → examples/chatgpt/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,32 +1,27 @@
use ohkami::prelude::*;


#[derive(Debug)]
pub enum Error {
Fetch(reqwest::Error),
}

impl IntoResponse for Error {
fn into_response(self) -> Response {
println!("{self}");
match self {
Self::Fetch(e) => Response::InternalServerError().with_text(e.to_string()),
Self::Fetch(_) => Response::InternalServerError(),
}
}
}

const _: () = {
impl std::error::Error for Error {}

impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Fetch(e) => e.fmt(f)
}
std::fmt::Debug::fmt(self, f)
}
}
};

const _: () = {
impl From<reqwest::Error> for Error {
fn from(e: reqwest::Error) -> Self {
Self::Fetch(e)
Expand Down
35 changes: 35 additions & 0 deletions examples/chatgpt/src/fangs.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
use std::env;
use std::sync::OnceLock;
use ohkami::prelude::*;

#[derive(Clone)]
pub struct APIKey(pub &'static str);

impl APIKey {
pub fn from_env() -> Self {
static API_KEY: OnceLock<Option<String>> = OnceLock::new();

let api_key = API_KEY.get_or_init(|| {
match env::args().nth(1).as_deref() {
Some("--api-key") => env::args().nth(2),
_ => env::var("OPENAI_API_KEY").ok()
}
}).as_deref().expect("\
OpenAI API key is not found\n\
\n\
[USAGE]\n\
Run `cargo run` with one of \n\
a. Set an environment variable `OPENAI_API_KEY` to your API key\n\
b. Pass your API key by command line arguments `-- --api-key <here>`\n\
");

Self(api_key)
}
}

impl FangAction for APIKey {
async fn fore<'a>(&'a self, req: &'a mut Request) -> Result<(), Response> {
req.memorize(self.clone());
Ok(())
}
}
38 changes: 18 additions & 20 deletions examples/openai/src/main.rs → examples/chatgpt/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ pub mod error;
pub mod fangs;
pub mod models;

use fangs::APIKey;
use error::Error;
use models::{ChatMessage, ChatCompletions, Role};

Expand All @@ -10,26 +11,22 @@ use ohkami::format::Text;
use ohkami::sse::DataStream;
use ohkami::util::StreamExt;


#[tokio::main]
async fn main() {
Ohkami::with((
fangs::WithAPIKey::from_env().expect("\
OpenAI API key is not found. \n\
\n\
[USAGE]\n\
Run `cargo run` with one of \n\
a. Set an environment variable `OPENAI_API_KEY` to your API key\n\
b. Pass your API key by command line arguments `-- --api-key <here>`\n\
"),
), (
let api_key = APIKey::from_env();

println!("Try:\n\
curl -v 'http://localhost:5050/chat-once' -H 'Content-Type: text/plain' -d '<your question>'\n\
");

Ohkami::with(api_key, (
"/chat-once".POST(relay_chat_completion),
)).howl("localhost:5050").await
}

pub async fn relay_chat_completion(
Memory(api_key): Memory<'_, &'static str>,
Text(message): Text<String>,
Memory(APIKey(api_key)): Memory<'_, APIKey>,
Text(content): Text<String>,
) -> Result<DataStream, Error> {
let mut gpt_response = reqwest::Client::new()
.post("https://api.openai.com/v1/chat/completions")
Expand All @@ -39,21 +36,22 @@ pub async fn relay_chat_completion(
stream: true,
messages: vec![
ChatMessage {
role: Role::user,
content: message,
role: Role::user,
content,
}
],
})
.send().await?
.bytes_stream();

Ok(DataStream::new(|mut s| async move {
let mut push_line = |mut line: String| {
let mut send_line = |mut line: String| {
#[cfg(debug_assertions)] {
assert!(line.ends_with("\n\n"))
}

line.truncate(line.len() - 2);
if line.ends_with("\n\n") {
line.truncate(line.len() - 2);
}

#[cfg(debug_assertions)] {
if line != "[DONE]" {
Expand All @@ -77,15 +75,15 @@ pub async fn relay_chat_completion(
{
if let Some(data) = line.strip_prefix("data: ") {
if data.ends_with("\n\n") {
push_line(data.to_string())
send_line(data.to_string())
} else {
remaining = data.into()
}
} else {
#[cfg(debug_assertions)] {
assert!(line.ends_with("\n\n"))
}
push_line(std::mem::take(&mut remaining) + line)
send_line(std::mem::take(&mut remaining) + line)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use ohkami::serde::{Deserialize, Serialize};


#[derive(Serialize)]
pub struct ChatCompletions {
pub model: &'static str,
Expand Down
61 changes: 0 additions & 61 deletions examples/openai/src/bin/reqwest_chat_completion.rs

This file was deleted.

29 changes: 0 additions & 29 deletions examples/openai/src/fangs.rs

This file was deleted.

1 change: 0 additions & 1 deletion examples/openai/src/lib.rs

This file was deleted.

0 comments on commit b564516

Please sign in to comment.