From b1b6c5fe9a88db4166f3908d080a71bdba2ace56 Mon Sep 17 00:00:00 2001 From: Scott Lessans Date: Tue, 5 Aug 2025 11:18:34 -0700 Subject: [PATCH] fixed readme rust sample, added readme to lib.rs docs for doctest, bumped version --- Cargo.lock | 2 +- Cargo.toml | 7 +++++-- README.md | 14 +++++++------- src/lib.rs | 2 ++ 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6dae6d9..6b774e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1317,7 +1317,7 @@ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "openai-harmony" -version = "0.0.2-alpha.1" +version = "0.0.2" dependencies = [ "anyhow", "base64", diff --git a/Cargo.toml b/Cargo.toml index f57dd14..12ebc27 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openai-harmony" -version = "0.0.2-alpha.1" +version = "0.0.2" edition = "2021" license = "Apache-2.0" repository = "https://github.com/openai/harmony" @@ -42,7 +42,10 @@ reqwest = { version = "0.12.5", default-features = false, features = [ ] } futures = "0.3" clap = { version = "4", features = ["derive"] } -pyo3 = { version = "0.25.0", optional = true, features = ["extension-module", "abi3-py38"] } +pyo3 = { version = "0.25.0", optional = true, features = [ + "extension-module", + "abi3-py38", +] } wasm-bindgen = { version = "0.2.100", optional = true, features = [ "serde-serialize", ] } diff --git a/README.md b/README.md index 73cc841..d9c5f13 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ The [gpt-oss models][gpt-oss] were trained on the [harmony response format][harm The format enables the model to output to multiple different channels for chain of thought, and tool calling preambles along with regular responses. It also enables specifying various tool namespaces, and structured outputs along with a clear instruction hierarchy. [Check out the guide][harmony-format] to learn more about the format itself. -``` +```text <|start|>system<|message|>You are ChatGPT, a large language model trained by OpenAI. Knowledge cutoff: 2024-06 Current date: 2025-06-28 @@ -114,12 +114,12 @@ openai-harmony = { git = "https://github.com/openai/harmony" } ```rust use openai_harmony::chat::{Message, Role, Conversation}; use openai_harmony::{HarmonyEncodingName, load_harmony_encoding}; + fn main() -> anyhow::Result<()> { let enc = load_harmony_encoding(HarmonyEncodingName::HarmonyGptOss)?; - let convo = Conversation::from_messages([ - Message::from_role_and_content(Role::User, "Hello there!"), - ]); - let tokens = enc.render_conversation_for_completion(&convo, Role::Assistant)?; + let convo = + Conversation::from_messages([Message::from_role_and_content(Role::User, "Hello there!")]); + let tokens = enc.render_conversation_for_completion(&convo, Role::Assistant, None)?; println!("{:?}", tokens); Ok(()) } @@ -130,7 +130,7 @@ fn main() -> anyhow::Result<()> { The majority of the rendering and parsing is built in Rust for performance and exposed to Python through thin [`pyo3`](https://pyo3.rs/) bindings. -``` +```text ┌──────────────────┐ ┌───────────────────────────┐ │ Python code │ │ Rust core (this repo) │ │ (dataclasses, │────► │ • chat / encoding logic │ @@ -140,7 +140,7 @@ through thin [`pyo3`](https://pyo3.rs/) bindings. ### Repository layout -``` +```text . ├── src/ # Rust crate │ ├── chat.rs # High-level data-structures (Role, Message, …) diff --git a/src/lib.rs b/src/lib.rs index 2535e1c..acd572a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,5 @@ +#![doc = include_str!("../README.md")] + pub mod chat; mod encoding; mod registry;