mirror of
https://github.com/openai/harmony.git
synced 2025-08-22 16:17:08 -04:00
Merge pull request #17 from openai/dev/scl/add-docs-to-cargo
Fix readme rust sample, version bump
This commit is contained in:
commit
64bca4cf32
4 changed files with 15 additions and 10 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -1317,7 +1317,7 @@ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
|||
|
||||
[[package]]
|
||||
name = "openai-harmony"
|
||||
version = "0.0.2-alpha.1"
|
||||
version = "0.0.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "openai-harmony"
|
||||
version = "0.0.2-alpha.1"
|
||||
version = "0.0.2"
|
||||
edition = "2021"
|
||||
license = "Apache-2.0"
|
||||
repository = "https://github.com/openai/harmony"
|
||||
|
@ -42,7 +42,10 @@ reqwest = { version = "0.12.5", default-features = false, features = [
|
|||
] }
|
||||
futures = "0.3"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
pyo3 = { version = "0.25.0", optional = true, features = ["extension-module", "abi3-py38"] }
|
||||
pyo3 = { version = "0.25.0", optional = true, features = [
|
||||
"extension-module",
|
||||
"abi3-py38",
|
||||
] }
|
||||
wasm-bindgen = { version = "0.2.100", optional = true, features = [
|
||||
"serde-serialize",
|
||||
] }
|
||||
|
|
14
README.md
14
README.md
|
@ -12,7 +12,7 @@ The [gpt-oss models][gpt-oss] were trained on the [harmony response format][harm
|
|||
|
||||
The format enables the model to output to multiple different channels for chain of thought, and tool calling preambles along with regular responses. It also enables specifying various tool namespaces, and structured outputs along with a clear instruction hierarchy. [Check out the guide][harmony-format] to learn more about the format itself.
|
||||
|
||||
```
|
||||
```text
|
||||
<|start|>system<|message|>You are ChatGPT, a large language model trained by OpenAI.
|
||||
Knowledge cutoff: 2024-06
|
||||
Current date: 2025-06-28
|
||||
|
@ -114,12 +114,12 @@ openai-harmony = { git = "https://github.com/openai/harmony" }
|
|||
```rust
|
||||
use openai_harmony::chat::{Message, Role, Conversation};
|
||||
use openai_harmony::{HarmonyEncodingName, load_harmony_encoding};
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let enc = load_harmony_encoding(HarmonyEncodingName::HarmonyGptOss)?;
|
||||
let convo = Conversation::from_messages([
|
||||
Message::from_role_and_content(Role::User, "Hello there!"),
|
||||
]);
|
||||
let tokens = enc.render_conversation_for_completion(&convo, Role::Assistant)?;
|
||||
let convo =
|
||||
Conversation::from_messages([Message::from_role_and_content(Role::User, "Hello there!")]);
|
||||
let tokens = enc.render_conversation_for_completion(&convo, Role::Assistant, None)?;
|
||||
println!("{:?}", tokens);
|
||||
Ok(())
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ fn main() -> anyhow::Result<()> {
|
|||
The majority of the rendering and parsing is built in Rust for performance and exposed to Python
|
||||
through thin [`pyo3`](https://pyo3.rs/) bindings.
|
||||
|
||||
```
|
||||
```text
|
||||
┌──────────────────┐ ┌───────────────────────────┐
|
||||
│ Python code │ │ Rust core (this repo) │
|
||||
│ (dataclasses, │────► │ • chat / encoding logic │
|
||||
|
@ -140,7 +140,7 @@ through thin [`pyo3`](https://pyo3.rs/) bindings.
|
|||
|
||||
### Repository layout
|
||||
|
||||
```
|
||||
```text
|
||||
.
|
||||
├── src/ # Rust crate
|
||||
│ ├── chat.rs # High-level data-structures (Role, Message, …)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
|
||||
pub mod chat;
|
||||
mod encoding;
|
||||
mod registry;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue