diff --git a/async-openai/src/lib.rs b/async-openai/src/lib.rs index 266a860f..73ee9d35 100644 --- a/async-openai/src/lib.rs +++ b/async-openai/src/lib.rs @@ -56,7 +56,7 @@ //! // Create request using builder pattern //! // Every request struct has companion builder struct with same name + Args suffix //! let request = CreateCompletionRequestArgs::default() -//! .model("text-davinci-003") +//! .model("gpt-3.5-turbo-instruct") //! .prompt("Tell me the recipe of alfredo pasta") //! .max_tokens(40_u16) //! .build() diff --git a/async-openai/src/types/impls.rs b/async-openai/src/types/impls.rs index 991dfbc8..d98f39ab 100644 --- a/async-openai/src/types/impls.rs +++ b/async-openai/src/types/impls.rs @@ -117,7 +117,7 @@ impl Default for InputSource { } /// for `impl_input!(Struct)` where -/// ``` +/// ```text /// Struct { /// source: InputSource /// } diff --git a/async-openai/tests/boxed_future.rs b/async-openai/tests/boxed_future.rs index 8ec03891..9eae1105 100644 --- a/async-openai/tests/boxed_future.rs +++ b/async-openai/tests/boxed_future.rs @@ -27,7 +27,7 @@ async fn boxed_future_test() { let client = Client::new(); let request = CreateCompletionRequestArgs::default() - .model("text-babbage-001") + .model("gpt-3.5-turbo-instruct") .n(1) .prompt("does 2 and 2 add to four? (yes/no):\n") .stream(true)