From e5f0b5de5e073bf708d885d8bc11f7da156403e6 Mon Sep 17 00:00:00 2001 From: Anders429 Date: Sun, 17 Dec 2023 22:30:45 -0800 Subject: [PATCH] Update documentation. --- src/de.rs | 12 ++++++------ src/lib.rs | 19 ++++++++++--------- src/ser.rs | 2 +- src/token.rs | 39 +++++++++++++++++++++++++++------------ 4 files changed, 44 insertions(+), 28 deletions(-) diff --git a/src/de.rs b/src/de.rs index 8898bbb..a8dd315 100644 --- a/src/de.rs +++ b/src/de.rs @@ -43,9 +43,9 @@ use serde::{ /// Deserializer for testing [`Deserialize`] implementations. /// -/// A deserializer is constructed using [`Tokens`] representing the serialized value to be -/// deserialized. The value that is output can be compared against an expected value to ensure -/// deserialization works correctly. +/// A deserializer is constructed from a sequence of [`Token`]s representing the serialized value +/// to be deserialized. The value that is output can be compared against an expected value to +/// ensure deserialization works correctly. /// /// # Configuration /// The following options can be configured on the [`Builder`]: @@ -1177,7 +1177,7 @@ impl<'a, 'de> de::Deserializer<'de> for EnumDeserializer<'a, 'de> { /// Construction of a `Deserializer` follows the builder pattern. Configuration options can be set /// on the `Builder`, and then the actual `Deserializer` is constructed by calling [`build()`]. /// -/// Note that providing [`Tokens`] using the [`tokens()`] method is required. +/// Note that providing a sequence of [`Token`]s using the [`tokens()`] method is required. /// /// # Example /// ``` rust @@ -1205,7 +1205,7 @@ pub struct Builder { } impl Builder { - /// Provides the [`Tokens`] to be used as the input source during deserialization. + /// Provides the sequence of [`Token`]s to be used as the input source during deserialization. /// /// Calling this method before [`build()`] is required. /// @@ -1371,7 +1371,7 @@ impl Default for Builder { /// ``` #[derive(Debug, PartialEq)] pub enum Error { - /// The [`Deserializer`] reached the end of the input [`Tokens`] before deserialization was + /// The [`Deserializer`] reached the end of the input [`Token`]s before deserialization was /// completed. EndOfTokens, diff --git a/src/lib.rs b/src/lib.rs index 1c4bda5..9cd4140 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,14 +2,15 @@ //! //! This library provides a [`Serializer`] and [`Deserializer`] to be used in writing unit tests to //! assert the behavior of manual [`Serialize`] and [`Deserialize`] implementations, respectively. -//! The implementation behavior can be verified by using [`Tokens`] representing an arbitrary -//! serialized state. +//! The implementation behavior can be verified by using a sequence of [`Token`]s representing an +//! arbitrary serialized state. //! //! # Testing Serialization -//! The [`Serializer`] returns [`Tokens`] representing the serialization of a value. The returned -//! `Tokens` can be checked to be equal to an expected value. Since [`Serialize::serialize()`] -//! returns a `Result`, it is recommended to use the [`claims`] crate to check that -//! the returned value is both `Ok` and equal to the expected `Tokens`. +//! The [`Serializer`] returns a sequence of [`Token`]s representing the serialization of a value. +//! The returned `Token`s can be checked to be equal to an expected value. Since +//! [`Serialize::serialize()`] returns a `Result`, it is recommended to use the +//! [`claims`] crate to check that the returned value is both `Ok` and equal to the expected +//! sequence of `Token`s. //! //! ``` //! use claims::assert_ok_eq; @@ -56,9 +57,9 @@ //! ``` //! //! # Testing Deserialization -//! A [`Deserializer`] is constructed by providing [`Tokens`] to be deserialized into a value. -//! During testing, the [`claims`] crate can be used to assert that deserialization succeeds and -//! returns the expected value. +//! A [`Deserializer`] is constructed by providing a sequence of [`Token`]s to be deserialized into +//! a value. During testing, the [`claims`] crate can be used to assert that deserialization +//! succeeds and returns the expected value. //! //! ``` //! use claims::assert_ok_eq; diff --git a/src/ser.rs b/src/ser.rs index bf4f405..db65d4b 100644 --- a/src/ser.rs +++ b/src/ser.rs @@ -102,7 +102,7 @@ pub enum SerializeStructAs { /// Serializer for testing [`Serialize`] implementations. /// /// This serializer outputs [`Tokens`] representing the serialized value. The `Tokens` can be -/// compared against expected `Tokens` to ensure the serialization is correct. +/// compared against an expected sequence of [`Token`]s to ensure the serialization is correct. /// /// # Configuration /// The following options can be configured on the [`Builder`]: diff --git a/src/token.rs b/src/token.rs index a8873b8..468f507 100644 --- a/src/token.rs +++ b/src/token.rs @@ -1,8 +1,11 @@ //! Tokens representing a serialized object. //! //! This module provides a [`Token`] type for representing a serialized value, as well as a -//! [`Tokens`] type containing a set of `Token`s. `Tokens` can be compared for equality, which is -//! useful for asserting whether serialized `Tokens` are as expected. +//! [`Tokens`] type containing a set of `Token`s. `Tokens` are returned by a [`Serializer`] and can +//! be compared against a sequence of `Token`s to verify equality, which is useful for asserting +//! whether serialized `Tokens` are as expected. +//! +//! [`Serializer`]: crate::Serializer use alloc::{ slice, @@ -29,10 +32,10 @@ use serde::de::Unexpected; /// A `Token` is a single serialization output produced by the [`Serializer`]. The one exception to /// this is the [`Unordered`] variant, which contains multiple sets of tokens that can be in any /// order. This is never produced by the `Serializer`, and is for use when comparing equality of -/// [`Tokens`]. +/// sequences of [`Token`]s. /// -/// Normally, `Token`s are used within the [`Tokens`] struct to either compare against the output -/// of a [`Serializer`] or to be used as input to a [`Deserializer`]. +/// Normally, a sequence of `Token`s are used to either compare against the output of a +/// [`Serializer`] or to be used as input to a [`Deserializer`]. /// /// [`Deserializer`]: crate::Deserializer /// [`Serializer`]: crate::Serializer @@ -1069,14 +1072,14 @@ impl<'a> From<&'a Token> for Unexpected<'a> { } } -/// An ordered set of [`Token`]s. +/// A sequence of [`Token`]s output by a [`Serializer`]. /// -/// This is simply a wrapper around a [`Vec`], providing a custom [`PartialEq`] -/// implementation for comparing with tokens output from a [`Serializer`]. +/// `Tokens` can be compared with any other sequence of `Token`s to assert that the serialized +/// values are as expected. /// /// # Examples /// -/// `Tokens` are output from a [`Serializer`] and are used as input to a [`Deserializer`]. +/// `Tokens` are output from a [`Serializer`] and can be compared against a sequence of `Token`s. /// /// ## Serialization /// @@ -1095,15 +1098,27 @@ impl<'a> From<&'a Token> for Unexpected<'a> { /// /// ## Deserialization /// +/// `Tokens` obtained from a [`Serializer`] can be used as input to a [`Deserializer`]. +/// /// ``` rust -/// use claims::assert_ok_eq; -/// use serde::Deserialize; +/// use claims::{ +/// assert_ok, +/// assert_ok_eq, +/// }; +/// use serde::{ +/// Deserialize, +/// Serialize, +/// }; /// use serde_assert::{ /// Deserializer, +/// Serializer, /// Token, /// }; /// -/// let mut deserializer = Deserializer::builder().tokens([Token::Bool(true)]).build(); +/// let serializer = Serializer::builder().build(); +/// let mut deserializer = Deserializer::builder() +/// .tokens(assert_ok!(true.serialize(&serializer))) +/// .build(); /// /// assert_ok_eq!(bool::deserialize(&mut deserializer), true); /// ```