From 45f9884c10df391a150a92a74f53930c51960db1 Mon Sep 17 00:00:00 2001
From: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com>
Date: Thu, 9 Jan 2025 13:22:05 -0700
Subject: [PATCH 1/3] fix: Simple bug fixes. * Fixes #145    * Renamed
 `intergration` to `integration` globally * Fixes #217    * Added
 skippedDocComment before `syntax`, `import`, `package`, `option`,      and
 `reserved` keywords in grammar       * The root cause is the change to
 docComment to make it non-skipped, so         we have to add it everywhere.
 Skipped comments (the `//` type) can be         absolutely anywhere because
 the grammar _skips_ them.    * Note, this _might_ also fix #319

Signed-off-by: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com>
---
 ...hedera.pbj.spotless-conventions.gradle.kts | 35 ++-----------------
 ...a.pbj.spotless-java-conventions.gradle.kts | 35 ++-----------------
 ...pbj.spotless-kotlin-conventions.gradle.kts | 33 ++---------------
 .../protoparser/grammar/Protobuf3.g4          | 20 ++++++-----
 pbj-integration-tests/build.gradle.kts        |  2 +-
 .../jmh/ComplexEqualsHashCodeBench.java       |  4 +--
 .../jmh/EqualsHashCodeBench.java              |  4 +--
 .../jmh/HashBench.java                        |  6 ++--
 .../jmh/JsonBench.java                        |  2 +-
 .../jmh/ProtobufObjectBench.java              |  2 +-
 .../jmh/VarIntBench.java                      |  2 +-
 .../test/CompareToNegativeTest.java           |  2 +-
 .../test/CompareToTest.java                   |  2 +-
 .../ExtendedUtf8MessageWithStringTest.java    |  2 +-
 .../test/FieldsNonNullTest.java               |  2 +-
 .../test/HashEqualsTest.java                  |  4 +--
 .../test/JsonCodecTest.java                   |  2 +-
 .../test/MalformedMessageTest.java            |  2 +-
 .../test/MaxDepthTest.java                    |  2 +-
 .../test/MaxSizeTest.java                     |  2 +-
 .../test/ParserNeverWrapsTest.java            |  2 +-
 .../test/SampleFuzzTest.java                  |  4 +--
 .../test/TestHashFunctions.java               |  4 +--
 .../test/TimestampTestTest.java               |  4 +--
 .../test/TruncatedDataTests.java              |  4 +--
 25 files changed, 50 insertions(+), 133 deletions(-)
 rename pbj-integration-tests/src/jmh/java/com/hedera/pbj/{intergration => integration}/jmh/ComplexEqualsHashCodeBench.java (99%)
 rename pbj-integration-tests/src/jmh/java/com/hedera/pbj/{intergration => integration}/jmh/EqualsHashCodeBench.java (98%)
 rename pbj-integration-tests/src/jmh/java/com/hedera/pbj/{intergration => integration}/jmh/HashBench.java (94%)
 rename pbj-integration-tests/src/jmh/java/com/hedera/pbj/{intergration => integration}/jmh/JsonBench.java (99%)
 rename pbj-integration-tests/src/jmh/java/com/hedera/pbj/{intergration => integration}/jmh/ProtobufObjectBench.java (99%)
 rename pbj-integration-tests/src/jmh/java/com/hedera/pbj/{intergration => integration}/jmh/VarIntBench.java (99%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/CompareToNegativeTest.java (97%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/CompareToTest.java (99%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/ExtendedUtf8MessageWithStringTest.java (99%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/FieldsNonNullTest.java (98%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/HashEqualsTest.java (98%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/JsonCodecTest.java (99%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/MalformedMessageTest.java (98%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/MaxDepthTest.java (98%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/MaxSizeTest.java (97%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/ParserNeverWrapsTest.java (99%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/SampleFuzzTest.java (98%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/TestHashFunctions.java (99%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/TimestampTestTest.java (98%)
 rename pbj-integration-tests/src/test/java/com/hedera/pbj/{intergration => integration}/test/TruncatedDataTests.java (99%)

diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-conventions.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-conventions.gradle.kts
index f40e9dd2..f8ee303b 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-conventions.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-conventions.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.diffplug.spotless") }
 
 spotless {
@@ -51,22 +36,8 @@ spotless {
 
         licenseHeader(
             """
-            ##
-            # Copyright (C) ${'$'}YEAR Hedera Hashgraph, LLC
-            #
-            # Licensed under the Apache License, Version 2.0 (the "License");
-            # you may not use this file except in compliance with the License.
-            # You may obtain a copy of the License at
-            #
-            #      http://www.apache.org/licenses/LICENSE-2.0
-            #
-            # Unless required by applicable law or agreed to in writing, software
-            # distributed under the License is distributed on an "AS IS" BASIS,
-            # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-            # See the License for the specific language governing permissions and
-            # limitations under the License.
-            ##
-        """
+            # SPDX-License-Identifier: Apache-2.0
+            """
                 .trimIndent(),
             "(name|on)"
         )
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts
index ae83aed1..3cc45136 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-java-conventions.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.diffplug.spotless") }
 
 spotless {
@@ -31,22 +16,8 @@ spotless {
         // of our test classes which are in the default package.
         licenseHeader(
             """
-           /*
-            * Copyright (C) ${'$'}YEAR Hedera Hashgraph, LLC
-            *
-            * Licensed under the Apache License, Version 2.0 (the "License");
-            * you may not use this file except in compliance with the License.
-            * You may obtain a copy of the License at
-            *
-            *      http://www.apache.org/licenses/LICENSE-2.0
-            *
-            * Unless required by applicable law or agreed to in writing, software
-            * distributed under the License is distributed on an "AS IS" BASIS,
-            * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-            * See the License for the specific language governing permissions and
-            * limitations under the License.
-            */${"\n\n"}
-        """
+           // SPDX-License-Identifier: Apache-2.0
+            """
                 .trimIndent(),
             "(package|import)"
         )
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-kotlin-conventions.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-kotlin-conventions.gradle.kts
index ef2755ec..40092e17 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-kotlin-conventions.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.spotless-kotlin-conventions.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.diffplug.spotless") }
 
 spotless {
@@ -22,21 +7,7 @@ spotless {
 
         licenseHeader(
             """
-           /*
-            * Copyright (C) ${'$'}YEAR Hedera Hashgraph, LLC
-            *
-            * Licensed under the Apache License, Version 2.0 (the "License");
-            * you may not use this file except in compliance with the License.
-            * You may obtain a copy of the License at
-            *
-            *      http://www.apache.org/licenses/LICENSE-2.0
-            *
-            * Unless required by applicable law or agreed to in writing, software
-            * distributed under the License is distributed on an "AS IS" BASIS,
-            * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-            * See the License for the specific language governing permissions and
-            * limitations under the License.
-            */${"\n\n"}
+           // SPDX-License-Identifier: Apache-2.0
             """
                 .trimIndent(),
             "(import|plugins|repositories)",
diff --git a/pbj-core/pbj-compiler/src/main/antlr/com/hedera/hashgraph/protoparser/grammar/Protobuf3.g4 b/pbj-core/pbj-compiler/src/main/antlr/com/hedera/hashgraph/protoparser/grammar/Protobuf3.g4
index 1f9fbc74..b4e66ce8 100644
--- a/pbj-core/pbj-compiler/src/main/antlr/com/hedera/hashgraph/protoparser/grammar/Protobuf3.g4
+++ b/pbj-core/pbj-compiler/src/main/antlr/com/hedera/hashgraph/protoparser/grammar/Protobuf3.g4
@@ -6,10 +6,9 @@
  * Original source is published under Apache License 2.0.
  *
  * Changes from the source above:
- * - rewrite to antlr
+ * - rewrite to antlr.
  * - extract some group to rule.
- *
- * @author anatawa12
+ * - Allow document comments in a few added places.
  */
 
 grammar Protobuf3;
@@ -29,30 +28,32 @@ proto
 
 docComment: DOC_COMMENT*;
 
+skippedDocComment: (WS | SKIPPED_DOC_COMMENT | LINE_COMMENT | COMMENT)*;
+
 optionComment: OPTION_LINE_COMMENT;
 
 // Syntax
 
 syntax
-  : SYNTAX EQ (PROTO3_LIT_SINGLE | PROTO3_LIT_DOBULE) SEMI
+  : skippedDocComment SYNTAX EQ (PROTO3_LIT_SINGLE | PROTO3_LIT_DOBULE) SEMI
   ;
 
 // Import Statement
 
 importStatement
-  : IMPORT ( WEAK | PUBLIC )? strLit SEMI
+  : skippedDocComment IMPORT ( WEAK | PUBLIC )? strLit SEMI
   ;
 
 // Package
 
 packageStatement
-  : PACKAGE fullIdent SEMI
+  : skippedDocComment PACKAGE fullIdent SEMI
   ;
 
 // Option
 
 optionStatement
-  : OPTION optionName EQ constant SEMI
+  : skippedDocComment OPTION optionName EQ constant SEMI
   ;
 
 optionName
@@ -80,6 +81,8 @@ fieldNumber
 
 // Oneof and oneof field
 
+// Note, oneOf isn't a message or field, so docComment is broken here, but the current
+// PBJ compiler requires docComment.
 oneof
   : docComment ONEOF oneofName LC ( optionStatement | oneofField | emptyStatement_ )* RC
   ;
@@ -134,7 +137,7 @@ type_
 // Reserved
 
 reserved
-  : RESERVED ( ranges | reservedFieldNames ) SEMI
+  : skippedDocComment RESERVED ( ranges | reservedFieldNames ) SEMI
   ;
 
 ranges
@@ -348,6 +351,7 @@ WS  :   [ \t\r\n\u000C]+ -> skip;
 OPTION_LINE_COMMENT: '//' WS? '<<<' .*? '>>>' ~[\r\n]*;
 LINE_COMMENT: '//' ~[\r\n]* -> skip;
 DOC_COMMENT: '/**' .*? '*/';
+SKIPPED_DOC_COMMENT: '/**' .*? '*/' -> skip;
 COMMENT: '/*' .*? '*/' -> skip;
 
 keywords
diff --git a/pbj-integration-tests/build.gradle.kts b/pbj-integration-tests/build.gradle.kts
index 4ae478b5..fc7d7657 100644
--- a/pbj-integration-tests/build.gradle.kts
+++ b/pbj-integration-tests/build.gradle.kts
@@ -104,7 +104,7 @@ testing {
 
             useJUnitPlatform { includeTags("FUZZ_TEST") }
             enableAssertions = false
-            systemProperties["com.hedera.pbj.intergration.test.fuzz.useRandomSeed"] = true
+            systemProperties["com.hedera.pbj.integration.test.fuzz.useRandomSeed"] = true
         }
     }
 }
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ComplexEqualsHashCodeBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
similarity index 99%
rename from pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ComplexEqualsHashCodeBench.java
rename to pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
index a2041d01..faceae06 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ComplexEqualsHashCodeBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.jmh;
+package com.hedera.pbj.integration.jmh;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.test.proto.pbj.Hasheval;
@@ -152,4 +152,4 @@ public void benchJavaRecordNotEquals(Blackhole blackhole) {
             blackhole.consume(hashevalJavaRecord.equals(hashevalJavaRecordDifferent));
         }
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EqualsHashCodeBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
similarity index 98%
rename from pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EqualsHashCodeBench.java
rename to pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
index d5b42b3e..b09870fc 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/EqualsHashCodeBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.jmh;
+package com.hedera.pbj.integration.jmh;
 
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
 import org.openjdk.jmh.annotations.Benchmark;
@@ -102,4 +102,4 @@ public void benchJavaRecordNotEquals(Blackhole blackhole) {
             blackhole.consume(timestampStandardRecord.equals(timestampStandardRecordDifferent));
         }
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/HashBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
similarity index 94%
rename from pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/HashBench.java
rename to pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
index f2fa35f4..88a0d43b 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/HashBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
@@ -1,6 +1,6 @@
-package com.hedera.pbj.intergration.jmh;
+package com.hedera.pbj.integration.jmh;
 
-import com.hedera.pbj.intergration.test.TestHashFunctions;
+import com.hedera.pbj.integration.test.TestHashFunctions;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.test.proto.pbj.Hasheval;
 import com.hedera.pbj.test.proto.pbj.Suit;
@@ -53,4 +53,4 @@ public void hashBenchFieldWise(Blackhole blackhole) throws IOException {
             TestHashFunctions.hash2(hasheval);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/JsonBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
similarity index 99%
rename from pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/JsonBench.java
rename to pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
index 5a258027..3a4f6fdf 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/JsonBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.jmh;
+package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.GeneratedMessage;
 import com.google.protobuf.GeneratedMessageV3;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
similarity index 99%
rename from pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java
rename to pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
index 18268a90..d5fd2466 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/ProtobufObjectBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.jmh;
+package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.CodedOutputStream;
 import com.google.protobuf.GeneratedMessage;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
similarity index 99%
rename from pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java
rename to pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
index 9a5018c2..43cf97b2 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/intergration/jmh/VarIntBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.jmh;
+package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.CodedInputStream;
 import com.google.protobuf.CodedOutputStream;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/CompareToNegativeTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
similarity index 97%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/CompareToNegativeTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
index d51ca2c7..a1d3c232 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/CompareToNegativeTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import static com.hedera.pbj.compiler.PbjCompilerTask.compileFilesIn;
 import static org.junit.jupiter.api.Assertions.assertEquals;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/CompareToTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
similarity index 99%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/CompareToTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
index e426f4ec..769484d0 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/CompareToTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import static java.util.Collections.shuffle;
 import static java.util.Collections.sort;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/ExtendedUtf8MessageWithStringTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
similarity index 99%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/ExtendedUtf8MessageWithStringTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
index 2f417889..26138e21 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/ExtendedUtf8MessageWithStringTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.CodedOutputStream;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/FieldsNonNullTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
similarity index 98%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/FieldsNonNullTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
index 622bddd9..3308f488 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/FieldsNonNullTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.hedera.hapi.node.base.FeeSchedule;
 import com.hedera.hapi.node.base.TransactionFeeSchedule;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/HashEqualsTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
similarity index 98%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/HashEqualsTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
index 46933674..34d0d06f 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/HashEqualsTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -89,4 +89,4 @@ void differentObjectsWithNoDefaulHashCode4() {
 
         assertNotEquals(tst.hashCode(), tst2.hashCode());
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/JsonCodecTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
similarity index 99%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/JsonCodecTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
index df82df4b..3141a25f 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/JsonCodecTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.ByteString;
 import com.google.protobuf.util.JsonFormat;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MalformedMessageTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
similarity index 98%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MalformedMessageTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
index 6dd84130..b2829d99 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MalformedMessageTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MaxDepthTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
similarity index 98%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MaxDepthTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
index 5bd03e88..55a2acd4 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MaxDepthTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.runtime.ParseException;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MaxSizeTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
similarity index 97%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MaxSizeTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
index dfa92aae..0182d28e 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/MaxSizeTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.runtime.ParseException;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/ParserNeverWrapsTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
similarity index 99%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/ParserNeverWrapsTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
index 055529f8..a99f1967 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/ParserNeverWrapsTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.runtime.Codec;
 import com.hedera.pbj.runtime.io.WritableSequentialData;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/SampleFuzzTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
similarity index 98%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/SampleFuzzTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
index 64b81ac5..4ac22e84 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/SampleFuzzTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.hedera.hapi.node.base.tests.AccountIDTest;
 import com.hedera.hapi.node.base.tests.ContractIDTest;
@@ -199,7 +199,7 @@ void fuzzTest() {
 
     private Random buildRandom() {
         final boolean useRandomSeed
-                = Boolean.valueOf(System.getProperty("com.hedera.pbj.intergration.test.fuzz.useRandomSeed"));
+                = Boolean.valueOf(System.getProperty("com.hedera.pbj.integration.test.fuzz.useRandomSeed"));
         final long seed = useRandomSeed ? new Random().nextLong() : FIXED_RANDOM_SEED;
 
         System.out.println("Fuzz tests are configured to use a "
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TestHashFunctions.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
similarity index 99%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TestHashFunctions.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
index 46751c57..4e2d8688 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TestHashFunctions.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.test.proto.pbj.Hasheval;
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
@@ -99,4 +99,4 @@ private static int processForBetterDistribution(int val) {
         val += val << 30;
         return val;
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TimestampTestTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
similarity index 98%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TimestampTestTest.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
index d75fa417..c3309e5b 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TimestampTestTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.CodedOutputStream;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
@@ -101,4 +101,4 @@ public static Stream<NoToStringWrapper<TimestampTest>> createModelTestArguments(
 		return ARGUMENTS.stream().map(NoToStringWrapper::new);
 	}
 
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TruncatedDataTests.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
similarity index 99%
rename from pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TruncatedDataTests.java
rename to pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
index 71b81801..8c7f38eb 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/intergration/test/TruncatedDataTests.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
@@ -1,4 +1,4 @@
-package com.hedera.pbj.intergration.test;
+package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.InvalidProtocolBufferException;
 import com.hedera.pbj.runtime.ProtoParserTools;
@@ -335,4 +335,4 @@ static Stream<Integer> createTruncateTestArguments() {
     }
 
     public static final String testData = "0a190a1266696c65732e6665655363686564756c657312033131310a310a29636f6e7472616374732e707265636f6d70696c652e687473456e61626c65546f6b656e4372656174651204747275650a230a1c746f6b656e732e6d6178546f6b656e4e616d6555746638427974657312033130300a1f0a16746f6b656e732e73746f726552656c734f6e4469736b120566616c73650a260a2072617465732e696e7472616461794368616e67654c696d697450657263656e74120232350a230a1e7374616b696e672e72657761726442616c616e63655468726573686f6c641201300a2a0a24636f6e7472616374732e6d6178526566756e6450657263656e744f664761734c696d6974120232300a2d0a267374616b696e672e726577617264486973746f72792e6e756d53746f726564506572696f647312033336350a1a0a146163636f756e74732e73797374656d41646d696e120235300a280a21666565732e746f6b656e5472616e7366657255736167654d756c7469706c69657212033338300a1c0a146175746f4372656174696f6e2e656e61626c65641204747275650a1e0a18666565732e6d696e436f6e67657374696f6e506572696f64120236300a1a0a1366696c65732e65786368616e6765526174657312033131320a280a1a626f6f7473747261702e72617465732e6e657874457870697279120a343130323434343830300a1a0a146163636f756e74732e667265657a6541646d696e120235380a1e0a166865646572612e666972737455736572456e746974791204313030310a370a1f636f6e7472616374732e73746f72616765536c6f745072696365546965727312143074696c3130304d2c3230303074696c3435304d0a270a2174726163656162696c6974792e6d61784578706f727473506572436f6e73536563120231300a220a1c6163636f756e74732e73797374656d556e64656c65746541646d696e120236300a280a1f636f6e7472616374732e616c6c6f774175746f4173736f63696174696f6e73120566616c73650a320a2b6865646572612e7265636f726453747265616d2e6e756d4f66426c6f636b486173686573496e537461746512033235360a2e0a256865646572612e776f726b666c6f772e766572696669636174696f6e54696d656f75744d53120532303030300a1c0a146163636f756e74732e73746f72654f6e4469736b1204747275650a280a216865646572612e616c6c6f77616e6365732e6d61784163636f756e744c696d697412033130300a2b0a256865646572612e616c6c6f77616e6365732e6d61785472616e73616374696f6e4c696d6974120232300a2b0a25636f6e73656e7375732e6d6573736167652e6d6178466f6c6c6f77696e675265636f726473120235300a2a0a236865646572612e7472616e73616374696f6e2e6d617856616c69644475726174696f6e12033138300a490a0c76657273696f6e2e68617069123953656d616e74696356657273696f6e5b6d616a6f723d302c206d696e6f723d34302c2070617463683d302c207072653d2c206275696c643d5d0a240a1d6163636f756e74732e7374616b696e675265776172644163636f756e7412033830300a310a2c6175746f72656e65772e6d61784e756d6265724f66456e746974696573546f52656e65774f7244656c6574651201320a380a217374616b696e672e6d61784461696c795374616b655265776172645468506572481213393232333337323033363835343737353830370a2b0a1f636f6e7472616374732e7265666572656e6365536c6f744c69666574696d65120833313533363030300a2d0a226c65646765722e6175746f52656e6577506572696f642e6d696e4475726174696f6e1207323539323030300a4d0a1076657273696f6e2e7365727669636573123953656d616e74696356657273696f6e5b6d616a6f723d302c206d696e6f723d34302c2070617463683d302c207072653d2c206275696c643d5d0a3a0a31636f6e7472616374732e707265636f6d70696c652e61746f6d696343727970746f5472616e736665722e656e61626c6564120566616c73650a220a14656e7469746965732e6d61784c69666574696d65120a333135333630303030300a260a1d636f6e7472616374732e65766d2e76657273696f6e2e64796e616d6963120566616c73650a2b0a22636f6e7472616374732e7369646563617256616c69646174696f6e456e61626c6564120566616c73650a210a1a6163636f756e74732e6e6f64655265776172644163636f756e7412033830310a180a11636f6e7472616374732e636861696e496412033239350a270a216c65646765722e6368616e6765486973746f7269616e2e6d656d6f727953656373120232300a290a21636f6e73656e7375732e6d6573736167652e6d61784279746573416c6c6f7765641204313032340a180a1166696c65732e61646472657373426f6f6b12033130310a200a1a6163636f756e74732e73797374656d44656c65746541646d696e120235390a380a30636f6e7472616374732e707265636f6d70696c652e6872634661636164652e6173736f63696174652e656e61626c65641204747275650a220a1b6163636f756e74732e6c6173745468726f74746c654578656d707412033130300a1e0a16746f6b656e732e6e6674732e617265456e61626c65641204747275650a1b0a10746f706963732e6d61784e756d6265721207313030303030300a200a1a6c65646765722e6e66745472616e73666572732e6d61784c656e120231300a2a0a25636f6e73656e7375732e6d6573736167652e6d6178507265636564696e675265636f7264731201330a190a117374616b696e672e6973456e61626c65641204747275650a260a1b746f6b656e732e6e6674732e6d6178416c6c6f7765644d696e74731207353030303030300a2f0a187374616b696e672e6d61785374616b6552657761726465641213353030303030303030303030303030303030300a2b0a1d626f6f7473747261702e72617465732e63757272656e74457870697279120a343130323434343830300a1e0a1766696c65732e7570677261646546696c654e756d62657212033135300a240a19636f6e7472616374732e64656661756c744c69666574696d651207373839303030300a260a217374616b696e672e666565732e6e6f646552657761726450657263656e746167651201300a200a19746f6b656e732e6d617853796d626f6c55746638427974657312033130300a250a1d736967732e657870616e6446726f6d496d6d757461626c6553746174651204747275650a170a127374616b696e672e726577617264526174651201300a2b0a1d626f6f7473747261702e73797374656d2e656e74697479457870697279120a313831323633373638360a1f0a196163636f756e74732e61646472657373426f6f6b41646d696e120235350a2b0a246865646572612e7265636f726453747265616d2e736964656361724d617853697a654d6212033235360a300a257363686564756c696e672e6d617845787069726174696f6e4675747572655365636f6e64731207353335363830300a2a0a21636f6e7472616374732e656e666f7263654372656174696f6e5468726f74746c65120566616c73650a1c0a14746f6b656e732e6d61785065724163636f756e741204313030300a1c0a1566696c65732e686170695065726d697373696f6e7312033132320a2d0a286865646572612e7265636f726453747265616d2e7369676e617475726546696c6556657273696f6e1201360a200a19746f6b656e732e6e6674732e6d6178517565727952616e676512033130300a1d0a176c65646765722e7472616e73666572732e6d61784c656e120231300a230a1a6163636f756e74732e626c6f636b6c6973742e656e61626c6564120566616c73650a200a1b72617465732e6d69646e69676874436865636b496e74657276616c1201310a2f0a2a74726163656162696c6974792e6d696e46726565546f557365644761735468726f74746c65526174696f1201390a340a266865646572612e7265636f726453747265616d2e73747265616d46696c6550726f6475636572120a636f6e63757272656e740a220a1c746f6b656e732e6e6674732e6d6178426174636853697a6557697065120231300a330a2b6865646572612e7265636f726453747265616d2e636f6d707265737346696c65734f6e4372656174696f6e1204747275650a1a0a127374616b696e672e706572696f644d696e731204313434300a240a1b6175746f72656e65772e6772616e744672656552656e6577616c73120566616c73650a2b0a1e636f6e7472616374732e6d61784b7650616972732e61676772656761746512093530303030303030300a220a1c746f6b656e732e6e6674732e6d6178426174636853697a654d696e74120231300a240a1d7374616b696e672e73756d4f66436f6e73656e7375735765696768747312033530300a210a1b746f6b656e732e6d6178437573746f6d46656573416c6c6f776564120231300a1c0a146c617a794372656174696f6e2e656e61626c65641204747275650a1b0a10746f6b656e732e6d61784e756d6265721207313030303030300a1d0a126163636f756e74732e6d61784e756d6265721207353030303030300a240a1c636f6e7472616374732e6974656d697a6553746f72616765466565731204747275650a230a1b6865646572612e616c6c6f77616e6365732e6973456e61626c65641204747275650a380a23626f6f7473747261702e6665655363686564756c65734a736f6e2e7265736f7572636512116665655363686564756c65732e6a736f6e0a2b0a246c65646765722e7265636f7264732e6d6178517565727961626c6542794163636f756e7412033138300a220a16636f6e7472616374732e6d6178476173506572536563120831353030303030300a300a28636f6e7472616374732e707265636f6d70696c652e6578706f72745265636f7264526573756c74731204747275650a1b0a156175746f52656e65772e746172676574547970657312025b5d0a270a22636f6e7472616374732e6d61784e756d5769746848617069536967734163636573731201300a280a20636f6e7472616374732e7468726f74746c652e7468726f74746c6542794761731204747275650a230a17746f6b656e732e6d617841676772656761746552656c73120831303030303030300a260a20626f6f7473747261702e72617465732e63757272656e7443656e744571756976120231320a290a236865646572612e7472616e73616374696f6e2e6d696e56616c69644475726174696f6e120231350a510a12636f6e7472616374732e7369646563617273123b5b434f4e54524143545f53544154455f4348414e47452c20434f4e54524143545f414354494f4e2c20434f4e54524143545f42595445434f44455d0a1b0a156c65646765722e66756e64696e674163636f756e74120239380a230a1a7363686564756c696e672e6c6f6e675465726d456e61626c6564120566616c73650a220a1a6c65646765722e6d61784175746f4173736f63696174696f6e731204353030300a1e0a16636f6e7472616374";
-}
\ No newline at end of file
+}

From 688c981750418e26889ffda9a88648ae0b56610b Mon Sep 17 00:00:00 2001
From: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com>
Date: Thu, 9 Jan 2025 14:31:15 -0700
Subject: [PATCH 2/3] Change to SPDX identifiers * Replaced copyright notices
 with SPDX license identifiers * Added SPDX identifiers where needed    * In
 particular, the integration tests had no headers, so added to all files.

Signed-off-by: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com>
---
 pbj-core/build.gradle.kts                     | 17 +----------------
 pbj-core/gradle.properties                    |  1 +
 pbj-core/gradle/modules.properties            |  1 +
 pbj-core/gradle/plugins/build.gradle.kts      | 17 +----------------
 ...om.hedera.pbj.aggregate-reports.gradle.kts | 17 +----------------
 .../com.hedera.pbj.conventions.gradle.kts     | 17 +----------------
 .../com.hedera.pbj.gradle-plugin.gradle.kts   | 17 +----------------
 .../kotlin/com.hedera.pbj.helidon.gradle.kts  | 17 +----------------
 .../com.hedera.pbj.maven-publish.gradle.kts   | 17 +----------------
 .../kotlin/com.hedera.pbj.protoc.gradle.kts   | 17 +----------------
 .../com.hedera.pbj.repositories.gradle.kts    | 17 +----------------
 .../kotlin/com.hedera.pbj.root.gradle.kts     | 17 +----------------
 .../kotlin/com.hedera.pbj.runtime.gradle.kts  | 17 +----------------
 pbj-core/pbj-compiler/build.gradle.kts        | 17 +----------------
 .../DefaultPbjSourceDirectorySet.java         | 17 +----------------
 .../pbj/compiler/PbjCompilerPlugin.java       | 17 +----------------
 .../hedera/pbj/compiler/PbjCompilerTask.java  | 17 +----------------
 .../pbj/compiler/PbjSourceDirectorySet.java   | 17 +----------------
 .../com/hedera/pbj/compiler/impl/Common.java  |  1 +
 .../compiler/impl/ContextualLookupHelper.java |  1 +
 .../com/hedera/pbj/compiler/impl/Field.java   |  1 +
 .../impl/FileAndPackageNamesConfig.java       |  1 +
 .../hedera/pbj/compiler/impl/FileType.java    |  1 +
 .../pbj/compiler/impl/LookupHelper.java       | 17 +----------------
 .../hedera/pbj/compiler/impl/MapField.java    |  1 +
 .../hedera/pbj/compiler/impl/OneOfField.java  |  1 +
 .../compiler/impl/PbjCompilerException.java   |  1 +
 .../hedera/pbj/compiler/impl/SingleField.java |  1 +
 .../impl/generators/EnumGenerator.java        |  1 +
 .../compiler/impl/generators/Generator.java   |  1 +
 .../impl/generators/ModelGenerator.java       |  1 +
 .../impl/generators/SchemaGenerator.java      |  1 +
 .../impl/generators/TestGenerator.java        |  1 +
 .../generators/json/JsonCodecGenerator.java   |  1 +
 .../json/JsonCodecParseMethodGenerator.java   |  1 +
 .../json/JsonCodecWriteMethodGenerator.java   |  1 +
 .../CodecFastEqualsMethodGenerator.java       |  1 +
 .../generators/protobuf/CodecGenerator.java   |  1 +
 .../CodecMeasureDataMethodGenerator.java      |  1 +
 .../CodecMeasureRecordMethodGenerator.java    |  1 +
 .../protobuf/CodecParseMethodGenerator.java   |  1 +
 .../protobuf/CodecWriteMethodGenerator.java   |  1 +
 .../hedera/pbj/compiler/impl/CommonTest.java  |  3 ++-
 .../pbj/compiler/impl/LookupHelperTest.java   |  3 ++-
 .../pbj-grpc-helidon-config/build.gradle.kts  | 17 +----------------
 .../helidon/config/PbjConfigBlueprint.java    | 17 +----------------
 .../src/main/java/module-info.java            |  1 +
 pbj-core/pbj-grpc-helidon/build.gradle.kts    | 17 +----------------
 .../pbj/grpc/helidon/DeadlineDetector.java    | 17 +----------------
 .../hedera/pbj/grpc/helidon/GrpcHeaders.java  | 17 +----------------
 .../pbj/grpc/helidon/PbjMethodRoute.java      | 17 +----------------
 .../helidon/PbjProtocolConfigProvider.java    | 17 +----------------
 .../pbj/grpc/helidon/PbjProtocolHandler.java  | 17 +----------------
 .../pbj/grpc/helidon/PbjProtocolProvider.java | 17 +----------------
 .../pbj/grpc/helidon/PbjProtocolSelector.java | 17 +----------------
 .../com/hedera/pbj/grpc/helidon/PbjRoute.java | 17 +----------------
 .../hedera/pbj/grpc/helidon/PbjRouting.java   | 17 +----------------
 .../pbj/grpc/helidon/PbjServiceRoute.java     | 17 +----------------
 .../grpc/helidon/RouteNotFoundHandler.java    |  1 +
 .../hedera/pbj/grpc/helidon/package-info.java |  1 +
 .../src/main/java/module-info.java            |  1 +
 .../pbj/grpc/helidon/GreeterService.java      | 17 +----------------
 .../pbj/grpc/helidon/GreeterServiceImpl.java  | 19 ++-----------------
 .../grpc/helidon/PbjProtocolHandlerTest.java  | 17 +----------------
 .../com/hedera/pbj/grpc/helidon/PbjTest.java  | 17 +----------------
 .../test/proto/greeter/greeter_service.proto  |  1 +
 .../src/test/proto/greeter/hello_reply.proto  |  1 +
 .../test/proto/greeter/hello_request.proto    |  3 ++-
 pbj-core/pbj-runtime/build.gradle.kts         | 17 +----------------
 .../pbj/runtime/io/BufferedDataGetBytes.java  |  1 +
 .../pbj/runtime/io/ByteBufferGetByte.java     |  1 +
 .../pbj/runtime/io/ByteBufferGetBytes.java    |  1 +
 .../pbj/runtime/io/ByteOrderEquals.java       |  1 +
 .../hedera/pbj/runtime/io/BytesGetLong.java   |  1 +
 .../io/WritableStreamingDataBench.java        |  1 +
 .../runtime/io/WriteBufferedDataBench.java    |  1 +
 .../pbj/runtime/io/WriteBytesBench.java       |  1 +
 .../java/com/hedera/pbj/runtime/Codec.java    |  1 +
 .../hedera/pbj/runtime/ComparableOneOf.java   |  1 +
 .../pbj/runtime/EnumWithProtoMetadata.java    |  1 +
 .../hedera/pbj/runtime/FieldDefinition.java   |  1 +
 .../com/hedera/pbj/runtime/FieldType.java     |  1 +
 .../com/hedera/pbj/runtime/JsonCodec.java     |  1 +
 .../com/hedera/pbj/runtime/JsonTools.java     |  1 +
 .../runtime/MalformedProtobufException.java   |  1 +
 .../java/com/hedera/pbj/runtime/OneOf.java    |  1 +
 .../hedera/pbj/runtime/ParseException.java    |  1 +
 .../java/com/hedera/pbj/runtime/PbjMap.java   |  1 +
 .../hedera/pbj/runtime/ProtoConstants.java    |  1 +
 .../hedera/pbj/runtime/ProtoParserTools.java  |  1 +
 .../hedera/pbj/runtime/ProtoTestTools.java    |  1 +
 .../com/hedera/pbj/runtime/ProtoWriter.java   |  1 +
 .../hedera/pbj/runtime/ProtoWriterTools.java  |  1 +
 .../pbj/runtime/RpcMethodDefinition.java      |  1 +
 .../pbj/runtime/RpcServiceDefinition.java     |  1 +
 .../java/com/hedera/pbj/runtime/Schema.java   |  1 +
 .../pbj/runtime/UncheckedParseException.java  |  1 +
 .../pbj/runtime/UnknownFieldException.java    |  1 +
 .../com/hedera/pbj/runtime/Utf8Tools.java     | 17 +----------------
 .../pbj/runtime/grpc/GrpcException.java       | 17 +----------------
 .../hedera/pbj/runtime/grpc/GrpcStatus.java   | 17 +----------------
 .../com/hedera/pbj/runtime/grpc/Pipeline.java | 17 +----------------
 .../hedera/pbj/runtime/grpc/Pipelines.java    | 17 +----------------
 .../pbj/runtime/grpc/ServiceInterface.java    | 17 +----------------
 .../pbj/runtime/io/DataEncodingException.java |  1 +
 .../runtime/io/ReadableSequentialData.java    |  1 +
 .../hedera/pbj/runtime/io/SequentialData.java |  1 +
 .../hedera/pbj/runtime/io/UnsafeUtils.java    |  1 +
 .../runtime/io/WritableSequentialData.java    | 17 +----------------
 .../pbj/runtime/io/buffer/BufferedData.java   |  1 +
 .../io/buffer/BufferedSequentialData.java     |  1 +
 .../io/buffer/ByteArrayBufferedData.java      | 17 +----------------
 .../hedera/pbj/runtime/io/buffer/Bytes.java   |  1 +
 .../runtime/io/buffer/DirectBufferedData.java |  1 +
 .../runtime/io/buffer/RandomAccessData.java   |  1 +
 .../buffer/RandomAccessSequenceAdapter.java   |  1 +
 .../pbj/runtime/io/stream/EOFException.java   |  1 +
 .../io/stream/ReadableStreamingData.java      |  1 +
 .../io/stream/WritableStreamingData.java      |  1 +
 .../CharBufferToWritableSequentialData.java   |  1 +
 .../pbj/runtime/test/NoToStringWrapper.java   |  3 ++-
 .../com/hedera/pbj/runtime/test/Sneaky.java   |  1 +
 .../test/UncheckedThrowingFunction.java       |  1 +
 .../src/main/java/module-info.java            |  1 +
 .../pbj/runtime/ProtoParserToolsTest.java     |  3 ++-
 .../pbj/runtime/ProtoWriterToolsTest.java     |  3 ++-
 .../com/hedera/pbj/runtime/Utf8ToolsTest.java |  1 +
 .../pbj/runtime/grpc/GrpcExceptionTest.java   |  1 +
 .../pbj/runtime/grpc/GrpcStatusTest.java      |  1 +
 .../pbj/runtime/grpc/PipelinesTest.java       | 17 +----------------
 .../com/hedera/pbj/runtime/io/DataTest.java   |  1 +
 .../io/ReadableSequentialDataTest.java        |  1 +
 .../io/ReadableSequentialTestBase.java        |  1 +
 .../pbj/runtime/io/ReadableTestBase.java      |  1 +
 .../pbj/runtime/io/SequentialDataTest.java    |  1 +
 .../pbj/runtime/io/SequentialTestBase.java    |  1 +
 .../pbj/runtime/io/UnsafeUtilsTest.java       |  1 +
 .../io/WritableSequentialDataTest.java        |  1 +
 .../pbj/runtime/io/WritableTestBase.java      |  1 +
 .../runtime/io/buffer/BufferedDataTest.java   |  1 +
 .../io/buffer/BufferedDataTestBase.java       |  1 +
 .../io/buffer/ByteArrayBufferedDataTest.java  |  1 +
 .../pbj/runtime/io/buffer/BytesTest.java      |  1 +
 .../io/buffer/DirectBufferedDataTest.java     |  1 +
 .../io/buffer/RandomAccessTestBase.java       |  1 +
 .../buffer/StubbedRandomAccessDataTest.java   |  1 +
 .../io/stream/ReadableStreamingDataTest.java  |  1 +
 .../io/stream/WritableStreamingDataTest.java  |  1 +
 .../test/java/tests/ComparableOneOfTest.java  |  1 +
 .../test/java/tests/FieldDefinitionTest.java  |  1 +
 .../src/test/java/tests/FuzzTest.java         |  1 +
 .../src/test/java/tests/NegativeTest.java     |  1 +
 .../src/test/java/tests/OneOfTest.java        |  1 +
 .../test/java/tests/ProtoConstantsTest.java   |  1 +
 .../pbj-runtime/src/test/proto/omnibus.proto  |  1 +
 pbj-core/settings.gradle.kts                  | 17 +----------------
 pbj-integration-tests/build.gradle.kts        |  1 +
 pbj-integration-tests/gradle.properties       |  1 +
 pbj-integration-tests/settings.gradle.kts     |  1 +
 .../jmh/ComplexEqualsHashCodeBench.java       |  1 +
 .../integration/jmh/EqualsHashCodeBench.java  |  1 +
 .../hedera/pbj/integration/jmh/HashBench.java |  1 +
 .../hedera/pbj/integration/jmh/JsonBench.java |  1 +
 .../integration/jmh/ProtobufObjectBench.java  |  1 +
 .../pbj/integration/jmh/VarIntBench.java      |  1 +
 .../pbj/integration/AccountDetailsPbj.java    |  1 +
 .../pbj/integration/AccountDetailsWriter.java |  1 +
 .../pbj/integration/EverythingTestData.java   |  1 +
 .../integration/EverythingWriterPerfTest.java |  1 +
 .../NonSynchronizedByteArrayInputStream.java  |  1 +
 .../NonSynchronizedByteArrayOutputStream.java |  1 +
 .../java/com/hedera/pbj/integration/Test.java |  1 +
 .../hedera/pbj/integration/fuzz/Elapsed.java  |  1 +
 .../hedera/pbj/integration/fuzz/FuzzTest.java |  1 +
 .../integration/fuzz/FuzzTestException.java   |  1 +
 .../pbj/integration/fuzz/FuzzTestResult.java  |  1 +
 .../hedera/pbj/integration/fuzz/FuzzUtil.java |  1 +
 .../pbj/integration/fuzz/SingleFuzzTest.java  |  1 +
 .../fuzz/SingleFuzzTestResult.java            |  1 +
 .../src/main/proto/bytes.proto                |  3 ++-
 .../src/main/proto/comparable.proto           |  1 +
 .../src/main/proto/everything.proto           |  3 ++-
 .../main/proto/extendedUtf8StingTest.proto    |  3 ++-
 .../src/main/proto/hasheval.proto             |  1 +
 .../src/main/proto/map.proto                  |  3 ++-
 .../src/main/proto/message.proto              |  3 ++-
 .../src/main/proto/timestampTest.proto        |  1 +
 .../src/main/proto/timestampTest2.proto       |  1 +
 .../test/CompareToNegativeTest.java           |  1 +
 .../pbj/integration/test/CompareToTest.java   |  1 +
 .../ExtendedUtf8MessageWithStringTest.java    |  1 +
 .../integration/test/FieldsNonNullTest.java   |  1 +
 .../pbj/integration/test/HashEqualsTest.java  |  1 +
 .../pbj/integration/test/JsonCodecTest.java   |  1 +
 .../test/MalformedMessageTest.java            |  1 +
 .../pbj/integration/test/MaxDepthTest.java    |  1 +
 .../pbj/integration/test/MaxSizeTest.java     |  1 +
 .../test/ParserNeverWrapsTest.java            |  1 +
 .../pbj/integration/test/SampleFuzzTest.java  |  1 +
 .../integration/test/TestHashFunctions.java   |  1 +
 .../integration/test/TimestampTestTest.java   |  1 +
 .../integration/test/TruncatedDataTests.java  |  1 +
 .../non_compilable_comparable_oneOf.proto     |  1 +
 .../non_compilable_comparable_repeated.proto  |  1 +
 .../non_compilable_comparable_sub_obj.proto   |  1 +
 205 files changed, 217 insertions(+), 732 deletions(-)

diff --git a/pbj-core/build.gradle.kts b/pbj-core/build.gradle.kts
index bd4c357a..6df59b01 100644
--- a/pbj-core/build.gradle.kts
+++ b/pbj-core/build.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins {
     id("com.hedera.pbj.root")
 }
diff --git a/pbj-core/gradle.properties b/pbj-core/gradle.properties
index 88c7182e..499f34dd 100644
--- a/pbj-core/gradle.properties
+++ b/pbj-core/gradle.properties
@@ -1,3 +1,4 @@
+# SPDX-License-Identifier: Apache-2.0
 # Version number
 version=0.9.13-SNAPSHOT
 
diff --git a/pbj-core/gradle/modules.properties b/pbj-core/gradle/modules.properties
index 0bedbc1a..26b49448 100644
--- a/pbj-core/gradle/modules.properties
+++ b/pbj-core/gradle/modules.properties
@@ -1,3 +1,4 @@
+# SPDX-License-Identifier: Apache-2.0
 com.google.protobuf=com.google.protobuf:protobuf-java
 com.google.protobuf.util=com.google.protobuf:protobuf-java-util
 io.grpc.netty=io.grpc:grpc-netty
diff --git a/pbj-core/gradle/plugins/build.gradle.kts b/pbj-core/gradle/plugins/build.gradle.kts
index 6041c61c..8d0f651c 100644
--- a/pbj-core/gradle/plugins/build.gradle.kts
+++ b/pbj-core/gradle/plugins/build.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { `kotlin-dsl` }
 
 repositories { gradlePluginPortal() }
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.aggregate-reports.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.aggregate-reports.gradle.kts
index 24347792..aa723c1a 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.aggregate-reports.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.aggregate-reports.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 import java.io.BufferedOutputStream
 import net.swiftzer.semver.SemVer
 
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.conventions.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.conventions.gradle.kts
index 6487e31e..5f90cbd9 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.conventions.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.conventions.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 import com.adarshr.gradle.testlogger.theme.ThemeType
 
 plugins {
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.gradle-plugin.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.gradle-plugin.gradle.kts
index 34218a22..eca30e2f 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.gradle-plugin.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.gradle-plugin.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 import com.autonomousapps.DependencyAnalysisSubExtension
 
 plugins {
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.helidon.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.helidon.gradle.kts
index 6cad4fd9..82ae5542 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.helidon.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.helidon.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins {
     id("java-library")
     id("com.hedera.pbj.conventions")
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.maven-publish.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.maven-publish.gradle.kts
index 5cacbdc2..296eb4b1 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.maven-publish.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.maven-publish.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins {
     id("java")
     id("maven-publish")
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.protoc.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.protoc.gradle.kts
index b7893882..b17ab19e 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.protoc.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.protoc.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.google.protobuf") }
 
 protobuf {
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.repositories.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.repositories.gradle.kts
index ac4586a4..2e4e0bca 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.repositories.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.repositories.gradle.kts
@@ -1,17 +1,2 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 repositories { mavenCentral() }
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.root.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.root.gradle.kts
index 4884da2f..fcf6b476 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.root.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.root.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 import io.github.gradlenexus.publishplugin.CloseNexusStagingRepository
 
 plugins {
diff --git a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.runtime.gradle.kts b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.runtime.gradle.kts
index aaba1944..39726888 100644
--- a/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.runtime.gradle.kts
+++ b/pbj-core/gradle/plugins/src/main/kotlin/com.hedera.pbj.runtime.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins {
     id("java-library")
     id("com.hedera.pbj.conventions")
diff --git a/pbj-core/pbj-compiler/build.gradle.kts b/pbj-core/pbj-compiler/build.gradle.kts
index 7ecc2d8a..cb6d26aa 100644
--- a/pbj-core/pbj-compiler/build.gradle.kts
+++ b/pbj-core/pbj-compiler/build.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.hedera.pbj.gradle-plugin") }
 
 // This project does not have a module-info.java, as Gradle does not support plugins that are
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java
index b7807983..79c236e3 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/DefaultPbjSourceDirectorySet.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler;
 
 import javax.inject.Inject;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java
index caa48d2a..d7466202 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerPlugin.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler;
 
 import javax.inject.Inject;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java
index 47a684af..fdde432b 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler;
 
 import com.hedera.pbj.compiler.impl.ContextualLookupHelper;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjSourceDirectorySet.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjSourceDirectorySet.java
index 52cc3d77..73585fb9 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjSourceDirectorySet.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjSourceDirectorySet.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler;
 
 import org.gradle.api.file.SourceDirectorySet;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java
index 23006654..35ef8b8d 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java
index 849f53fd..87903f75 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.*;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java
index 1dcc3ec8..bc2c86d1 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java
index 331cb53c..50ea5653 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 /**
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java
index 65682e8b..5e6ce0db 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 /**
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java
index 8fa666be..ff8b4b92 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import static java.util.Collections.emptyList;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java
index e092a918..342119ff 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import java.util.Set;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java
index 4a2d8a51..92389d61 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java
index 1fce888f..ddcca5a6 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 /**
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java
index dbde204a..54f6aa7b 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java
index c35a41db..6d14325d 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators;
 
 import com.hedera.pbj.compiler.impl.ContextualLookupHelper;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java
index afc16eba..e8ef58e0 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators;
 
 import com.hedera.pbj.compiler.impl.ContextualLookupHelper;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java
index 8eb8d95e..912ecc2e 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java
index 5f9ae52d..ed61adb6 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators;
 
 import com.hedera.pbj.compiler.impl.*;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java
index 46c6f8e1..24b5e606 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java
index 74fbda6f..b572ae0f 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.json;
 
 import com.hedera.pbj.compiler.impl.*;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java
index dfcde8c1..f28afe99 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.json;
 
 import com.hedera.pbj.compiler.impl.Common;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java
index d8d6bf31..22a9eb8a 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.json;
 
 import com.hedera.pbj.compiler.impl.Common;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java
index 376c9026..2c4312d3 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.protobuf;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java
index 5e0fb10d..abb55a19 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.protobuf;
 
 import com.hedera.pbj.compiler.impl.*;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java
index 3c84b83f..011a05f1 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.protobuf;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java
index 622aaaa6..a263b38d 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.protobuf;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java
index 284d8c00..4f69601b 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.protobuf;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java
index 415e0a94..685f95d2 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.protobuf;
 
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
diff --git a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java
index c03343eb..17788d1a 100644
--- a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java
+++ b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import org.junit.jupiter.api.DisplayName;
@@ -175,4 +176,4 @@ void oneParamsThrowsAndReturnsWithMore3() {
                 "@throws DataAccessException if an I/O error occurs";
         assertEquals(expected, result);
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java
index c15910eb..9004d1f1 100644
--- a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java
+++ b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
 import static com.hedera.pbj.compiler.impl.LookupHelper.extractComparableFields;
@@ -128,4 +129,4 @@ private static MessageElementContext createMessageElement(final String fieldName
     }
 
 
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-grpc-helidon-config/build.gradle.kts b/pbj-core/pbj-grpc-helidon-config/build.gradle.kts
index 520efc6b..bfb9cccd 100644
--- a/pbj-core/pbj-grpc-helidon-config/build.gradle.kts
+++ b/pbj-core/pbj-grpc-helidon-config/build.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.hedera.pbj.helidon") }
 
 mainModuleInfo {
diff --git a/pbj-core/pbj-grpc-helidon-config/src/main/java/com/hedera/pbj/grpc/helidon/config/PbjConfigBlueprint.java b/pbj-core/pbj-grpc-helidon-config/src/main/java/com/hedera/pbj/grpc/helidon/config/PbjConfigBlueprint.java
index 0781dbe6..ba14b2cb 100644
--- a/pbj-core/pbj-grpc-helidon-config/src/main/java/com/hedera/pbj/grpc/helidon/config/PbjConfigBlueprint.java
+++ b/pbj-core/pbj-grpc-helidon-config/src/main/java/com/hedera/pbj/grpc/helidon/config/PbjConfigBlueprint.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon.config;
 
 import io.helidon.builder.api.Option;
diff --git a/pbj-core/pbj-grpc-helidon-config/src/main/java/module-info.java b/pbj-core/pbj-grpc-helidon-config/src/main/java/module-info.java
index 1ad9b4fe..ba14277b 100644
--- a/pbj-core/pbj-grpc-helidon-config/src/main/java/module-info.java
+++ b/pbj-core/pbj-grpc-helidon-config/src/main/java/module-info.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 import io.helidon.common.features.api.Feature;
 import io.helidon.common.features.api.HelidonFlavor;
 import io.helidon.common.features.api.Preview;
diff --git a/pbj-core/pbj-grpc-helidon/build.gradle.kts b/pbj-core/pbj-grpc-helidon/build.gradle.kts
index 596bc589..28d57102 100644
--- a/pbj-core/pbj-grpc-helidon/build.gradle.kts
+++ b/pbj-core/pbj-grpc-helidon/build.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins {
     id("com.hedera.pbj.helidon")
     id("com.hedera.pbj.protoc") // protobuf plugin is only used for tests
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/DeadlineDetector.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/DeadlineDetector.java
index d6fa4489..4cd3f62c 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/DeadlineDetector.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/DeadlineDetector.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.hedera.pbj.runtime.grpc.GrpcStatus;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java
index ba02c620..2e10573b 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/GrpcHeaders.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static com.hedera.pbj.runtime.grpc.ServiceInterface.RequestOptions.APPLICATION_GRPC;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java
index 72b4e210..53620d61 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjMethodRoute.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static java.util.Objects.requireNonNull;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolConfigProvider.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolConfigProvider.java
index d8403303..6181721d 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolConfigProvider.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolConfigProvider.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2023-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.hedera.pbj.grpc.helidon.config.PbjConfig;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java
index 8bd00bd0..61aed6e5 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static com.hedera.pbj.grpc.helidon.GrpcHeaders.APPLICATION_GRPC_PROTO_TYPE;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java
index 4f71583f..7d5df767 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolProvider.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.hedera.pbj.grpc.helidon.config.PbjConfig;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java
index 7d5ba3ed..338b5f3b 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static java.util.Objects.requireNonNull;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRoute.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRoute.java
index e17482d9..bff1df63 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRoute.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRoute.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.hedera.pbj.runtime.grpc.ServiceInterface;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java
index 32f12930..4b22c294 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjRouting.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.hedera.pbj.runtime.grpc.ServiceInterface;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java
index 819a89a3..6e618e12 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjServiceRoute.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static java.util.Objects.requireNonNull;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java
index 0694adfa..d3075e58 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/package-info.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/package-info.java
index c850fda3..8810a986 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/package-info.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/package-info.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 /**
  * An implementation of gRPC for the Helidon webserver based on PBJ.
  *
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/module-info.java b/pbj-core/pbj-grpc-helidon/src/main/java/module-info.java
index bbf8121a..08651b9a 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/module-info.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/module-info.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 import io.helidon.common.features.api.Feature;
 import io.helidon.common.features.api.HelidonFlavor;
 import io.helidon.common.features.api.Preview;
diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java
index 7cd7d1a0..fcfd640c 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java
+++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterService.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.google.protobuf.InvalidProtocolBufferException;
diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java
index 49b8733e..a1b653ee 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java
+++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import com.hedera.pbj.runtime.grpc.GrpcException;
@@ -117,4 +102,4 @@ public void onComplete() {
             }
         };
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java
index f5a878a3..7f24e03a 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java
+++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java
index 0979694e..c6eea83d 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java
+++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjTest.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.grpc.helidon;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/greeter_service.proto b/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/greeter_service.proto
index 51b0e11a..b9cc017a 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/greeter_service.proto
+++ b/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/greeter_service.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 import "greeter/hello_reply.proto";
 import "greeter/hello_request.proto";
diff --git a/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_reply.proto b/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_reply.proto
index e9202541..d87cec56 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_reply.proto
+++ b/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_reply.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package greeter;
diff --git a/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_request.proto b/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_request.proto
index 86eff26c..35850826 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_request.proto
+++ b/pbj-core/pbj-grpc-helidon/src/test/proto/greeter/hello_request.proto
@@ -1,7 +1,8 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package greeter;
 option java_multiple_files = true;
 message HelloRequest {
   string name = 1;
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-runtime/build.gradle.kts b/pbj-core/pbj-runtime/build.gradle.kts
index e65bc024..5b81d00e 100644
--- a/pbj-core/pbj-runtime/build.gradle.kts
+++ b/pbj-core/pbj-runtime/build.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 plugins { id("com.hedera.pbj.runtime") }
 
 testModuleInfo {
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java
index 2d751d21..11a98271 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java
index 3b4dd6e0..aa9b22b3 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import java.nio.ByteBuffer;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetBytes.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetBytes.java
index 0478c2ad..f5a2e41a 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetBytes.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetBytes.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import java.nio.ByteBuffer;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteOrderEquals.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteOrderEquals.java
index 9be722c6..08f9682b 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteOrderEquals.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteOrderEquals.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import java.nio.ByteOrder;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java
index 7638bd9c..6c8ebe39 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java
index 050aa467..8080952c 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java
index 458e6822..55cc0a6d 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.FieldDefinition;
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java
index aca18db0..c8779bb9 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.FieldDefinition;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java
index f7996b3f..d19cd86d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import java.io.ByteArrayOutputStream;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java
index a336041e..9c0326a5 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import java.util.Objects;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java
index f6169e70..927b0fa9 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java
index 01c37eb7..4683bfa7 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java
index 1f596a9f..14b703a9 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java
index 84e3b10a..242ea055 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java
index 6a242874..eefc6bb2 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java
index 0635b923..8f91de88 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import java.io.IOException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java
index 20d1905a..1efdc720 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import java.util.Objects;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java
index 0e02f841..0f9bea01 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java
index 34d42262..457f7b36 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import java.util.Collection;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java
index c4ffb60d..45612948 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java
index c5570536..76c2c3be 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java
index 691f6bfd..0dfb397f 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java
index d562692a..dd8db3dd 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.WritableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java
index b7ffa004..71b0894d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import static com.hedera.pbj.runtime.ProtoConstants.WIRE_TYPE_DELIMITED;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java
index 86002aa5..2a44ab2d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java
index a57e3095..e47b2cc5 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java
index da808dc6..357e96f8 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java
index 63827c26..8ecdcbaa 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java
index f0021abe..5aeb1733 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java
index af62cf62..1ba66f94 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import static java.lang.Character.*;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java
index 49741d14..afabaddb 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import static java.util.Objects.requireNonNull;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java
index 0cffc1f7..f587ee48 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java
index fd3d163b..7a01d63d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import java.util.concurrent.Flow;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java
index 5a329666..e3564686 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import static java.util.Objects.requireNonNull;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java
index f3d9a4cf..6d9cc0df 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java
index f150ddc8..ad06c932 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java
index 4e87284b..751274b1 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java
index 98e25688..9b05f31e 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import java.io.UncheckedIOException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java
index 8a1be181..48f554a5 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import java.lang.reflect.Field;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java
index 28c82ab9..311b73b9 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java
index 23fecd5f..af9c8fb3 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import static java.nio.ByteOrder.BIG_ENDIAN;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java
index 8a4d9560..2e61736e 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.SequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java
index 500d3464..ab2b6b28 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.DataEncodingException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java
index ab5370a9..8a5ccbeb 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.DataEncodingException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java
index adbcbede..6359f566 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.DataEncodingException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java
index 12caad20..ae95948f 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.DataEncodingException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java
index d78af838..179690c3 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java
index f86939a5..84bb308b 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
 import java.nio.BufferUnderflowException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java
index bde6b93b..30c37070 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
 import com.hedera.pbj.runtime.io.DataEncodingException;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java
index ee495356..3f4824df 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
 import com.hedera.pbj.runtime.io.WritableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java
index e2eca6c8..b1749d24 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.test;
 
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java
index 4a042cec..ea378578 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.test;
 
 import java.util.Objects;
@@ -56,4 +57,4 @@ public boolean equals(Object o) {
     public int hashCode() {
         return Objects.hash(value);
     }
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java
index 53e72116..f423c000 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.test;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java
index c7883ca4..1c5365c8 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.test;
 
 import java.util.function.Function;
diff --git a/pbj-core/pbj-runtime/src/main/java/module-info.java b/pbj-core/pbj-runtime/src/main/java/module-info.java
index 73d414e8..6c85816f 100644
--- a/pbj-core/pbj-runtime/src/main/java/module-info.java
+++ b/pbj-core/pbj-runtime/src/main/java/module-info.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 /** Runtime module of code needed by PBJ generated code at runtime. */
 module com.hedera.pbj.runtime {
 
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java
index 218247a8..02470ac9 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import static com.hedera.pbj.runtime.FieldType.FIXED32;
@@ -331,4 +332,4 @@ private static <T> void testRead(final Supplier<? extends T> valueSupplier,
         assertEquals(value, reader.apply(data));
     }
 
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java
index 7efa375c..5456bdae 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import static com.hedera.pbj.runtime.FieldType.BOOL;
@@ -1449,4 +1450,4 @@ static FieldDefinition createRepeatedFieldDefinition(FieldType fieldType) {
         return new FieldDefinition(RANDOM_STRING.nextString(), fieldType, true, RNG.nextInt(1, 16));
     }
 
-}
\ No newline at end of file
+}
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java
index 72084d32..4ee92f2e 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java
index 0bba9abe..a4f4d68e 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java
index f9230845..2a7a1c98 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java
index ea4b95b0..00fd6b96 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2024 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.grpc;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java
index 126618a1..80d3dba6 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.google.protobuf.CodedInputStream;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java
index 38da52dd..2ec6eee6 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.stream.EOFException;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java
index d988802e..2ddba1d2 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java
index e84644ce..84e3a5f4 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java
index fb5dcd94..d4eb7b91 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import org.junit.jupiter.params.ParameterizedTest;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java
index 1f077d03..913f3288 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java
index bd8fb02f..50affdcb 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java
index fc5f53a9..e349ec24 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java
index 0755a635..e67c106d 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java
index 401319b9..9bf3c503 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java
index 79e01f78..f9c5d170 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java
index 6c509351..2e1c28ee 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java
index 01472499..6625b1c2 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedDataTest.java
index d41c4820..598b4a17 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java
index 44654424..05331ae8 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import static org.assertj.core.api.Assertions.assertThat;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java
index 5f0cbc57..0923ec7c 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java
index fc48ab86..02557276 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java
index 3c1134ee..a9714f74 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
 import com.hedera.pbj.runtime.io.WritableSequentialData;
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java b/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java
index 5d3e8717..b15516d2 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package tests;
 
 import com.hedera.pbj.runtime.ComparableOneOf;
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java b/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java
index 2a127752..25069987 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package tests;
 
 import com.hedera.pbj.runtime.FieldDefinition;
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java b/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java
index de1a38a9..e200715f 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package tests;
 
 public class FuzzTest {
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java b/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java
index 38552a22..a91dbe6e 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package tests;
 
 public class NegativeTest {
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java b/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java
index 6f63c25f..5e3055c7 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package tests;
 
 import com.hedera.pbj.runtime.EnumWithProtoMetadata;
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/ProtoConstantsTest.java b/pbj-core/pbj-runtime/src/test/java/tests/ProtoConstantsTest.java
index 3a47f13a..e878e13e 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/ProtoConstantsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/ProtoConstantsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package tests;
 
 import com.hedera.pbj.runtime.ProtoConstants;
diff --git a/pbj-core/pbj-runtime/src/test/proto/omnibus.proto b/pbj-core/pbj-runtime/src/test/proto/omnibus.proto
index c6d035ce..6b5b6350 100644
--- a/pbj-core/pbj-runtime/src/test/proto/omnibus.proto
+++ b/pbj-core/pbj-runtime/src/test/proto/omnibus.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package test.proto;
diff --git a/pbj-core/settings.gradle.kts b/pbj-core/settings.gradle.kts
index 47fda821..653fd4b1 100644
--- a/pbj-core/settings.gradle.kts
+++ b/pbj-core/settings.gradle.kts
@@ -1,19 +1,4 @@
-/*
- * Copyright (C) 2022-2023 Hedera Hashgraph, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
+// SPDX-License-Identifier: Apache-2.0
 pluginManagement { includeBuild("gradle/plugins") }
 
 plugins {
diff --git a/pbj-integration-tests/build.gradle.kts b/pbj-integration-tests/build.gradle.kts
index fc7d7657..0b49929a 100644
--- a/pbj-integration-tests/build.gradle.kts
+++ b/pbj-integration-tests/build.gradle.kts
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 plugins {
     id("java")
     id("jacoco")
diff --git a/pbj-integration-tests/gradle.properties b/pbj-integration-tests/gradle.properties
index 0a77e57b..325bc7f9 100644
--- a/pbj-integration-tests/gradle.properties
+++ b/pbj-integration-tests/gradle.properties
@@ -1,3 +1,4 @@
+# SPDX-License-Identifier: Apache-2.0
 # Need increased heap for running Gradle itself, or SonarQube will run the JVM out of metaspace
 org.gradle.jvmargs=-Xmx2048m
 
diff --git a/pbj-integration-tests/settings.gradle.kts b/pbj-integration-tests/settings.gradle.kts
index e67b086c..aa2cb590 100644
--- a/pbj-integration-tests/settings.gradle.kts
+++ b/pbj-integration-tests/settings.gradle.kts
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 import me.champeau.gradle.igp.gitRepositories
 
 pluginManagement {
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
index faceae06..fd0ebfcf 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.jmh;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
index b09870fc..8705997f 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.jmh;
 
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
index 88a0d43b..ebdd9530 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.jmh;
 
 import com.hedera.pbj.integration.test.TestHashFunctions;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
index 3a4f6fdf..f6ada547 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.GeneratedMessage;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
index d5fd2466..adbd182d 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.CodedOutputStream;
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
index 43cf97b2..4ae4a8f2 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.CodedInputStream;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java
index b8214be5..eac128b8 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import com.hedera.hapi.node.base.*;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java
index d39392bc..1809fcab 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import com.hedera.hapi.node.token.AccountDetails;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java
index 8f2c5261..e36391bd 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java
index df468109..c516cb98 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java
index a65d45f9..1af80d4d 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import java.io.IOException;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java
index 36a616e6..9108d01a 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import java.io.OutputStream;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java
index 4dec7166..b20a30d8 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
 import com.google.protobuf.ByteString;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java
index 02d3ef1d..f3a583e3 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 import com.hedera.pbj.runtime.test.Sneaky;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java
index cdf3da2b..3732c8bd 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 import com.hedera.pbj.runtime.Codec;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestException.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestException.java
index fa8b4e36..cc5546d0 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestException.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestException.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 public class FuzzTestException extends RuntimeException {
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java
index b92a83e7..1663697b 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 import java.text.NumberFormat;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java
index 93142b42..0dcf4243 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 import java.lang.reflect.Field;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java
index 7c2373d4..b7e617ad 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 import com.hedera.pbj.runtime.Codec;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java
index b52b950b..bf4eaabf 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.fuzz;
 
 /**
diff --git a/pbj-integration-tests/src/main/proto/bytes.proto b/pbj-integration-tests/src/main/proto/bytes.proto
index 2fd09a80..be48d255 100644
--- a/pbj-integration-tests/src/main/proto/bytes.proto
+++ b/pbj-integration-tests/src/main/proto/bytes.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
@@ -11,4 +12,4 @@ option java_multiple_files = true;
  */
 message MessageWithBytes {
   bytes bytesField = 1;
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/main/proto/comparable.proto b/pbj-integration-tests/src/main/proto/comparable.proto
index 75c3aa94..c0b205d2 100644
--- a/pbj-integration-tests/src/main/proto/comparable.proto
+++ b/pbj-integration-tests/src/main/proto/comparable.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
diff --git a/pbj-integration-tests/src/main/proto/everything.proto b/pbj-integration-tests/src/main/proto/everything.proto
index 57fd251b..0a96c05b 100644
--- a/pbj-integration-tests/src/main/proto/everything.proto
+++ b/pbj-integration-tests/src/main/proto/everything.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
@@ -189,4 +190,4 @@ message InnerEverything {
     google.protobuf.BytesValue bytesBoxedOneOf = 100025;
     google.protobuf.StringValue stringBoxedOneOf = 100026;
   }
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/main/proto/extendedUtf8StingTest.proto b/pbj-integration-tests/src/main/proto/extendedUtf8StingTest.proto
index afbd3ec1..bd1a249c 100644
--- a/pbj-integration-tests/src/main/proto/extendedUtf8StingTest.proto
+++ b/pbj-integration-tests/src/main/proto/extendedUtf8StingTest.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
@@ -34,4 +35,4 @@ message MessageWithString {
      * A single string for extended testing
      */
     string aTestString = 1;
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/main/proto/hasheval.proto b/pbj-integration-tests/src/main/proto/hasheval.proto
index 0d9b01f7..b819a142 100644
--- a/pbj-integration-tests/src/main/proto/hasheval.proto
+++ b/pbj-integration-tests/src/main/proto/hasheval.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
diff --git a/pbj-integration-tests/src/main/proto/map.proto b/pbj-integration-tests/src/main/proto/map.proto
index a87ff84e..cf1df985 100644
--- a/pbj-integration-tests/src/main/proto/map.proto
+++ b/pbj-integration-tests/src/main/proto/map.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
@@ -23,4 +24,4 @@ message MessageWithManyMaps {
   map<string, MessageWithMaps> mapStringToMessage = 3;
   map<uint64, bytes> mapUInt64ToBytes = 4;
   map<int64, bool> mapInt64ToBool = 5;
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/main/proto/message.proto b/pbj-integration-tests/src/main/proto/message.proto
index 59151c3e..cae623da 100644
--- a/pbj-integration-tests/src/main/proto/message.proto
+++ b/pbj-integration-tests/src/main/proto/message.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
@@ -11,4 +12,4 @@ option java_multiple_files = true;
  */
 message MessageWithMessage {
   MessageWithMessage message = 1;
-}
\ No newline at end of file
+}
diff --git a/pbj-integration-tests/src/main/proto/timestampTest.proto b/pbj-integration-tests/src/main/proto/timestampTest.proto
index 87392695..b866ede4 100644
--- a/pbj-integration-tests/src/main/proto/timestampTest.proto
+++ b/pbj-integration-tests/src/main/proto/timestampTest.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
diff --git a/pbj-integration-tests/src/main/proto/timestampTest2.proto b/pbj-integration-tests/src/main/proto/timestampTest2.proto
index 1eb2c3da..dcffcc11 100644
--- a/pbj-integration-tests/src/main/proto/timestampTest2.proto
+++ b/pbj-integration-tests/src/main/proto/timestampTest2.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
index a1d3c232..11faa73d 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import static com.hedera.pbj.compiler.PbjCompilerTask.compileFilesIn;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
index 769484d0..f4f7419e 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import static java.util.Collections.shuffle;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
index 26138e21..64987db4 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.CodedOutputStream;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
index 3308f488..fc086b02 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.hedera.hapi.node.base.FeeSchedule;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
index 34d0d06f..edf02637 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
index 3141a25f..7097e1ea 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.ByteString;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
index b2829d99..a6f9e233 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import static org.junit.jupiter.api.Assertions.assertThrows;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
index 55a2acd4..711299a1 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.runtime.ParseException;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
index 0182d28e..04743dfc 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.runtime.ParseException;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
index a99f1967..a269a210 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.runtime.Codec;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
index 4ac22e84..03832cc0 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.hedera.hapi.node.base.tests.AccountIDTest;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
index 4e2d8688..48be11a6 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.hedera.pbj.test.proto.pbj.Hasheval;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
index c3309e5b..4b5b5255 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.CodedOutputStream;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
index 8c7f38eb..d8dd1ff2 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
 import com.google.protobuf.InvalidProtocolBufferException;
diff --git a/pbj-integration-tests/src/test/resources/non_compilable_comparable_oneOf.proto b/pbj-integration-tests/src/test/resources/non_compilable_comparable_oneOf.proto
index 9bfb0cc3..d36b64b0 100644
--- a/pbj-integration-tests/src/test/resources/non_compilable_comparable_oneOf.proto
+++ b/pbj-integration-tests/src/test/resources/non_compilable_comparable_oneOf.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
diff --git a/pbj-integration-tests/src/test/resources/non_compilable_comparable_repeated.proto b/pbj-integration-tests/src/test/resources/non_compilable_comparable_repeated.proto
index d3a74453..4eb43230 100644
--- a/pbj-integration-tests/src/test/resources/non_compilable_comparable_repeated.proto
+++ b/pbj-integration-tests/src/test/resources/non_compilable_comparable_repeated.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;
diff --git a/pbj-integration-tests/src/test/resources/non_compilable_comparable_sub_obj.proto b/pbj-integration-tests/src/test/resources/non_compilable_comparable_sub_obj.proto
index 305f2bad..67e4046f 100644
--- a/pbj-integration-tests/src/test/resources/non_compilable_comparable_sub_obj.proto
+++ b/pbj-integration-tests/src/test/resources/non_compilable_comparable_sub_obj.proto
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: Apache-2.0
 syntax = "proto3";
 
 package proto;

From a9f6300052548b8b80714a51d00709250fa0b785 Mon Sep 17 00:00:00 2001
From: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com>
Date: Thu, 9 Jan 2025 16:18:11 -0700
Subject: [PATCH 3/3] Added spotless plugin to the integration tests build * In
 pbj-integration    * Also ran a spotlessApply to fix _many_ formatting
 issues. * In pbj-core    * Ran spotlessApply, which clearly hasn't been done
 in a very long time.

Signed-off-by: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com>
---
 .../hedera/pbj/compiler/PbjCompilerTask.java  |   17 +-
 .../com/hedera/pbj/compiler/impl/Common.java  | 1147 ++++----
 .../compiler/impl/ContextualLookupHelper.java |   37 +-
 .../com/hedera/pbj/compiler/impl/Field.java   |  808 +++---
 .../impl/FileAndPackageNamesConfig.java       |    4 +-
 .../hedera/pbj/compiler/impl/FileType.java    |    4 +-
 .../pbj/compiler/impl/LookupHelper.java       |   84 +-
 .../hedera/pbj/compiler/impl/MapField.java    |   97 +-
 .../hedera/pbj/compiler/impl/OneOfField.java  |  383 +--
 .../compiler/impl/PbjCompilerException.java   |    4 +-
 .../hedera/pbj/compiler/impl/SingleField.java |  632 ++--
 .../impl/generators/EnumGenerator.java        |  297 +-
 .../compiler/impl/generators/Generator.java   |   38 +-
 .../impl/generators/ModelGenerator.java       | 1379 +++++----
 .../impl/generators/SchemaGenerator.java      |  166 +-
 .../impl/generators/TestGenerator.java        |  517 ++--
 .../generators/json/JsonCodecGenerator.java   |  196 +-
 .../json/JsonCodecParseMethodGenerator.java   |  148 +-
 .../json/JsonCodecWriteMethodGenerator.java   |  171 +-
 .../CodecFastEqualsMethodGenerator.java       |    5 +-
 .../generators/protobuf/CodecGenerator.java   |  155 +-
 .../CodecMeasureDataMethodGenerator.java      |    4 +-
 .../CodecMeasureRecordMethodGenerator.java    |  209 +-
 .../protobuf/CodecParseMethodGenerator.java   |  307 +-
 .../protobuf/CodecWriteMethodGenerator.java   |  219 +-
 .../hedera/pbj/compiler/impl/CommonTest.java  |  308 +-
 .../pbj/compiler/impl/LookupHelperTest.java   |   35 +-
 .../pbj/grpc/helidon/PbjProtocolHandler.java  |   63 +-
 .../pbj/grpc/helidon/PbjProtocolSelector.java |    8 -
 .../grpc/helidon/RouteNotFoundHandler.java    |   12 +-
 .../pbj/grpc/helidon/GreeterServiceImpl.java  |    6 +-
 .../grpc/helidon/PbjProtocolHandlerTest.java  |  242 +-
 .../pbj/runtime/io/BufferedDataGetBytes.java  |    1 -
 .../pbj/runtime/io/ByteBufferGetByte.java     |    1 -
 .../hedera/pbj/runtime/io/BytesGetLong.java   |    1 -
 .../io/WritableStreamingDataBench.java        |    1 -
 .../runtime/io/WriteBufferedDataBench.java    |  143 +-
 .../pbj/runtime/io/WriteBytesBench.java       |  126 +-
 .../java/com/hedera/pbj/runtime/Codec.java    |  110 +-
 .../hedera/pbj/runtime/ComparableOneOf.java   |   25 +-
 .../pbj/runtime/EnumWithProtoMetadata.java    |    4 +-
 .../hedera/pbj/runtime/FieldDefinition.java   |   38 +-
 .../com/hedera/pbj/runtime/FieldType.java     |   97 +-
 .../com/hedera/pbj/runtime/JsonCodec.java     |   45 +-
 .../com/hedera/pbj/runtime/JsonTools.java     |  173 +-
 .../runtime/MalformedProtobufException.java   |   26 +-
 .../java/com/hedera/pbj/runtime/OneOf.java    |   27 +-
 .../hedera/pbj/runtime/ParseException.java    |    6 +-
 .../java/com/hedera/pbj/runtime/PbjMap.java   |   22 +-
 .../hedera/pbj/runtime/ProtoConstants.java    |    8 +-
 .../hedera/pbj/runtime/ProtoParserTools.java  |   71 +-
 .../hedera/pbj/runtime/ProtoTestTools.java    |   83 +-
 .../com/hedera/pbj/runtime/ProtoWriter.java   |    1 -
 .../hedera/pbj/runtime/ProtoWriterTools.java  |  333 ++-
 .../pbj/runtime/RpcMethodDefinition.java      |    7 +-
 .../pbj/runtime/RpcServiceDefinition.java     |   15 +-
 .../java/com/hedera/pbj/runtime/Schema.java   |   19 +-
 .../pbj/runtime/UncheckedParseException.java  |    4 +-
 .../pbj/runtime/UnknownFieldException.java    |    4 +-
 .../com/hedera/pbj/runtime/Utf8Tools.java     |   23 +-
 .../pbj/runtime/grpc/GrpcException.java       |   17 +-
 .../hedera/pbj/runtime/grpc/GrpcStatus.java   |  119 +-
 .../com/hedera/pbj/runtime/grpc/Pipeline.java |    7 +-
 .../hedera/pbj/runtime/grpc/Pipelines.java    |  200 +-
 .../pbj/runtime/grpc/ServiceInterface.java    |   68 +-
 .../pbj/runtime/io/DataEncodingException.java |    7 +-
 .../runtime/io/ReadableSequentialData.java    |  314 +-
 .../hedera/pbj/runtime/io/SequentialData.java |   59 +-
 .../hedera/pbj/runtime/io/UnsafeUtils.java    |   94 +-
 .../runtime/io/WritableSequentialData.java    |  221 +-
 .../pbj/runtime/io/buffer/BufferedData.java   |  231 +-
 .../io/buffer/BufferedSequentialData.java     |   28 +-
 .../io/buffer/ByteArrayBufferedData.java      |  103 +-
 .../hedera/pbj/runtime/io/buffer/Bytes.java   |  278 +-
 .../runtime/io/buffer/DirectBufferedData.java |   71 +-
 .../runtime/io/buffer/RandomAccessData.java   |  391 +--
 .../buffer/RandomAccessSequenceAdapter.java   |   34 +-
 .../pbj/runtime/io/stream/EOFException.java   |    4 +-
 .../io/stream/ReadableStreamingData.java      |   16 +-
 .../io/stream/WritableStreamingData.java      |   64 +-
 .../CharBufferToWritableSequentialData.java   |    9 +-
 .../pbj/runtime/test/NoToStringWrapper.java   |   12 +-
 .../com/hedera/pbj/runtime/test/Sneaky.java   |   18 +-
 .../test/UncheckedThrowingFunction.java       |    5 +-
 .../src/main/java/module-info.java            |    2 -
 .../pbj/runtime/ProtoParserToolsTest.java     |  128 +-
 .../pbj/runtime/ProtoWriterToolsTest.java     |  616 ++--
 .../com/hedera/pbj/runtime/Utf8ToolsTest.java |   41 +-
 .../pbj/runtime/grpc/GrpcExceptionTest.java   |    7 +-
 .../pbj/runtime/grpc/GrpcStatusTest.java      |    7 +-
 .../pbj/runtime/grpc/PipelinesTest.java       |  430 +--
 .../com/hedera/pbj/runtime/io/DataTest.java   |  290 +-
 .../io/ReadableSequentialDataTest.java        |   20 +-
 .../io/ReadableSequentialTestBase.java        |    4 +-
 .../pbj/runtime/io/ReadableTestBase.java      |  593 ++--
 .../pbj/runtime/io/SequentialDataTest.java    |   11 +-
 .../pbj/runtime/io/SequentialTestBase.java    | 2567 +++++++++--------
 .../pbj/runtime/io/UnsafeUtilsTest.java       |   17 +-
 .../io/WritableSequentialDataTest.java        |    2 -
 .../pbj/runtime/io/WritableTestBase.java      |  723 +++--
 .../runtime/io/buffer/BufferedDataTest.java   |    2 +-
 .../io/buffer/BufferedDataTestBase.java       |  127 +-
 .../io/buffer/ByteArrayBufferedDataTest.java  |    2 +-
 .../pbj/runtime/io/buffer/BytesTest.java      |  459 +--
 .../io/buffer/RandomAccessTestBase.java       |  179 +-
 .../buffer/StubbedRandomAccessDataTest.java   |    6 +-
 .../io/stream/ReadableStreamingDataTest.java  |  197 +-
 .../io/stream/WritableStreamingDataTest.java  |   42 +-
 .../test/java/tests/ComparableOneOfTest.java  |   17 +-
 .../test/java/tests/FieldDefinitionTest.java  |   17 +-
 .../src/test/java/tests/FuzzTest.java         |    4 +-
 .../src/test/java/tests/NegativeTest.java     |   39 +-
 .../src/test/java/tests/OneOfTest.java        |   15 +-
 pbj-integration-tests/build.gradle.kts        |   48 +-
 .../jmh/ComplexEqualsHashCodeBench.java       |  154 +-
 .../integration/jmh/EqualsHashCodeBench.java  |    8 +-
 .../hedera/pbj/integration/jmh/HashBench.java |   28 +-
 .../hedera/pbj/integration/jmh/JsonBench.java |  275 +-
 .../integration/jmh/ProtobufObjectBench.java  |  540 ++--
 .../pbj/integration/jmh/VarIntBench.java      |  422 +--
 .../pbj/integration/AccountDetailsPbj.java    |  234 +-
 .../pbj/integration/AccountDetailsWriter.java |   18 +-
 .../pbj/integration/EverythingTestData.java   |  220 +-
 .../integration/EverythingWriterPerfTest.java |   12 +-
 .../NonSynchronizedByteArrayInputStream.java  |    4 +-
 .../NonSynchronizedByteArrayOutputStream.java |   17 +-
 .../java/com/hedera/pbj/integration/Test.java |   99 +-
 .../hedera/pbj/integration/fuzz/Elapsed.java  |   23 +-
 .../hedera/pbj/integration/fuzz/FuzzTest.java |   88 +-
 .../pbj/integration/fuzz/FuzzTestResult.java  |   30 +-
 .../hedera/pbj/integration/fuzz/FuzzUtil.java |    7 +-
 .../pbj/integration/fuzz/SingleFuzzTest.java  |  108 +-
 .../fuzz/SingleFuzzTestResult.java            |   61 +-
 .../test/CompareToNegativeTest.java           |   38 +-
 .../pbj/integration/test/CompareToTest.java   |  300 +-
 .../ExtendedUtf8MessageWithStringTest.java    |  209 +-
 .../integration/test/FieldsNonNullTest.java   |   13 +-
 .../pbj/integration/test/HashEqualsTest.java  |    3 +-
 .../pbj/integration/test/JsonCodecTest.java   |   42 +-
 .../test/MalformedMessageTest.java            |   18 +-
 .../pbj/integration/test/MaxDepthTest.java    |   59 +-
 .../pbj/integration/test/MaxSizeTest.java     |   13 +-
 .../test/ParserNeverWrapsTest.java            |   33 +-
 .../pbj/integration/test/SampleFuzzTest.java  |  231 +-
 .../integration/test/TestHashFunctions.java   |   17 +-
 .../integration/test/TimestampTestTest.java   |  160 +-
 .../integration/test/TruncatedDataTests.java  |  207 +-
 147 files changed, 12357 insertions(+), 9432 deletions(-)

diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java
index fdde432b..3a4480dd 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/PbjCompilerTask.java
@@ -54,21 +54,21 @@ public abstract class PbjCompilerTask extends SourceTask {
     public void perform() throws Exception {
         // Clean output directories
         getFileOperations().delete(getJavaMainOutputDirectory(), getJavaTestOutputDirectory());
-        compileFilesIn(getSource(),
+        compileFilesIn(
+                getSource(),
                 getJavaMainOutputDirectory().get().getAsFile(),
                 getJavaTestOutputDirectory().get().getAsFile());
     }
 
     /**
      * Compile all the proto files in the given source directories
+     *
      * @param sourceFiles The source files to compile
      * @param mainOutputDir The main output directory
      * @param testOutputDir The test output directory
      */
-
-    public static void compileFilesIn(Iterable<File> sourceFiles,
-                                       File mainOutputDir,
-                                       File testOutputDir) throws Exception {
+    public static void compileFilesIn(
+            Iterable<File> sourceFiles, File mainOutputDir, File testOutputDir) throws Exception {
         // first we do a scan of files to build lookup tables for imports, packages etc.
         final LookupHelper lookupHelper = new LookupHelper(sourceFiles);
         // for each proto src directory generate code
@@ -100,9 +100,7 @@ public static void compileFilesIn(Iterable<File> sourceFiles,
                         if (enumDef != null) {
                             // run just enum generators for enum
                             EnumGenerator.generateEnumFile(
-                                    enumDef,
-                                    mainOutputDir,
-                                    contextualLookupHelper);
+                                    enumDef, mainOutputDir, contextualLookupHelper);
                         }
                     }
                 } catch (Exception e) {
@@ -112,7 +110,8 @@ public static void compileFilesIn(Iterable<File> sourceFiles,
                     var trace = e.getStackTrace();
                     int count = 0;
                     for (var element : trace) {
-                        if (count++ < MAX_TRACE_FRAMES) System.err.println(STACK_ELEMENT_INDENT + element);
+                        if (count++ < MAX_TRACE_FRAMES)
+                            System.err.println(STACK_ELEMENT_INDENT + element);
                     }
                     throw e;
                 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java
index 35ef8b8d..f8b492dd 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Common.java
@@ -3,7 +3,6 @@
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
@@ -14,331 +13,374 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
-/**
- * Common functions and constants for code generation
- */
+/** Common functions and constants for code generation */
 @SuppressWarnings({"DuplicatedCode", "EscapedSpace"})
 public final class Common {
-	/** The indent for fields, default 4 spaces */
-	public static final String FIELD_INDENT = " ".repeat(4);
+    /** The indent for fields, default 4 spaces */
+    public static final String FIELD_INDENT = " ".repeat(4);
+
+    public static final int DEFAULT_INDENT = 4;
+
+    /** Number of bits used to represent the tag type */
+    static final int TAG_TYPE_BITS = 3;
+
+    /** Wire format code for var int */
+    public static final int TYPE_VARINT = 0;
 
-	public static final int DEFAULT_INDENT = 4;
-	/** Number of bits used to represent the tag type */
-	static final int TAG_TYPE_BITS = 3;
+    /** Wire format code for fixed 64bit number */
+    public static final int TYPE_FIXED64 = 1;
 
-	/** Wire format code for var int */
-	public static final int TYPE_VARINT = 0;
-	/** Wire format code for fixed 64bit number */
-	public static final int TYPE_FIXED64 = 1;
-	/** Wire format code for length delimited, all the complex types */
-	public static final int TYPE_LENGTH_DELIMITED = 2;
-	/** Wire format code for fixed 32bit number */
-	public static final int TYPE_FIXED32 = 5;
-    private static final Pattern COMPARABLE_PATTERN = Pattern.compile("[)] implements Comparable<\\w+> [{]");
+    /** Wire format code for length delimited, all the complex types */
+    public static final int TYPE_LENGTH_DELIMITED = 2;
 
+    /** Wire format code for fixed 32bit number */
+    public static final int TYPE_FIXED32 = 5;
+
+    private static final Pattern COMPARABLE_PATTERN =
+            Pattern.compile("[)] implements Comparable<\\w+> [{]");
 
     /**
-	 * Makes a tag value given a field number and wire type.
-	 *
-	 * @param wireType the wire type part of tag
-	 * @param fieldNumber the field number part of tag
-	 * @return packed encoded tag
-	 */
-	public static int getTag(final int wireType, final int fieldNumber) {
-		return (fieldNumber << TAG_TYPE_BITS) | wireType;
-	}
-
-	/**
-	 * Make sure first character of a string is upper case
-	 *
-	 * @param name string input who's first character can be upper or lower case
-	 * @return name with first character converted to upper case
-	 */
-	public static String capitalizeFirstLetter(String name) {
-		if (!name.isEmpty()) {
-			if (name.chars().allMatch(Character::isUpperCase)) {
-				return Character.toUpperCase(name.charAt(0)) + name.substring(1).toLowerCase();
-			} else {
-				return Character.toUpperCase(name.charAt(0)) + name.substring(1);
-			}
-		}
-		return name;
-	}
-
-	/**
-	 * Convert names like "hello_world" to "HelloWorld" or "helloWorld" depending on firstUpper. Also handles special case
-	 * like "HELLO_WORLD" to same output as "hello_world", while "HelloWorld_Two" still becomes "helloWorldTwo".
-	 *
-	 * @param name input name in snake case
-	 * @param firstUpper if true then first char is upper case otherwise it is lower
-	 * @return out name in camel case
-	 */
-	@NonNull
-	public static String snakeToCamel(@NonNull String name, boolean firstUpper) {
-		final String out =  Arrays.stream(name.split("_")).map(Common::capitalizeFirstLetter).collect(
-				Collectors.joining(""));
-		return (firstUpper ? Character.toUpperCase(out.charAt(0)) : Character.toLowerCase(out.charAt(0)) )
-				+ out.substring(1);
-	}
-
-	/**
-	 * Convert a camel case name to upper case snake case
-	 *
-	 * @param name the input name in camel case
-	 * @return output name in upper snake case
-	 */
-	public static String camelToUpperSnake(String name) {
-		// check if already camel upper
-		if (name.chars().allMatch(c -> Character.isUpperCase(c) || Character.isDigit(c) || c == '_')) return name;
-		// check if already has underscores, then just capitalize
-		if (name.contains("_")) return name.toUpperCase();
-		// else convert
-		final StringBuilder buf = new StringBuilder();
-		for (int i = 0; i < name.length(); i++) {
-			final char c = name.charAt(i);
-			if (Character.isUpperCase(c) && i > 0) {
-				buf.append("_");
-				buf.append(c);
-			} else {
-				buf.append(Character.toUpperCase(c));
-			}
-		}
-		// fix special case for capital ID
-		return buf.toString().replaceAll("_I_D", "_ID");
-	}
-
-	/**
-	 * Build a clean java doc comment for a field
-	 *
-	 * @param fieldNumber The field proto number
-	 * @param docContext The parsed field comment contact
-	 * @return clean comment
-	 */
-	public static String buildCleanFieldJavaDoc(int fieldNumber, Protobuf3Parser.DocCommentContext docContext) {
-		final String cleanedComment = docContext == null ? "" : cleanJavaDocComment(docContext.getText());
-		final String fieldNumComment = "<b>("+fieldNumber+")</b> ";
-		return fieldNumComment + cleanedComment;
-	}
-
-	/**
-	 * Build a clean java doc comment for an oneof field
-	 *
-	 * @param fieldNumbers The field proto numbers for all fields in oneof
-	 * @param docContext The parsed field comment contact
-	 * @return clean comment
-	 */
-	public static String buildCleanFieldJavaDoc(List<Integer> fieldNumbers, Protobuf3Parser.DocCommentContext docContext) {
-		final String cleanedComment = docContext == null ? "" : cleanJavaDocComment(docContext.getText());
-		final String fieldNumComment =
-				"<b>("+fieldNumbers.stream().map(Objects::toString).collect(Collectors.joining(", "))+")</b> ";
-		return fieldNumComment + cleanedComment;
-	}
-
-	/**
-	 * Clean up a java doc style comment removing all the "*" etc.
-	 *
-	 * @param fieldComment raw Java doc style comment
-	 * @return clean multi-line content of the comment
-	 */
-	public static String cleanJavaDocComment(String fieldComment) {
-		return cleanDocStr(fieldComment
-				.replaceAll("/\\*\\*[\n\r\s\t]*\\*[\t\s]*|[\n\r\s\t]*\\*/","") // remove java doc
-				.replaceAll("\n\s+\\*\s+","\n") // remove indenting and *
-				.replaceAll("/\\*\\*","") // remove indenting and /** at beginning of comment.
-				.trim() // Remove leading and trailing spaces.
-		);
-	}
-
-	/**
-	 * Clean a string so that it can be included in JavaDoc. Does things like replace unsupported HTML tags.
-	 *
-	 * @param docStr The string to clean
-	 * @return cleaned output
-	 */
-	public static String cleanDocStr(String docStr) {
-		return docStr
-				.replaceAll("<(/?)tt>", "<$1code>") // tt tags are not supported in javadoc
-				.replaceAll(" < ", " &lt; ") // escape loose less than
-				.replaceAll(" > ", " &gt; ") // escape loose less than
-				.replaceAll(" & ", " &amp; ") // escape loose less than
-		;
-	}
-
-	/**
-	 * Convert a field type like "long" to the Java object wrapper type "Long", or pass though if not java primitive
-	 *
-	 * @param primitiveFieldType java field type like "int" etc
-	 * @return java object wrapper type like "Integer" or pass though
-	 */
-	public static String javaPrimitiveToObjectType(String primitiveFieldType) {
-		return switch(primitiveFieldType){
-			case "boolean" -> "Boolean";
-			case "int" -> "Integer";
-			case "long" -> "Long";
-			case "float" -> "Float";
-			case "double" -> "Double";
-			default -> primitiveFieldType;
-		};
-	}
-
-	/**
-	 * Recursively calculates the hashcode for a message fields.
-	 *
-	 * @param fields The fields of this object.
-	 * @param generatedCodeSoFar The accumulated hash code so far.
-	 * @return The generated code for getting the hashCode value.
-	 */
-	public static String getFieldsHashCode(final List<Field> fields, String generatedCodeSoFar) {
-		for (Field f : fields) {
-			if (f.parent() != null) {
-				final OneOfField oneOfField = f.parent();
-				generatedCodeSoFar += getFieldsHashCode(oneOfField.fields(), generatedCodeSoFar);
-			} else if (f.optionalValueType()) {
-				generatedCodeSoFar = getPrimitiveWrapperHashCodeGeneration(generatedCodeSoFar, f);
-			} else if (f.repeated()) {
-				generatedCodeSoFar = getRepeatedHashCodeGeneration(generatedCodeSoFar, f);
-			} else {
-                if (f.type() == Field.FieldType.FIXED32 ||
-                        f.type() == Field.FieldType.INT32 ||
-                        f.type() == Field.FieldType.SFIXED32 ||
-                        f.type() == Field.FieldType.SINT32 ||
-                        f.type() == Field.FieldType.UINT32) {
-                    generatedCodeSoFar += (
-                            """
+     * Makes a tag value given a field number and wire type.
+     *
+     * @param wireType the wire type part of tag
+     * @param fieldNumber the field number part of tag
+     * @return packed encoded tag
+     */
+    public static int getTag(final int wireType, final int fieldNumber) {
+        return (fieldNumber << TAG_TYPE_BITS) | wireType;
+    }
+
+    /**
+     * Make sure first character of a string is upper case
+     *
+     * @param name string input who's first character can be upper or lower case
+     * @return name with first character converted to upper case
+     */
+    public static String capitalizeFirstLetter(String name) {
+        if (!name.isEmpty()) {
+            if (name.chars().allMatch(Character::isUpperCase)) {
+                return Character.toUpperCase(name.charAt(0)) + name.substring(1).toLowerCase();
+            } else {
+                return Character.toUpperCase(name.charAt(0)) + name.substring(1);
+            }
+        }
+        return name;
+    }
+
+    /**
+     * Convert names like "hello_world" to "HelloWorld" or "helloWorld" depending on firstUpper.
+     * Also handles special case like "HELLO_WORLD" to same output as "hello_world", while
+     * "HelloWorld_Two" still becomes "helloWorldTwo".
+     *
+     * @param name input name in snake case
+     * @param firstUpper if true then first char is upper case otherwise it is lower
+     * @return out name in camel case
+     */
+    @NonNull
+    public static String snakeToCamel(@NonNull String name, boolean firstUpper) {
+        final String out =
+                Arrays.stream(name.split("_"))
+                        .map(Common::capitalizeFirstLetter)
+                        .collect(Collectors.joining(""));
+        return (firstUpper
+                        ? Character.toUpperCase(out.charAt(0))
+                        : Character.toLowerCase(out.charAt(0)))
+                + out.substring(1);
+    }
+
+    /**
+     * Convert a camel case name to upper case snake case
+     *
+     * @param name the input name in camel case
+     * @return output name in upper snake case
+     */
+    public static String camelToUpperSnake(String name) {
+        // check if already camel upper
+        if (name.chars()
+                .allMatch(c -> Character.isUpperCase(c) || Character.isDigit(c) || c == '_'))
+            return name;
+        // check if already has underscores, then just capitalize
+        if (name.contains("_")) return name.toUpperCase();
+        // else convert
+        final StringBuilder buf = new StringBuilder();
+        for (int i = 0; i < name.length(); i++) {
+            final char c = name.charAt(i);
+            if (Character.isUpperCase(c) && i > 0) {
+                buf.append("_");
+                buf.append(c);
+            } else {
+                buf.append(Character.toUpperCase(c));
+            }
+        }
+        // fix special case for capital ID
+        return buf.toString().replaceAll("_I_D", "_ID");
+    }
+
+    /**
+     * Build a clean java doc comment for a field
+     *
+     * @param fieldNumber The field proto number
+     * @param docContext The parsed field comment contact
+     * @return clean comment
+     */
+    public static String buildCleanFieldJavaDoc(
+            int fieldNumber, Protobuf3Parser.DocCommentContext docContext) {
+        final String cleanedComment =
+                docContext == null ? "" : cleanJavaDocComment(docContext.getText());
+        final String fieldNumComment = "<b>(" + fieldNumber + ")</b> ";
+        return fieldNumComment + cleanedComment;
+    }
+
+    /**
+     * Build a clean java doc comment for an oneof field
+     *
+     * @param fieldNumbers The field proto numbers for all fields in oneof
+     * @param docContext The parsed field comment contact
+     * @return clean comment
+     */
+    public static String buildCleanFieldJavaDoc(
+            List<Integer> fieldNumbers, Protobuf3Parser.DocCommentContext docContext) {
+        final String cleanedComment =
+                docContext == null ? "" : cleanJavaDocComment(docContext.getText());
+        final String fieldNumComment =
+                "<b>("
+                        + fieldNumbers.stream()
+                                .map(Objects::toString)
+                                .collect(Collectors.joining(", "))
+                        + ")</b> ";
+        return fieldNumComment + cleanedComment;
+    }
+
+    /**
+     * Clean up a java doc style comment removing all the "*" etc.
+     *
+     * @param fieldComment raw Java doc style comment
+     * @return clean multi-line content of the comment
+     */
+    public static String cleanJavaDocComment(String fieldComment) {
+        return cleanDocStr(
+                fieldComment
+                        .replaceAll(
+                                "/\\*\\*[\n\r\s\t]*\\*[\t\s]*|[\n\r\s\t]*\\*/",
+                                "") // remove java doc
+                        .replaceAll("\n\s+\\*\s+", "\n") // remove indenting and *
+                        .replaceAll(
+                                "/\\*\\*", "") // remove indenting and /** at beginning of comment.
+                        .trim() // Remove leading and trailing spaces.
+                );
+    }
+
+    /**
+     * Clean a string so that it can be included in JavaDoc. Does things like replace unsupported
+     * HTML tags.
+     *
+     * @param docStr The string to clean
+     * @return cleaned output
+     */
+    public static String cleanDocStr(String docStr) {
+        return docStr.replaceAll("<(/?)tt>", "<$1code>") // tt tags are not supported in javadoc
+                .replaceAll(" < ", " &lt; ") // escape loose less than
+                .replaceAll(" > ", " &gt; ") // escape loose less than
+                .replaceAll(" & ", " &amp; ") // escape loose less than
+        ;
+    }
+
+    /**
+     * Convert a field type like "long" to the Java object wrapper type "Long", or pass though if
+     * not java primitive
+     *
+     * @param primitiveFieldType java field type like "int" etc
+     * @return java object wrapper type like "Integer" or pass though
+     */
+    public static String javaPrimitiveToObjectType(String primitiveFieldType) {
+        return switch (primitiveFieldType) {
+            case "boolean" -> "Boolean";
+            case "int" -> "Integer";
+            case "long" -> "Long";
+            case "float" -> "Float";
+            case "double" -> "Double";
+            default -> primitiveFieldType;
+        };
+    }
+
+    /**
+     * Recursively calculates the hashcode for a message fields.
+     *
+     * @param fields The fields of this object.
+     * @param generatedCodeSoFar The accumulated hash code so far.
+     * @return The generated code for getting the hashCode value.
+     */
+    public static String getFieldsHashCode(final List<Field> fields, String generatedCodeSoFar) {
+        for (Field f : fields) {
+            if (f.parent() != null) {
+                final OneOfField oneOfField = f.parent();
+                generatedCodeSoFar += getFieldsHashCode(oneOfField.fields(), generatedCodeSoFar);
+            } else if (f.optionalValueType()) {
+                generatedCodeSoFar = getPrimitiveWrapperHashCodeGeneration(generatedCodeSoFar, f);
+            } else if (f.repeated()) {
+                generatedCodeSoFar = getRepeatedHashCodeGeneration(generatedCodeSoFar, f);
+            } else {
+                if (f.type() == Field.FieldType.FIXED32
+                        || f.type() == Field.FieldType.INT32
+                        || f.type() == Field.FieldType.SFIXED32
+                        || f.type() == Field.FieldType.SINT32
+                        || f.type() == Field.FieldType.UINT32) {
+                    generatedCodeSoFar +=
+                            ("""
                              if ($fieldName != DEFAULT.$fieldName) {
                                  result = 31 * result + Integer.hashCode($fieldName);
                              }
-                             """).replace("$fieldName", f.nameCamelFirstLower());
-                } else if (f.type() == Field.FieldType.FIXED64 ||
-                        f.type() == Field.FieldType.INT64 ||
-                        f.type() == Field.FieldType.SFIXED64 ||
-                        f.type() == Field.FieldType.SINT64 ||
-                        f.type() == Field.FieldType.UINT64) {
-                    generatedCodeSoFar += (
-                            """
+                             """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.FIXED64
+                        || f.type() == Field.FieldType.INT64
+                        || f.type() == Field.FieldType.SFIXED64
+                        || f.type() == Field.FieldType.SINT64
+                        || f.type() == Field.FieldType.UINT64) {
+                    generatedCodeSoFar +=
+                            ("""
                              if ($fieldName != DEFAULT.$fieldName) {
                                  result = 31 * result + Long.hashCode($fieldName);
                              }
-                             """).replace("$fieldName", f.nameCamelFirstLower());
+                             """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.BOOL) {
-                    generatedCodeSoFar += (
-                            """
+                    generatedCodeSoFar +=
+                            ("""
                             if ($fieldName != DEFAULT.$fieldName) {
                                result = 31 * result + Boolean.hashCode($fieldName);
                             }
-                            """).replace("$fieldName", f.nameCamelFirstLower());
+                            """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.FLOAT) {
-                    generatedCodeSoFar += (
-                            """
+                    generatedCodeSoFar +=
+                            ("""
                             if ($fieldName != DEFAULT.$fieldName) {
                                result = 31 * result + Float.hashCode($fieldName);
                             }
-                            """).replace("$fieldName", f.nameCamelFirstLower());
+                            """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.DOUBLE) {
-                    generatedCodeSoFar += (
-							"""
+                    generatedCodeSoFar +=
+                            ("""
                             if ($fieldName != DEFAULT.$fieldName) {
                                result = 31 * result + Double.hashCode($fieldName);
                             }
-                            """).replace("$fieldName", f.nameCamelFirstLower());
+                            """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.BYTES) {
-                    generatedCodeSoFar += (
-                            """
+                    generatedCodeSoFar +=
+                            ("""
                              if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
                                 result = 31 * result + $fieldName.hashCode();
                              }
-                             """).replace("$fieldName", f.nameCamelFirstLower());
+                             """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.ENUM) {
-                    generatedCodeSoFar += (
-                            """
+                    generatedCodeSoFar +=
+                            ("""
                              if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
                                 result = 31 * result + Integer.hashCode($fieldName.protoOrdinal());
                              }
-                             """).replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.MAP) {
-					generatedCodeSoFar += getMapHashCodeGeneration(generatedCodeSoFar, f);
-				} else if (f.type() == Field.FieldType.STRING ||
-						f.parent() == null) { // process sub message
-					generatedCodeSoFar += (
-							"""
+                             """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.MAP) {
+                    generatedCodeSoFar += getMapHashCodeGeneration(generatedCodeSoFar, f);
+                } else if (f.type() == Field.FieldType.STRING
+                        || f.parent() == null) { // process sub message
+                    generatedCodeSoFar +=
+                            ("""
                              if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
                                 result = 31 * result + $fieldName.hashCode();
                              }
-                             """).replace("$fieldName", f.nameCamelFirstLower());
+                             """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else {
-                    throw new RuntimeException("Unexpected field type for getting HashCode - " + f.type().toString());
+                    throw new RuntimeException(
+                            "Unexpected field type for getting HashCode - " + f.type().toString());
                 }
             }
-		}
-		return generatedCodeSoFar.indent(DEFAULT_INDENT * 2);
-	}
-
-	/**
-	 * Get the hashcode codegen for a optional field.
-	 * @param generatedCodeSoFar The string that the codegen is generated into.
-	 * @param f The field for which to generate the hash code.
-	 * @return Updated codegen string.
-	 */
-	@NonNull
-	private static String getPrimitiveWrapperHashCodeGeneration(String generatedCodeSoFar, Field f) {
-		switch (f.messageType()) {
-			case "StringValue" -> generatedCodeSoFar += (
-     			"""
+        }
+        return generatedCodeSoFar.indent(DEFAULT_INDENT * 2);
+    }
+
+    /**
+     * Get the hashcode codegen for a optional field.
+     *
+     * @param generatedCodeSoFar The string that the codegen is generated into.
+     * @param f The field for which to generate the hash code.
+     * @return Updated codegen string.
+     */
+    @NonNull
+    private static String getPrimitiveWrapperHashCodeGeneration(
+            String generatedCodeSoFar, Field f) {
+        switch (f.messageType()) {
+            case "StringValue" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + $fieldName.hashCode();
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			case "BoolValue" -> generatedCodeSoFar += (
-				"""
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "BoolValue" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + Boolean.hashCode($fieldName);
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			case "Int32Value", "UInt32Value" -> generatedCodeSoFar += (
-     			"""
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "Int32Value", "UInt32Value" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + Integer.hashCode($fieldName);
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			case "Int64Value", "UInt64Value" -> generatedCodeSoFar += (
-     			"""
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "Int64Value", "UInt64Value" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + Long.hashCode($fieldName);
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			case "FloatValue" -> generatedCodeSoFar += (
-     			"""
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "FloatValue" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + Float.hashCode($fieldName);
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			case "DoubleValue" -> generatedCodeSoFar += (
-     			"""
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "DoubleValue" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + Double.hashCode($fieldName);
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			case "BytesValue" -> generatedCodeSoFar += (
-     			"""
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "BytesValue" -> generatedCodeSoFar +=
+                    ("""
 				if ($fieldName != null && !$fieldName.equals(DEFAULT.$fieldName)) {
 				    result = 31 * result + ($fieldName == null ? 0 : $fieldName.hashCode());
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-			default -> throw new UnsupportedOperationException("Unhandled optional message type:" + f.messageType());
-		}
-		return generatedCodeSoFar;
-	}
-
-	/**
-	 * Get the hashcode codegen for a repeated field.
-	 * @param generatedCodeSoFar The string that the codegen is generated into.
-	 * @param f The field for which to generate the hash code.
-	 * @return Updated codegen string.
-	 */
-	@NonNull
-	private static String getRepeatedHashCodeGeneration(String generatedCodeSoFar, Field f) {
-		generatedCodeSoFar += (
-            """
+				""")
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            default -> throw new UnsupportedOperationException(
+                    "Unhandled optional message type:" + f.messageType());
+        }
+        return generatedCodeSoFar;
+    }
+
+    /**
+     * Get the hashcode codegen for a repeated field.
+     *
+     * @param generatedCodeSoFar The string that the codegen is generated into.
+     * @param f The field for which to generate the hash code.
+     * @return Updated codegen string.
+     */
+    @NonNull
+    private static String getRepeatedHashCodeGeneration(String generatedCodeSoFar, Field f) {
+        generatedCodeSoFar +=
+                ("""
             java.util.List list$$fieldName = $fieldName;
             if (list$$fieldName != null) {
                 for (Object o : list$$fieldName) {
@@ -349,20 +391,22 @@ private static String getRepeatedHashCodeGeneration(String generatedCodeSoFar, F
                     }
                }
             }
-            """).replace("$fieldName", f.nameCamelFirstLower());
-		return generatedCodeSoFar;
-	}
-
-	/**
-	 * Get the hashcode codegen for a map field.
-	 * @param generatedCodeSoFar The string that the codegen is generated into.
-	 * @param f The field for which to generate the hash code.
-	 * @return Updated codegen string.
-	 */
-	@NonNull
-	private static String getMapHashCodeGeneration(String generatedCodeSoFar, final Field f) {
-		generatedCodeSoFar += (
-				"""
+            """)
+                        .replace("$fieldName", f.nameCamelFirstLower());
+        return generatedCodeSoFar;
+    }
+
+    /**
+     * Get the hashcode codegen for a map field.
+     *
+     * @param generatedCodeSoFar The string that the codegen is generated into.
+     * @param f The field for which to generate the hash code.
+     * @return Updated codegen string.
+     */
+    @NonNull
+    private static String getMapHashCodeGeneration(String generatedCodeSoFar, final Field f) {
+        generatedCodeSoFar +=
+                ("""
 				for (Object k : ((PbjMap) $fieldName).getSortedKeys()) {
 					if (k != null) {
 						result = 31 * result + k.hashCode();
@@ -376,162 +420,178 @@ private static String getMapHashCodeGeneration(String generatedCodeSoFar, final
 						result = 31 * result;
 					}
 				}
-				""").replace("$fieldName", f.nameCamelFirstLower());
-		return generatedCodeSoFar;
-	}
-
-	/**
-	 * Recursively calculates `equals` statement for a message fields.
-	 *
-	 * @param fields The fields of this object.
-	 * @param generatedCodeSoFar The accumulated hash code so far.
-	 * @return The generated code for getting the object equality
-	 */
-	public static String getFieldsEqualsStatements(final List<Field> fields, String generatedCodeSoFar) {
-		for (Field f : fields) {
-			if (f.parent() != null) {
-				final OneOfField oneOfField = f.parent();
-				generatedCodeSoFar += getFieldsEqualsStatements(oneOfField.fields(), generatedCodeSoFar);
-			}
-
-			if (f.optionalValueType()) {
-				generatedCodeSoFar = getPrimitiveWrapperEqualsGeneration(generatedCodeSoFar, f);
-			}
-			else if (f.repeated()) {
-				generatedCodeSoFar = getRepeatedEqualsGeneration(generatedCodeSoFar, f);
-			} else {
+				""")
+                        .replace("$fieldName", f.nameCamelFirstLower());
+        return generatedCodeSoFar;
+    }
+
+    /**
+     * Recursively calculates `equals` statement for a message fields.
+     *
+     * @param fields The fields of this object.
+     * @param generatedCodeSoFar The accumulated hash code so far.
+     * @return The generated code for getting the object equality
+     */
+    public static String getFieldsEqualsStatements(
+            final List<Field> fields, String generatedCodeSoFar) {
+        for (Field f : fields) {
+            if (f.parent() != null) {
+                final OneOfField oneOfField = f.parent();
+                generatedCodeSoFar +=
+                        getFieldsEqualsStatements(oneOfField.fields(), generatedCodeSoFar);
+            }
+
+            if (f.optionalValueType()) {
+                generatedCodeSoFar = getPrimitiveWrapperEqualsGeneration(generatedCodeSoFar, f);
+            } else if (f.repeated()) {
+                generatedCodeSoFar = getRepeatedEqualsGeneration(generatedCodeSoFar, f);
+            } else {
                 f.nameCamelFirstLower();
-                if (f.type() == Field.FieldType.FIXED32 ||
-                        f.type() == Field.FieldType.INT32 ||
-                        f.type() == Field.FieldType.SFIXED32 ||
-                        f.type() == Field.FieldType.SINT32 ||
-                        f.type() == Field.FieldType.UINT32) {
+                if (f.type() == Field.FieldType.FIXED32
+                        || f.type() == Field.FieldType.INT32
+                        || f.type() == Field.FieldType.SFIXED32
+                        || f.type() == Field.FieldType.SINT32
+                        || f.type() == Field.FieldType.UINT32) {
                     generatedCodeSoFar +=
-                             """
+                            """
                              if ($fieldName != thatObj.$fieldName) {
                                  return false;
                              }
-                             """.replace("$fieldName", f.nameCamelFirstLower());
-                } else if (f.type() == Field.FieldType.FIXED64 ||
-                        f.type() == Field.FieldType.INT64 ||
-                        f.type() == Field.FieldType.SFIXED64 ||
-                        f.type() == Field.FieldType.SINT64 ||
-                        f.type() == Field.FieldType.UINT64) {
+                             """
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.FIXED64
+                        || f.type() == Field.FieldType.INT64
+                        || f.type() == Field.FieldType.SFIXED64
+                        || f.type() == Field.FieldType.SINT64
+                        || f.type() == Field.FieldType.UINT64) {
                     generatedCodeSoFar +=
                             """
                             if ($fieldName != thatObj.$fieldName) {
                                 return false;
                             }
-                            """.replace("$fieldName", f.nameCamelFirstLower());
+                            """
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.BOOL) {
                     generatedCodeSoFar +=
                             """
                             if ($fieldName != thatObj.$fieldName) {
                                 return false;
                             }
-                             """.replace("$fieldName", f.nameCamelFirstLower());
+                             """
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.FLOAT) {
                     generatedCodeSoFar +=
                             """
                             if ($fieldName != thatObj.$fieldName) {
                                 return false;
                             }
-                             """.replace("$fieldName", f.nameCamelFirstLower());
+                             """
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else if (f.type() == Field.FieldType.DOUBLE) {
                     generatedCodeSoFar +=
                             """
                             if ($fieldName != thatObj.$fieldName) {
                                 return false;
                             }
-                             """.replace("$fieldName", f.nameCamelFirstLower());
-                } else if (f.type() == Field.FieldType.STRING ||
-                        f.type() == Field.FieldType.BYTES ||
-                        f.type() == Field.FieldType.ENUM ||
-                        f.type() == Field.FieldType.MAP ||
-                        f.parent() == null /* Process a sub-message */) {
-                    generatedCodeSoFar += (
-                            """
+                             """
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.STRING
+                        || f.type() == Field.FieldType.BYTES
+                        || f.type() == Field.FieldType.ENUM
+                        || f.type() == Field.FieldType.MAP
+                        || f.parent() == null /* Process a sub-message */) {
+                    generatedCodeSoFar +=
+                            ("""
                             if ($fieldName == null && thatObj.$fieldName != null) {
                                 return false;
                             }
                             if ($fieldName != null && !$fieldName.equals(thatObj.$fieldName)) {
                                 return false;
                             }
-                            """).replace("$fieldName", f.nameCamelFirstLower());
+                            """)
+                                    .replace("$fieldName", f.nameCamelFirstLower());
                 } else {
-                    throw new IllegalArgumentException("Unexpected field type for getting Equals - " + f.type().toString());
+                    throw new IllegalArgumentException(
+                            "Unexpected field type for getting Equals - " + f.type().toString());
                 }
             }
-		}
-		return generatedCodeSoFar.indent(DEFAULT_INDENT);
-	}
-
-	/**
-	 * Get the equals codegen for a optional field.
-	 * @param generatedCodeSoFar The string that the codegen is generated into.
-	 * @param f The field for which to generate the equals code.
-	 * @return Updated codegen string.
-	 */
-	@NonNull
-	private static String getPrimitiveWrapperEqualsGeneration(String generatedCodeSoFar, Field f) {
-		switch (f.messageType()) {
-			case "StringValue" ->
-				generatedCodeSoFar += (
-                """
+        }
+        return generatedCodeSoFar.indent(DEFAULT_INDENT);
+    }
+
+    /**
+     * Get the equals codegen for a optional field.
+     *
+     * @param generatedCodeSoFar The string that the codegen is generated into.
+     * @param f The field for which to generate the equals code.
+     * @return Updated codegen string.
+     */
+    @NonNull
+    private static String getPrimitiveWrapperEqualsGeneration(String generatedCodeSoFar, Field f) {
+        switch (f.messageType()) {
+            case "StringValue" -> generatedCodeSoFar +=
+                    ("""
                 if (this.$fieldName == null && thatObj.$fieldName != null) {
                     return false;
                 }
                 if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) {
                     return false;
                 }
-                """).replace("$fieldName", f.nameCamelFirstLower());
-			case "BoolValue" ->
-
-				generatedCodeSoFar += (
-                """
+                """)
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "BoolValue" -> generatedCodeSoFar +=
+                    ("""
                 if (this.$fieldName == null && thatObj.$fieldName != null) {
                     return false;
                 }
                 if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) {
                     return false;
                 }
-                """).replace("$fieldName", f.nameCamelFirstLower());
-			case "Int32Value", "UInt32Value", "Int64Value", "UInt64Value", "FloatValue", "DoubleValue" ->
-				generatedCodeSoFar += (
-                """
+                """)
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "Int32Value",
+                    "UInt32Value",
+                    "Int64Value",
+                    "UInt64Value",
+                    "FloatValue",
+                    "DoubleValue" -> generatedCodeSoFar +=
+                    ("""
                 if (this.$fieldName == null && thatObj.$fieldName != null) {
                     return false;
                 }
                 if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) {
                     return false;
                 }
-                """).replace("$fieldName", f.nameCamelFirstLower());
-            case "BytesValue" ->
-				generatedCodeSoFar += (
-                """
+                """)
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            case "BytesValue" -> generatedCodeSoFar +=
+                    ("""
                 if (this.$fieldName == null && thatObj.$fieldName != null) {
                     return false;
                 }
                 if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) {
                     return false;
                 }
-                """).replace("$fieldName", f.nameCamelFirstLower());
-			default -> throw new UnsupportedOperationException("Unhandled optional message type:" + f.messageType());
-		}
-		;
-		return generatedCodeSoFar;
-	}
-
-	/**
-	 * Get the equals codegen for a repeated field.
-	 * @param generatedCodeSoFar The string that the codegen is generated into.
-	 * @param f The field for which to generate the equals code.
-	 * @return Updated codegen string.
-	 */
-	@NonNull
-	private static String getRepeatedEqualsGeneration(String generatedCodeSoFar, Field f) {
-		generatedCodeSoFar += (
-    	"""
+                """)
+                            .replace("$fieldName", f.nameCamelFirstLower());
+            default -> throw new UnsupportedOperationException(
+                    "Unhandled optional message type:" + f.messageType());
+        }
+        ;
+        return generatedCodeSoFar;
+    }
+
+    /**
+     * Get the equals codegen for a repeated field.
+     *
+     * @param generatedCodeSoFar The string that the codegen is generated into.
+     * @param f The field for which to generate the equals code.
+     * @return Updated codegen string.
+     */
+    @NonNull
+    private static String getRepeatedEqualsGeneration(String generatedCodeSoFar, Field f) {
+        generatedCodeSoFar +=
+                ("""
 		if (this.$fieldName == null && thatObj.$fieldName != null) {
 		    return false;
 		}
@@ -539,107 +599,118 @@ private static String getRepeatedEqualsGeneration(String generatedCodeSoFar, Fie
 		if (this.$fieldName != null && !$fieldName.equals(thatObj.$fieldName)) {
 		    return false;
 		}
-		""").replace("$fieldName", f.nameCamelFirstLower());
-		return generatedCodeSoFar;
-	}
-
-
-	/**
-	 * Generate the compareTo method content for the provided fields
-	 *
-	 * @param fields                The fields of this object.
-	 * @param generatedCodeSoFar    the generated code so far (non-empty in case of nested objects)
-	 * @param destinationSrcDir a directory where the previously generated code is saved
-	 * @return The generated code for compareTo method body
-	 */
-	public static String getFieldsCompareToStatements(final List<Field> fields, String generatedCodeSoFar, File destinationSrcDir) {
-		for (Field f : fields) {
-			if (f.optionalValueType()) {
-				generatedCodeSoFar += getPrimitiveWrapperCompareToGeneration(f);
-			} else if (f.repeated()) {
-				throw new UnsupportedOperationException("Repeated fields are not supported in compareTo method");
-			} else {
-				if (f.type() == Field.FieldType.FIXED32 ||
-						f.type() == Field.FieldType.INT32 ||
-						f.type() == Field.FieldType.SFIXED32 ||
-						f.type() == Field.FieldType.SINT32) {
-					generatedCodeSoFar +=
-							"""
+		""")
+                        .replace("$fieldName", f.nameCamelFirstLower());
+        return generatedCodeSoFar;
+    }
+
+    /**
+     * Generate the compareTo method content for the provided fields
+     *
+     * @param fields The fields of this object.
+     * @param generatedCodeSoFar the generated code so far (non-empty in case of nested objects)
+     * @param destinationSrcDir a directory where the previously generated code is saved
+     * @return The generated code for compareTo method body
+     */
+    public static String getFieldsCompareToStatements(
+            final List<Field> fields, String generatedCodeSoFar, File destinationSrcDir) {
+        for (Field f : fields) {
+            if (f.optionalValueType()) {
+                generatedCodeSoFar += getPrimitiveWrapperCompareToGeneration(f);
+            } else if (f.repeated()) {
+                throw new UnsupportedOperationException(
+                        "Repeated fields are not supported in compareTo method");
+            } else {
+                if (f.type() == Field.FieldType.FIXED32
+                        || f.type() == Field.FieldType.INT32
+                        || f.type() == Field.FieldType.SFIXED32
+                        || f.type() == Field.FieldType.SINT32) {
+                    generatedCodeSoFar +=
+                            """
 							result = Integer.compare($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							    return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.UINT32) {
-						generatedCodeSoFar +=
-       						"""
+							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.UINT32) {
+                    generatedCodeSoFar +=
+                            """
 							result = Integer.compareUnsigned($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							    return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-
-				} else if (f.type() == Field.FieldType.FIXED64 ||
-						f.type() == Field.FieldType.INT64 ||
-						f.type() == Field.FieldType.SFIXED64 ||
-						f.type() == Field.FieldType.SINT64) {
-					generatedCodeSoFar +=
 							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+
+                } else if (f.type() == Field.FieldType.FIXED64
+                        || f.type() == Field.FieldType.INT64
+                        || f.type() == Field.FieldType.SFIXED64
+                        || f.type() == Field.FieldType.SINT64) {
+                    generatedCodeSoFar +=
+                            """
 							result = Long.compare($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							    return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.UINT64) {
-					generatedCodeSoFar +=
 							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.UINT64) {
+                    generatedCodeSoFar +=
+                            """
 							result = Long.compareUnsigned($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							    return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.BOOL) {
-					generatedCodeSoFar +=
 							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.BOOL) {
+                    generatedCodeSoFar +=
+                            """
 							result = Boolean.compare($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							    return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.FLOAT) {
-					generatedCodeSoFar +=
 							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.FLOAT) {
+                    generatedCodeSoFar +=
+                            """
 							result = Float.compare($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							    return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.DOUBLE) {
-					generatedCodeSoFar +=
 							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.DOUBLE) {
+                    generatedCodeSoFar +=
+                            """
 							result = Double.compare($fieldName, thatObj.$fieldName);
 							if (result != 0) {
 							     return result;
 							}
-							""".replace("$fieldName", f.nameCamelFirstLower());
-				} else if (f.type() == Field.FieldType.STRING ||
-						f.type() == Field.FieldType.BYTES ||
-						f.type() == Field.FieldType.ENUM) {
-					generatedCodeSoFar += generateCompareToForObject(f);
-				} else if (f.type() == Field.FieldType.MESSAGE || f.type() == Field.FieldType.ONE_OF) {
-					verifyComparable(f, destinationSrcDir);
-					generatedCodeSoFar += generateCompareToForObject(f);
-				} else {
-					throw new IllegalArgumentException("Unexpected field type for getting CompareTo - " + f.type().toString());
-				}
-			}
-		}
-		return generatedCodeSoFar.indent(DEFAULT_INDENT * 2);
-	}
+							"""
+                                    .replace("$fieldName", f.nameCamelFirstLower());
+                } else if (f.type() == Field.FieldType.STRING
+                        || f.type() == Field.FieldType.BYTES
+                        || f.type() == Field.FieldType.ENUM) {
+                    generatedCodeSoFar += generateCompareToForObject(f);
+                } else if (f.type() == Field.FieldType.MESSAGE
+                        || f.type() == Field.FieldType.ONE_OF) {
+                    verifyComparable(f, destinationSrcDir);
+                    generatedCodeSoFar += generateCompareToForObject(f);
+                } else {
+                    throw new IllegalArgumentException(
+                            "Unexpected field type for getting CompareTo - " + f.type().toString());
+                }
+            }
+        }
+        return generatedCodeSoFar.indent(DEFAULT_INDENT * 2);
+    }
 
-	@NonNull
-	private static String generateCompareToForObject(Field f) {
-		return """
+    @NonNull
+    private static String generateCompareToForObject(Field f) {
+        return """
 				if ($fieldName == null && thatObj.$fieldName != null) {
 				    return -1;
 				}
@@ -652,50 +723,58 @@ private static String generateCompareToForObject(Field f) {
 				if (result != 0) {
 				    return result;
 				}
-				""".replace("$fieldName", f.nameCamelFirstLower());
-	}
-
-	/**
-	 * Verify that the field is comparable.
-	 * @param field The field to verify.
-	 * @param destinationSrcDir The directory where the previously generated code is saved.
-	 */
-	private static void verifyComparable(final Field field, File destinationSrcDir) {
-		if (field instanceof final SingleField singleField) {
-			if (singleField.type() != Field.FieldType.MESSAGE) {
-				// everything else except message and bytes is comparable for sure
-				return;
-			}
-			// let's check if the message implements Comparable
-			final String className = singleField.javaFieldType();
-			final File javaFile = getJavaFile(destinationSrcDir, singleField.messageTypeModelPackage(), className);
-			try (BufferedReader reader = new BufferedReader(new FileReader(javaFile))) {
-				String line;
-				while ((line = reader.readLine()) != null) {
-					if (COMPARABLE_PATTERN.matcher(line).matches()) {
-						return;
-					}
-				}
-				throw new IllegalArgumentException(("Field %s.%s specified in `pbj.comparable` option must implement " +
-						"`Comparable` interface but it doesn't.").formatted(className, field.nameCamelFirstLower()));
-			} catch (IOException e) {
-				throw new RuntimeException(e);
-			}
-        } if (field instanceof final OneOfField oneOfField) {
-			oneOfField.fields().forEach(v -> verifyComparable(v, destinationSrcDir));
-		} else {
-			throw new UnsupportedOperationException("Unexpected field type - " + field.getClass());
-		}
-	}
-
-	/**
-	 * Generates the compareTo code for a primitive wrapper field.
-	 * @param f The field for which to generate the compareTo code.
-	 * @return The generated code for compareTo method body
-	 */
-	private static String getPrimitiveWrapperCompareToGeneration(Field f) {
-		final String template =
 				"""
+                .replace("$fieldName", f.nameCamelFirstLower());
+    }
+
+    /**
+     * Verify that the field is comparable.
+     *
+     * @param field The field to verify.
+     * @param destinationSrcDir The directory where the previously generated code is saved.
+     */
+    private static void verifyComparable(final Field field, File destinationSrcDir) {
+        if (field instanceof final SingleField singleField) {
+            if (singleField.type() != Field.FieldType.MESSAGE) {
+                // everything else except message and bytes is comparable for sure
+                return;
+            }
+            // let's check if the message implements Comparable
+            final String className = singleField.javaFieldType();
+            final File javaFile =
+                    getJavaFile(
+                            destinationSrcDir, singleField.messageTypeModelPackage(), className);
+            try (BufferedReader reader = new BufferedReader(new FileReader(javaFile))) {
+                String line;
+                while ((line = reader.readLine()) != null) {
+                    if (COMPARABLE_PATTERN.matcher(line).matches()) {
+                        return;
+                    }
+                }
+                throw new IllegalArgumentException(
+                        ("Field %s.%s specified in `pbj.comparable` option must implement "
+                                        + "`Comparable` interface but it doesn't.")
+                                .formatted(className, field.nameCamelFirstLower()));
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
+        if (field instanceof final OneOfField oneOfField) {
+            oneOfField.fields().forEach(v -> verifyComparable(v, destinationSrcDir));
+        } else {
+            throw new UnsupportedOperationException("Unexpected field type - " + field.getClass());
+        }
+    }
+
+    /**
+     * Generates the compareTo code for a primitive wrapper field.
+     *
+     * @param f The field for which to generate the compareTo code.
+     * @return The generated code for compareTo method body
+     */
+    private static String getPrimitiveWrapperCompareToGeneration(Field f) {
+        final String template =
+                """
                     if ($fieldName == null && thatObj.$fieldName != null) {
                         return -1;
                     } else if ($fieldName != null && thatObj.$fieldName == null) {
@@ -708,50 +787,58 @@ private static String getPrimitiveWrapperCompareToGeneration(Field f) {
                     }
                     """;
 
+        final String compareStatement =
+                switch (f.messageType()) {
+                    case "StringValue", "BytesValue" -> "$fieldName.compareTo(thatObj.$fieldName)";
+                    case "BoolValue" -> "java.lang.Boolean.compare($fieldName, thatObj.$fieldName)";
+                    case "Int32Value" -> "java.lang.Integer.compare($fieldName,"
+                            + " thatObj.$fieldName)";
+                    case "UInt32Value" -> "java.lang.Integer.compareUnsigned($fieldName,"
+                            + " thatObj.$fieldName)";
+                    case "Int64Value" -> "java.lang.Long.compare($fieldName, thatObj.$fieldName)";
+                    case "UInt64Value" -> "java.lang.Long.compareUnsigned($fieldName,"
+                            + " thatObj.$fieldName)";
+                    case "FloatValue" -> "java.lang.Float.compare($fieldName, thatObj.$fieldName)";
+                    case "DoubleValue" -> "java.lang.Double.compare($fieldName,"
+                            + " thatObj.$fieldName)";
+                    default -> throw new UnsupportedOperationException(
+                            "Unhandled optional message type:" + f.messageType());
+                };
+
+        return template.replace("$compareStatement", compareStatement)
+                .replace("$fieldName", f.nameCamelFirstLower());
+    }
 
-		final String compareStatement = switch (f.messageType()) {
-			case "StringValue", "BytesValue" -> "$fieldName.compareTo(thatObj.$fieldName)";
-			case "BoolValue" -> "java.lang.Boolean.compare($fieldName, thatObj.$fieldName)";
-			case "Int32Value" -> "java.lang.Integer.compare($fieldName, thatObj.$fieldName)";
-			case "UInt32Value" -> "java.lang.Integer.compareUnsigned($fieldName, thatObj.$fieldName)";
-			case "Int64Value" -> "java.lang.Long.compare($fieldName, thatObj.$fieldName)";
-			case "UInt64Value" -> "java.lang.Long.compareUnsigned($fieldName, thatObj.$fieldName)";
-			case "FloatValue" -> "java.lang.Float.compare($fieldName, thatObj.$fieldName)";
-			case "DoubleValue" -> "java.lang.Double.compare($fieldName, thatObj.$fieldName)";
-            default -> throw new UnsupportedOperationException("Unhandled optional message type:" + f.messageType());
-		};
-
-		return template
-				.replace("$compareStatement", compareStatement)
-				.replace("$fieldName", f.nameCamelFirstLower());
-	}
-
-	/**
-	 * Remove leading dot from a string so ".a.b.c" becomes "a.b.c"
-	 *
-	 * @param text text to remove leading dot from
-	 * @return  text without a leading dot
-	 */
-	public static String removingLeadingDot(String text) {
-		if (!text.isEmpty() & text.charAt(0) == '.') {
-			return text.substring(1);
-		}
-		return text;
-	}
-
-	/**
-	 * Get the java file for a src directory, package and classname with optional suffix. All parent directories will
-	 * also be created.
-	 *
-	 * @param srcDir The src dir root of all java src
-	 * @param javaPackage the java package with '.' deliminators
-	 * @param className the camel case class name
-	 * @return File object for java file
-	 */
-	public static File getJavaFile(File srcDir, String javaPackage, String className) {
-		File packagePath = new File(srcDir.getPath() + File.separatorChar + javaPackage.replaceAll("\\.","\\" + File.separator));
-		//noinspection ResultOfMethodCallIgnored
-		packagePath.mkdirs();
-		return new File(packagePath,className+".java");
-	}
+    /**
+     * Remove leading dot from a string so ".a.b.c" becomes "a.b.c"
+     *
+     * @param text text to remove leading dot from
+     * @return text without a leading dot
+     */
+    public static String removingLeadingDot(String text) {
+        if (!text.isEmpty() & text.charAt(0) == '.') {
+            return text.substring(1);
+        }
+        return text;
+    }
+
+    /**
+     * Get the java file for a src directory, package and classname with optional suffix. All parent
+     * directories will also be created.
+     *
+     * @param srcDir The src dir root of all java src
+     * @param javaPackage the java package with '.' deliminators
+     * @param className the camel case class name
+     * @return File object for java file
+     */
+    public static File getJavaFile(File srcDir, String javaPackage, String className) {
+        File packagePath =
+                new File(
+                        srcDir.getPath()
+                                + File.separatorChar
+                                + javaPackage.replaceAll("\\.", "\\" + File.separator));
+        //noinspection ResultOfMethodCallIgnored
+        packagePath.mkdirs();
+        return new File(packagePath, className + ".java");
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java
index 87903f75..30240231 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/ContextualLookupHelper.java
@@ -2,22 +2,23 @@
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.*;
-
 import java.io.File;
 import java.util.List;
 
 /**
- * Wrapper around LookupHelper adding the context of which protobuf source file the lookup is happening within. This
- * makes it easy to carry the source files context for lookup so that the package and imports are correctly understood.
+ * Wrapper around LookupHelper adding the context of which protobuf source file the lookup is
+ * happening within. This makes it easy to carry the source files context for lookup so that the
+ * package and imports are correctly understood.
  */
 public class ContextualLookupHelper {
     /** Lookup helper that we are delegating to */
     private final LookupHelper lookupHelper;
+
     /** The proto source file for context */
     private final File srcProtoFileContext;
 
     /**
-     * Create a new ContextualLookupHelper delegating to {@code  lookupHelper} with the context of
+     * Create a new ContextualLookupHelper delegating to {@code lookupHelper} with the context of
      * {@code srcProtoFileContext}.
      *
      * @param lookupHelper Lookup helper that we are delegating to
@@ -35,23 +36,27 @@ public ContextualLookupHelper(LookupHelper lookupHelper, File srcProtoFileContex
      * @param context Parser Context, a message or enum
      * @return java package to put model class in
      */
-    public String getUnqualifiedClassForMessage(final FileType fileType, final MessageDefContext context) {
+    public String getUnqualifiedClassForMessage(
+            final FileType fileType, final MessageDefContext context) {
         return lookupHelper.getUnqualifiedClass(srcProtoFileContext, fileType, context);
     }
 
     /**
-     * Get the fully qualified class name for a msgDef with given fileType that would be generated by PBJ.
+     * Get the fully qualified class name for a msgDef with given fileType that would be generated
+     * by PBJ.
      *
      * @param fileType The type of file we want the fully qualified class name for
      * @param message The msgDef to get fully qualified class name for
      * @return fully qualified class name
      */
-    public String getFullyQualifiedMessageClassname(final FileType fileType, final MessageDefContext message) {
+    public String getFullyQualifiedMessageClassname(
+            final FileType fileType, final MessageDefContext message) {
         return lookupHelper.getFullyQualifiedClass(srcProtoFileContext, fileType, message);
     }
 
     /**
      * Get the set of fields that are comparable for a given message.
+     *
      * @param message The message to get comparable fields for
      * @return set of field names that are comparable
      */
@@ -88,8 +93,10 @@ public String getPackageForEnum(FileType fileType, EnumDefContext enumDef) {
      * @param fieldContext The field to get package for message type for
      * @return java package to put model class in
      */
-    public String getPackageFieldMessageType(final FileType fileType, final FieldContext fieldContext) {
-        return lookupHelper.getPackage(srcProtoFileContext, fileType, fieldContext.type_().messageType());
+    public String getPackageFieldMessageType(
+            final FileType fileType, final FieldContext fieldContext) {
+        return lookupHelper.getPackage(
+                srcProtoFileContext, fileType, fieldContext.type_().messageType());
     }
 
     /**
@@ -99,19 +106,23 @@ public String getPackageFieldMessageType(final FileType fileType, final FieldCon
      * @param typeContext The field to get package for message type for
      * @return java package to put model class in
      */
-    public String getPackageFieldMessageType(final FileType fileType, final Type_Context typeContext) {
+    public String getPackageFieldMessageType(
+            final FileType fileType, final Type_Context typeContext) {
         return lookupHelper.getPackage(srcProtoFileContext, fileType, typeContext.messageType());
     }
 
     /**
-     * Get the PBJ Java package a class should be generated into for a given fieldContext and file type.
+     * Get the PBJ Java package a class should be generated into for a given fieldContext and file
+     * type.
      *
      * @param fileType The type of file we want the package for
      * @param fieldContext The field to get package for message type for
      * @return java package to put model class in
      */
-    public String getPackageOneofFieldMessageType(final FileType fileType, final OneofFieldContext fieldContext) {
-        return lookupHelper.getPackage(srcProtoFileContext, fileType, fieldContext.type_().messageType());
+    public String getPackageOneofFieldMessageType(
+            final FileType fileType, final OneofFieldContext fieldContext) {
+        return lookupHelper.getPackage(
+                srcProtoFileContext, fileType, fieldContext.type_().messageType());
     }
 
     /**
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java
index bc2c86d1..44e0d877 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/Field.java
@@ -1,405 +1,425 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
+import static com.hedera.pbj.compiler.impl.Common.*;
+
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.util.Set;
 
-import static com.hedera.pbj.compiler.impl.Common.*;
-
-/**
- * Interface for SingleFields and OneOfFields
- */
+/** Interface for SingleFields and OneOfFields */
 @SuppressWarnings("unused")
 public interface Field {
 
-	/** The default maximum size of a repeated or length-encoded field (Bytes, String, Message, etc.). */
-	public static final long DEFAULT_MAX_SIZE = 2 * 1024 * 1024;
-
-	/**
-	 * Is this field a repeated field. Repeated fields are lists of values rather than a single value.
-	 *
-	 * @return true if this field is a list and false if it is a single value
-	 */
-	boolean repeated();
-
-	/**
-	 * Returns the field's max size relevant to repeated or length-encoded fields.
-	 * The returned value has no meaning for scalar fields (BOOL, INT, etc.).
-	 */
-	default long maxSize() {
-		return DEFAULT_MAX_SIZE;
-	}
-
-	/**
-	 * Get the field number, the number of the field in the parent message
-	 *
-	 * @return this fields number
-	 */
-	int fieldNumber();
-
-	/**
-	 * Get this fields name in original case and format
-	 *
-	 * @return this fields name
-	 */
-	String name();
-
-	/**
-	 * Get this fields name converted to camel case with the first letter upper case
-	 *
-	 * @return this fields name converted
-	 */
-	default String nameCamelFirstUpper() {
-		return snakeToCamel(name(),true);
-	}
-
-	/**
-	 * Get this fields name converted to camel case with the first letter lower case
-	 *
-	 * @return this fields name converted
-	 */
-	@NonNull
-	default String nameCamelFirstLower() {
-		return snakeToCamel(name(),false);
-	}
-
-	/**
-	 * Get the field type for this field, the field type is independent of repeated
-	 *
-	 * @return this fields type
-	 */
-	FieldType type();
-
-	/**
-	 * Get the protobuf field type for this field
-	 *
-	 * @return this fields type in protobuf format
-	 */
-	String protobufFieldType();
-
-	/**
-	 * Get the Java field type for this field
-	 *
-	 * @return this fields type in Java format
-	 */
-	String javaFieldType();
-
-	/**
-	 * Get the Java field type for this field.
-	 * Unlike {@link #javaFieldType()}, this method returns the base type for repeated and oneOf fields.
-	 *
-	 * @return this fields type in Java format
-	 */
-	String javaFieldTypeBase();
-
-	/**
-	 * Get the name for this type that is added to write/sizeof etc. methods.
-	 *
-	 * @return Name for type used in method names
-	 */
-	String methodNameType();
-
-	/**
-	 * Add all the needed imports for this field to the supplied set.
-	 *
-	 * @param imports      set of imports to add to, this contains packages not classes. They are always imported as ".*".
-	 * @param modelImports if imports for this field's generated model classes should be added
-	 * @param codecImports if imports for this field's generated codec classes should be added
-	 * @param testImports  if imports for this field's generated test classes should be added
-	 */
-	void addAllNeededImports(Set<String> imports, boolean modelImports,
-							 boolean codecImports, final boolean testImports);
-
-	/**
-	 * Get the java code to parse the value for this field from input
-	 *
-	 * @return java source code to parse
-	 */
-	String parseCode();
-
-	/**
-	 * Get the java code default value for this field, "null" for object types
-	 *
-	 * @return code for default value
-	 */
-	String javaDefault();
-
-	/**
-	 * Get the field definitions line of code for schema file for this field. One line for single fields and multiple
-	 * for oneofs.
-	 *
-	 * @return field definition lines of code
-	 */
-	String schemaFieldsDef();
-
-	/**
-	 * Get the schema case statement for getting the field definition by field number
-	 *
-	 * @return java source code for case statement to get field def for field number
-	 */
-	String schemaGetFieldsDefCase();
-
-	/**
-	 * Get the case statement for setting this method to go in parser set method code
-	 *
-	 * @return java source code for case statement setting this field
-	 */
-	String parserFieldsSetMethodCase();
-
-	/**
-	 * Get the java doc comment for this field, cleaned and ready to insert in output
-	 *
-	 * @return java doc comment
-	 */
-	String comment();
-
-	/**
-	 * Get if this field is deprecated or not
-	 *
-	 * @return true if field is deprecated, otherwise false
-	 */
-	boolean deprecated();
-
-	/**
-	 * Get the message type for this field if it is of type message otherwise null
-	 *
-	 * @return message type or null if not a message type field
-	 */
-	default String messageType() {
-		return null;
-	}
-
-	/**
-	 * Get if this field is an optional value type, optionals are handled in protobuf by value type objects for
-	 * primitives
-	 *
-	 * @return true if this field is option by use of a protobuf value type, otherwise false
-	 */
-	default boolean optionalValueType() {
-		return false;
-	}
-
-	/**
-	 * Get the parent field for this field, null if there is no parent like in the case of a single field.
-	 *
-	 * @return this fields parent field for oneof fields
-	 */
-	default OneOfField parent() {
-		return null;
-	}
-
-	/**
-	 * Extract the name of the Java model class for a message type,
-	 * or null if the type is not a message.
-	 */
-	static String extractMessageTypeName(final Protobuf3Parser.Type_Context typeContext) {
-		return typeContext.messageType() == null ? null : typeContext.messageType().messageName().getText();
-	}
-
-	/**
-	 * Extract the name of the Java package for a given FileType for a message type,
-	 * or null if the type is not a message.
-	 */
-	static String extractMessageTypePackage(
-			final Protobuf3Parser.Type_Context typeContext,
-			final FileType fileType,
-			final ContextualLookupHelper lookupHelper) {
-		return typeContext.messageType() == null || typeContext.messageType().messageName().getText() == null ? null :
-				lookupHelper.getPackageFieldMessageType(fileType, typeContext);
-	}
-
-	/**
-	 * Field type enum for use in field classes
-	 */
-	enum FieldType {
-		/** Protobuf message field type */
-		MESSAGE("Object", "Object", "null", TYPE_LENGTH_DELIMITED),
-		/** Protobuf enum(unsigned varint encoded int of ordinal) field type */
-		ENUM("int", "Integer", "null", TYPE_VARINT),
-		/** Protobuf int32(signed varint encoded int) field type */
-		INT32("int", "Integer", "0", TYPE_VARINT),
-		/** Protobuf uint32(unsigned varint encoded int) field type */
-		UINT32("int", "Integer", "0", TYPE_VARINT),
-		/** Protobuf sint32(signed zigzag varint encoded int) field type */
-		SINT32("int", "Integer", "0", TYPE_VARINT),
-		/** Protobuf int64(signed varint encoded long) field type */
-		INT64("long", "Long", "0", TYPE_VARINT),
-		/** Protobuf uint64(unsigned varint encoded long)  field type */
-		UINT64("long", "Long", "0", TYPE_VARINT),
-		/** Protobuf sint64(signed zigzag varint encoded long) field type */
-		SINT64("long", "Long", "0", TYPE_VARINT),
-		/** Protobuf float field type */
-		FLOAT("float", "Float", "0", TYPE_FIXED32),
-		/** Protobuf fixed int32(fixed encoding int) field type */
-		FIXED32("int", "Integer", "0", TYPE_FIXED32),
-		/** Protobuf sfixed int32(signed fixed encoding int) field type */
-		SFIXED32("int", "Integer", "0", TYPE_FIXED32),
-		/** Protobuf double field type */
-		DOUBLE("double", "Double", "0", TYPE_FIXED64),
-		/** Protobuf sfixed64(fixed encoding long) field type */
-		FIXED64("long", "Long", "0", TYPE_FIXED64),
-		/** Protobuf sfixed64(signed fixed encoding long) field type */
-		SFIXED64("long", "Long", "0", TYPE_FIXED64),
-		/** Protobuf string field type */
-		STRING("String", "String", "\"\"", TYPE_LENGTH_DELIMITED),
-		/** Protobuf bool(boolean) field type */
-		BOOL("boolean", "Boolean", "false", TYPE_VARINT),
-		/** Protobuf bytes field type */
-		BYTES("Bytes", "Bytes", "Bytes.EMPTY", TYPE_LENGTH_DELIMITED),
-		/** Protobuf oneof field type, this is not a true field type in protobuf. Needed here for a few edge cases */
-		ONE_OF("OneOf", "OneOf", "null", 0 ),// BAD TYPE
-		// On the wire, a map is a repeated Message {key, value}, sorted in the natural order of keys for determinism.
-		MAP("Map", "Map", "Collections.EMPTY_MAP", TYPE_LENGTH_DELIMITED );
-
-		/** The type of field type in Java code */
-		public final String javaType;
-		/** The type of boxed field type in Java code */
-		public final String boxedType;
-		/** The field type default value in Java code */
-		public final String javaDefault;
-		/** The protobuf wire type for field type */
-		public final int wireType;
-
-		/**
-		 * Construct a new FieldType enum
-		 *
-		 * @param javaType The type of field type in Java code
-		 * @param boxedType The boxed type of the field type, e.g. Integer for an int field.
-		 * @param javaDefault The field type default value in Java code
-		 * @param wireType The protobuf wire type for field type
-		 */
-		FieldType(String javaType, final String boxedType, final String javaDefault, int wireType) {
-			this.javaType = javaType;
-			this.boxedType = boxedType;
-			this.javaDefault = javaDefault;
-			this.wireType = wireType;
-		}
-
-		/**
-		 * Get the field type string = the enum name
-		 *
-		 * @return Field type string
-		 */
-		String fieldType() {
-			return name();
-		}
-
-		/**
-		 * Get the protobuf wire type for field type
-		 *
-		 * @return protobuf wire type for field type
-		 */
-		public int wireType() {
-			return wireType;
-		}
-
-		/**
-		 * Get the type of field type in Java code
-		 *
-		 * @param repeated if the field is repeated or not, java types are different for repeated field
-		 * @return The type of field type in Java code
-		 */
-		@SuppressWarnings("DuplicatedCode")
-		public String javaType(boolean repeated) {
-			if (repeated) {
-				return switch (javaType) {
-					case "int" -> "List<Integer>";
-					case "long" -> "List<Long>";
-					case "float" -> "List<Float>";
-					case "double" -> "List<Double>";
-					case "boolean" -> "List<Boolean>";
-					default -> "List<" + javaType + ">";
-				};
-			} else {
-				return javaType;
-			}
-		}
-
-		/**
-		 * Get the field type for a given parser context
-		 *
-		 * @param typeContext The parser context to get field type for
-		 * @param lookupHelper Lookup helper with global context
-		 * @return The field type enum for parser context
-		 */
-		static FieldType of(Protobuf3Parser.Type_Context typeContext,  final ContextualLookupHelper lookupHelper) {
-			if (typeContext.enumType() != null) {
-				return FieldType.ENUM;
-			} else if (typeContext.messageType() != null) {
-				if (lookupHelper.isEnum(typeContext.messageType())) return FieldType.ENUM;
-				return FieldType.MESSAGE;
-			} else if (typeContext.INT32() != null) {
-				return FieldType.INT32;
-			} else if (typeContext.UINT32() != null) {
-				return FieldType.UINT32;
-			} else if (typeContext.SINT32() != null) {
-				return FieldType.SINT32;
-			} else if (typeContext.INT64() != null) {
-				return FieldType.INT64;
-			} else if (typeContext.UINT64() != null) {
-				return FieldType.UINT64;
-			} else if (typeContext.SINT64() != null) {
-				return FieldType.SINT64;
-			} else if (typeContext.FLOAT() != null) {
-				return FieldType.FLOAT;
-			} else if (typeContext.FIXED32() != null) {
-				return FieldType.FIXED32;
-			} else if (typeContext.SFIXED32() != null) {
-				return FieldType.SFIXED32;
-			} else if (typeContext.DOUBLE() != null) {
-				return FieldType.DOUBLE;
-			} else if (typeContext.FIXED64() != null) {
-				return FieldType.FIXED64;
-			} else if (typeContext.SFIXED64() != null) {
-				return FieldType.SFIXED64;
-			} else if (typeContext.STRING() != null) {
-				return FieldType.STRING;
-			} else if (typeContext.BOOL() != null) {
-				return FieldType.BOOL;
-			} else if (typeContext.BYTES() != null) {
-				return FieldType.BYTES;
-			} else {
-				throw new IllegalArgumentException("Unknown field type: "+typeContext);
-			}
-		}
-
-		/**
-		 * Get the field type for a given map key type parser context
-		 *
-		 * @param typeContext The parser context to get field type for
-		 * @param lookupHelper Lookup helper with global context
-		 * @return The field type enum for parser context
-		 */
-		static FieldType of(Protobuf3Parser.KeyTypeContext typeContext,  final ContextualLookupHelper lookupHelper) {
-			if (typeContext.INT32() != null) {
-				return FieldType.INT32;
-			} else if (typeContext.UINT32() != null) {
-				return FieldType.UINT32;
-			} else if (typeContext.SINT32() != null) {
-				return FieldType.SINT32;
-			} else if (typeContext.INT64() != null) {
-				return FieldType.INT64;
-			} else if (typeContext.UINT64() != null) {
-				return FieldType.UINT64;
-			} else if (typeContext.SINT64() != null) {
-				return FieldType.SINT64;
-			} else if (typeContext.FIXED32() != null) {
-				return FieldType.FIXED32;
-			} else if (typeContext.SFIXED32() != null) {
-				return FieldType.SFIXED32;
-			} else if (typeContext.FIXED64() != null) {
-				return FieldType.FIXED64;
-			} else if (typeContext.SFIXED64() != null) {
-				return FieldType.SFIXED64;
-			} else if (typeContext.STRING() != null) {
-				return FieldType.STRING;
-			} else if (typeContext.BOOL() != null) {
-				return FieldType.BOOL;
-			} else {
-				throw new IllegalArgumentException("Unknown map key type: " + typeContext);
-			}
-		}
-	}
+    /**
+     * The default maximum size of a repeated or length-encoded field (Bytes, String, Message,
+     * etc.).
+     */
+    public static final long DEFAULT_MAX_SIZE = 2 * 1024 * 1024;
+
+    /**
+     * Is this field a repeated field. Repeated fields are lists of values rather than a single
+     * value.
+     *
+     * @return true if this field is a list and false if it is a single value
+     */
+    boolean repeated();
+
+    /**
+     * Returns the field's max size relevant to repeated or length-encoded fields. The returned
+     * value has no meaning for scalar fields (BOOL, INT, etc.).
+     */
+    default long maxSize() {
+        return DEFAULT_MAX_SIZE;
+    }
+
+    /**
+     * Get the field number, the number of the field in the parent message
+     *
+     * @return this fields number
+     */
+    int fieldNumber();
+
+    /**
+     * Get this fields name in original case and format
+     *
+     * @return this fields name
+     */
+    String name();
+
+    /**
+     * Get this fields name converted to camel case with the first letter upper case
+     *
+     * @return this fields name converted
+     */
+    default String nameCamelFirstUpper() {
+        return snakeToCamel(name(), true);
+    }
+
+    /**
+     * Get this fields name converted to camel case with the first letter lower case
+     *
+     * @return this fields name converted
+     */
+    @NonNull
+    default String nameCamelFirstLower() {
+        return snakeToCamel(name(), false);
+    }
+
+    /**
+     * Get the field type for this field, the field type is independent of repeated
+     *
+     * @return this fields type
+     */
+    FieldType type();
+
+    /**
+     * Get the protobuf field type for this field
+     *
+     * @return this fields type in protobuf format
+     */
+    String protobufFieldType();
+
+    /**
+     * Get the Java field type for this field
+     *
+     * @return this fields type in Java format
+     */
+    String javaFieldType();
+
+    /**
+     * Get the Java field type for this field. Unlike {@link #javaFieldType()}, this method returns
+     * the base type for repeated and oneOf fields.
+     *
+     * @return this fields type in Java format
+     */
+    String javaFieldTypeBase();
+
+    /**
+     * Get the name for this type that is added to write/sizeof etc. methods.
+     *
+     * @return Name for type used in method names
+     */
+    String methodNameType();
+
+    /**
+     * Add all the needed imports for this field to the supplied set.
+     *
+     * @param imports set of imports to add to, this contains packages not classes. They are always
+     *     imported as ".*".
+     * @param modelImports if imports for this field's generated model classes should be added
+     * @param codecImports if imports for this field's generated codec classes should be added
+     * @param testImports if imports for this field's generated test classes should be added
+     */
+    void addAllNeededImports(
+            Set<String> imports,
+            boolean modelImports,
+            boolean codecImports,
+            final boolean testImports);
+
+    /**
+     * Get the java code to parse the value for this field from input
+     *
+     * @return java source code to parse
+     */
+    String parseCode();
+
+    /**
+     * Get the java code default value for this field, "null" for object types
+     *
+     * @return code for default value
+     */
+    String javaDefault();
+
+    /**
+     * Get the field definitions line of code for schema file for this field. One line for single
+     * fields and multiple for oneofs.
+     *
+     * @return field definition lines of code
+     */
+    String schemaFieldsDef();
+
+    /**
+     * Get the schema case statement for getting the field definition by field number
+     *
+     * @return java source code for case statement to get field def for field number
+     */
+    String schemaGetFieldsDefCase();
+
+    /**
+     * Get the case statement for setting this method to go in parser set method code
+     *
+     * @return java source code for case statement setting this field
+     */
+    String parserFieldsSetMethodCase();
+
+    /**
+     * Get the java doc comment for this field, cleaned and ready to insert in output
+     *
+     * @return java doc comment
+     */
+    String comment();
+
+    /**
+     * Get if this field is deprecated or not
+     *
+     * @return true if field is deprecated, otherwise false
+     */
+    boolean deprecated();
+
+    /**
+     * Get the message type for this field if it is of type message otherwise null
+     *
+     * @return message type or null if not a message type field
+     */
+    default String messageType() {
+        return null;
+    }
+
+    /**
+     * Get if this field is an optional value type, optionals are handled in protobuf by value type
+     * objects for primitives
+     *
+     * @return true if this field is option by use of a protobuf value type, otherwise false
+     */
+    default boolean optionalValueType() {
+        return false;
+    }
+
+    /**
+     * Get the parent field for this field, null if there is no parent like in the case of a single
+     * field.
+     *
+     * @return this fields parent field for oneof fields
+     */
+    default OneOfField parent() {
+        return null;
+    }
+
+    /**
+     * Extract the name of the Java model class for a message type, or null if the type is not a
+     * message.
+     */
+    static String extractMessageTypeName(final Protobuf3Parser.Type_Context typeContext) {
+        return typeContext.messageType() == null
+                ? null
+                : typeContext.messageType().messageName().getText();
+    }
+
+    /**
+     * Extract the name of the Java package for a given FileType for a message type, or null if the
+     * type is not a message.
+     */
+    static String extractMessageTypePackage(
+            final Protobuf3Parser.Type_Context typeContext,
+            final FileType fileType,
+            final ContextualLookupHelper lookupHelper) {
+        return typeContext.messageType() == null
+                        || typeContext.messageType().messageName().getText() == null
+                ? null
+                : lookupHelper.getPackageFieldMessageType(fileType, typeContext);
+    }
+
+    /** Field type enum for use in field classes */
+    enum FieldType {
+        /** Protobuf message field type */
+        MESSAGE("Object", "Object", "null", TYPE_LENGTH_DELIMITED),
+        /** Protobuf enum(unsigned varint encoded int of ordinal) field type */
+        ENUM("int", "Integer", "null", TYPE_VARINT),
+        /** Protobuf int32(signed varint encoded int) field type */
+        INT32("int", "Integer", "0", TYPE_VARINT),
+        /** Protobuf uint32(unsigned varint encoded int) field type */
+        UINT32("int", "Integer", "0", TYPE_VARINT),
+        /** Protobuf sint32(signed zigzag varint encoded int) field type */
+        SINT32("int", "Integer", "0", TYPE_VARINT),
+        /** Protobuf int64(signed varint encoded long) field type */
+        INT64("long", "Long", "0", TYPE_VARINT),
+        /** Protobuf uint64(unsigned varint encoded long) field type */
+        UINT64("long", "Long", "0", TYPE_VARINT),
+        /** Protobuf sint64(signed zigzag varint encoded long) field type */
+        SINT64("long", "Long", "0", TYPE_VARINT),
+        /** Protobuf float field type */
+        FLOAT("float", "Float", "0", TYPE_FIXED32),
+        /** Protobuf fixed int32(fixed encoding int) field type */
+        FIXED32("int", "Integer", "0", TYPE_FIXED32),
+        /** Protobuf sfixed int32(signed fixed encoding int) field type */
+        SFIXED32("int", "Integer", "0", TYPE_FIXED32),
+        /** Protobuf double field type */
+        DOUBLE("double", "Double", "0", TYPE_FIXED64),
+        /** Protobuf sfixed64(fixed encoding long) field type */
+        FIXED64("long", "Long", "0", TYPE_FIXED64),
+        /** Protobuf sfixed64(signed fixed encoding long) field type */
+        SFIXED64("long", "Long", "0", TYPE_FIXED64),
+        /** Protobuf string field type */
+        STRING("String", "String", "\"\"", TYPE_LENGTH_DELIMITED),
+        /** Protobuf bool(boolean) field type */
+        BOOL("boolean", "Boolean", "false", TYPE_VARINT),
+        /** Protobuf bytes field type */
+        BYTES("Bytes", "Bytes", "Bytes.EMPTY", TYPE_LENGTH_DELIMITED),
+        /**
+         * Protobuf oneof field type, this is not a true field type in protobuf. Needed here for a
+         * few edge cases
+         */
+        ONE_OF("OneOf", "OneOf", "null", 0), // BAD TYPE
+        // On the wire, a map is a repeated Message {key, value}, sorted in the natural order of
+        // keys for determinism.
+        MAP("Map", "Map", "Collections.EMPTY_MAP", TYPE_LENGTH_DELIMITED);
+
+        /** The type of field type in Java code */
+        public final String javaType;
+
+        /** The type of boxed field type in Java code */
+        public final String boxedType;
+
+        /** The field type default value in Java code */
+        public final String javaDefault;
+
+        /** The protobuf wire type for field type */
+        public final int wireType;
+
+        /**
+         * Construct a new FieldType enum
+         *
+         * @param javaType The type of field type in Java code
+         * @param boxedType The boxed type of the field type, e.g. Integer for an int field.
+         * @param javaDefault The field type default value in Java code
+         * @param wireType The protobuf wire type for field type
+         */
+        FieldType(String javaType, final String boxedType, final String javaDefault, int wireType) {
+            this.javaType = javaType;
+            this.boxedType = boxedType;
+            this.javaDefault = javaDefault;
+            this.wireType = wireType;
+        }
+
+        /**
+         * Get the field type string = the enum name
+         *
+         * @return Field type string
+         */
+        String fieldType() {
+            return name();
+        }
+
+        /**
+         * Get the protobuf wire type for field type
+         *
+         * @return protobuf wire type for field type
+         */
+        public int wireType() {
+            return wireType;
+        }
+
+        /**
+         * Get the type of field type in Java code
+         *
+         * @param repeated if the field is repeated or not, java types are different for repeated
+         *     field
+         * @return The type of field type in Java code
+         */
+        @SuppressWarnings("DuplicatedCode")
+        public String javaType(boolean repeated) {
+            if (repeated) {
+                return switch (javaType) {
+                    case "int" -> "List<Integer>";
+                    case "long" -> "List<Long>";
+                    case "float" -> "List<Float>";
+                    case "double" -> "List<Double>";
+                    case "boolean" -> "List<Boolean>";
+                    default -> "List<" + javaType + ">";
+                };
+            } else {
+                return javaType;
+            }
+        }
+
+        /**
+         * Get the field type for a given parser context
+         *
+         * @param typeContext The parser context to get field type for
+         * @param lookupHelper Lookup helper with global context
+         * @return The field type enum for parser context
+         */
+        static FieldType of(
+                Protobuf3Parser.Type_Context typeContext,
+                final ContextualLookupHelper lookupHelper) {
+            if (typeContext.enumType() != null) {
+                return FieldType.ENUM;
+            } else if (typeContext.messageType() != null) {
+                if (lookupHelper.isEnum(typeContext.messageType())) return FieldType.ENUM;
+                return FieldType.MESSAGE;
+            } else if (typeContext.INT32() != null) {
+                return FieldType.INT32;
+            } else if (typeContext.UINT32() != null) {
+                return FieldType.UINT32;
+            } else if (typeContext.SINT32() != null) {
+                return FieldType.SINT32;
+            } else if (typeContext.INT64() != null) {
+                return FieldType.INT64;
+            } else if (typeContext.UINT64() != null) {
+                return FieldType.UINT64;
+            } else if (typeContext.SINT64() != null) {
+                return FieldType.SINT64;
+            } else if (typeContext.FLOAT() != null) {
+                return FieldType.FLOAT;
+            } else if (typeContext.FIXED32() != null) {
+                return FieldType.FIXED32;
+            } else if (typeContext.SFIXED32() != null) {
+                return FieldType.SFIXED32;
+            } else if (typeContext.DOUBLE() != null) {
+                return FieldType.DOUBLE;
+            } else if (typeContext.FIXED64() != null) {
+                return FieldType.FIXED64;
+            } else if (typeContext.SFIXED64() != null) {
+                return FieldType.SFIXED64;
+            } else if (typeContext.STRING() != null) {
+                return FieldType.STRING;
+            } else if (typeContext.BOOL() != null) {
+                return FieldType.BOOL;
+            } else if (typeContext.BYTES() != null) {
+                return FieldType.BYTES;
+            } else {
+                throw new IllegalArgumentException("Unknown field type: " + typeContext);
+            }
+        }
+
+        /**
+         * Get the field type for a given map key type parser context
+         *
+         * @param typeContext The parser context to get field type for
+         * @param lookupHelper Lookup helper with global context
+         * @return The field type enum for parser context
+         */
+        static FieldType of(
+                Protobuf3Parser.KeyTypeContext typeContext,
+                final ContextualLookupHelper lookupHelper) {
+            if (typeContext.INT32() != null) {
+                return FieldType.INT32;
+            } else if (typeContext.UINT32() != null) {
+                return FieldType.UINT32;
+            } else if (typeContext.SINT32() != null) {
+                return FieldType.SINT32;
+            } else if (typeContext.INT64() != null) {
+                return FieldType.INT64;
+            } else if (typeContext.UINT64() != null) {
+                return FieldType.UINT64;
+            } else if (typeContext.SINT64() != null) {
+                return FieldType.SINT64;
+            } else if (typeContext.FIXED32() != null) {
+                return FieldType.FIXED32;
+            } else if (typeContext.SFIXED32() != null) {
+                return FieldType.SFIXED32;
+            } else if (typeContext.FIXED64() != null) {
+                return FieldType.FIXED64;
+            } else if (typeContext.SFIXED64() != null) {
+                return FieldType.SFIXED64;
+            } else if (typeContext.STRING() != null) {
+                return FieldType.STRING;
+            } else if (typeContext.BOOL() != null) {
+                return FieldType.BOOL;
+            } else {
+                throw new IllegalArgumentException("Unknown map key type: " + typeContext);
+            }
+        }
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java
index 50ea5653..dc0bf8e7 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileAndPackageNamesConfig.java
@@ -1,9 +1,7 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
-/**
- * All constants used in the naming of class files and packages
- */
+/** All constants used in the naming of class files and packages */
 public final class FileAndPackageNamesConfig {
 
     /** Suffix for schema java classes */
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java
index 5e6ce0db..9c6a1736 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/FileType.java
@@ -1,9 +1,7 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
-/**
- * Enum for the different types of files that are generated
- */
+/** Enum for the different types of files that are generated */
 public enum FileType {
     /** Generated model record object */
     MODEL,
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java
index ff8b4b92..3d517d8c 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/LookupHelper.java
@@ -13,10 +13,6 @@
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.TopLevelDefContext;
 import edu.umd.cs.findbugs.annotations.NonNull;
 import edu.umd.cs.findbugs.annotations.Nullable;
-import org.antlr.v4.runtime.CharStreams;
-import org.antlr.v4.runtime.CommonTokenStream;
-import org.antlr.v4.runtime.ParserRuleContext;
-
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
@@ -31,6 +27,9 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 import java.util.stream.StreamSupport;
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.ParserRuleContext;
 
 /**
  * Class that manages packages and enum names that are used more than one place in code generation
@@ -40,10 +39,13 @@ public final class LookupHelper {
     /** REGEX pattern to match options in special option comments */
     private static final Pattern OPTION_COMMENT =
             Pattern.compile("//\\s+<<<\\s*([\\w.]+)\\s*=\\s*\"([^\"]+)\"\\s*>>>");
+
     /** The option name for PBJ package at file level */
     private static final String PBJ_PACKAGE_OPTION_NAME = "pbj.java_package";
+
     /** The option name for PBJ package at msgDef level */
     private static final String PBJ_MESSAGE_PACKAGE_OPTION_NAME = "pbj.message_java_package";
+
     /** The option name for PBJ package at msgDef level */
     private static final String PBJ_ENUM_PACKAGE_OPTION_NAME = "pbj.enum_java_package";
 
@@ -70,8 +72,10 @@ public final class LookupHelper {
     /** Map from proto file path to list of other proto files it imports */
     private final Map<String, Set<String>> protoFileImports = new HashMap<>();
 
-    /** Map from fully qualified msgDef name to a list of field names that are comparable.
-     * We use a list here, because the order of the field matters. */
+    /**
+     * Map from fully qualified msgDef name to a list of field names that are comparable. We use a
+     * list here, because the order of the field matters.
+     */
     private final Map<String, List<String>> comparableFieldsByMsg = new HashMap<>();
 
     /**
@@ -149,7 +153,8 @@ public String getFullyQualifiedProtoName(
                 return nameFoundInLocalFile;
             }
             // message type is not from local file so check imported files
-            for (final var importedProtoFilePath : protoFileImports.get(protoSrcFile.getAbsolutePath())) {
+            for (final var importedProtoFilePath :
+                    protoFileImports.get(protoSrcFile.getAbsolutePath())) {
                 final var messageMap = msgAndEnumByFile.get(importedProtoFilePath);
                 if (messageMap == null) {
                     throw new PbjCompilerException(
@@ -174,7 +179,8 @@ public String getFullyQualifiedProtoName(
                                             .get(protoSrcFile.getAbsolutePath())
                                             .toArray()));
         } else if (context instanceof MessageDefContext || context instanceof EnumDefContext) {
-            final Map<String, String> fileMap = msgAndEnumByFile.get(protoSrcFile.getAbsolutePath());
+            final Map<String, String> fileMap =
+                    msgAndEnumByFile.get(protoSrcFile.getAbsolutePath());
             if (fileMap == null) {
                 throw new PbjCompilerException(
                         "Failed to find messageMapLocal for proto file [" + protoSrcFile + "]");
@@ -540,7 +546,8 @@ private void buildMessage(
         String messagePbjPackage = fileLevelPbjJavaPackage;
 
         final String fullyQualifiedMessage = getFullyQualifiedProtoNameForMsgOrEnum(msgDef);
-        comparableFieldsByMsg.computeIfAbsent(fullyQualifiedMessage, v -> extractComparableFields(msgDef));
+        comparableFieldsByMsg.computeIfAbsent(
+                fullyQualifiedMessage, v -> extractComparableFields(msgDef));
         for (final var element : msgDef.messageBody().messageElement()) {
             final var option = element.optionStatement();
             if (option != null) {
@@ -589,6 +596,7 @@ private void buildMessage(
 
     /**
      * Extract the set of fields that are comparable for a given message.
+     *
      * @param msgDef The message definition to get comparable fields for
      * @return a list of field names that are comparable
      */
@@ -602,36 +610,43 @@ static List<String> extractComparableFields(final MessageDefContext msgDef) {
             final String optionValue = matcher.group(2);
             if (optionName.equals(PBJ_COMPARABLE_OPTION_NAME)) {
                 final Set<String> repeatedFields = new HashSet<>();
-                final Set<String> regularFieldNames = msgDef.messageBody().messageElement().stream()
-                        .filter(v -> v.field() != null)
-                        .filter(v -> {
-                            if(v.field().REPEATED() != null){
-                                repeatedFields.add(v.field().fieldName().getText());
-                                return false;
-                            } else {
-                                return true;
-                            }
-                        })
-                        .map(v -> v.field().fieldName().getText()).collect(Collectors.toSet());
-                final Set<String> oneOfFieldNames = msgDef.messageBody().messageElement().stream()
-                        .filter(v -> v.oneof() != null)
-                        .map(v -> v.oneof().oneofName().getText())
-                        .collect(Collectors.toSet());
+                final Set<String> regularFieldNames =
+                        msgDef.messageBody().messageElement().stream()
+                                .filter(v -> v.field() != null)
+                                .filter(
+                                        v -> {
+                                            if (v.field().REPEATED() != null) {
+                                                repeatedFields.add(v.field().fieldName().getText());
+                                                return false;
+                                            } else {
+                                                return true;
+                                            }
+                                        })
+                                .map(v -> v.field().fieldName().getText())
+                                .collect(Collectors.toSet());
+                final Set<String> oneOfFieldNames =
+                        msgDef.messageBody().messageElement().stream()
+                                .filter(v -> v.oneof() != null)
+                                .map(v -> v.oneof().oneofName().getText())
+                                .collect(Collectors.toSet());
                 final Set<String> allFieldNames = new HashSet<>();
                 allFieldNames.addAll(regularFieldNames);
                 allFieldNames.addAll(oneOfFieldNames);
                 return Arrays.stream(optionValue.split(","))
                         .map(String::trim)
-                        .peek(v -> {
-                            if(repeatedFields.contains(v)){
-                                throw new IllegalArgumentException("Field `%s` specified in `%s` option is repeated. Repeated fields are not supported by this option."
-                                        .formatted(v, PBJ_COMPARABLE_OPTION_NAME));
-                            }
-                            if (!allFieldNames.contains(v)) {
-                                throw new IllegalArgumentException(
-                                        "Field '%s' specified in %s option is not found.".formatted(v, PBJ_COMPARABLE_OPTION_NAME));
-                            }
-                       })
+                        .peek(
+                                v -> {
+                                    if (repeatedFields.contains(v)) {
+                                        throw new IllegalArgumentException(
+                                                "Field `%s` specified in `%s` option is repeated. Repeated fields are not supported by this option."
+                                                        .formatted(v, PBJ_COMPARABLE_OPTION_NAME));
+                                    }
+                                    if (!allFieldNames.contains(v)) {
+                                        throw new IllegalArgumentException(
+                                                "Field '%s' specified in %s option is not found."
+                                                        .formatted(v, PBJ_COMPARABLE_OPTION_NAME));
+                                    }
+                                })
                         .collect(Collectors.toList());
             }
         }
@@ -723,6 +738,7 @@ private static String getFullyQualifiedProtoNameForMsgOrEnum(
 
     /**
      * Get a list of fields that are comparable for a given message.
+     *
      * @param message The message to get comparable fields for
      * @return a list of field names that are comparable
      */
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java
index 342119ff..a573f82e 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/MapField.java
@@ -1,21 +1,22 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
-import java.util.Set;
-import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
 import static com.hedera.pbj.compiler.impl.SingleField.getDeprecatedOption;
 
+import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
+import java.util.Set;
+
 /**
  * A field of type map.
- * <p>
- * In protobuf, a map is essentially a repeated map entry message with two fields: key and value.
- * However, we don't model the map entry message explicitly for performance reasons. Instead,
- * we deal with the keys and values directly, and define synthetic Field objects for them here
- * for convenience, so that we can reuse the majority of the code generation code.
- * <p>
- * In model implementations we use a custom implementation of the Map interface named PbjMap
- * which is an immutable map that exposes a SortedKeys list which allows one to iterate
- * the map deterministically which is useful for serializing, computing the hash code, etc.
+ *
+ * <p>In protobuf, a map is essentially a repeated map entry message with two fields: key and value.
+ * However, we don't model the map entry message explicitly for performance reasons. Instead, we
+ * deal with the keys and values directly, and define synthetic Field objects for them here for
+ * convenience, so that we can reuse the majority of the code generation code.
+ *
+ * <p>In model implementations we use a custom implementation of the Map interface named PbjMap
+ * which is an immutable map that exposes a SortedKeys list which allows one to iterate the map
+ * deterministically which is useful for serializing, computing the hash code, etc.
  */
 public record MapField(
         /** A synthetic "key" field in a map entry. */
@@ -34,13 +35,12 @@ public record MapField(
         String javaDefault,
         String parserFieldsSetMethodCase,
         String comment,
-        boolean deprecated
-) implements Field {
+        boolean deprecated)
+        implements Field {
 
-    /**
-     * Construct a MapField instance out of a MapFieldContext and a lookup helper.
-     */
-    public MapField(Protobuf3Parser.MapFieldContext mapContext, final ContextualLookupHelper lookupHelper) {
+    /** Construct a MapField instance out of a MapFieldContext and a lookup helper. */
+    public MapField(
+            Protobuf3Parser.MapFieldContext mapContext, final ContextualLookupHelper lookupHelper) {
         this(
                 new SingleField(
                         false,
@@ -60,13 +60,16 @@ public MapField(Protobuf3Parser.MapFieldContext mapContext, final ContextualLook
                         2,
                         "___" + mapContext.mapName().getText() + "__value",
                         Field.extractMessageTypeName(mapContext.type_()),
-                        Field.extractMessageTypePackage(mapContext.type_(), FileType.MODEL, lookupHelper),
-                        Field.extractMessageTypePackage(mapContext.type_(), FileType.CODEC, lookupHelper),
-                        Field.extractMessageTypePackage(mapContext.type_(), FileType.TEST, lookupHelper),
-                        "An internal, private map entry value for " + mapContext.mapName().getText(),
+                        Field.extractMessageTypePackage(
+                                mapContext.type_(), FileType.MODEL, lookupHelper),
+                        Field.extractMessageTypePackage(
+                                mapContext.type_(), FileType.CODEC, lookupHelper),
+                        Field.extractMessageTypePackage(
+                                mapContext.type_(), FileType.TEST, lookupHelper),
+                        "An internal, private map entry value for "
+                                + mapContext.mapName().getText(),
                         false,
                         null),
-
                 false, // maps cannot be repeated
                 Integer.parseInt(mapContext.fieldNumber().getText()),
                 mapContext.mapName().getText(),
@@ -77,43 +80,51 @@ public MapField(Protobuf3Parser.MapFieldContext mapContext, final ContextualLook
                 null,
                 "PbjMap.EMPTY",
                 "",
-                Common.buildCleanFieldJavaDoc(Integer.parseInt(mapContext.fieldNumber().getText()), mapContext.docComment()),
-                getDeprecatedOption(mapContext.fieldOptions())
-        );
+                Common.buildCleanFieldJavaDoc(
+                        Integer.parseInt(mapContext.fieldNumber().getText()),
+                        mapContext.docComment()),
+                getDeprecatedOption(mapContext.fieldOptions()));
     }
 
     /**
-     * Composes the Java generic type of the map field, e.g. "&lt;Integer, String&gt;" for a Map&lt;Integer, String&gt;.
+     * Composes the Java generic type of the map field, e.g. "&lt;Integer, String&gt;" for a
+     * Map&lt;Integer, String&gt;.
      */
     public String javaGenericType() {
-        return "<" + keyField.type().boxedType + ", " +
-                (valueField().type() == FieldType.MESSAGE ? ((SingleField)valueField()).messageType() : valueField().type().boxedType)
+        return "<"
+                + keyField.type().boxedType
+                + ", "
+                + (valueField().type() == FieldType.MESSAGE
+                        ? ((SingleField) valueField()).messageType()
+                        : valueField().type().boxedType)
                 + ">";
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public String javaFieldType() {
         return "Map" + javaGenericType();
     }
 
     private void composeFieldDef(StringBuilder sb, Field field) {
-        sb.append("""
+        sb.append(
+                """
                     /**
                      * $doc
                      */
                 """
-                .replace("$doc", field.comment().replaceAll("\n","\n     * "))
-        );
-        sb.append("    public static final FieldDefinition %s = new FieldDefinition(\"%s\", FieldType.%s, %s, false, false, %d);\n"
-                .formatted(Common.camelToUpperSnake(field.name()), field.name(), field.type().fieldType(), field.repeated(), field.fieldNumber()));
+                        .replace("$doc", field.comment().replaceAll("\n", "\n     * ")));
+        sb.append(
+                "    public static final FieldDefinition %s = new FieldDefinition(\"%s\", FieldType.%s, %s, false, false, %d);\n"
+                        .formatted(
+                                Common.camelToUpperSnake(field.name()),
+                                field.name(),
+                                field.type().fieldType(),
+                                field.repeated(),
+                                field.fieldNumber()));
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public String schemaFieldsDef() {
         StringBuilder sb = new StringBuilder();
@@ -123,17 +134,13 @@ public String schemaFieldsDef() {
         return sb.toString();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public String schemaGetFieldsDefCase() {
         return "case %d -> %s;".formatted(fieldNumber, Common.camelToUpperSnake(name));
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void addAllNeededImports(
             final Set<String> imports,
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java
index 92389d61..269e404b 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/OneOfField.java
@@ -2,201 +2,202 @@
 package com.hedera.pbj.compiler.impl;
 
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 import java.util.stream.Collectors;
 
-/**
- * An implementation of Field for OneOf fields
- */
+/** An implementation of Field for OneOf fields */
 public record OneOfField(
-		String parentMessageName,
-		String name,
-		String comment,
-		List<Field> fields,
-		boolean repeated,
-		boolean deprecated,
-		boolean comparable
-) implements Field {
-	/**
-	 * Create a OneOf field from parser context
-	 *
-	 * @param oneOfContext the parsed one of field
-	 * @param parentMessageName the name of the parent message
-	 * @param lookupHelper helper for accessing global context
-	 */
-	public OneOfField(final Protobuf3Parser.OneofContext oneOfContext, final String parentMessageName, final ContextualLookupHelper lookupHelper) {
-		this(parentMessageName,
-			oneOfContext.oneofName().getText(),
-			Common.buildCleanFieldJavaDoc(
-					oneOfContext.oneofField().stream().map(field -> Integer.parseInt(field.fieldNumber().getText())).toList(),
-					oneOfContext.docComment()),
-			new ArrayList<>(oneOfContext.oneofField().size()),
-			false,
-			getDeprecatedOption(oneOfContext.optionStatement()),
-			isComparable(oneOfContext, lookupHelper)
-		);
-		for (var field: oneOfContext.oneofField()) {
-			fields.add(new SingleField(field, this, lookupHelper));
-		}
-	}
-
-	private static boolean isComparable(Protobuf3Parser.OneofContext oneOfContext, ContextualLookupHelper lookupHelper) {
-		final boolean comparable;
-		final List<String> comparableFields = lookupHelper.getComparableFields(((Protobuf3Parser.MessageDefContext)
-				oneOfContext.getParent().getParent().getParent()));
-		comparable = comparableFields.contains(oneOfContext.oneofName().getText());
-		return comparable;
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public FieldType type() {
-		return FieldType.ONE_OF;
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public int fieldNumber() {
-		return fields.get(0).fieldNumber();
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String protobufFieldType() {
-		return "oneof";
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String javaFieldType() {
-		return "%s<%s>".formatted(className(), getEnumClassRef());
-	}
-
-	public String className() {
-		return comparable ? "ComparableOneOf" : "OneOf";
-	}
-
-	@Override
-	public String javaFieldTypeBase() {
-		return getEnumClassRef();
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String methodNameType() {
-		throw new UnsupportedOperationException("mapToWriteMethod can not handle "+type());
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public void addAllNeededImports(final Set<String> imports, boolean modelImports,
-									boolean codecImports, final boolean testImports) {
-		imports.add("com.hedera.pbj.runtime");
-		for (var field:fields) {
-			field.addAllNeededImports(imports, modelImports, codecImports, testImports);
-		}
-	}
-
-	/**
-	 * N/A for OneOfField
-	 */
-	@Override
-	public String parseCode() {
-		return null;
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String javaDefault() {
-		return Common.camelToUpperSnake(name)+"_UNSET";
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String schemaFieldsDef() {
-		return fields.stream().map(Field::schemaFieldsDef).collect(Collectors.joining("\n"));
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String schemaGetFieldsDefCase() {
-		return fields.stream().map(Field::schemaGetFieldsDefCase).collect(Collectors.joining("\n            "));
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String parserFieldsSetMethodCase() {
-		return fields.stream().map(Field::parserFieldsSetMethodCase).collect(Collectors.joining("\n"));
-	}
-
-	/**
-	 * Get reference to enum class in Java code
-	 *
-	 * @return enum class reference
-	 */
-	public String getEnumClassRef() {
-		return parentMessageName+"."+ nameCamelFirstUpper()+"OneOfType";
-	}
-
-	/**
-	 * Helpful debug toString
-	 *
-	 * @return debug toString
-	 */
-	@Override
-	public String toString() {
-		return "OneOfField{" +
-				"parentMessageName='" + parentMessageName + '\'' +
-				", name='" + name + '\'' +
-				", comment='" + comment + '\'' +
-				", fields.size=" + fields.size() +
-				", repeated=" + repeated +
-				", deprecated=" + deprecated +
-				'}';
-	}
-
-	// ====== Static Utility Methods ============================
-
-	/**
-	 * Extract if a field is deprecated or not from the protobuf options on the field
-	 *
-	 * @param optionContext protobuf options from parser
-	 * @return true if field has deprecated option, otherwise false
-	 */
-	private static boolean getDeprecatedOption(List<Protobuf3Parser.OptionStatementContext> optionContext) {
-		boolean deprecated = false;
-		if (optionContext != null) {
-			for (var option : optionContext) {
-				if ("deprecated".equals(option.optionName().getText())) {
-					deprecated = true;
-				} else {
-					System.err.println("Unhandled Option on oneof: "+option.optionName().getText());
-				}
-			}
-		}
-		return deprecated;
-	}
+        String parentMessageName,
+        String name,
+        String comment,
+        List<Field> fields,
+        boolean repeated,
+        boolean deprecated,
+        boolean comparable)
+        implements Field {
+    /**
+     * Create a OneOf field from parser context
+     *
+     * @param oneOfContext the parsed one of field
+     * @param parentMessageName the name of the parent message
+     * @param lookupHelper helper for accessing global context
+     */
+    public OneOfField(
+            final Protobuf3Parser.OneofContext oneOfContext,
+            final String parentMessageName,
+            final ContextualLookupHelper lookupHelper) {
+        this(
+                parentMessageName,
+                oneOfContext.oneofName().getText(),
+                Common.buildCleanFieldJavaDoc(
+                        oneOfContext.oneofField().stream()
+                                .map(field -> Integer.parseInt(field.fieldNumber().getText()))
+                                .toList(),
+                        oneOfContext.docComment()),
+                new ArrayList<>(oneOfContext.oneofField().size()),
+                false,
+                getDeprecatedOption(oneOfContext.optionStatement()),
+                isComparable(oneOfContext, lookupHelper));
+        for (var field : oneOfContext.oneofField()) {
+            fields.add(new SingleField(field, this, lookupHelper));
+        }
+    }
+
+    private static boolean isComparable(
+            Protobuf3Parser.OneofContext oneOfContext, ContextualLookupHelper lookupHelper) {
+        final boolean comparable;
+        final List<String> comparableFields =
+                lookupHelper.getComparableFields(
+                        ((Protobuf3Parser.MessageDefContext)
+                                oneOfContext.getParent().getParent().getParent()));
+        comparable = comparableFields.contains(oneOfContext.oneofName().getText());
+        return comparable;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public FieldType type() {
+        return FieldType.ONE_OF;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public int fieldNumber() {
+        return fields.get(0).fieldNumber();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String protobufFieldType() {
+        return "oneof";
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String javaFieldType() {
+        return "%s<%s>".formatted(className(), getEnumClassRef());
+    }
+
+    public String className() {
+        return comparable ? "ComparableOneOf" : "OneOf";
+    }
+
+    @Override
+    public String javaFieldTypeBase() {
+        return getEnumClassRef();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String methodNameType() {
+        throw new UnsupportedOperationException("mapToWriteMethod can not handle " + type());
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public void addAllNeededImports(
+            final Set<String> imports,
+            boolean modelImports,
+            boolean codecImports,
+            final boolean testImports) {
+        imports.add("com.hedera.pbj.runtime");
+        for (var field : fields) {
+            field.addAllNeededImports(imports, modelImports, codecImports, testImports);
+        }
+    }
+
+    /** N/A for OneOfField */
+    @Override
+    public String parseCode() {
+        return null;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String javaDefault() {
+        return Common.camelToUpperSnake(name) + "_UNSET";
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String schemaFieldsDef() {
+        return fields.stream().map(Field::schemaFieldsDef).collect(Collectors.joining("\n"));
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String schemaGetFieldsDefCase() {
+        return fields.stream()
+                .map(Field::schemaGetFieldsDefCase)
+                .collect(Collectors.joining("\n            "));
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String parserFieldsSetMethodCase() {
+        return fields.stream()
+                .map(Field::parserFieldsSetMethodCase)
+                .collect(Collectors.joining("\n"));
+    }
+
+    /**
+     * Get reference to enum class in Java code
+     *
+     * @return enum class reference
+     */
+    public String getEnumClassRef() {
+        return parentMessageName + "." + nameCamelFirstUpper() + "OneOfType";
+    }
+
+    /**
+     * Helpful debug toString
+     *
+     * @return debug toString
+     */
+    @Override
+    public String toString() {
+        return "OneOfField{"
+                + "parentMessageName='"
+                + parentMessageName
+                + '\''
+                + ", name='"
+                + name
+                + '\''
+                + ", comment='"
+                + comment
+                + '\''
+                + ", fields.size="
+                + fields.size()
+                + ", repeated="
+                + repeated
+                + ", deprecated="
+                + deprecated
+                + '}';
+    }
+
+    // ====== Static Utility Methods ============================
+
+    /**
+     * Extract if a field is deprecated or not from the protobuf options on the field
+     *
+     * @param optionContext protobuf options from parser
+     * @return true if field has deprecated option, otherwise false
+     */
+    private static boolean getDeprecatedOption(
+            List<Protobuf3Parser.OptionStatementContext> optionContext) {
+        boolean deprecated = false;
+        if (optionContext != null) {
+            for (var option : optionContext) {
+                if ("deprecated".equals(option.optionName().getText())) {
+                    deprecated = true;
+                } else {
+                    System.err.println(
+                            "Unhandled Option on oneof: " + option.optionName().getText());
+                }
+            }
+        }
+        return deprecated;
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java
index ddcca5a6..9f58e8be 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/PbjCompilerException.java
@@ -1,9 +1,7 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
-/**
- * Exception thrown when compiler hits errors that are not recoverable
- */
+/** Exception thrown when compiler hits errors that are not recoverable */
 public class PbjCompilerException extends RuntimeException {
 
     /**
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java
index 54f6aa7b..76051a3e 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/SingleField.java
@@ -8,326 +8,390 @@
 /**
  * Record for Field in Protobuf file. Contains all logic and special cases for fields
  *
- * @param repeated                If this is a repeated field, ie protobuf equivalent of array
- * @param type                    The type of this single field
- * @param fieldNumber             The protobuf field number
- * @param name                    The name of this filed
- * @param messageType             The message type of this field is of type message
+ * @param repeated If this is a repeated field, ie protobuf equivalent of array
+ * @param type The type of this single field
+ * @param fieldNumber The protobuf field number
+ * @param name The name of this filed
+ * @param messageType The message type of this field is of type message
  * @param messageTypeCodecPackage
  */
 @SuppressWarnings("DuplicatedCode")
-public record SingleField(boolean repeated, FieldType type, int fieldNumber, String name, String messageType,
-						  String messageTypeModelPackage,
-						  String messageTypeCodecPackage, String messageTypeTestPackage,
-						  String comment, boolean deprecated, OneOfField parent) implements Field {
+public record SingleField(
+        boolean repeated,
+        FieldType type,
+        int fieldNumber,
+        String name,
+        String messageType,
+        String messageTypeModelPackage,
+        String messageTypeCodecPackage,
+        String messageTypeTestPackage,
+        String comment,
+        boolean deprecated,
+        OneOfField parent)
+        implements Field {
 
+    /**
+     * Construct a SingleField from a parsed field context
+     *
+     * @param fieldContext the field context to extra field data from
+     * @param lookupHelper lookup helper for finding packages and other global context data
+     */
+    public SingleField(
+            Protobuf3Parser.FieldContext fieldContext, final ContextualLookupHelper lookupHelper) {
+        this(
+                fieldContext.REPEATED() != null,
+                FieldType.of(fieldContext.type_(), lookupHelper),
+                Integer.parseInt(fieldContext.fieldNumber().getText()),
+                fieldContext.fieldName().getText(),
+                Field.extractMessageTypeName(fieldContext.type_()),
+                Field.extractMessageTypePackage(fieldContext.type_(), FileType.MODEL, lookupHelper),
+                Field.extractMessageTypePackage(fieldContext.type_(), FileType.CODEC, lookupHelper),
+                Field.extractMessageTypePackage(fieldContext.type_(), FileType.TEST, lookupHelper),
+                Common.buildCleanFieldJavaDoc(
+                        Integer.parseInt(fieldContext.fieldNumber().getText()),
+                        fieldContext.docComment()),
+                getDeprecatedOption(fieldContext.fieldOptions()),
+                null);
+    }
 
-	/**
-	 * Construct a SingleField from a parsed field context
-	 *
-	 * @param fieldContext the field context to extra field data from
-	 * @param lookupHelper lookup helper for finding packages and other global context data
-	 */
-	public SingleField(Protobuf3Parser.FieldContext fieldContext, final ContextualLookupHelper lookupHelper) {
-		this(fieldContext.REPEATED() != null,
-				FieldType.of(fieldContext.type_(), lookupHelper),
-				Integer.parseInt(fieldContext.fieldNumber().getText()),
-				fieldContext.fieldName().getText(),
-				Field.extractMessageTypeName(fieldContext.type_()),
-				Field.extractMessageTypePackage(fieldContext.type_(), FileType.MODEL, lookupHelper),
-				Field.extractMessageTypePackage(fieldContext.type_(), FileType.CODEC, lookupHelper),
-				Field.extractMessageTypePackage(fieldContext.type_(), FileType.TEST, lookupHelper),
-				Common.buildCleanFieldJavaDoc(Integer.parseInt(fieldContext.fieldNumber().getText()), fieldContext.docComment()),
-				getDeprecatedOption(fieldContext.fieldOptions()),
-				null
-		);
-	}
+    /**
+     * Construct a SingleField from a parsed oneof subfield context
+     *
+     * @param fieldContext the field context to extra field data from
+     * @param lookupHelper lookup helper for finding packages and other global context data
+     */
+    public SingleField(
+            Protobuf3Parser.OneofFieldContext fieldContext,
+            final OneOfField parent,
+            final ContextualLookupHelper lookupHelper) {
+        this(
+                false,
+                FieldType.of(fieldContext.type_(), lookupHelper),
+                Integer.parseInt(fieldContext.fieldNumber().getText()),
+                fieldContext.fieldName().getText(),
+                (fieldContext.type_().messageType() == null)
+                        ? null
+                        : fieldContext.type_().messageType().messageName().getText(),
+                (fieldContext.type_().messageType() == null)
+                        ? null
+                        : lookupHelper.getPackageOneofFieldMessageType(
+                                FileType.MODEL, fieldContext),
+                (fieldContext.type_().messageType() == null)
+                        ? null
+                        : lookupHelper.getPackageOneofFieldMessageType(
+                                FileType.CODEC, fieldContext),
+                (fieldContext.type_().messageType() == null)
+                        ? null
+                        : lookupHelper.getPackageOneofFieldMessageType(FileType.TEST, fieldContext),
+                Common.buildCleanFieldJavaDoc(
+                        Integer.parseInt(fieldContext.fieldNumber().getText()),
+                        fieldContext.docComment()),
+                getDeprecatedOption(fieldContext.fieldOptions()),
+                parent);
+    }
 
-	/**
-	 * Construct a SingleField from a parsed oneof subfield context
-	 *
-	 * @param fieldContext the field context to extra field data from
-	 * @param lookupHelper lookup helper for finding packages and other global context data
-	 */
-	public SingleField(Protobuf3Parser.OneofFieldContext fieldContext, final OneOfField parent,  final ContextualLookupHelper lookupHelper) {
-		this(false,
-				FieldType.of(fieldContext.type_(), lookupHelper),
-				Integer.parseInt(fieldContext.fieldNumber().getText()), fieldContext.fieldName().getText(),
-				(fieldContext.type_().messageType() == null) ? null :
-						fieldContext.type_().messageType().messageName().getText(),
-				(fieldContext.type_().messageType() == null) ? null :
-						lookupHelper.getPackageOneofFieldMessageType(FileType.MODEL, fieldContext),
-				(fieldContext.type_().messageType() == null) ? null :
-						lookupHelper.getPackageOneofFieldMessageType(FileType.CODEC, fieldContext), (fieldContext.type_().messageType() == null) ? null :
-						lookupHelper.getPackageOneofFieldMessageType(FileType.TEST, fieldContext),
-				Common.buildCleanFieldJavaDoc(Integer.parseInt(fieldContext.fieldNumber().getText()), fieldContext.docComment()),
-				getDeprecatedOption(fieldContext.fieldOptions()),
-				parent
-		);
-	}
+    /** {@inheritDoc} */
+    @Override
+    public boolean optionalValueType() { // Move logic for checking built in types to common
+        return type == SingleField.FieldType.MESSAGE
+                && (messageType.equals("StringValue")
+                        || messageType.equals("Int32Value")
+                        || messageType.equals("UInt32Value")
+                        || messageType.equals("Int64Value")
+                        || messageType.equals("UInt64Value")
+                        || messageType.equals("FloatValue")
+                        || messageType.equals("DoubleValue")
+                        || messageType.equals("BoolValue")
+                        || messageType.equals("BytesValue"));
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public boolean optionalValueType() { // Move logic for checking built in types to common
-		return type == SingleField.FieldType.MESSAGE && (
-				messageType.equals("StringValue") ||
-				messageType.equals("Int32Value") ||
-				messageType.equals("UInt32Value") ||
-				messageType.equals("Int64Value") ||
-				messageType.equals("UInt64Value") ||
-				messageType.equals("FloatValue") ||
-				messageType.equals("DoubleValue") ||
-				messageType.equals("BoolValue") ||
-				messageType.equals("BytesValue")
-		);
-	}
+    /** {@inheritDoc} */
+    @Override
+    public String protobufFieldType() {
+        return type == SingleField.FieldType.MESSAGE ? messageType : type.javaType;
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String protobufFieldType() {
-		return type == SingleField.FieldType.MESSAGE ? messageType : type.javaType;
-	}
+    /** {@inheritDoc} */
+    @Override
+    public String javaFieldType() {
+        return javaFieldType(true);
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String javaFieldType() {
-		return javaFieldType(true);
-	}
+    @Override
+    public String javaFieldTypeBase() {
+        return javaFieldType(false);
+    }
 
-	@Override
-	public String javaFieldTypeBase() {
-		return javaFieldType(false);
-	}
+    @NonNull
+    private String javaFieldType(boolean considerRepeated) {
+        String fieldType =
+                switch (type) {
+                    case MESSAGE -> messageType;
+                    case ENUM -> Common.snakeToCamel(messageType, true);
+                    default -> type.javaType;
+                };
+        fieldType =
+                switch (fieldType) {
+                    case "StringValue" -> "String";
+                    case "Int32Value", "UInt32Value" -> "Integer";
+                    case "Int64Value", "UInt64Value" -> "Long";
+                    case "FloatValue" -> "Float";
+                    case "DoubleValue" -> "Double";
+                    case "BoolValue" -> "Boolean";
+                    case "BytesValue" -> "Bytes";
+                    default -> fieldType;
+                };
+        if (considerRepeated && repeated) {
+            fieldType =
+                    switch (fieldType) {
+                        case "int" -> "List<Integer>";
+                        case "long" -> "List<Long>";
+                        case "float" -> "List<Float>";
+                        case "double" -> "List<Double>";
+                        case "boolean" -> "List<Boolean>";
+                        default -> "List<" + fieldType + ">";
+                    };
+        }
+        return fieldType;
+    }
 
-	@NonNull
-	private String javaFieldType(boolean considerRepeated) {
-		String fieldType = switch(type) {
-			case MESSAGE -> messageType;
-			case ENUM -> Common.snakeToCamel(messageType, true);
-			default -> type.javaType;
-		};
-		fieldType = switch (fieldType) {
-			case "StringValue" -> "String";
-			case "Int32Value", "UInt32Value" -> "Integer";
-			case "Int64Value", "UInt64Value" -> "Long";
-			case "FloatValue" -> "Float";
-			case "DoubleValue" -> "Double";
-			case "BoolValue" -> "Boolean";
-			case "BytesValue" -> "Bytes";
-			default -> fieldType;
-		};
-		if (considerRepeated && repeated) {
-			fieldType = switch (fieldType) {
-				case "int" -> "List<Integer>";
-				case "long" -> "List<Long>";
-				case "float" -> "List<Float>";
-				case "double" -> "List<Double>";
-				case "boolean" -> "List<Boolean>";
-				default -> "List<" + fieldType + ">";
-			};
-		}
-		return fieldType;
-	}
+    public String javaFieldTypeForTest() {
+        return switch (type) {
+            case MESSAGE -> messageType;
+            case ENUM -> Common.snakeToCamel(messageType, true);
+            default -> type.javaType;
+        };
+    }
 
-	public String javaFieldTypeForTest() {
-		return switch(type) {
-			case MESSAGE -> messageType;
-			case ENUM -> Common.snakeToCamel(messageType, true);
-			default -> type.javaType;
-		};
-	}
+    /** {@inheritDoc} */
+    @Override
+    public String methodNameType() {
+        return switch (type()) {
+            case BOOL -> "Boolean";
+            case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> "Integer";
+            case INT64, SINT64, UINT64, FIXED64, SFIXED64 -> "Long";
+            case FLOAT -> "Float";
+            case DOUBLE -> "Double";
+            case MESSAGE -> "Message";
+            case STRING -> "String";
+            case ENUM -> "Enum";
+            case BYTES -> "Bytes";
+            default -> throw new UnsupportedOperationException(
+                    "mapToWriteMethod can not handle " + type());
+        };
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String methodNameType() {
-		return switch(type()) {
-			case BOOL -> "Boolean";
-			case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> "Integer";
-			case INT64, SINT64, UINT64, FIXED64, SFIXED64 -> "Long";
-			case FLOAT -> "Float";
-			case DOUBLE -> "Double";
-			case MESSAGE -> "Message";
-			case STRING -> "String";
-			case ENUM -> "Enum";
-			case BYTES -> "Bytes";
-			default -> throw new UnsupportedOperationException("mapToWriteMethod can not handle "+type());
-		};
-	}
+    /** {@inheritDoc} */
+    @Override
+    public void addAllNeededImports(
+            Set<String> imports,
+            boolean modelImports,
+            boolean codecImports,
+            final boolean testImports) {
+        if (repeated || optionalValueType()) imports.add("java.util");
+        if (type == FieldType.BYTES) imports.add("com.hedera.pbj.runtime.io.buffer");
+        if (messageTypeModelPackage != null && modelImports) imports.add(messageTypeModelPackage);
+        if (messageTypeCodecPackage != null && codecImports) imports.add(messageTypeCodecPackage);
+        if (messageTypeTestPackage != null && testImports) imports.add(messageTypeTestPackage);
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public void addAllNeededImports(Set<String> imports, boolean modelImports, boolean codecImports, final boolean testImports) {
-		if (repeated || optionalValueType()) imports.add("java.util");
-		if (type == FieldType.BYTES) imports.add("com.hedera.pbj.runtime.io.buffer");
-		if (messageTypeModelPackage != null && modelImports) imports.add(messageTypeModelPackage);
-		if (messageTypeCodecPackage != null && codecImports) imports.add(messageTypeCodecPackage);
-		if (messageTypeTestPackage != null && testImports) imports.add(messageTypeTestPackage);
-	}
+    /** {@inheritDoc} */
+    @Override
+    public String parseCode() {
+        if (type == FieldType.MESSAGE) {
+            return "%s.PROTOBUF.parse(input, strictMode, maxDepth - 1)"
+                    .formatted(messageType, messageType);
+        } else {
+            return "input";
+        }
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String parseCode() {
-		if (type == FieldType.MESSAGE) {
-			return "%s.PROTOBUF.parse(input, strictMode, maxDepth - 1)"
-					.formatted(messageType, messageType);
-		} else {
-			return "input";
-		}
-	}
+    /** {@inheritDoc} */
+    @Override
+    public String javaDefault() {
+        if (optionalValueType()) {
+            return "null";
+        } else if (repeated) {
+            return "Collections.emptyList()";
+        } else if (type == FieldType.ENUM) {
+            return messageType + ".fromProtobufOrdinal(0)";
+        } else {
+            return type.javaDefault;
+        }
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String javaDefault() {
-		if (optionalValueType()) {
-			return "null";
-		} else if (repeated) {
-			return "Collections.emptyList()";
-		} else if (type == FieldType.ENUM) {
-			return messageType+".fromProtobufOrdinal(0)";
-		} else {
-			return type.javaDefault;
-		}
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String schemaFieldsDef() {
-		final String javaDocComment =
-				"""
+    /** {@inheritDoc} */
+    @Override
+    public String schemaFieldsDef() {
+        final String javaDocComment =
+                """
                     /**
                      * $doc
                      */
                 """
-				.replace("$doc", comment().replaceAll("\n","\n     * "));
-		boolean isPartOfOneOf = parent != null;
-		if (optionalValueType()) {
-			final String optionalBaseFieldType = switch (messageType) {
-				case "StringValue" -> "STRING";
-				case "Int32Value" -> "INT32";
-				case "UInt32Value" -> "UINT32";
-				case "Int64Value" -> "INT64";
-				case "UInt64Value" -> "UINT64";
-				case "FloatValue" -> "FLOAT";
-				case "DoubleValue" -> "DOUBLE";
-				case "BoolValue" -> "BOOL";
-				case "BytesValue" -> "BYTES";
-				default -> throw new UnsupportedOperationException("Unsupported optional field type found: "+type.javaType+" in "+this);
-			};
-			return javaDocComment + "    public static final FieldDefinition %s = new FieldDefinition(\"%s\", FieldType.%s, %s, true, %s, %d);\n"
-					.formatted(Common.camelToUpperSnake(name), name, optionalBaseFieldType, repeated, isPartOfOneOf, fieldNumber);
-		} else {
-			return javaDocComment + "    public static final FieldDefinition %s = new FieldDefinition(\"%s\", FieldType.%s, %s, false, %s, %d);\n"
-					.formatted(Common.camelToUpperSnake(name), name, type.fieldType(), repeated, isPartOfOneOf, fieldNumber);
-		}
-	}
+                        .replace("$doc", comment().replaceAll("\n", "\n     * "));
+        boolean isPartOfOneOf = parent != null;
+        if (optionalValueType()) {
+            final String optionalBaseFieldType =
+                    switch (messageType) {
+                        case "StringValue" -> "STRING";
+                        case "Int32Value" -> "INT32";
+                        case "UInt32Value" -> "UINT32";
+                        case "Int64Value" -> "INT64";
+                        case "UInt64Value" -> "UINT64";
+                        case "FloatValue" -> "FLOAT";
+                        case "DoubleValue" -> "DOUBLE";
+                        case "BoolValue" -> "BOOL";
+                        case "BytesValue" -> "BYTES";
+                        default -> throw new UnsupportedOperationException(
+                                "Unsupported optional field type found: "
+                                        + type.javaType
+                                        + " in "
+                                        + this);
+                    };
+            return javaDocComment
+                    + "    public static final FieldDefinition %s = new FieldDefinition(\"%s\", FieldType.%s, %s, true, %s, %d);\n"
+                            .formatted(
+                                    Common.camelToUpperSnake(name),
+                                    name,
+                                    optionalBaseFieldType,
+                                    repeated,
+                                    isPartOfOneOf,
+                                    fieldNumber);
+        } else {
+            return javaDocComment
+                    + "    public static final FieldDefinition %s = new FieldDefinition(\"%s\", FieldType.%s, %s, false, %s, %d);\n"
+                            .formatted(
+                                    Common.camelToUpperSnake(name),
+                                    name,
+                                    type.fieldType(),
+                                    repeated,
+                                    isPartOfOneOf,
+                                    fieldNumber);
+        }
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	public String schemaGetFieldsDefCase() {
-		return "case %d -> %s;".formatted(fieldNumber, Common.camelToUpperSnake(name));
-	}
+    /** {@inheritDoc} */
+    public String schemaGetFieldsDefCase() {
+        return "case %d -> %s;".formatted(fieldNumber, Common.camelToUpperSnake(name));
+    }
 
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public String parserFieldsSetMethodCase() {
-		final String fieldNameToSet = parent != null ? parent.name() : name;
-		if (optionalValueType()) {
-			if (parent != null) { // one of
-				return "case %d -> this.%s = new %s<>(%s.%sOneOfType.%s, input);"
-						.formatted(fieldNumber, fieldNameToSet, parent.className(), parent.parentMessageName(),
-								Common.snakeToCamel(parent.name(), true), Common.camelToUpperSnake(name));
-			} else {
-				return "case %d -> this.%s = input;".formatted(fieldNumber, fieldNameToSet);
-			}
-		} else if (type == FieldType.MESSAGE) {
-			final String valueToSet = parent != null ?
-					"new $className<>($parentMessageName.$parentNameOneOfType.$parentName, %modelClass.PROTOBUF.parse(input))"
-							.replace("$className", parent.className())
-							.replace("$parentMessageName", parent.parentMessageName())
-							.replace("$parentName", Common.snakeToCamel(parent.name(), true))
-							.replace("$parseCode", parseCode())
-							: parseCode();
-			if (repeated) {
-				return """
+    /** {@inheritDoc} */
+    @Override
+    public String parserFieldsSetMethodCase() {
+        final String fieldNameToSet = parent != null ? parent.name() : name;
+        if (optionalValueType()) {
+            if (parent != null) { // one of
+                return "case %d -> this.%s = new %s<>(%s.%sOneOfType.%s, input);"
+                        .formatted(
+                                fieldNumber,
+                                fieldNameToSet,
+                                parent.className(),
+                                parent.parentMessageName(),
+                                Common.snakeToCamel(parent.name(), true),
+                                Common.camelToUpperSnake(name));
+            } else {
+                return "case %d -> this.%s = input;".formatted(fieldNumber, fieldNameToSet);
+            }
+        } else if (type == FieldType.MESSAGE) {
+            final String valueToSet =
+                    parent != null
+                            ? "new $className<>($parentMessageName.$parentNameOneOfType.$parentName, %modelClass.PROTOBUF.parse(input))"
+                                    .replace("$className", parent.className())
+                                    .replace("$parentMessageName", parent.parentMessageName())
+                                    .replace(
+                                            "$parentName", Common.snakeToCamel(parent.name(), true))
+                                    .replace("$parseCode", parseCode())
+                            : parseCode();
+            if (repeated) {
+                return """
 					case %d -> {
 									if (this.%s.equals(Collections.emptyList())) {
 										this.%s = new ArrayList<>();
 									}
 									this.%s.add(%s);
 								}"""
-						.formatted(fieldNumber, fieldNameToSet, fieldNameToSet, fieldNameToSet, valueToSet);
+                        .formatted(
+                                fieldNumber,
+                                fieldNameToSet,
+                                fieldNameToSet,
+                                fieldNameToSet,
+                                valueToSet);
 
-			} else {
-				return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet,valueToSet);
-			}
-		} else if (type == FieldType.ENUM) {
-			if (repeated) {
-				return "case %d -> this.%s = input.stream().map(%s::fromProtobufOrdinal).toList();".formatted(fieldNumber, fieldNameToSet,
-						Common.snakeToCamel(messageType, true));
-			} else {
-				return "case %d -> this.%s = %s.fromProtobufOrdinal(input);".formatted(fieldNumber, fieldNameToSet,
-						Common.snakeToCamel(messageType, true));
-			}
-		} else if (repeated && (type == FieldType.STRING || type == FieldType.BYTES)) {
-			final String valueToSet = parent != null ?
-					"new %s<>(%s.%sOneOfType.%s,input)".formatted(parent.className(), parent.parentMessageName(),
-							Common.snakeToCamel(parent.name(), true), Common.camelToUpperSnake(name)) :
-					"input";
-			return """
+            } else {
+                return "case %d -> this.%s = %s;"
+                        .formatted(fieldNumber, fieldNameToSet, valueToSet);
+            }
+        } else if (type == FieldType.ENUM) {
+            if (repeated) {
+                return "case %d -> this.%s = input.stream().map(%s::fromProtobufOrdinal).toList();"
+                        .formatted(
+                                fieldNumber,
+                                fieldNameToSet,
+                                Common.snakeToCamel(messageType, true));
+            } else {
+                return "case %d -> this.%s = %s.fromProtobufOrdinal(input);"
+                        .formatted(
+                                fieldNumber,
+                                fieldNameToSet,
+                                Common.snakeToCamel(messageType, true));
+            }
+        } else if (repeated && (type == FieldType.STRING || type == FieldType.BYTES)) {
+            final String valueToSet =
+                    parent != null
+                            ? "new %s<>(%s.%sOneOfType.%s,input)"
+                                    .formatted(
+                                            parent.className(),
+                                            parent.parentMessageName(),
+                                            Common.snakeToCamel(parent.name(), true),
+                                            Common.camelToUpperSnake(name))
+                            : "input";
+            return """
 				case %d -> {
 								if (this.%s.equals(Collections.emptyList())) {
 									this.%s = new ArrayList<>();
 								}
 								this.%s.add(%s);
 							}"""
-					.formatted(fieldNumber, fieldNameToSet, fieldNameToSet, fieldNameToSet, valueToSet);
+                    .formatted(
+                            fieldNumber,
+                            fieldNameToSet,
+                            fieldNameToSet,
+                            fieldNameToSet,
+                            valueToSet);
 
-		} else {
-			final String valueToSet = parent != null ?
-					"new %s<>(%s.%sOneOfType.%s,input)".formatted(parent.className(), parent.parentMessageName(),
-							Common.snakeToCamel(parent.name(), true), Common.camelToUpperSnake(name)) :
-					"input";
-			return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet,valueToSet);
-		}
-	}
+        } else {
+            final String valueToSet =
+                    parent != null
+                            ? "new %s<>(%s.%sOneOfType.%s,input)"
+                                    .formatted(
+                                            parent.className(),
+                                            parent.parentMessageName(),
+                                            Common.snakeToCamel(parent.name(), true),
+                                            Common.camelToUpperSnake(name))
+                            : "input";
+            return "case %d -> this.%s = %s;".formatted(fieldNumber, fieldNameToSet, valueToSet);
+        }
+    }
 
-	// ====== Static Utility Methods ============================
+    // ====== Static Utility Methods ============================
 
-	/**
-	 * Extract if a field is deprecated or not from the protobuf options on the field
-	 *
-	 * @param optionContext protobuf options from parser
-	 * @return true if field has deprecated option, otherwise false
-	 */
-	static boolean getDeprecatedOption(Protobuf3Parser.FieldOptionsContext optionContext) {
-		if (optionContext != null) {
-			for (var option : optionContext.fieldOption()) {
-				if ("deprecated".equals(option.optionName().getText())) {
-					return true;
-				} else {
-					System.err.println("Unhandled Option: " + optionContext.getText());
-				}
-			}
-		}
-		return false;
-	}
+    /**
+     * Extract if a field is deprecated or not from the protobuf options on the field
+     *
+     * @param optionContext protobuf options from parser
+     * @return true if field has deprecated option, otherwise false
+     */
+    static boolean getDeprecatedOption(Protobuf3Parser.FieldOptionsContext optionContext) {
+        if (optionContext != null) {
+            for (var option : optionContext.fieldOption()) {
+                if ("deprecated".equals(option.optionName().getText())) {
+                    return true;
+                } else {
+                    System.err.println("Unhandled Option: " + optionContext.getText());
+                }
+            }
+        }
+        return false;
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java
index 6d14325d..5f97aeda 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/EnumGenerator.java
@@ -1,10 +1,12 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators;
 
+import static com.hedera.pbj.compiler.impl.Common.*;
+import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
+
 import com.hedera.pbj.compiler.impl.ContextualLookupHelper;
 import com.hedera.pbj.compiler.impl.FileType;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -14,120 +16,155 @@
 import java.util.Map;
 import java.util.stream.Collectors;
 
-import static com.hedera.pbj.compiler.impl.Common.*;
-import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
-
-/**
- * Code for generating enum code
- */
+/** Code for generating enum code */
 @SuppressWarnings({"InconsistentTextBlockIndent", "EscapedSpace"})
 public final class EnumGenerator {
 
-	/** Record for an enum value temporary storage */
-	record EnumValue(String name, boolean deprecated, String javaDoc) {}
+    /** Record for an enum value temporary storage */
+    record EnumValue(String name, boolean deprecated, String javaDoc) {}
 
-	/**
-	 * Generate a Java enum from protobuf enum
-	 *
-	 * @param enumDef the parsed enum def
-	 * @param destinationSrcDir The destination source directory to generate into
-	 * @param lookupHelper Lookup helper for package information
-	 * @throws IOException if there was a problem writing generated code
-	 */
-	public static void generateEnumFile(Protobuf3Parser.EnumDefContext enumDef, File destinationSrcDir,
-								 final ContextualLookupHelper lookupHelper) throws IOException {
-		final String enumName = enumDef.enumName().getText();
-		final String modelPackage = lookupHelper.getPackageForEnum(FileType.MODEL, enumDef);
-		final String javaDocComment = (enumDef.docComment()== null) ? "" :
-				cleanDocStr(enumDef.docComment().getText().replaceAll("\n \\*\s*\n","\n * <p>\n"));
-		String deprecated = "";
-		final Map<Integer, EnumValue> enumValues = new HashMap<>();
-		int maxIndex = 0;
-		for (var item: enumDef.enumBody().enumElement()) {
-			if (item.enumField() != null && item.enumField().ident() != null) {
-				final var enumValueName = item.enumField().ident().getText();
-				final var enumNumber = Integer.parseInt(item.enumField().intLit().getText());
-				final String enumValueJavaDoc = cleanDocStr(
-						(item.enumField().docComment() == null || item.enumField().docComment().getText().isBlank()) ?
-								enumValueName :
-						item.enumField().docComment().getText()
-								.replaceAll("[\t ]*/\\*\\*([\n\t ]+\\*\s+)?","") // remove doc start indenting
-								.replaceAll("/\\*\\*","") //  remove doc start
-								.replaceAll("[\n\t ]+\\*/","") //  remove doc end
-								.replaceAll("\n[\t\s]+\\*\\*?","\n") // remove doc indenting
-								.replaceAll("/n\s*/n","/n") //  remove empty lines
-				);
-				maxIndex = Math.max(maxIndex, enumNumber);
-				enumValues.put(enumNumber, new EnumValue(enumValueName, false,enumValueJavaDoc));
-			} else if (item.optionStatement() != null){
-				if ("deprecated".equals(item.optionStatement().optionName().getText())) {
-					deprecated = "@Deprecated ";
-				} else {
-					System.err.println("Unhandled Option: "+item.optionStatement().getText());
-				}
-			} else {
-				System.err.println("EnumGenerator Warning - Unknown element: "+item+" -- "+item.getText());
-			}
-		}
-		try (FileWriter javaWriter = new FileWriter(getJavaFile(destinationSrcDir, modelPackage, enumName))) {
-			javaWriter.write(
-					"package "+modelPackage+";\n\n"+
-							createEnum(javaDocComment, deprecated, enumName,
-									maxIndex, enumValues, false)
-			);
-		}
-	}
+    /**
+     * Generate a Java enum from protobuf enum
+     *
+     * @param enumDef the parsed enum def
+     * @param destinationSrcDir The destination source directory to generate into
+     * @param lookupHelper Lookup helper for package information
+     * @throws IOException if there was a problem writing generated code
+     */
+    public static void generateEnumFile(
+            Protobuf3Parser.EnumDefContext enumDef,
+            File destinationSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException {
+        final String enumName = enumDef.enumName().getText();
+        final String modelPackage = lookupHelper.getPackageForEnum(FileType.MODEL, enumDef);
+        final String javaDocComment =
+                (enumDef.docComment() == null)
+                        ? ""
+                        : cleanDocStr(
+                                enumDef.docComment()
+                                        .getText()
+                                        .replaceAll("\n \\*\s*\n", "\n * <p>\n"));
+        String deprecated = "";
+        final Map<Integer, EnumValue> enumValues = new HashMap<>();
+        int maxIndex = 0;
+        for (var item : enumDef.enumBody().enumElement()) {
+            if (item.enumField() != null && item.enumField().ident() != null) {
+                final var enumValueName = item.enumField().ident().getText();
+                final var enumNumber = Integer.parseInt(item.enumField().intLit().getText());
+                final String enumValueJavaDoc =
+                        cleanDocStr(
+                                (item.enumField().docComment() == null
+                                                || item.enumField()
+                                                        .docComment()
+                                                        .getText()
+                                                        .isBlank())
+                                        ? enumValueName
+                                        : item.enumField()
+                                                .docComment()
+                                                .getText()
+                                                .replaceAll(
+                                                        "[\t ]*/\\*\\*([\n\t ]+\\*\s+)?",
+                                                        "") // remove doc start indenting
+                                                .replaceAll("/\\*\\*", "") //  remove doc start
+                                                .replaceAll("[\n\t ]+\\*/", "") //  remove doc end
+                                                .replaceAll(
+                                                        "\n[\t\s]+\\*\\*?",
+                                                        "\n") // remove doc indenting
+                                                .replaceAll("/n\s*/n", "/n") //  remove empty lines
+                                );
+                maxIndex = Math.max(maxIndex, enumNumber);
+                enumValues.put(enumNumber, new EnumValue(enumValueName, false, enumValueJavaDoc));
+            } else if (item.optionStatement() != null) {
+                if ("deprecated".equals(item.optionStatement().optionName().getText())) {
+                    deprecated = "@Deprecated ";
+                } else {
+                    System.err.println("Unhandled Option: " + item.optionStatement().getText());
+                }
+            } else {
+                System.err.println(
+                        "EnumGenerator Warning - Unknown element: "
+                                + item
+                                + " -- "
+                                + item.getText());
+            }
+        }
+        try (FileWriter javaWriter =
+                new FileWriter(getJavaFile(destinationSrcDir, modelPackage, enumName))) {
+            javaWriter.write(
+                    "package "
+                            + modelPackage
+                            + ";\n\n"
+                            + createEnum(
+                                    javaDocComment,
+                                    deprecated,
+                                    enumName,
+                                    maxIndex,
+                                    enumValues,
+                                    false));
+        }
+    }
 
-	/**
-	 * Generate code for a enum
-	 *
-	 * @param javaDocComment either enum javadoc comment or empty string
-	 * @param deprecated either @deprecated string or empty string
-	 * @param enumName the name for enum
-	 * @param maxIndex the max ordinal for enum
-	 * @param enumValues map of ordinal to enum value
-	 * @param addUnknown when true we add an enum value for one of
-	 * @return string code for enum
-	 */
-	static String createEnum(String javaDocComment, String deprecated, String enumName,
-							 int maxIndex, Map<Integer, EnumValue> enumValues, boolean addUnknown) {
-		final List<String> enumValuesCode = new ArrayList<>(maxIndex);
-		if (addUnknown) {
-			enumValuesCode.add(
-      				"""
+    /**
+     * Generate code for a enum
+     *
+     * @param javaDocComment either enum javadoc comment or empty string
+     * @param deprecated either @deprecated string or empty string
+     * @param enumName the name for enum
+     * @param maxIndex the max ordinal for enum
+     * @param enumValues map of ordinal to enum value
+     * @param addUnknown when true we add an enum value for one of
+     * @return string code for enum
+     */
+    static String createEnum(
+            String javaDocComment,
+            String deprecated,
+            String enumName,
+            int maxIndex,
+            Map<Integer, EnumValue> enumValues,
+            boolean addUnknown) {
+        final List<String> enumValuesCode = new ArrayList<>(maxIndex);
+        if (addUnknown) {
+            enumValuesCode.add(
+                    """
 					/**
 					 * Enum value for a unset OneOf, to avoid null OneOfs
 					 */
 					UNSET(-1, "UNSET")""");
-		}
-		for (int i = 0; i <= maxIndex; i++) {
-			final EnumValue enumValue = enumValues.get(i);
-			if (enumValue != null) {
-				final String cleanedEnumComment =
-				   """
+        }
+        for (int i = 0; i <= maxIndex; i++) {
+            final EnumValue enumValue = enumValues.get(i);
+            if (enumValue != null) {
+                final String cleanedEnumComment =
+                        """
 					/**$enumJavadoc
 					*/
 					"""
-					.replace("$enumJavadoc", enumValue.javaDoc);
-				final String deprecatedText = enumValue.deprecated ? "@Deprecated\n" : "";
-				enumValuesCode.add(
-						cleanedEnumComment
-								+ deprecatedText+ camelToUpperSnake(enumValue.name) +
-								"("+i+", \""+enumValue.name+"\")");
-			}
-		}
-		return """
+                                .replace("$enumJavadoc", enumValue.javaDoc);
+                final String deprecatedText = enumValue.deprecated ? "@Deprecated\n" : "";
+                enumValuesCode.add(
+                        cleanedEnumComment
+                                + deprecatedText
+                                + camelToUpperSnake(enumValue.name)
+                                + "("
+                                + i
+                                + ", \""
+                                + enumValue.name
+                                + "\")");
+            }
+        }
+        return """
 				$javaDocComment
-				$deprecated$public enum $enumName 
+				$deprecated$public enum $enumName
 				        implements com.hedera.pbj.runtime.EnumWithProtoMetadata {
 				$enumValues;
-				    
+
 				    /** The field ordinal in protobuf for this type */
 				    private final int protoOrdinal;
-				    
+
 				    /** The original field name in protobuf for this type */
 				    private final String protoName;
-				    
+
 				    /**
 				     * OneOf Type Enum Constructor
 				     *
@@ -138,7 +175,7 @@ static String createEnum(String javaDocComment, String deprecated, String enumNa
 				        this.protoOrdinal = protoOrdinal;
 				        this.protoName = protoName;
 				    }
-				    
+
 				    /**
 				     * Get the oneof field ordinal in protobuf for this type
 				     *
@@ -147,7 +184,7 @@ static String createEnum(String javaDocComment, String deprecated, String enumNa
 				    public int protoOrdinal() {
 				        return protoOrdinal;
 				    }
-				    
+
 				    /**
 				     * Get the original field name in protobuf for this type
 				     *
@@ -156,7 +193,7 @@ public int protoOrdinal() {
 				    public String protoName() {
 				        return protoName;
 				    }
-				    
+
 				    /**
 				     * Get enum from protobuf ordinal
 				     *
@@ -170,7 +207,7 @@ public String protoName() {
 				            default -> throw new IllegalArgumentException("Unknown protobuf ordinal "+ordinal);
 				        };
 				    }
-				    
+
 				    /**
 				     * Get enum from string name, supports the enum or protobuf format name
 				     *
@@ -185,25 +222,43 @@ public String protoName() {
 				    }
 				}
 				"""
-				.replace("$javaDocComment", javaDocComment)
-				.replace("$deprecated$", deprecated)
-				.replace("$enumName", enumName)
-				.replace("$enumValues", String.join(",\n\n", enumValuesCode).indent(DEFAULT_INDENT))
-				.replace("$caseStatements", enumValues.entrySet()
-						.stream()
-						.map((entry) -> "case %s -> %s;".formatted(entry.getKey(), camelToUpperSnake(entry.getValue().name))
-								.indent(DEFAULT_INDENT * 3))
-						.collect(Collectors.joining("\n")))
-				.replace("$fromStringCaseStatements", enumValues.values().stream().map(enumValue -> {
-					if (camelToUpperSnake(enumValue.name).equals(enumValue.name)) {
-						return "case \"%s\" -> %s;"
-								.formatted(enumValue.name, camelToUpperSnake(enumValue.name))
-								.indent(DEFAULT_INDENT * 3);
-					} else {
-						return "case \"%s\", \"%s\" -> %s;"
-								.formatted(enumValue.name, camelToUpperSnake(enumValue.name), camelToUpperSnake(enumValue.name))
-								.indent(DEFAULT_INDENT * 3);
-					}
-				}).collect(Collectors.joining("\n")));
-	}
+                .replace("$javaDocComment", javaDocComment)
+                .replace("$deprecated$", deprecated)
+                .replace("$enumName", enumName)
+                .replace("$enumValues", String.join(",\n\n", enumValuesCode).indent(DEFAULT_INDENT))
+                .replace(
+                        "$caseStatements",
+                        enumValues.entrySet().stream()
+                                .map(
+                                        (entry) ->
+                                                "case %s -> %s;"
+                                                        .formatted(
+                                                                entry.getKey(),
+                                                                camelToUpperSnake(
+                                                                        entry.getValue().name))
+                                                        .indent(DEFAULT_INDENT * 3))
+                                .collect(Collectors.joining("\n")))
+                .replace(
+                        "$fromStringCaseStatements",
+                        enumValues.values().stream()
+                                .map(
+                                        enumValue -> {
+                                            if (camelToUpperSnake(enumValue.name)
+                                                    .equals(enumValue.name)) {
+                                                return "case \"%s\" -> %s;"
+                                                        .formatted(
+                                                                enumValue.name,
+                                                                camelToUpperSnake(enumValue.name))
+                                                        .indent(DEFAULT_INDENT * 3);
+                                            } else {
+                                                return "case \"%s\", \"%s\" -> %s;"
+                                                        .formatted(
+                                                                enumValue.name,
+                                                                camelToUpperSnake(enumValue.name),
+                                                                camelToUpperSnake(enumValue.name))
+                                                        .indent(DEFAULT_INDENT * 3);
+                                            }
+                                        })
+                                .collect(Collectors.joining("\n")));
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java
index e8ef58e0..16cb60a9 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/Generator.java
@@ -5,37 +5,35 @@
 import com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator;
 import com.hedera.pbj.compiler.impl.generators.protobuf.CodecGenerator;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-
 import java.io.File;
 import java.io.IOException;
 import java.util.List;
 
-/**
- * Interface for a code generator from protobuf message definition
- */
+/** Interface for a code generator from protobuf message definition */
 public interface Generator {
 
-    /**
-     * List of all generator classes
-     */
-    List<Class<? extends Generator>> GENERATORS = List.of(
-            ModelGenerator.class,
-            SchemaGenerator.class,
-            CodecGenerator.class,
-            JsonCodecGenerator.class,
-            TestGenerator.class
-    );
+    /** List of all generator classes */
+    List<Class<? extends Generator>> GENERATORS =
+            List.of(
+                    ModelGenerator.class,
+                    SchemaGenerator.class,
+                    CodecGenerator.class,
+                    JsonCodecGenerator.class,
+                    TestGenerator.class);
 
     /**
      * Generate a code from protobuf message type
      *
-     * @param msgDef                the parsed message
-     * @param destinationSrcDir     the destination source directory to generate into
+     * @param msgDef the parsed message
+     * @param destinationSrcDir the destination source directory to generate into
      * @param destinationTestSrcDir the destination source directory to generate test files into
-     * @param lookupHelper          Lookup helper for global context lookups
+     * @param lookupHelper Lookup helper for global context lookups
      * @throws IOException if there was a problem writing generated code
      */
-    void generate(final Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir,
-                  File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException;
-
+    void generate(
+            final Protobuf3Parser.MessageDefContext msgDef,
+            final File destinationSrcDir,
+            File destinationTestSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException;
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java
index 912ecc2e..4bf2f7b5 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/ModelGenerator.java
@@ -34,16 +34,16 @@
 import java.util.stream.Collectors;
 
 /**
- * Code generator that parses protobuf files and generates nice Java source for record files for each message type and
- * enum.
+ * Code generator that parses protobuf files and generates nice Java source for record files for
+ * each message type and enum.
  */
 @SuppressWarnings({"StringConcatenationInLoop", "EscapedSpace"})
 public final class ModelGenerator implements Generator {
 
     private static final String NON_NULL_ANNOTATION = "@NonNull";
 
-	private static final String HASH_CODE_MANIPULATION =
-		"""
+    private static final String HASH_CODE_MANIPULATION =
+            """
 		// Shifts: 30, 27, 16, 20, 5, 18, 10, 24, 30
 		hashCode += hashCode << 30;
 		hashCode ^= hashCode >>> 27;
@@ -54,158 +54,194 @@ public final class ModelGenerator implements Generator {
 		hashCode += hashCode << 10;
 		hashCode ^= hashCode >>> 24;
 		hashCode += hashCode << 30;
-		""".indent(DEFAULT_INDENT);
+		"""
+                    .indent(DEFAULT_INDENT);
 
-	/**
-	 * {@inheritDoc}
-	 *
-	 * <p>Generates a new model object, as a Java Record type.
-	 */
-	public void generate(final MessageDefContext msgDef,
-                         final File destinationSrcDir,
-                         final File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException {
-
-
-		// The javaRecordName will be something like "AccountID".
-		final var javaRecordName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
-		// The modelPackage is the Java package to put the model class into.
-		final String modelPackage = lookupHelper.getPackageForMessage(FileType.MODEL, msgDef);
-		// The File to write the sources that we generate into
-		final File javaFile = getJavaFile(destinationSrcDir, modelPackage, javaRecordName);
-		// The javadoc comment to use for the model class, which comes **directly** from the protobuf schema,
-		// but is cleaned up and formatted for use in JavaDoc.
-		String javaDocComment = (msgDef.docComment()== null) ? "" :
-				cleanDocStr(msgDef.docComment().getText().replaceAll("\n \\*\s*\n","\n * <p>\n"));
-		// The Javadoc "@Deprecated" tag, which is set if the protobuf schema says the field is deprecated
-		String deprecated = "";
-		// The list of fields, as defined in the protobuf schema
-		final List<Field> fields = new ArrayList<>();
-		// The generated Java code for an enum field if OneOf is used
-		final List<String> oneofEnums = new ArrayList<>();
-		// The generated Java code for getters if OneOf is used
-		final List<String> oneofGetters = new ArrayList<>();
-		// The generated Java code for has methods for normal fields
-		final List<String> hasMethods = new ArrayList<>();
-		// The generated Java import statements. We'll build this up as we go.
-		final Set<String> imports = new TreeSet<>();
-		imports.add("com.hedera.pbj.runtime");
-		imports.add("com.hedera.pbj.runtime.io");
-		imports.add("com.hedera.pbj.runtime.io.buffer");
-		imports.add("com.hedera.pbj.runtime.io.stream");
-		imports.add("edu.umd.cs.findbugs.annotations");
-
-		// Iterate over all the items in the protobuf schema
-		for (final var item : msgDef.messageBody().messageElement()) {
-			if (item.messageDef() != null) { // process sub messages
-				generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
-			} else if (item.oneof() != null) { // process one ofs
-				oneofGetters.addAll(generateCodeForOneOf(lookupHelper, item, javaRecordName, imports, oneofEnums, fields));
-			} else if (item.mapField() != null) { // process map fields
-				final MapField field = new MapField(item.mapField(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, false, false);
-			} else if (item.field() != null && item.field().fieldName() != null) {
-				generateCodeForField(lookupHelper, item, fields, imports, hasMethods);
-			} else if (item.optionStatement() != null){
-				if ("deprecated".equals(item.optionStatement().optionName().getText())) {
-					deprecated = "@Deprecated ";
-				} else {
-					System.err.println("Unhandled Option: "+item.optionStatement().getText());
-				}
-			} else if (item.reserved() == null){ // ignore reserved and warn about anything else
-				System.err.println("ModelGenerator Warning - Unknown element: "+item+" -- "+item.getText());
-			}
-		}
+    /**
+     * {@inheritDoc}
+     *
+     * <p>Generates a new model object, as a Java Record type.
+     */
+    public void generate(
+            final MessageDefContext msgDef,
+            final File destinationSrcDir,
+            final File destinationTestSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException {
 
-		// process field java doc and insert into record java doc
-		if (!fields.isEmpty()) {
-			String recordJavaDoc = javaDocComment.isEmpty() ? "/**\n * " + javaRecordName :
-					javaDocComment.replaceAll("\n\s*\\*/", "");
-			recordJavaDoc += "\n *";
-			for (final var field : fields) {
-				recordJavaDoc += "\n * @param "+field.nameCamelFirstLower()+" "+
-							field.comment()
-								.replaceAll("\n", "\n *         "+" ".repeat(field.nameCamelFirstLower().length()));
-			}
-			recordJavaDoc += "\n */";
-			javaDocComment = cleanDocStr(recordJavaDoc);
-		}
+        // The javaRecordName will be something like "AccountID".
+        final var javaRecordName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
+        // The modelPackage is the Java package to put the model class into.
+        final String modelPackage = lookupHelper.getPackageForMessage(FileType.MODEL, msgDef);
+        // The File to write the sources that we generate into
+        final File javaFile = getJavaFile(destinationSrcDir, modelPackage, javaRecordName);
+        // The javadoc comment to use for the model class, which comes **directly** from the
+        // protobuf schema,
+        // but is cleaned up and formatted for use in JavaDoc.
+        String javaDocComment =
+                (msgDef.docComment() == null)
+                        ? ""
+                        : cleanDocStr(
+                                msgDef.docComment()
+                                        .getText()
+                                        .replaceAll("\n \\*\s*\n", "\n * <p>\n"));
+        // The Javadoc "@Deprecated" tag, which is set if the protobuf schema says the field is
+        // deprecated
+        String deprecated = "";
+        // The list of fields, as defined in the protobuf schema
+        final List<Field> fields = new ArrayList<>();
+        // The generated Java code for an enum field if OneOf is used
+        final List<String> oneofEnums = new ArrayList<>();
+        // The generated Java code for getters if OneOf is used
+        final List<String> oneofGetters = new ArrayList<>();
+        // The generated Java code for has methods for normal fields
+        final List<String> hasMethods = new ArrayList<>();
+        // The generated Java import statements. We'll build this up as we go.
+        final Set<String> imports = new TreeSet<>();
+        imports.add("com.hedera.pbj.runtime");
+        imports.add("com.hedera.pbj.runtime.io");
+        imports.add("com.hedera.pbj.runtime.io.buffer");
+        imports.add("com.hedera.pbj.runtime.io.stream");
+        imports.add("edu.umd.cs.findbugs.annotations");
 
-		// === Build Body Content
-		String bodyContent = "";
+        // Iterate over all the items in the protobuf schema
+        for (final var item : msgDef.messageBody().messageElement()) {
+            if (item.messageDef() != null) { // process sub messages
+                generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
+            } else if (item.oneof() != null) { // process one ofs
+                oneofGetters.addAll(
+                        generateCodeForOneOf(
+                                lookupHelper, item, javaRecordName, imports, oneofEnums, fields));
+            } else if (item.mapField() != null) { // process map fields
+                final MapField field = new MapField(item.mapField(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, false, false);
+            } else if (item.field() != null && item.field().fieldName() != null) {
+                generateCodeForField(lookupHelper, item, fields, imports, hasMethods);
+            } else if (item.optionStatement() != null) {
+                if ("deprecated".equals(item.optionStatement().optionName().getText())) {
+                    deprecated = "@Deprecated ";
+                } else {
+                    System.err.println("Unhandled Option: " + item.optionStatement().getText());
+                }
+            } else if (item.reserved() == null) { // ignore reserved and warn about anything else
+                System.err.println(
+                        "ModelGenerator Warning - Unknown element: "
+                                + item
+                                + " -- "
+                                + item.getText());
+            }
+        }
 
-		// static codec and default instance
-		bodyContent +=
-				generateCodecFields(msgDef, lookupHelper, javaRecordName);
+        // process field java doc and insert into record java doc
+        if (!fields.isEmpty()) {
+            String recordJavaDoc =
+                    javaDocComment.isEmpty()
+                            ? "/**\n * " + javaRecordName
+                            : javaDocComment.replaceAll("\n\s*\\*/", "");
+            recordJavaDoc += "\n *";
+            for (final var field : fields) {
+                recordJavaDoc +=
+                        "\n * @param "
+                                + field.nameCamelFirstLower()
+                                + " "
+                                + field.comment()
+                                        .replaceAll(
+                                                "\n",
+                                                "\n *         "
+                                                        + " "
+                                                                .repeat(
+                                                                        field.nameCamelFirstLower()
+                                                                                .length()));
+            }
+            recordJavaDoc += "\n */";
+            javaDocComment = cleanDocStr(recordJavaDoc);
+        }
 
-		// constructor
-		bodyContent += generateConstructor(javaRecordName, fields, true, msgDef, lookupHelper);
+        // === Build Body Content
+        String bodyContent = "";
 
-		bodyContent += generateHashCode(fields);
+        // static codec and default instance
+        bodyContent += generateCodecFields(msgDef, lookupHelper, javaRecordName);
 
-		bodyContent += generateEquals(fields, javaRecordName);
+        // constructor
+        bodyContent += generateConstructor(javaRecordName, fields, true, msgDef, lookupHelper);
 
-		final List<Field> comparableFields = filterComparableFields(msgDef, lookupHelper, fields);
-		final boolean hasComparableFields = !comparableFields.isEmpty();
-		if (hasComparableFields) {
-			bodyContent += generateCompareTo(comparableFields, javaRecordName, destinationSrcDir);
-		}
+        bodyContent += generateHashCode(fields);
 
-		// Has methods
-		bodyContent += String.join("\n", hasMethods);
-		bodyContent += "\n";
+        bodyContent += generateEquals(fields, javaRecordName);
 
-		// oneof getters
-		bodyContent += String.join("\n    ", oneofGetters);
-		bodyContent += "\n";
+        final List<Field> comparableFields = filterComparableFields(msgDef, lookupHelper, fields);
+        final boolean hasComparableFields = !comparableFields.isEmpty();
+        if (hasComparableFields) {
+            bodyContent += generateCompareTo(comparableFields, javaRecordName, destinationSrcDir);
+        }
 
-		// builder copy & new builder methods
-		bodyContent = genrateBuilderFactoryMethods(bodyContent, fields);
+        // Has methods
+        bodyContent += String.join("\n", hasMethods);
+        bodyContent += "\n";
 
-		// generate builder
-		bodyContent += generateBuilder(msgDef, fields, lookupHelper);
-		bodyContent += "\n";
+        // oneof getters
+        bodyContent += String.join("\n    ", oneofGetters);
+        bodyContent += "\n";
 
-		// oneof enums
-		bodyContent += String.join("\n    ", oneofEnums);
+        // builder copy & new builder methods
+        bodyContent = genrateBuilderFactoryMethods(bodyContent, fields);
 
-		// === Build file
-		try (final FileWriter javaWriter = new FileWriter(javaFile)) {
-			javaWriter.write(
-					generateClass(modelPackage, imports, javaDocComment, deprecated, javaRecordName, fields, bodyContent, hasComparableFields)
-			);
-		}
-	}
-
-	/**
-	 * Generating method that assembles all the previously generated pieces together
-	 * @param modelPackage the model package to use for the code generation
-	 * @param imports the imports to use for the code generation
-	 * @param javaDocComment the java doc comment to use for the code generation
-	 * @param deprecated the deprecated annotation to add
-	 * @param javaRecordName the name of the class
-	 * @param fields the fields to use for the code generation
-	 * @param bodyContent the body content to use for the code generation
-	 * @return the generated code
-	 */
-	@NonNull
-	private static String generateClass(final String modelPackage,
-										final Set<String> imports,
-										final String javaDocComment,
-										final String deprecated,
-										final String javaRecordName,
-										final List<Field> fields,
-										final String bodyContent,
-										final boolean isComparable) {
-		final String implementsComparable;
-		if (isComparable) {
-			imports.add("java.lang.Comparable");
-			implementsComparable = "implements Comparable<$javaRecordName> ";
-		} else {
-			implementsComparable = "";
-		}
+        // generate builder
+        bodyContent += generateBuilder(msgDef, fields, lookupHelper);
+        bodyContent += "\n";
+
+        // oneof enums
+        bodyContent += String.join("\n    ", oneofEnums);
+
+        // === Build file
+        try (final FileWriter javaWriter = new FileWriter(javaFile)) {
+            javaWriter.write(
+                    generateClass(
+                            modelPackage,
+                            imports,
+                            javaDocComment,
+                            deprecated,
+                            javaRecordName,
+                            fields,
+                            bodyContent,
+                            hasComparableFields));
+        }
+    }
 
-		return """
+    /**
+     * Generating method that assembles all the previously generated pieces together
+     *
+     * @param modelPackage the model package to use for the code generation
+     * @param imports the imports to use for the code generation
+     * @param javaDocComment the java doc comment to use for the code generation
+     * @param deprecated the deprecated annotation to add
+     * @param javaRecordName the name of the class
+     * @param fields the fields to use for the code generation
+     * @param bodyContent the body content to use for the code generation
+     * @return the generated code
+     */
+    @NonNull
+    private static String generateClass(
+            final String modelPackage,
+            final Set<String> imports,
+            final String javaDocComment,
+            final String deprecated,
+            final String javaRecordName,
+            final List<Field> fields,
+            final String bodyContent,
+            final boolean isComparable) {
+        final String implementsComparable;
+        if (isComparable) {
+            imports.add("java.lang.Comparable");
+            implementsComparable = "implements Comparable<$javaRecordName> ";
+        } else {
+            implementsComparable = "";
+        }
+
+        return """
 				package $package;
 				$imports
 				import com.hedera.pbj.runtime.Codec;
@@ -213,68 +249,86 @@ private static String generateClass(final String modelPackage,
 				import edu.umd.cs.findbugs.annotations.Nullable;
 				import edu.umd.cs.findbugs.annotations.NonNull;
 				import static java.util.Objects.requireNonNull;
-									
+
 				$javaDocComment$deprecated
 				public record $javaRecordName(
 				$fields) $implementsComparable{
 				$bodyContent}
 				"""
-				.replace("$package", modelPackage)
-				.replace("$imports", imports.isEmpty() ? "" : imports.stream().collect(Collectors.joining(".*;\nimport ", "\nimport ", ".*;\n")))
-				.replace("$javaDocComment", javaDocComment)
-				.replace("$deprecated", deprecated)
-				.replace("$implementsComparable", implementsComparable)
-				.replace("$javaRecordName", javaRecordName)
-				.replace("$fields", fields.stream().map(field ->
-						getFieldAnnotations(field)
-								+ field.javaFieldType() + " " + field.nameCamelFirstLower()
-				).collect(Collectors.joining(",\n")).indent(DEFAULT_INDENT))
-				.replace("$bodyContent", bodyContent);
-	}
-
-	/**
-	 * Returns a set of annotations for a given field.
-	 * @param field a field
-	 * @return an empty string, or a string with Java annotations ending with a space
-	 */
-	private static String getFieldAnnotations(final Field field) {
-		if (field.repeated()) return NON_NULL_ANNOTATION + " ";
-
-		return switch (field.type()) {
-			case MESSAGE -> "@Nullable ";
-			case BYTES, STRING -> NON_NULL_ANNOTATION + " ";
-			default -> "";
-		};
-	}
-
-	/**
-	 * Filter the fields to only include those that are comparable
-	 * @param msgDef The message definition
-	 * @param lookupHelper The lookup helper
-	 * @param fields The fields to filter
-	 * @return the filtered fields
-	 */
-	@NonNull
-	private static List<Field> filterComparableFields(final MessageDefContext msgDef,
-													final ContextualLookupHelper lookupHelper,
-													final List<Field> fields) {
-		final Map<String, Field> fieldByName = fields.stream().collect(toMap(Field::name, f -> f));
-		final List<String> comparableFields = lookupHelper.getComparableFields(msgDef);
-		return comparableFields.stream().map(fieldByName::get).collect(Collectors.toList());
-	}
-
-	/**
-	 * Generates the compareTo method
-	 *
-	 * @param fields                the fields to use for the code generation
-	 * @param javaRecordName        the name of the class
-	 * @param destinationSrcDir
-	 * @return the generated code
-	 */
-	@NonNull
-	private static String generateCompareTo(final List<Field> fields, final String javaRecordName, final File destinationSrcDir) {
-		String bodyContent =
-			"""
+                .replace("$package", modelPackage)
+                .replace(
+                        "$imports",
+                        imports.isEmpty()
+                                ? ""
+                                : imports.stream()
+                                        .collect(
+                                                Collectors.joining(
+                                                        ".*;\nimport ", "\nimport ", ".*;\n")))
+                .replace("$javaDocComment", javaDocComment)
+                .replace("$deprecated", deprecated)
+                .replace("$implementsComparable", implementsComparable)
+                .replace("$javaRecordName", javaRecordName)
+                .replace(
+                        "$fields",
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                getFieldAnnotations(field)
+                                                        + field.javaFieldType()
+                                                        + " "
+                                                        + field.nameCamelFirstLower())
+                                .collect(Collectors.joining(",\n"))
+                                .indent(DEFAULT_INDENT))
+                .replace("$bodyContent", bodyContent);
+    }
+
+    /**
+     * Returns a set of annotations for a given field.
+     *
+     * @param field a field
+     * @return an empty string, or a string with Java annotations ending with a space
+     */
+    private static String getFieldAnnotations(final Field field) {
+        if (field.repeated()) return NON_NULL_ANNOTATION + " ";
+
+        return switch (field.type()) {
+            case MESSAGE -> "@Nullable ";
+            case BYTES, STRING -> NON_NULL_ANNOTATION + " ";
+            default -> "";
+        };
+    }
+
+    /**
+     * Filter the fields to only include those that are comparable
+     *
+     * @param msgDef The message definition
+     * @param lookupHelper The lookup helper
+     * @param fields The fields to filter
+     * @return the filtered fields
+     */
+    @NonNull
+    private static List<Field> filterComparableFields(
+            final MessageDefContext msgDef,
+            final ContextualLookupHelper lookupHelper,
+            final List<Field> fields) {
+        final Map<String, Field> fieldByName = fields.stream().collect(toMap(Field::name, f -> f));
+        final List<String> comparableFields = lookupHelper.getComparableFields(msgDef);
+        return comparableFields.stream().map(fieldByName::get).collect(Collectors.toList());
+    }
+
+    /**
+     * Generates the compareTo method
+     *
+     * @param fields the fields to use for the code generation
+     * @param javaRecordName the name of the class
+     * @param destinationSrcDir
+     * @return the generated code
+     */
+    @NonNull
+    private static String generateCompareTo(
+            final List<Field> fields, final String javaRecordName, final File destinationSrcDir) {
+        String bodyContent =
+                """
 			/**
 			 * Implementation of Comparable interface
 			 */
@@ -284,33 +338,35 @@ public int compareTo($javaRecordName thatObj) {
 					return 1;
 				}
 				int result = 0;
-			""".replace("$javaRecordName", javaRecordName).indent(DEFAULT_INDENT);
+			"""
+                        .replace("$javaRecordName", javaRecordName)
+                        .indent(DEFAULT_INDENT);
 
-		bodyContent += Common.getFieldsCompareToStatements(fields, "", destinationSrcDir);
+        bodyContent += Common.getFieldsCompareToStatements(fields, "", destinationSrcDir);
 
-		bodyContent +=
-			"""
+        bodyContent += """
 				return result;
 			}
 			""".indent(DEFAULT_INDENT);
-		return bodyContent;
-	}
-
-	/**
-	 * Generates the equals method
-	 * @param fields the fields to use for the code generation
-	 * @param javaRecordName the name of the class
-	 * @return the generated code
-	 */
-	@NonNull
-	private static String generateEquals(final List<Field> fields, final String javaRecordName) {
-		String equalsStatements = "";
-		// Generate a call to private method that iterates through fields
-		// and calculates the hashcode.
-		equalsStatements = Common.getFieldsEqualsStatements(fields, equalsStatements);
-
-		String bodyContent =
-		"""
+        return bodyContent;
+    }
+
+    /**
+     * Generates the equals method
+     *
+     * @param fields the fields to use for the code generation
+     * @param javaRecordName the name of the class
+     * @return the generated code
+     */
+    @NonNull
+    private static String generateEquals(final List<Field> fields, final String javaRecordName) {
+        String equalsStatements = "";
+        // Generate a call to private method that iterates through fields
+        // and calculates the hashcode.
+        equalsStatements = Common.getFieldsEqualsStatements(fields, equalsStatements);
+
+        String bodyContent =
+                """
 		/**
 		* Override the default equals method for
 		*/
@@ -320,29 +376,32 @@ public boolean equals(Object that) {
 		        return false;
 		    }
 		    $javaRecordName thatObj = ($javaRecordName)that;
-		""".replace("$javaRecordName", javaRecordName).indent(DEFAULT_INDENT);
-
-		bodyContent += equalsStatements.indent(DEFAULT_INDENT);
-		bodyContent +=
 		"""
+                        .replace("$javaRecordName", javaRecordName)
+                        .indent(DEFAULT_INDENT);
+
+        bodyContent += equalsStatements.indent(DEFAULT_INDENT);
+        bodyContent += """
 		    return true;
 		}
 		""".indent(DEFAULT_INDENT);
-		return bodyContent;
-	}
-
-	/**
-	 * Generates the hashCode method
-	 * @param fields the fields to use for the code generation
-	 * @return the generated code
-	 */
-	@NonNull
-	private static String generateHashCode(final List<Field> fields) {
-		// Generate a call to private method that iterates through fields and calculates the hashcode
-		final String statements = getFieldsHashCode(fields, "");
-
-		String bodyContent =
-			"""
+        return bodyContent;
+    }
+
+    /**
+     * Generates the hashCode method
+     *
+     * @param fields the fields to use for the code generation
+     * @return the generated code
+     */
+    @NonNull
+    private static String generateHashCode(final List<Field> fields) {
+        // Generate a call to private method that iterates through fields and calculates the
+        // hashcode
+        final String statements = getFieldsHashCode(fields, "");
+
+        String bodyContent =
+                """
 			/**
 			* Override the default hashCode method for
 			* all other objects to make hashCode
@@ -350,36 +409,39 @@ private static String generateHashCode(final List<Field> fields) {
 			@Override
 			public int hashCode() {
 				int result = 1;
-			""".indent(DEFAULT_INDENT);
+			"""
+                        .indent(DEFAULT_INDENT);
 
-		bodyContent += statements;
+        bodyContent += statements;
 
-		bodyContent +=
-			"""
+        bodyContent +=
+                """
 				long hashCode = result;
 			$hashCodeManipulation
 				return (int)hashCode;
 			}
-			""".replace("$hashCodeManipulation", HASH_CODE_MANIPULATION)
-				.indent(DEFAULT_INDENT);
-		return bodyContent;
-	}
-
-	/**
-	 * Generates a pre-populated constructor for a class.
-	 * @param fields the fields to use for the code generation
-	 * @return the generated code
-	 */
-	private static String generateConstructor(
-			final String constructorName,
-			final List<Field> fields,
-			final boolean shouldThrowOnOneOfNull,
-			final MessageDefContext msgDef,
-			final ContextualLookupHelper lookupHelper) {
-		if (fields.isEmpty()) {
-			return "";
-		}
-		return """
+			"""
+                        .replace("$hashCodeManipulation", HASH_CODE_MANIPULATION)
+                        .indent(DEFAULT_INDENT);
+        return bodyContent;
+    }
+
+    /**
+     * Generates a pre-populated constructor for a class.
+     *
+     * @param fields the fields to use for the code generation
+     * @return the generated code
+     */
+    private static String generateConstructor(
+            final String constructorName,
+            final List<Field> fields,
+            final boolean shouldThrowOnOneOfNull,
+            final MessageDefContext msgDef,
+            final ContextualLookupHelper lookupHelper) {
+        if (fields.isEmpty()) {
+            return "";
+        }
+        return """
 			    /**
 			     * Create a pre-populated $constructorName.
 			     * $constructorParamDocs
@@ -387,55 +449,107 @@ private static String generateConstructor(
 			    public $constructorName($constructorParams) {
 			$constructorCode    }
 			"""
-				.replace("$constructorParamDocs",fields.stream().map(field ->
-						"\n     * @param "+field.nameCamelFirstLower()+" "+
-								field.comment().replaceAll("\n", "\n     *         "+" ".repeat(field.nameCamelFirstLower().length()))
-				).collect(Collectors.joining(", ")))
-				.replace("$constructorName", constructorName)
-				.replace("$constructorParams",fields.stream().map(field ->
-						field.javaFieldType() + " " + field.nameCamelFirstLower()
-				).collect(Collectors.joining(", ")))
-				.replace("$constructorCode",fields.stream().map(field -> {
-					StringBuilder sb = new StringBuilder();
-					if (shouldThrowOnOneOfNull && field instanceof OneOfField) {
-						sb.append(generateConstructorCodeForField(field)).append('\n');
-					}
-					switch (field.type()) {
-						case BYTES, STRING: {
-							sb.append("this.$name = $name != null ? $name : $default;"
-									.replace("$name", field.nameCamelFirstLower())
-									.replace("$default", getDefaultValue(field, msgDef, lookupHelper))
-							);
-							break;
-						}
-						case MAP: {
-							sb.append("this.$name = PbjMap.of($name);"
-									.replace("$name", field.nameCamelFirstLower())
-							);
-							break;
-						}
-						default:
-							if (field.repeated()) {
-								sb.append("this.$name = $name == null ? Collections.emptyList() : $name;".replace("$name", field.nameCamelFirstLower()));
-							} else {
-								sb.append("this.$name = $name;".replace("$name", field.nameCamelFirstLower()));
-							}
-							break;
-					}
-					return sb.toString();
-				}).collect(Collectors.joining("\n")).indent(DEFAULT_INDENT * 2));
-	}
-
-	/**
-	 * Generates constructor code for the class
-	 * @param fields the fields to use for the code generation
-	 * @param javaRecordName the name of the class
-	 * @return the generated code
-	 */
-	@NonNull
-	private static String generateConstructor(final List<Field> fields, final String javaRecordName) {
-		return """
-				     
+                .replace(
+                        "$constructorParamDocs",
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                "\n     * @param "
+                                                        + field.nameCamelFirstLower()
+                                                        + " "
+                                                        + field.comment()
+                                                                .replaceAll(
+                                                                        "\n",
+                                                                        "\n     *         "
+                                                                                + " "
+                                                                                        .repeat(
+                                                                                                field.nameCamelFirstLower()
+                                                                                                        .length())))
+                                .collect(Collectors.joining(", ")))
+                .replace("$constructorName", constructorName)
+                .replace(
+                        "$constructorParams",
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                field.javaFieldType()
+                                                        + " "
+                                                        + field.nameCamelFirstLower())
+                                .collect(Collectors.joining(", ")))
+                .replace(
+                        "$constructorCode",
+                        fields.stream()
+                                .map(
+                                        field -> {
+                                            StringBuilder sb = new StringBuilder();
+                                            if (shouldThrowOnOneOfNull
+                                                    && field instanceof OneOfField) {
+                                                sb.append(generateConstructorCodeForField(field))
+                                                        .append('\n');
+                                            }
+                                            switch (field.type()) {
+                                                case BYTES, STRING:
+                                                    {
+                                                        sb.append(
+                                                                "this.$name = $name != null ? $name : $default;"
+                                                                        .replace(
+                                                                                "$name",
+                                                                                field
+                                                                                        .nameCamelFirstLower())
+                                                                        .replace(
+                                                                                "$default",
+                                                                                getDefaultValue(
+                                                                                        field,
+                                                                                        msgDef,
+                                                                                        lookupHelper)));
+                                                        break;
+                                                    }
+                                                case MAP:
+                                                    {
+                                                        sb.append(
+                                                                "this.$name = PbjMap.of($name);"
+                                                                        .replace(
+                                                                                "$name",
+                                                                                field
+                                                                                        .nameCamelFirstLower()));
+                                                        break;
+                                                    }
+                                                default:
+                                                    if (field.repeated()) {
+                                                        sb.append(
+                                                                "this.$name = $name == null ? Collections.emptyList() : $name;"
+                                                                        .replace(
+                                                                                "$name",
+                                                                                field
+                                                                                        .nameCamelFirstLower()));
+                                                    } else {
+                                                        sb.append(
+                                                                "this.$name = $name;"
+                                                                        .replace(
+                                                                                "$name",
+                                                                                field
+                                                                                        .nameCamelFirstLower()));
+                                                    }
+                                                    break;
+                                            }
+                                            return sb.toString();
+                                        })
+                                .collect(Collectors.joining("\n"))
+                                .indent(DEFAULT_INDENT * 2));
+    }
+
+    /**
+     * Generates constructor code for the class
+     *
+     * @param fields the fields to use for the code generation
+     * @param javaRecordName the name of the class
+     * @return the generated code
+     */
+    @NonNull
+    private static String generateConstructor(
+            final List<Field> fields, final String javaRecordName) {
+        return """
+
 				/**
 				 * Override the default constructor adding input validation
 				 * %s
@@ -444,94 +558,123 @@ private static String generateConstructor(final List<Field> fields, final String
 				%s
 				}
 				"""
-				.formatted(
-						fields.stream().map(field -> "\n * @param " + field.nameCamelFirstLower() + " " +
-								field.comment()
-										.replaceAll("\n", "\n *         " + " ".repeat(field.nameCamelFirstLower().length()))
-						).collect(Collectors.joining()),
-						javaRecordName,
-						fields.stream()
-								.filter(f -> f instanceof OneOfField)
-								.map(ModelGenerator::generateConstructorCodeForField)
-								.collect(Collectors.joining("\n"))
-				)
-				.indent(DEFAULT_INDENT);
-	}
-
-	/**
-	 * Generates the constructor code for the class
-	 * @param f the field to use for the code generation
-	 * @return the generated code
-	 */
-	private static String generateConstructorCodeForField(final Field f) {
-		final StringBuilder sb = new StringBuilder("""
+                .formatted(
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                "\n * @param "
+                                                        + field.nameCamelFirstLower()
+                                                        + " "
+                                                        + field.comment()
+                                                                .replaceAll(
+                                                                        "\n",
+                                                                        "\n *         "
+                                                                                + " "
+                                                                                        .repeat(
+                                                                                                field.nameCamelFirstLower()
+                                                                                                        .length())))
+                                .collect(Collectors.joining()),
+                        javaRecordName,
+                        fields.stream()
+                                .filter(f -> f instanceof OneOfField)
+                                .map(ModelGenerator::generateConstructorCodeForField)
+                                .collect(Collectors.joining("\n")))
+                .indent(DEFAULT_INDENT);
+    }
+
+    /**
+     * Generates the constructor code for the class
+     *
+     * @param f the field to use for the code generation
+     * @return the generated code
+     */
+    private static String generateConstructorCodeForField(final Field f) {
+        final StringBuilder sb =
+                new StringBuilder(
+                        """
 								if ($fieldName == null) {
 								    throw new NullPointerException("Parameter '$fieldName' must be supplied and can not be null");
-								}""".replace("$fieldName", f.nameCamelFirstLower()));
-		if (f instanceof final OneOfField oof) {
-			for (final Field subField: oof.fields()) {
-				if (subField.optionalValueType()) {
-					sb.append("""
-       
+								}"""
+                                .replace("$fieldName", f.nameCamelFirstLower()));
+        if (f instanceof final OneOfField oof) {
+            for (final Field subField : oof.fields()) {
+                if (subField.optionalValueType()) {
+                    sb.append(
+                            """
+
 							// handle special case where protobuf does not have destination between a OneOf with optional
 							// value of empty vs an unset OneOf.
 							if ($fieldName.kind() == $fieldUpperNameOneOfType.$subFieldNameUpper && $fieldName.value() == null) {
 								$fieldName = new $className<>($fieldUpperNameOneOfType.UNSET, null);
 							}"""
-							.replace("$className", oof.className())
-							.replace("$fieldName", f.nameCamelFirstLower())
-							.replace("$fieldUpperName", f.nameCamelFirstUpper())
-							.replace("$subFieldNameUpper", camelToUpperSnake(subField.name()))
-					);
-				}
-			}
-		}
-		return sb.toString().indent(DEFAULT_INDENT);
-	}
-
-	/**
-	 * Generates codec fields for the calss
-	 * @param msgDef the message definition
-	 * @param lookupHelper the lookup helper
-	 * @param javaRecordName the name of the class
-	 * @return the generated code
-	 */
-	@NonNull
-	private static String generateCodecFields(final MessageDefContext msgDef, final ContextualLookupHelper lookupHelper, final String javaRecordName) {
-		return """
+                                    .replace("$className", oof.className())
+                                    .replace("$fieldName", f.nameCamelFirstLower())
+                                    .replace("$fieldUpperName", f.nameCamelFirstUpper())
+                                    .replace(
+                                            "$subFieldNameUpper",
+                                            camelToUpperSnake(subField.name())));
+                }
+            }
+        }
+        return sb.toString().indent(DEFAULT_INDENT);
+    }
+
+    /**
+     * Generates codec fields for the calss
+     *
+     * @param msgDef the message definition
+     * @param lookupHelper the lookup helper
+     * @param javaRecordName the name of the class
+     * @return the generated code
+     */
+    @NonNull
+    private static String generateCodecFields(
+            final MessageDefContext msgDef,
+            final ContextualLookupHelper lookupHelper,
+            final String javaRecordName) {
+        return """
 				/** Protobuf codec for reading and writing in protobuf format */
 				public static final Codec<$modelClass> PROTOBUF = new $qualifiedCodecClass();
 				/** JSON codec for reading and writing in JSON format */
 				public static final JsonCodec<$modelClass> JSON = new $qualifiedJsonCodecClass();
-								
+
 				/** Default instance with all fields set to default values */
 				public static final $modelClass DEFAULT = newBuilder().build();
 				"""
-				.replace("$modelClass", javaRecordName)
-				.replace("$qualifiedCodecClass", lookupHelper.getFullyQualifiedMessageClassname(FileType.CODEC, msgDef))
-				.replace("$qualifiedJsonCodecClass", lookupHelper.getFullyQualifiedMessageClassname(FileType.JSON_CODEC, msgDef))
-				.indent(DEFAULT_INDENT);
-	}
-
-	/**
-	 * Generates accessor fields for the class
-	 * @param item message element context provided by the parser
-	 * @param fields the fields to use for the code generation
-	 * @param imports the imports to use for the code generation
-	 * @param hasMethods the has methods to use for the code generation
-	 */
-	private static void generateCodeForField(final ContextualLookupHelper lookupHelper,
-                                             final Protobuf3Parser.MessageElementContext item,
-                                             final List<Field> fields,
-                                             final Set<String> imports,
-                                             final List<String> hasMethods) {
-		final SingleField field = new SingleField(item.field(), lookupHelper);
-		fields.add(field);
-		field.addAllNeededImports(imports, true, false, false);
-		// Note that repeated fields default to empty list, so technically they always have a non-null value,
-		// and therefore the additional convenience methods, especially when they throw an NPE, don't make sense.
-		if (field.type() == FieldType.MESSAGE && !field.repeated()) {
-			hasMethods.add("""
+                .replace("$modelClass", javaRecordName)
+                .replace(
+                        "$qualifiedCodecClass",
+                        lookupHelper.getFullyQualifiedMessageClassname(FileType.CODEC, msgDef))
+                .replace(
+                        "$qualifiedJsonCodecClass",
+                        lookupHelper.getFullyQualifiedMessageClassname(FileType.JSON_CODEC, msgDef))
+                .indent(DEFAULT_INDENT);
+    }
+
+    /**
+     * Generates accessor fields for the class
+     *
+     * @param item message element context provided by the parser
+     * @param fields the fields to use for the code generation
+     * @param imports the imports to use for the code generation
+     * @param hasMethods the has methods to use for the code generation
+     */
+    private static void generateCodeForField(
+            final ContextualLookupHelper lookupHelper,
+            final Protobuf3Parser.MessageElementContext item,
+            final List<Field> fields,
+            final Set<String> imports,
+            final List<String> hasMethods) {
+        final SingleField field = new SingleField(item.field(), lookupHelper);
+        fields.add(field);
+        field.addAllNeededImports(imports, true, false, false);
+        // Note that repeated fields default to empty list, so technically they always have a
+        // non-null value,
+        // and therefore the additional convenience methods, especially when they throw an NPE,
+        // don't make sense.
+        if (field.type() == FieldType.MESSAGE && !field.repeated()) {
+            hasMethods.add(
+                    """
 					/**
 					 * Convenience method to check if the $fieldName has a value
 					 *
@@ -540,7 +683,7 @@ private static void generateCodeForField(final ContextualLookupHelper lookupHelp
 					public boolean has$fieldNameUpperFirst() {
 						return $fieldName != null;
 					}
-					
+
 					/**
 					 * Gets the value for $fieldName if it has a value, or else returns the default
 					 * value for the type.
@@ -551,7 +694,7 @@ private static void generateCodeForField(final ContextualLookupHelper lookupHelp
 					public $javaFieldType $fieldNameOrElse(@NonNull final $javaFieldType defaultValue) {
 						return has$fieldNameUpperFirst() ? $fieldName : defaultValue;
 					}
-					
+
 					/**
 					 * Gets the value for $fieldName if it has a value, or else throws an NPE.
 					 * value for the type.
@@ -562,7 +705,7 @@ private static void generateCodeForField(final ContextualLookupHelper lookupHelp
 					public @NonNull $javaFieldType $fieldNameOrThrow() {
 						return requireNonNull($fieldName, "Field $fieldName is null");
 					}
-					
+
 					/**
 					 * Executes the supplied {@link Consumer} if, and only if, the $fieldName has a value
 					 *
@@ -574,45 +717,50 @@ private static void generateCodeForField(final ContextualLookupHelper lookupHelp
 						}
 					}
 					"""
-					.replace("$fieldNameUpperFirst", field.nameCamelFirstUpper())
-					.replace("$javaFieldType", field.javaFieldType())
-					.replace("$fieldName", field.nameCamelFirstLower())
-					.indent(DEFAULT_INDENT)
-			);
-		}
-	}
-
-	/**
-	 * Generates the code related to the oneof field
-	 * @param lookupHelper the lookup helper
-	 * @param item message element context provided by the parser
-	 * @param javaRecordName the name of the class
-	 * @param imports the imports to use for the code generation
-	 * @param oneofEnums the oneof enums to use for the code generation
-	 * @param fields the fields to use for the code generation
-	 * @return the generated code
-	 */
-	private static List<String>  generateCodeForOneOf(final ContextualLookupHelper lookupHelper,
-                                                      final Protobuf3Parser.MessageElementContext item,
-                                                      final String javaRecordName,
-                                                      final Set<String> imports,
-                                                      final List<String> oneofEnums,
-                                                      final List<Field> fields) {
-		final List<String> oneofGetters = new ArrayList<>();
-		final var oneOfField = new OneOfField(item.oneof(), javaRecordName, lookupHelper);
-		final var enumName = oneOfField.nameCamelFirstUpper() + "OneOfType";
-		final int maxIndex = oneOfField.fields().get(oneOfField.fields().size() - 1).fieldNumber();
-		final Map<Integer, EnumValue> enumValues = new HashMap<>();
-		for (final var field : oneOfField.fields()) {
-			final String javaFieldType = javaPrimitiveToObjectType(field.javaFieldType());
-			final String enumComment = cleanDocStr(field.comment())
-				.replaceAll("[\t\s]*/\\*\\*","") // remove doc start indenting
-				.replaceAll("\n[\t\s]+\\*","\n") // remove doc indenting
-				.replaceAll("/\\*\\*","") //  remove doc start
-				.replaceAll("\\*\\*/",""); //  remove doc end
-			enumValues.put(field.fieldNumber(), new EnumValue(field.name(), field.deprecated(), enumComment));
-			// generate getters for one ofs
-			oneofGetters.add("""
+                            .replace("$fieldNameUpperFirst", field.nameCamelFirstUpper())
+                            .replace("$javaFieldType", field.javaFieldType())
+                            .replace("$fieldName", field.nameCamelFirstLower())
+                            .indent(DEFAULT_INDENT));
+        }
+    }
+
+    /**
+     * Generates the code related to the oneof field
+     *
+     * @param lookupHelper the lookup helper
+     * @param item message element context provided by the parser
+     * @param javaRecordName the name of the class
+     * @param imports the imports to use for the code generation
+     * @param oneofEnums the oneof enums to use for the code generation
+     * @param fields the fields to use for the code generation
+     * @return the generated code
+     */
+    private static List<String> generateCodeForOneOf(
+            final ContextualLookupHelper lookupHelper,
+            final Protobuf3Parser.MessageElementContext item,
+            final String javaRecordName,
+            final Set<String> imports,
+            final List<String> oneofEnums,
+            final List<Field> fields) {
+        final List<String> oneofGetters = new ArrayList<>();
+        final var oneOfField = new OneOfField(item.oneof(), javaRecordName, lookupHelper);
+        final var enumName = oneOfField.nameCamelFirstUpper() + "OneOfType";
+        final int maxIndex = oneOfField.fields().get(oneOfField.fields().size() - 1).fieldNumber();
+        final Map<Integer, EnumValue> enumValues = new HashMap<>();
+        for (final var field : oneOfField.fields()) {
+            final String javaFieldType = javaPrimitiveToObjectType(field.javaFieldType());
+            final String enumComment =
+                    cleanDocStr(field.comment())
+                            .replaceAll("[\t\s]*/\\*\\*", "") // remove doc start indenting
+                            .replaceAll("\n[\t\s]+\\*", "\n") // remove doc indenting
+                            .replaceAll("/\\*\\*", "") //  remove doc start
+                            .replaceAll("\\*\\*/", ""); //  remove doc end
+            enumValues.put(
+                    field.fieldNumber(),
+                    new EnumValue(field.name(), field.deprecated(), enumComment));
+            // generate getters for one ofs
+            oneofGetters.add(
+                    """
 					/**
 					 * Direct typed getter for one of field $fieldName.
 					 *
@@ -621,7 +769,7 @@ private static List<String>  generateCodeForOneOf(final ContextualLookupHelper l
 					public @Nullable $javaFieldType $fieldName() {
 						return $oneOfField.kind() == $enumName.$enumValue ? ($javaFieldType)$oneOfField.value() : null;
 					}
-					
+
 					/**
 					 * Convenience method to check if the $oneOfField has a one-of with type $enumValue
 					 *
@@ -630,7 +778,7 @@ private static List<String>  generateCodeForOneOf(final ContextualLookupHelper l
 					public boolean has$fieldNameUpperFirst() {
 						return $oneOfField.kind() == $enumName.$enumValue;
 					}
-					
+
 					/**
 					 * Gets the value for $fieldName if it has a value, or else returns the default
 					 * value for the type.
@@ -641,7 +789,7 @@ private static List<String>  generateCodeForOneOf(final ContextualLookupHelper l
 					public $javaFieldType $fieldNameOrElse(@NonNull final $javaFieldType defaultValue) {
 						return has$fieldNameUpperFirst() ? $fieldName() : defaultValue;
 					}
-					
+
 					/**
 					 * Gets the value for $fieldName if it was set, or throws a NullPointerException if it was not set.
 					 *
@@ -652,34 +800,37 @@ private static List<String>  generateCodeForOneOf(final ContextualLookupHelper l
 						return requireNonNull($fieldName(), "Field $fieldName is null");
 					}
 					"""
-					.replace("$fieldNameUpperFirst",field.nameCamelFirstUpper())
-					.replace("$fieldName",field.nameCamelFirstLower())
-					.replace("$javaFieldType",javaFieldType)
-					.replace("$oneOfField",oneOfField.nameCamelFirstLower())
-					.replace("$enumName",enumName)
-					.replace("$enumValue",camelToUpperSnake(field.name()))
-					.indent(DEFAULT_INDENT)
-			);
-			if (field.type() == FieldType.MESSAGE) {
-				field.addAllNeededImports(imports, true, false, false);
-			}
-		}
-		final String enumComment = """
+                            .replace("$fieldNameUpperFirst", field.nameCamelFirstUpper())
+                            .replace("$fieldName", field.nameCamelFirstLower())
+                            .replace("$javaFieldType", javaFieldType)
+                            .replace("$oneOfField", oneOfField.nameCamelFirstLower())
+                            .replace("$enumName", enumName)
+                            .replace("$enumValue", camelToUpperSnake(field.name()))
+                            .indent(DEFAULT_INDENT));
+            if (field.type() == FieldType.MESSAGE) {
+                field.addAllNeededImports(imports, true, false, false);
+            }
+        }
+        final String enumComment =
+                """
 							/**
 							 * Enum for the type of "%s" oneof value
-							 */""".formatted(oneOfField.name());
-		final String enumString = createEnum(enumComment ,"",enumName,maxIndex,enumValues, true)
-				.indent(DEFAULT_INDENT * 2);
-		oneofEnums.add(enumString);
-		fields.add(oneOfField);
-		imports.add("com.hedera.pbj.runtime");
-		return oneofGetters;
-	}
-
-	@NonNull
-	private static String genrateBuilderFactoryMethods(String bodyContent, final List<Field> fields) {
-		bodyContent +=
-    		"""
+							 */"""
+                        .formatted(oneOfField.name());
+        final String enumString =
+                createEnum(enumComment, "", enumName, maxIndex, enumValues, true)
+                        .indent(DEFAULT_INDENT * 2);
+        oneofEnums.add(enumString);
+        fields.add(oneOfField);
+        imports.add("com.hedera.pbj.runtime");
+        return oneofGetters;
+    }
+
+    @NonNull
+    private static String genrateBuilderFactoryMethods(
+            String bodyContent, final List<Field> fields) {
+        bodyContent +=
+                """
 			/**
 			 * Return a builder for building a copy of this model object. It will be pre-populated with all the data from this
 			 * model object.
@@ -689,7 +840,7 @@ private static String genrateBuilderFactoryMethods(String bodyContent, final Lis
 			public Builder copyBuilder() {
 			    return new Builder(%s);
 			}
-			
+
 			/**
 			 * Return a new builder for building a model object. This is just a shortcut for <code>new Model.Builder()</code>.
 			 *
@@ -699,35 +850,44 @@ public static Builder newBuilder() {
 			    return new Builder();
 			}
 			"""
-			.formatted(fields.stream().map(Field::nameCamelFirstLower).collect(Collectors.joining(", ")))
-			.indent(DEFAULT_INDENT);
-		return bodyContent;
-	}
-
-	private static void generateBuilderMethods(
-			final List<String> builderMethods,
-			final MessageDefContext msgDef,
-			final Field field,
-			final ContextualLookupHelper lookupHelper) {
-		final String prefix, postfix, fieldToSet;
-		final String fieldAnnotations = getFieldAnnotations(field);
-		final OneOfField parentOneOfField = field.parent();
-		final String fieldName = field.nameCamelFirstLower();
-		if (parentOneOfField != null) {
-			final String oneOfEnumValue = parentOneOfField.getEnumClassRef() + "." + camelToUpperSnake(field.name());
-			prefix = " new %s<>(".formatted(parentOneOfField.className()) + oneOfEnumValue + ",";
-			postfix = ")";
-			fieldToSet = parentOneOfField.nameCamelFirstLower();
-		} else if (fieldAnnotations.contains(NON_NULL_ANNOTATION)) {
-			prefix = "";
-			postfix = " != null ? " + fieldName + " : " + getDefaultValue(field, msgDef, lookupHelper);
-			fieldToSet = fieldName;
-		} else {
-			prefix = "";
-			postfix = "";
-			fieldToSet = fieldName;
-		}
-		builderMethods.add("""
+                        .formatted(
+                                fields.stream()
+                                        .map(Field::nameCamelFirstLower)
+                                        .collect(Collectors.joining(", ")))
+                        .indent(DEFAULT_INDENT);
+        return bodyContent;
+    }
+
+    private static void generateBuilderMethods(
+            final List<String> builderMethods,
+            final MessageDefContext msgDef,
+            final Field field,
+            final ContextualLookupHelper lookupHelper) {
+        final String prefix, postfix, fieldToSet;
+        final String fieldAnnotations = getFieldAnnotations(field);
+        final OneOfField parentOneOfField = field.parent();
+        final String fieldName = field.nameCamelFirstLower();
+        if (parentOneOfField != null) {
+            final String oneOfEnumValue =
+                    parentOneOfField.getEnumClassRef() + "." + camelToUpperSnake(field.name());
+            prefix = " new %s<>(".formatted(parentOneOfField.className()) + oneOfEnumValue + ",";
+            postfix = ")";
+            fieldToSet = parentOneOfField.nameCamelFirstLower();
+        } else if (fieldAnnotations.contains(NON_NULL_ANNOTATION)) {
+            prefix = "";
+            postfix =
+                    " != null ? "
+                            + fieldName
+                            + " : "
+                            + getDefaultValue(field, msgDef, lookupHelper);
+            fieldToSet = fieldName;
+        } else {
+            prefix = "";
+            postfix = "";
+            fieldToSet = fieldName;
+        }
+        builderMethods.add(
+                """
 						/**
 						 * $fieldDoc
 						 *
@@ -738,19 +898,20 @@ private static void generateBuilderMethods(
 						    this.$fieldToSet = $prefix$fieldName$postfix;
 						    return this;
 						}"""
-				.replace("$fieldDoc",field.comment()
-						.replaceAll("\n", "\n * "))
-				.replace("$fieldName", fieldName)
-				.replace("$fieldToSet",fieldToSet)
-				.replace("$prefix",prefix)
-				.replace("$postfix",postfix)
-				.replace("$fieldAnnotations", fieldAnnotations)
-				.replace("$fieldType",field.javaFieldType())
-				.indent(DEFAULT_INDENT)
-		);
-		// add nice method for simple message fields so can just set using un-built builder
-		if (field.type() == Field.FieldType.MESSAGE && !field.optionalValueType() && !field.repeated()) {
-			builderMethods.add("""
+                        .replace("$fieldDoc", field.comment().replaceAll("\n", "\n * "))
+                        .replace("$fieldName", fieldName)
+                        .replace("$fieldToSet", fieldToSet)
+                        .replace("$prefix", prefix)
+                        .replace("$postfix", postfix)
+                        .replace("$fieldAnnotations", fieldAnnotations)
+                        .replace("$fieldType", field.javaFieldType())
+                        .indent(DEFAULT_INDENT));
+        // add nice method for simple message fields so can just set using un-built builder
+        if (field.type() == Field.FieldType.MESSAGE
+                && !field.optionalValueType()
+                && !field.repeated()) {
+            builderMethods.add(
+                    """
 						/**
 						 * $fieldDoc
 						 *
@@ -761,35 +922,41 @@ private static void generateBuilderMethods(
 						    this.$fieldToSet =$prefix builder.build() $postfix;
 						    return this;
 						}"""
-					.replace("$messageClass",field.messageType())
-					.replace("$fieldDoc",field.comment()
-							.replaceAll("\n", "\n * "))
-					.replace("$fieldName", fieldName)
-					.replace("$fieldToSet",fieldToSet)
-					.replace("$prefix",prefix)
-					.replace("$postfix",postfix)
-					.replace("$fieldType",field.javaFieldType())
-					.indent(DEFAULT_INDENT)
-			);
-		}
+                            .replace("$messageClass", field.messageType())
+                            .replace("$fieldDoc", field.comment().replaceAll("\n", "\n * "))
+                            .replace("$fieldName", fieldName)
+                            .replace("$fieldToSet", fieldToSet)
+                            .replace("$prefix", prefix)
+                            .replace("$postfix", postfix)
+                            .replace("$fieldType", field.javaFieldType())
+                            .indent(DEFAULT_INDENT));
+        }
 
-		// add nice method for message fields with list types for varargs
-		if (field.repeated()) {
-			// Need to re-define the prefix and postfix for repeated fields because they don't use `values` directly
-			// but wrap it in List.of(values) instead, so the simple definitions above don't work here.
-			final String repeatedPrefix;
-			final String repeatedPostfix;
-			if (parentOneOfField != null) {
-				repeatedPrefix = prefix + " values == null ? " + getDefaultValue(field, msgDef, lookupHelper) + " : ";
-				repeatedPostfix = postfix;
-			} else if (fieldAnnotations.contains(NON_NULL_ANNOTATION)) {
-				repeatedPrefix = "values == null ? " + getDefaultValue(field, msgDef, lookupHelper) + " : ";
-				repeatedPostfix = "";
-			} else {
-				repeatedPrefix = prefix;
-				repeatedPostfix = postfix;
-			}
-			builderMethods.add("""
+        // add nice method for message fields with list types for varargs
+        if (field.repeated()) {
+            // Need to re-define the prefix and postfix for repeated fields because they don't use
+            // `values` directly
+            // but wrap it in List.of(values) instead, so the simple definitions above don't work
+            // here.
+            final String repeatedPrefix;
+            final String repeatedPostfix;
+            if (parentOneOfField != null) {
+                repeatedPrefix =
+                        prefix
+                                + " values == null ? "
+                                + getDefaultValue(field, msgDef, lookupHelper)
+                                + " : ";
+                repeatedPostfix = postfix;
+            } else if (fieldAnnotations.contains(NON_NULL_ANNOTATION)) {
+                repeatedPrefix =
+                        "values == null ? " + getDefaultValue(field, msgDef, lookupHelper) + " : ";
+                repeatedPostfix = "";
+            } else {
+                repeatedPrefix = prefix;
+                repeatedPostfix = postfix;
+            }
+            builderMethods.add(
+                    """
 						/**
 						 * $fieldDoc
 						 *
@@ -800,54 +967,61 @@ private static void generateBuilderMethods(
 						    this.$fieldToSet = $repeatedPrefix List.of(values) $repeatedPostfix;
 						    return this;
 						}"""
-					.replace("$baseType",field.javaFieldType().substring("List<".length(),field.javaFieldType().length()-1))
-					.replace("$fieldDoc",field.comment()
-							.replaceAll("\n", "\n * "))
-					.replace("$fieldName", fieldName)
-					.replace("$fieldToSet",fieldToSet)
-					.replace("$fieldType",field.javaFieldType())
-					.replace("$repeatedPrefix",repeatedPrefix)
-					.replace("$repeatedPostfix",repeatedPostfix)
-					.indent(DEFAULT_INDENT)
-			);
-		}
-	}
-
-	/**
-	 * Generates the builder for the class
-	 * @param msgDef the message definition
-	 * @param fields the fields to use for the code generation
-	 * @param lookupHelper the lookup helper
-	 * @return the generated code
-	 */
-	private static String generateBuilder(final MessageDefContext msgDef, final List<Field> fields, final ContextualLookupHelper lookupHelper) {
-		final String javaRecordName = msgDef.messageName().getText();
-		final List<String> builderMethods = new ArrayList<>();
-		for (final Field field: fields) {
-			if (field.type() == Field.FieldType.ONE_OF) {
-				final OneOfField oneOfField = (OneOfField) field;
-				for (final Field subField: oneOfField.fields()) {
-					generateBuilderMethods(builderMethods, msgDef, subField, lookupHelper);
-				}
-			} else {
-				generateBuilderMethods(builderMethods, msgDef, field, lookupHelper);
-			}
-		}
-		return """
+                            .replace(
+                                    "$baseType",
+                                    field.javaFieldType()
+                                            .substring(
+                                                    "List<".length(),
+                                                    field.javaFieldType().length() - 1))
+                            .replace("$fieldDoc", field.comment().replaceAll("\n", "\n * "))
+                            .replace("$fieldName", fieldName)
+                            .replace("$fieldToSet", fieldToSet)
+                            .replace("$fieldType", field.javaFieldType())
+                            .replace("$repeatedPrefix", repeatedPrefix)
+                            .replace("$repeatedPostfix", repeatedPostfix)
+                            .indent(DEFAULT_INDENT));
+        }
+    }
+
+    /**
+     * Generates the builder for the class
+     *
+     * @param msgDef the message definition
+     * @param fields the fields to use for the code generation
+     * @param lookupHelper the lookup helper
+     * @return the generated code
+     */
+    private static String generateBuilder(
+            final MessageDefContext msgDef,
+            final List<Field> fields,
+            final ContextualLookupHelper lookupHelper) {
+        final String javaRecordName = msgDef.messageName().getText();
+        final List<String> builderMethods = new ArrayList<>();
+        for (final Field field : fields) {
+            if (field.type() == Field.FieldType.ONE_OF) {
+                final OneOfField oneOfField = (OneOfField) field;
+                for (final Field subField : oneOfField.fields()) {
+                    generateBuilderMethods(builderMethods, msgDef, subField, lookupHelper);
+                }
+            } else {
+                generateBuilderMethods(builderMethods, msgDef, field, lookupHelper);
+            }
+        }
+        return """
 			/**
 			 * Builder class for easy creation, ideal for clean code where performance is not critical. In critical performance
 			 * paths use the constructor directly.
 			 */
 			public static final class Builder {
 			    $fields;
-		
+
 			    /**
 			     * Create an empty builder
 			     */
 			    public Builder() {}
-			    
+
 			    $prePopulatedBuilder
-		
+
 			    /**
 			     * Build a new model record with data set on builder
 			     *
@@ -856,34 +1030,53 @@ public Builder() {}
 			    public $javaRecordName build() {
 			        return new $javaRecordName($recordParams);
 			    }
-		
+
 			    $builderMethods}"""
-				.replace("$fields", fields.stream().map(field ->
-						getFieldAnnotations(field)
-								+ "private " + field.javaFieldType()
-								+ " " + field.nameCamelFirstLower()
-								+ " = " + getDefaultValue(field, msgDef, lookupHelper)
-						).collect(Collectors.joining(";\n    ")))
-				.replace("$prePopulatedBuilder", generateConstructor("Builder", fields, false, msgDef, lookupHelper))
-				.replace("$javaRecordName",javaRecordName)
-				.replace("$recordParams",fields.stream().map(Field::nameCamelFirstLower).collect(Collectors.joining(", ")))
-				.replace("$builderMethods", String.join("\n", builderMethods))
-				.indent(DEFAULT_INDENT);
-	}
-
-	/**
-	 * Gets the default value for the field
-	 * @param field the field to use for the code generation
-	 * @param msgDef the message definition
-	 * @param lookupHelper the lookup helper
-	 * @return the generated code
-	 */
-	private static String getDefaultValue(final Field field, final MessageDefContext msgDef, final ContextualLookupHelper lookupHelper) {
-		if (field.type() == Field.FieldType.ONE_OF) {
-			return lookupHelper.getFullyQualifiedMessageClassname(FileType.CODEC, msgDef)+"."+field.javaDefault();
-		} else {
-			return field.javaDefault();
-		}
-	}
+                .replace(
+                        "$fields",
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                getFieldAnnotations(field)
+                                                        + "private "
+                                                        + field.javaFieldType()
+                                                        + " "
+                                                        + field.nameCamelFirstLower()
+                                                        + " = "
+                                                        + getDefaultValue(
+                                                                field, msgDef, lookupHelper))
+                                .collect(Collectors.joining(";\n    ")))
+                .replace(
+                        "$prePopulatedBuilder",
+                        generateConstructor("Builder", fields, false, msgDef, lookupHelper))
+                .replace("$javaRecordName", javaRecordName)
+                .replace(
+                        "$recordParams",
+                        fields.stream()
+                                .map(Field::nameCamelFirstLower)
+                                .collect(Collectors.joining(", ")))
+                .replace("$builderMethods", String.join("\n", builderMethods))
+                .indent(DEFAULT_INDENT);
+    }
 
+    /**
+     * Gets the default value for the field
+     *
+     * @param field the field to use for the code generation
+     * @param msgDef the message definition
+     * @param lookupHelper the lookup helper
+     * @return the generated code
+     */
+    private static String getDefaultValue(
+            final Field field,
+            final MessageDefContext msgDef,
+            final ContextualLookupHelper lookupHelper) {
+        if (field.type() == Field.FieldType.ONE_OF) {
+            return lookupHelper.getFullyQualifiedMessageClassname(FileType.CODEC, msgDef)
+                    + "."
+                    + field.javaDefault();
+        } else {
+            return field.javaDefault();
+        }
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java
index ed61adb6..d4df4d0f 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/SchemaGenerator.java
@@ -3,7 +3,6 @@
 
 import com.hedera.pbj.compiler.impl.*;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -14,67 +13,78 @@
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-/**
- * Code generator that parses protobuf files and generates schemas for each message type.
- */
+/** Code generator that parses protobuf files and generates schemas for each message type. */
 public final class SchemaGenerator implements Generator {
 
-	/**
-	 * {@inheritDoc}
-	 */
-	public void generate(final Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir,
-						 File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException {
-		final String modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
-		final String schemaClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.SCHEMA, msgDef);
-		final String schemaPackage = lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef);
-		final File javaFile = Common.getJavaFile(destinationSrcDir, schemaPackage, schemaClassName);
-		final List<Field> fields = new ArrayList<>();
-		final Set<String> imports = new TreeSet<>();
-		for (final var item : msgDef.messageBody().messageElement()) {
-			if (item.messageDef() != null) { // process sub messages
-				generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
-			} else if (item.oneof() != null) { // process one ofs
-				final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, false, false);
-			} else if (item.mapField() != null) { // process map flattenedFields
-				final MapField field = new MapField(item.mapField(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, false, false);
-			} else if (item.field() != null && item.field().fieldName() != null) {
-				final var field = new SingleField(item.field(), lookupHelper);
-				fields.add(field);
-			} else if (item.reserved() == null && item.optionStatement() == null) {
-				// we can ignore reserved and option statements for now
-				System.err.println("SchemaGenerator Warning - Unknown element: "+item+" -- "+item.getText());
-			}
-		}
+    /** {@inheritDoc} */
+    public void generate(
+            final Protobuf3Parser.MessageDefContext msgDef,
+            final File destinationSrcDir,
+            File destinationTestSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException {
+        final String modelClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
+        final String schemaClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.SCHEMA, msgDef);
+        final String schemaPackage = lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef);
+        final File javaFile = Common.getJavaFile(destinationSrcDir, schemaPackage, schemaClassName);
+        final List<Field> fields = new ArrayList<>();
+        final Set<String> imports = new TreeSet<>();
+        for (final var item : msgDef.messageBody().messageElement()) {
+            if (item.messageDef() != null) { // process sub messages
+                generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
+            } else if (item.oneof() != null) { // process one ofs
+                final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, false, false);
+            } else if (item.mapField() != null) { // process map flattenedFields
+                final MapField field = new MapField(item.mapField(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, false, false);
+            } else if (item.field() != null && item.field().fieldName() != null) {
+                final var field = new SingleField(item.field(), lookupHelper);
+                fields.add(field);
+            } else if (item.reserved() == null && item.optionStatement() == null) {
+                // we can ignore reserved and option statements for now
+                System.err.println(
+                        "SchemaGenerator Warning - Unknown element: "
+                                + item
+                                + " -- "
+                                + item.getText());
+            }
+        }
 
-		final List<Field> flattenedFields = fields.stream()
-				.flatMap(field -> field instanceof OneOfField ? ((OneOfField)field).fields().stream() :
-						Stream.of(field))
-				.collect(Collectors.toList());
+        final List<Field> flattenedFields =
+                fields.stream()
+                        .flatMap(
+                                field ->
+                                        field instanceof OneOfField
+                                                ? ((OneOfField) field).fields().stream()
+                                                : Stream.of(field))
+                        .collect(Collectors.toList());
 
-		try (FileWriter javaWriter = new FileWriter(javaFile)) {
-			javaWriter.write("""
+        try (FileWriter javaWriter = new FileWriter(javaFile)) {
+            javaWriter.write(
+                    """
 					package $schemaPackage;
-										
+
 					import com.hedera.pbj.runtime.FieldDefinition;
 					import com.hedera.pbj.runtime.FieldType;
 					import com.hedera.pbj.runtime.Schema;
 					$imports
-										
+
 					/**
 					 * Schema for $modelClassName model object. Generate based on protobuf schema.
 					 */
 					public final class $schemaClassName implements Schema {
-					
+
 					    // -- FIELD DEFINITIONS ---------------------------------------------
-					    
+
 					$fields
-					    
+
 					    // -- OTHER METHODS -------------------------------------------------
-					    
+
 					    /**
 					     * Check if a field definition belongs to this schema.
 					     *
@@ -84,32 +94,41 @@ public final class $schemaClassName implements Schema {
 					    public static boolean valid(FieldDefinition f) {
 					    	return f != null && getField(f.number()) == f;
 					    }
-					    
+
 					$getMethods
 					}
 					"""
-					.replace("$schemaPackage", schemaPackage)
-					.replace("$imports", imports.isEmpty() ? "" : imports.stream()
-							.filter(input -> !input.equals(schemaPackage))
-							.collect(Collectors.joining(".*;\nimport ","\nimport ",".*;\n")))
-					.replace("$modelClassName", modelClassName)
-					.replace("$schemaClassName", schemaClassName)
-					.replace("$fields", fields.stream().map(
-							Field::schemaFieldsDef
-					).collect(Collectors.joining("\n\n")))
-					.replace("$getMethods", generateGetField(flattenedFields))
-			);
-		}
-	}
+                            .replace("$schemaPackage", schemaPackage)
+                            .replace(
+                                    "$imports",
+                                    imports.isEmpty()
+                                            ? ""
+                                            : imports.stream()
+                                                    .filter(input -> !input.equals(schemaPackage))
+                                                    .collect(
+                                                            Collectors.joining(
+                                                                    ".*;\nimport ",
+                                                                    "\nimport ",
+                                                                    ".*;\n")))
+                            .replace("$modelClassName", modelClassName)
+                            .replace("$schemaClassName", schemaClassName)
+                            .replace(
+                                    "$fields",
+                                    fields.stream()
+                                            .map(Field::schemaFieldsDef)
+                                            .collect(Collectors.joining("\n\n")))
+                            .replace("$getMethods", generateGetField(flattenedFields)));
+        }
+    }
 
-	/**
-	 * Generate getField method to get a field definition given a field number
-	 *
-	 * @param flattenedFields flattened list of all fields, with oneofs flattened
-	 * @return source code string for getField method
-	 */
-	private static String generateGetField(final List<Field> flattenedFields) {
-		return 	"""		
+    /**
+     * Generate getField method to get a field definition given a field number
+     *
+     * @param flattenedFields flattened list of all fields, with oneofs flattened
+     * @return source code string for getField method
+     */
+    private static String generateGetField(final List<Field> flattenedFields) {
+        return """
 					/**
 					 * Get a field definition given a field number
 					 *
@@ -122,9 +141,10 @@ public static FieldDefinition getField(final int fieldNumber) {
 					        default -> null;
 					    };
 					}
-				""".formatted(flattenedFields.stream()
-											.map(Field::schemaGetFieldsDefCase)
-											.collect(Collectors.joining("\n            ")));
-	}
-
+				"""
+                .formatted(
+                        flattenedFields.stream()
+                                .map(Field::schemaGetFieldsDefCase)
+                                .collect(Collectors.joining("\n            ")));
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java
index 24b5e606..8b72d054 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/TestGenerator.java
@@ -5,67 +5,72 @@
 
 import com.hedera.pbj.compiler.impl.*;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-import kotlin.reflect.jvm.internal.impl.protobuf.CodedOutputStream;
-
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-/**
- * Code generator that parses protobuf files and generates unit tests for each message type.
- */
+/** Code generator that parses protobuf files and generates unit tests for each message type. */
 public final class TestGenerator implements Generator {
 
-	/**
-	 * {@inheritDoc}
-	 */
-	public void generate(Protobuf3Parser.MessageDefContext msgDef, File destinationSrcDir,
-						 File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException {
-		final var modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
-		final var testClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.TEST, msgDef);
-		final String testPackage = lookupHelper.getPackageForMessage(FileType.TEST, msgDef);
-		final String protoCJavaFullQualifiedClass = lookupHelper.getFullyQualifiedMessageClassname(FileType.PROTOC,msgDef);
-		final File javaFile = Common.getJavaFile(destinationTestSrcDir, testPackage, testClassName);
-		final List<Field> fields = new ArrayList<>();
-		final Set<String> imports = new TreeSet<>();
-		imports.add("com.hedera.pbj.runtime.io.buffer");
-		imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef));
-		for (final var item: msgDef.messageBody().messageElement()) {
-			if (item.messageDef() != null) { // process sub messages
-				generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
-			} else if (item.oneof() != null) { // process one ofs
-				final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, false, true);
-				for(var subField : field.fields()) {
-					subField.addAllNeededImports(imports, true, false, true);
-				}
-			} else if (item.mapField() != null) { // process map fields
-				final MapField field = new MapField(item.mapField(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, false, true);
-			} else if (item.field() != null && item.field().fieldName() != null) {
-				final var field = new SingleField(item.field(), lookupHelper);
-				fields.add(field);
-				if (field.type() == Field.FieldType.MESSAGE || field.type() == Field.FieldType.ENUM) {
-					field.addAllNeededImports(imports, true, false, true);
-				}
-			} else if (item.reserved() == null && item.optionStatement() == null) {
-				System.err.println("TestGenerator Warning - Unknown element: "+item+" -- "+item.getText());
-			}
-		}
-		imports.add("java.util");
-		try (FileWriter javaWriter = new FileWriter(javaFile)) {
-			javaWriter.write("""
+    /** {@inheritDoc} */
+    public void generate(
+            Protobuf3Parser.MessageDefContext msgDef,
+            File destinationSrcDir,
+            File destinationTestSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException {
+        final var modelClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
+        final var testClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.TEST, msgDef);
+        final String testPackage = lookupHelper.getPackageForMessage(FileType.TEST, msgDef);
+        final String protoCJavaFullQualifiedClass =
+                lookupHelper.getFullyQualifiedMessageClassname(FileType.PROTOC, msgDef);
+        final File javaFile = Common.getJavaFile(destinationTestSrcDir, testPackage, testClassName);
+        final List<Field> fields = new ArrayList<>();
+        final Set<String> imports = new TreeSet<>();
+        imports.add("com.hedera.pbj.runtime.io.buffer");
+        imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef));
+        for (final var item : msgDef.messageBody().messageElement()) {
+            if (item.messageDef() != null) { // process sub messages
+                generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
+            } else if (item.oneof() != null) { // process one ofs
+                final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, false, true);
+                for (var subField : field.fields()) {
+                    subField.addAllNeededImports(imports, true, false, true);
+                }
+            } else if (item.mapField() != null) { // process map fields
+                final MapField field = new MapField(item.mapField(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, false, true);
+            } else if (item.field() != null && item.field().fieldName() != null) {
+                final var field = new SingleField(item.field(), lookupHelper);
+                fields.add(field);
+                if (field.type() == Field.FieldType.MESSAGE
+                        || field.type() == Field.FieldType.ENUM) {
+                    field.addAllNeededImports(imports, true, false, true);
+                }
+            } else if (item.reserved() == null && item.optionStatement() == null) {
+                System.err.println(
+                        "TestGenerator Warning - Unknown element: "
+                                + item
+                                + " -- "
+                                + item.getText());
+            }
+        }
+        imports.add("java.util");
+        try (FileWriter javaWriter = new FileWriter(javaFile)) {
+            javaWriter.write(
+                    """
 					package %s;
-									
+
 					import com.google.protobuf.util.JsonFormat;
 					import com.google.protobuf.CodedOutputStream;
 					import com.hedera.pbj.runtime.io.buffer.BufferedData;
@@ -79,15 +84,15 @@ public void generate(Protobuf3Parser.MessageDefContext msgDef, File destinationS
 					import java.nio.ByteBuffer;
 					import java.nio.CharBuffer;
 					%s
-												
+
 					import com.google.protobuf.CodedInputStream;
 					import com.google.protobuf.WireFormat;
 					import java.io.IOException;
 					import java.nio.charset.StandardCharsets;
-										
+
 					import static com.hedera.pbj.runtime.ProtoTestTools.*;
 					import static org.junit.jupiter.api.Assertions.*;
-										
+
 					/**
 					 * Unit Test for %s model object. Generate based on protobuf schema.
 					 */
@@ -95,29 +100,35 @@ public final class %s {
 					%s
 					%s
 					}
-					""".formatted(
-							testPackage,
-						imports.isEmpty() ? "" : imports.stream()
-								.filter(input -> !input.equals(testPackage))
-								.collect(Collectors.joining(".*;\nimport ","\nimport ",".*;\n")),
-						modelClassName,
-						testClassName,
-						generateTestMethod(modelClassName, protoCJavaFullQualifiedClass)
-								.indent(DEFAULT_INDENT),
-						generateModelTestArgumentsMethod(modelClassName, fields)
-								.indent(DEFAULT_INDENT)
-					)
-			);
-		}
-	}
-
-	private static String generateModelTestArgumentsMethod(final String modelClassName, final List<Field> fields) {
-		return """	
+					"""
+                            .formatted(
+                                    testPackage,
+                                    imports.isEmpty()
+                                            ? ""
+                                            : imports.stream()
+                                                    .filter(input -> !input.equals(testPackage))
+                                                    .collect(
+                                                            Collectors.joining(
+                                                                    ".*;\nimport ",
+                                                                    "\nimport ",
+                                                                    ".*;\n")),
+                                    modelClassName,
+                                    testClassName,
+                                    generateTestMethod(modelClassName, protoCJavaFullQualifiedClass)
+                                            .indent(DEFAULT_INDENT),
+                                    generateModelTestArgumentsMethod(modelClassName, fields)
+                                            .indent(DEFAULT_INDENT)));
+        }
+    }
+
+    private static String generateModelTestArgumentsMethod(
+            final String modelClassName, final List<Field> fields) {
+        return """
 				/**
 				 * List of all valid arguments for testing, built as a static list, so we can reuse it.
 				 */
 				public static final List<%s> ARGUMENTS;
-				
+
 				static {
 				%s
 				    // work out the longest of all the lists of args as that is how many test cases we need
@@ -130,7 +141,7 @@ private static String generateModelTestArgumentsMethod(final String modelClassNa
 				%s
 				            )).toList();
 				}
-				
+
 				/**
 				 * Create a stream of all test permutations of the %s class we are testing. This is reused by other tests
 				 * as well that have model objects with fields of this type.
@@ -140,95 +151,148 @@ private static String generateModelTestArgumentsMethod(final String modelClassNa
 				public static Stream<NoToStringWrapper<%s>> createModelTestArguments() {
 					return ARGUMENTS.stream().map(NoToStringWrapper::new);
 				}
-				""".formatted(
-					modelClassName,
-					fields.stream()
-							.filter(field -> !field.javaFieldType().equals(modelClassName))
-							.map(f -> "final var %sList = %s;".formatted(f.nameCamelFirstLower(), generateTestData(modelClassName, f, f.optionalValueType(), f.repeated())))
-							.collect(Collectors.joining("\n")).indent(DEFAULT_INDENT),
-					fields.stream()
-							.filter(field -> !field.javaFieldType().equals(modelClassName))
-							.map(f -> f.nameCamelFirstLower()+"List.size()")
-							.collect(Collectors.collectingAndThen(
-									Collectors.toList(),
-									list -> list.isEmpty() ? Stream.of("0") : list.stream()
-							))
-							.collect(Collectors.joining(",\n")).indent(DEFAULT_INDENT * 2),
-					modelClassName,
-					fields.stream().map(field ->
-							field.javaFieldType().equals(modelClassName)
-									? field.javaFieldType() + ".newBuilder().build()"
-									: "$nameList.get(Math.min(i, $nameList.size()-1))".replace("$name", field.nameCamelFirstLower())
-					).collect(Collectors.joining(",\n")).indent(DEFAULT_INDENT * 4),
-					modelClassName,
-					modelClassName
-				);
-	}
-
-	private static String generateTestData(String modelClassName, Field field, boolean optional, boolean repeated) {
-		if (optional) {
-
-			Field.FieldType convertedFieldType = getOptionalConvertedFieldType(field);
-			return """
+				"""
+                .formatted(
+                        modelClassName,
+                        fields.stream()
+                                .filter(field -> !field.javaFieldType().equals(modelClassName))
+                                .map(
+                                        f ->
+                                                "final var %sList = %s;"
+                                                        .formatted(
+                                                                f.nameCamelFirstLower(),
+                                                                generateTestData(
+                                                                        modelClassName,
+                                                                        f,
+                                                                        f.optionalValueType(),
+                                                                        f.repeated())))
+                                .collect(Collectors.joining("\n"))
+                                .indent(DEFAULT_INDENT),
+                        fields.stream()
+                                .filter(field -> !field.javaFieldType().equals(modelClassName))
+                                .map(f -> f.nameCamelFirstLower() + "List.size()")
+                                .collect(
+                                        Collectors.collectingAndThen(
+                                                Collectors.toList(),
+                                                list ->
+                                                        list.isEmpty()
+                                                                ? Stream.of("0")
+                                                                : list.stream()))
+                                .collect(Collectors.joining(",\n"))
+                                .indent(DEFAULT_INDENT * 2),
+                        modelClassName,
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                field.javaFieldType().equals(modelClassName)
+                                                        ? field.javaFieldType()
+                                                                + ".newBuilder().build()"
+                                                        : "$nameList.get(Math.min(i, $nameList.size()-1))"
+                                                                .replace(
+                                                                        "$name",
+                                                                        field
+                                                                                .nameCamelFirstLower()))
+                                .collect(Collectors.joining(",\n"))
+                                .indent(DEFAULT_INDENT * 4),
+                        modelClassName,
+                        modelClassName);
+    }
+
+    private static String generateTestData(
+            String modelClassName, Field field, boolean optional, boolean repeated) {
+        if (optional) {
+
+            Field.FieldType convertedFieldType = getOptionalConvertedFieldType(field);
+            return """
 					addNull(%s)"""
-					.formatted(getOptionsForFieldType(convertedFieldType, convertedFieldType.javaType));
-		} else if (repeated) {
-			final String optionsList = generateTestData(modelClassName, field, field.optionalValueType(), false);
-			return """
+                    .formatted(
+                            getOptionsForFieldType(
+                                    convertedFieldType, convertedFieldType.javaType));
+        } else if (repeated) {
+            final String optionsList =
+                    generateTestData(modelClassName, field, field.optionalValueType(), false);
+            return """
 					generateListArguments(%s)""".formatted(optionsList);
-		} else if (field instanceof final OneOfField oneOf) {
-			final List<String> options = new ArrayList<>();
-			for (var subField : oneOf.fields()) {
-				if (subField instanceof SingleField) {
-					final String enumValueName = Common.camelToUpperSnake(subField.name());
-					// special cases to break cyclic dependencies
-					if (!("THRESHOLD_KEY".equals(enumValueName) || "KEY_LIST".equals(enumValueName)
-							|| "THRESHOLD_SIGNATURE".equals(enumValueName) || "SIGNATURE_LIST".equals(enumValueName))) {
-						final String listStr;
-						if (subField.optionalValueType()) {
-							Field.FieldType convertedSubFieldType = getOptionalConvertedFieldType(subField);
-							listStr = getOptionsForFieldType(convertedSubFieldType, convertedSubFieldType.javaType);
-						} else {
-							listStr = getOptionsForFieldType(subField.type(), ((SingleField) subField).javaFieldTypeForTest());
-						}
-						options.add(listStr + ("\n.stream()\n" +
-								"""
+        } else if (field instanceof final OneOfField oneOf) {
+            final List<String> options = new ArrayList<>();
+            for (var subField : oneOf.fields()) {
+                if (subField instanceof SingleField) {
+                    final String enumValueName = Common.camelToUpperSnake(subField.name());
+                    // special cases to break cyclic dependencies
+                    if (!("THRESHOLD_KEY".equals(enumValueName)
+                            || "KEY_LIST".equals(enumValueName)
+                            || "THRESHOLD_SIGNATURE".equals(enumValueName)
+                            || "SIGNATURE_LIST".equals(enumValueName))) {
+                        final String listStr;
+                        if (subField.optionalValueType()) {
+                            Field.FieldType convertedSubFieldType =
+                                    getOptionalConvertedFieldType(subField);
+                            listStr =
+                                    getOptionsForFieldType(
+                                            convertedSubFieldType, convertedSubFieldType.javaType);
+                        } else {
+                            listStr =
+                                    getOptionsForFieldType(
+                                            subField.type(),
+                                            ((SingleField) subField).javaFieldTypeForTest());
+                        }
+                        options.add(
+                                listStr
+                                        + ("\n.stream()\n"
+                                                        + """
 										.map(value -> new %s<>(%sOneOfType.%s, value))
-										.toList()""".formatted(
-										((OneOfField) field).className(),
-										modelClassName + "." + field.nameCamelFirstUpper(),
-										enumValueName
-								)).indent(DEFAULT_INDENT)
-						);
-					}
-				} else {
-					System.err.println("Did not expect a OneOfField in a OneOfField. In " +
-							"modelClassName=" + modelClassName + " field=" + field + " subField=" + subField);
-				}
-			}
-			return """
+										.toList()"""
+                                                                .formatted(
+                                                                        ((OneOfField) field)
+                                                                                .className(),
+                                                                        modelClassName
+                                                                                + "."
+                                                                                + field
+                                                                                        .nameCamelFirstUpper(),
+                                                                        enumValueName))
+                                                .indent(DEFAULT_INDENT));
+                    }
+                } else {
+                    System.err.println(
+                            "Did not expect a OneOfField in a OneOfField. In "
+                                    + "modelClassName="
+                                    + modelClassName
+                                    + " field="
+                                    + field
+                                    + " subField="
+                                    + subField);
+                }
+            }
+            return """
 					Stream.of(
 					    List.of(new %s<>(%sOneOfType.UNSET, null)),
 					    %s
-					).flatMap(List::stream).toList()""".formatted(
-					((OneOfField) field).className(),
-					modelClassName + "." + field.nameCamelFirstUpper(),
-					String.join(",\n", options).indent(DEFAULT_INDENT)
-					).indent(DEFAULT_INDENT * 2);
-		} else if (field instanceof final MapField mapField) {
-			// e.g. INTEGER_TESTS_LIST
-			final String keyOptions = getOptionsForFieldType(mapField.keyField().type(), mapField.keyField().javaFieldType());
-			// e.g. STRING_TESTS_LIST, or, say, CustomMessageTest.ARGUMENTS
-			final String valueOptions = getOptionsForFieldType(mapField.valueField().type(), mapField.valueField().javaFieldType());
-
-			// A cartesian product is nice to use, but it doesn't seem reasonable from the performance perspective.
-			// Instead, we want to test three cases:
-			// 1. Empty map
-			// 2. Map with a single entry
-			// 3. Map with multiple (e.g. two) entries
-			// Note that keys and value options lists may be pretty small. E.g. Boolean would only have 2 elements. So we use mod.
-			// Also note that we assume there's at least one element in each list.
-			return """
+					).flatMap(List::stream).toList()"""
+                    .formatted(
+                            ((OneOfField) field).className(),
+                            modelClassName + "." + field.nameCamelFirstUpper(),
+                            String.join(",\n", options).indent(DEFAULT_INDENT))
+                    .indent(DEFAULT_INDENT * 2);
+        } else if (field instanceof final MapField mapField) {
+            // e.g. INTEGER_TESTS_LIST
+            final String keyOptions =
+                    getOptionsForFieldType(
+                            mapField.keyField().type(), mapField.keyField().javaFieldType());
+            // e.g. STRING_TESTS_LIST, or, say, CustomMessageTest.ARGUMENTS
+            final String valueOptions =
+                    getOptionsForFieldType(
+                            mapField.valueField().type(), mapField.valueField().javaFieldType());
+
+            // A cartesian product is nice to use, but it doesn't seem reasonable from the
+            // performance perspective.
+            // Instead, we want to test three cases:
+            // 1. Empty map
+            // 2. Map with a single entry
+            // 3. Map with multiple (e.g. two) entries
+            // Note that keys and value options lists may be pretty small. E.g. Boolean would only
+            // have 2 elements. So we use mod.
+            // Also note that we assume there's at least one element in each list.
+            return """
          			List.of(
      					Map.$javaGenericTypeof(),
      					Map.$javaGenericTypeof($keyOptions.get(0), $valueOptions.get(0)),
@@ -237,60 +301,68 @@ private static String generateTestData(String modelClassName, Field field, boole
      						$keyOptions.get(2 % $keyOptions.size()), $valueOptions.get(2 % $valueOptions.size())
      					)
 					)"""
-					.replace("$javaGenericType", mapField.javaGenericType())
-					.replace("$keyOptions", keyOptions)
-					.replace("$valueOptions", valueOptions)
-					;
-		} else {
-			return getOptionsForFieldType(field.type(), ((SingleField)field).javaFieldTypeForTest());
-		}
-	}
-
-	private static Field.FieldType getOptionalConvertedFieldType(final Field field) {
-		return switch (field.messageType()) {
-			case "StringValue" -> Field.FieldType.STRING;
-			case "Int32Value" -> Field.FieldType.INT32;
-			case "UInt32Value" -> Field.FieldType.UINT32;
-			case "Int64Value" -> Field.FieldType.INT64;
-			case "UInt64Value" -> Field.FieldType.UINT64;
-			case "FloatValue" -> Field.FieldType.FLOAT;
-			case "DoubleValue" -> Field.FieldType.DOUBLE;
-			case "BoolValue" -> Field.FieldType.BOOL;
-			case "BytesValue" -> Field.FieldType.BYTES;
-			default -> Field.FieldType.MESSAGE;
-		};
-	}
-
-	private static String getOptionsForFieldType(Field.FieldType fieldType, String javaFieldType) {
-		return switch (fieldType) {
-			case INT32, SINT32, SFIXED32 -> "INTEGER_TESTS_LIST";
-			case UINT32, FIXED32 -> "UNSIGNED_INTEGER_TESTS_LIST";
-			case INT64, SINT64, SFIXED64 -> "LONG_TESTS_LIST";
-			case UINT64, FIXED64 -> "UNSIGNED_LONG_TESTS_LIST";
-			case FLOAT -> "FLOAT_TESTS_LIST";
-			case DOUBLE -> "DOUBLE_TESTS_LIST";
-			case BOOL -> "BOOLEAN_TESTS_LIST";
-			case STRING -> "STRING_TESTS_LIST";
-			case BYTES -> "BYTES_TESTS_LIST";
-			case ENUM -> "Arrays.asList(" + javaFieldType + ".values())";
-			case ONE_OF -> throw new RuntimeException("Should never happen, should have been caught in generateTestData()");
-			case MESSAGE -> javaFieldType + FileAndPackageNamesConfig.TEST_JAVA_FILE_SUFFIX + ".ARGUMENTS";
-			case MAP -> throw new RuntimeException("Should never happen, should have been caught in generateTestData()");
-		};
-	}
-
-	/**
-	 * Generate code for test method. The test method is designed to reuse thread local buffers. This is
-	 * very important for performance as without this the tests quickly overwhelm the garbage collector.
-	 *
-	 * This method also adds a public static final reference to the ProtoC class for this model object.
-	 *
-	 * @param modelClassName The class name of the model object we are creating a test for
-	 * @param protoCJavaFullQualifiedClass The qualified class name of the protoc generated object class
-	 * @return Code for test method
-	 */
-	private static String generateTestMethod(final String modelClassName, final String protoCJavaFullQualifiedClass) {
-		return """
+                    .replace("$javaGenericType", mapField.javaGenericType())
+                    .replace("$keyOptions", keyOptions)
+                    .replace("$valueOptions", valueOptions);
+        } else {
+            return getOptionsForFieldType(
+                    field.type(), ((SingleField) field).javaFieldTypeForTest());
+        }
+    }
+
+    private static Field.FieldType getOptionalConvertedFieldType(final Field field) {
+        return switch (field.messageType()) {
+            case "StringValue" -> Field.FieldType.STRING;
+            case "Int32Value" -> Field.FieldType.INT32;
+            case "UInt32Value" -> Field.FieldType.UINT32;
+            case "Int64Value" -> Field.FieldType.INT64;
+            case "UInt64Value" -> Field.FieldType.UINT64;
+            case "FloatValue" -> Field.FieldType.FLOAT;
+            case "DoubleValue" -> Field.FieldType.DOUBLE;
+            case "BoolValue" -> Field.FieldType.BOOL;
+            case "BytesValue" -> Field.FieldType.BYTES;
+            default -> Field.FieldType.MESSAGE;
+        };
+    }
+
+    private static String getOptionsForFieldType(Field.FieldType fieldType, String javaFieldType) {
+        return switch (fieldType) {
+            case INT32, SINT32, SFIXED32 -> "INTEGER_TESTS_LIST";
+            case UINT32, FIXED32 -> "UNSIGNED_INTEGER_TESTS_LIST";
+            case INT64, SINT64, SFIXED64 -> "LONG_TESTS_LIST";
+            case UINT64, FIXED64 -> "UNSIGNED_LONG_TESTS_LIST";
+            case FLOAT -> "FLOAT_TESTS_LIST";
+            case DOUBLE -> "DOUBLE_TESTS_LIST";
+            case BOOL -> "BOOLEAN_TESTS_LIST";
+            case STRING -> "STRING_TESTS_LIST";
+            case BYTES -> "BYTES_TESTS_LIST";
+            case ENUM -> "Arrays.asList(" + javaFieldType + ".values())";
+            case ONE_OF -> throw new RuntimeException(
+                    "Should never happen, should have been caught in generateTestData()");
+            case MESSAGE -> javaFieldType
+                    + FileAndPackageNamesConfig.TEST_JAVA_FILE_SUFFIX
+                    + ".ARGUMENTS";
+            case MAP -> throw new RuntimeException(
+                    "Should never happen, should have been caught in generateTestData()");
+        };
+    }
+
+    /**
+     * Generate code for test method. The test method is designed to reuse thread local buffers.
+     * This is very important for performance as without this the tests quickly overwhelm the
+     * garbage collector.
+     *
+     * <p>This method also adds a public static final reference to the ProtoC class for this model
+     * object.
+     *
+     * @param modelClassName The class name of the model object we are creating a test for
+     * @param protoCJavaFullQualifiedClass The qualified class name of the protoc generated object
+     *     class
+     * @return Code for test method
+     */
+    private static String generateTestMethod(
+            final String modelClassName, final String protoCJavaFullQualifiedClass) {
+        return """
 				/** A reference to the protoc generated object class. */
 				public static final Class<$protocModelClass> PROTOC_MODEL_CLASS
 						= $protocModelClass.class;
@@ -305,29 +377,29 @@ private static String generateTestMethod(final String modelClassName, final Stri
 				    final var byteBuffer = getThreadLocalByteBuffer();
 				    final var charBuffer = getThreadLocalCharBuffer();
 				    final var charBuffer2 = getThreadLocalCharBuffer2();
-				    
+
 				    // model to bytes with PBJ
 				    $modelClassName.PROTOBUF.write(modelObj, dataBuffer);
 				    // clamp limit to bytes written
 				    dataBuffer.limit(dataBuffer.position());
-				    
+
 				    // copy bytes to ByteBuffer
 				    dataBuffer.resetPosition();
 				    final int protoBufByteCount = (int)dataBuffer.remaining();
 				    dataBuffer.readBytes(byteBuffer);
 				    byteBuffer.flip();
-				    
+
 				    // read proto bytes with ProtoC to make sure it is readable and no parse exceptions are thrown
 				    final $protocModelClass protoCModelObj = $protocModelClass.parseFrom(byteBuffer);
-				    
+
 				    // read proto bytes with PBJ parser
 				    dataBuffer.resetPosition();
 				    final $modelClassName modelObj2 = $modelClassName.PROTOBUF.parse(dataBuffer);
-				    
+
 				    // check the read back object is equal to written original one
 				    //assertEquals(modelObj.toString(), modelObj2.toString());
 				    assertEquals(modelObj, modelObj2);
-				    
+
 				    // model to bytes with ProtoC writer
 				    byteBuffer.clear();
 				    final CodedOutputStream codedOutput = CodedOutputStream.newInstance(byteBuffer);
@@ -337,7 +409,7 @@ private static String generateTestMethod(final String modelClassName, final Stri
 				    // copy to a data buffer
 				    dataBuffer2.writeBytes(byteBuffer);
 				    dataBuffer2.flip();
-				    
+
 				    // compare written bytes
 				    assertEquals(dataBuffer, dataBuffer2);
 
@@ -350,7 +422,7 @@ private static String generateTestMethod(final String modelClassName, final Stri
 				    dataBuffer2.resetPosition();
 				    assertEquals(protoBufByteCount, $modelClassName.PROTOBUF.measure(dataBuffer2));
 				    assertEquals(protoBufByteCount, $modelClassName.PROTOBUF.measureRecord(modelObj));
-				    		
+
 				    // check fast equals
 				    dataBuffer2.resetPosition();
 				    assertTrue($modelClassName.PROTOBUF.fastEquals(modelObj, dataBuffer2));
@@ -370,12 +442,12 @@ private static String generateTestMethod(final String modelClassName, final Stri
 				    JsonFormat.printer().appendTo(protoCModelObj, charBuffer2);
 				    charBuffer2.flip();
 				    assertEquals(charBuffer2, charBuffer);
-				    
+
 				    // Test JSON Reading
 				    final $modelClassName jsonReadPbj = $modelClassName.JSON.parse(JsonTools.parseJson(charBuffer), false, Integer.MAX_VALUE);
 				    assertEquals(modelObj, jsonReadPbj);
 				}
-				
+
 				@SuppressWarnings("EqualsWithItself")
 				@Test
 				public void testTestEqualsAndHashCode() throws Exception {
@@ -396,9 +468,8 @@ public void testTestEqualsAndHashCode() throws Exception {
 				    }
 				}
 				"""
-				.replace("$modelClassName",modelClassName)
-				.replace("$protocModelClass",protoCJavaFullQualifiedClass)
-				.replace("$modelClassName",modelClassName)
-		;
-	}
+                .replace("$modelClassName", modelClassName)
+                .replace("$protocModelClass", protoCJavaFullQualifiedClass)
+                .replace("$modelClassName", modelClassName);
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java
index b572ae0f..2f020438 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecGenerator.java
@@ -4,7 +4,6 @@
 import com.hedera.pbj.compiler.impl.*;
 import com.hedera.pbj.compiler.impl.generators.Generator;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -14,52 +13,60 @@
 import java.util.TreeSet;
 import java.util.stream.Collectors;
 
-/**
- * Code generator that parses protobuf files and generates writers for each message type.
- */
+/** Code generator that parses protobuf files and generates writers for each message type. */
 @SuppressWarnings("DuplicatedCode")
 public final class JsonCodecGenerator implements Generator {
 
-	/**
-	 * {@inheritDoc}
-	 */
-	public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir,
-						 File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException {
-		final String modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
-		final String codecClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.JSON_CODEC, msgDef);
-		final String codecPackage = lookupHelper.getPackageForMessage(FileType.JSON_CODEC, msgDef);
-		final File javaFile = Common.getJavaFile(destinationSrcDir, codecPackage, codecClassName);
+    /** {@inheritDoc} */
+    public void generate(
+            Protobuf3Parser.MessageDefContext msgDef,
+            final File destinationSrcDir,
+            File destinationTestSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException {
+        final String modelClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
+        final String codecClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.JSON_CODEC, msgDef);
+        final String codecPackage = lookupHelper.getPackageForMessage(FileType.JSON_CODEC, msgDef);
+        final File javaFile = Common.getJavaFile(destinationSrcDir, codecPackage, codecClassName);
 
-		final List<Field> fields = new ArrayList<>();
-		final Set<String> imports = new TreeSet<>();
-		imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef));
-		imports.add(lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef));
+        final List<Field> fields = new ArrayList<>();
+        final Set<String> imports = new TreeSet<>();
+        imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef));
+        imports.add(lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef));
 
-		for(var item: msgDef.messageBody().messageElement()) {
-			if (item.messageDef() != null) { // process sub messages
-				generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
-			} else if (item.oneof() != null) { // process one ofs
-				final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, true, false);
-			} else if (item.mapField() != null) { // process map fields
-				final MapField field = new MapField(item.mapField(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, true, false);
-			} else if (item.field() != null && item.field().fieldName() != null) {
-				final var field = new SingleField(item.field(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, true, false);
-			} else if (item.reserved() == null && item.optionStatement() == null) {
-				System.err.println("WriterGenerator Warning - Unknown element: "+item+" -- "+item.getText());
-			}
-		}
-		final String writeMethod = JsonCodecWriteMethodGenerator.generateWriteMethod(modelClassName, fields);
+        for (var item : msgDef.messageBody().messageElement()) {
+            if (item.messageDef() != null) { // process sub messages
+                generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
+            } else if (item.oneof() != null) { // process one ofs
+                final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, true, false);
+            } else if (item.mapField() != null) { // process map fields
+                final MapField field = new MapField(item.mapField(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, true, false);
+            } else if (item.field() != null && item.field().fieldName() != null) {
+                final var field = new SingleField(item.field(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, true, false);
+            } else if (item.reserved() == null && item.optionStatement() == null) {
+                System.err.println(
+                        "WriterGenerator Warning - Unknown element: "
+                                + item
+                                + " -- "
+                                + item.getText());
+            }
+        }
+        final String writeMethod =
+                JsonCodecWriteMethodGenerator.generateWriteMethod(modelClassName, fields);
 
-		try (FileWriter javaWriter = new FileWriter(javaFile)) {
-			javaWriter.write("""
+        try (FileWriter javaWriter = new FileWriter(javaFile)) {
+            javaWriter.write(
+                    """
 					package $package;
-									
+
 					import com.hedera.pbj.runtime.*;
 					import com.hedera.pbj.runtime.io.*;
 					import com.hedera.pbj.runtime.io.buffer.*;
@@ -69,13 +76,13 @@ public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destin
 					import java.util.*;
 					import edu.umd.cs.findbugs.annotations.NonNull;
 					import edu.umd.cs.findbugs.annotations.Nullable;
-					
+
 					import $qualifiedModelClass;
 					$imports
 					import com.hedera.pbj.runtime.jsonparser.*;
 					import static $schemaClass.*;
 					import static com.hedera.pbj.runtime.JsonTools.*;
-										
+
 					/**
 					 * JSON Codec for $modelClass model object. Generated based on protobuf schema.
 					 */
@@ -85,48 +92,67 @@ public final class $codecClass implements JsonCodec<$modelClass> {
 					    $writeMethod
 					}
 					"""
-					.replace("$package", codecPackage)
-					.replace("$imports", imports.isEmpty() ? "" : imports.stream()
-							.filter(input -> !input.equals(codecPackage))
-							.collect(Collectors.joining(".*;\nimport ","\nimport ",".*;\n")))
-					.replace("$schemaClass", lookupHelper.getFullyQualifiedMessageClassname(FileType.SCHEMA, msgDef))
-					.replace("$modelClass", modelClassName)
-					.replace("$qualifiedModelClass", lookupHelper.getFullyQualifiedMessageClassname(FileType.MODEL, msgDef))
-					.replace("$codecClass", codecClassName)
-					.replace("$unsetOneOfConstants", JsonCodecParseMethodGenerator.generateUnsetOneOfConstants(fields))
-					.replace("$writeMethod", writeMethod)
-					.replace("$parseObject", JsonCodecParseMethodGenerator.generateParseObjectMethod(modelClassName, fields))
-			);
-		}
-	}
+                            .replace("$package", codecPackage)
+                            .replace(
+                                    "$imports",
+                                    imports.isEmpty()
+                                            ? ""
+                                            : imports.stream()
+                                                    .filter(input -> !input.equals(codecPackage))
+                                                    .collect(
+                                                            Collectors.joining(
+                                                                    ".*;\nimport ",
+                                                                    "\nimport ",
+                                                                    ".*;\n")))
+                            .replace(
+                                    "$schemaClass",
+                                    lookupHelper.getFullyQualifiedMessageClassname(
+                                            FileType.SCHEMA, msgDef))
+                            .replace("$modelClass", modelClassName)
+                            .replace(
+                                    "$qualifiedModelClass",
+                                    lookupHelper.getFullyQualifiedMessageClassname(
+                                            FileType.MODEL, msgDef))
+                            .replace("$codecClass", codecClassName)
+                            .replace(
+                                    "$unsetOneOfConstants",
+                                    JsonCodecParseMethodGenerator.generateUnsetOneOfConstants(
+                                            fields))
+                            .replace("$writeMethod", writeMethod)
+                            .replace(
+                                    "$parseObject",
+                                    JsonCodecParseMethodGenerator.generateParseObjectMethod(
+                                            modelClassName, fields)));
+        }
+    }
 
-	/**
-	 * Converts a field name to a JSON field name.
-	 *
-	 * @param fieldName the field name
-	 * @return the JSON field name
-	 */
-	static String toJsonFieldName(String fieldName) {
-		// based directly on protoc so output matches
-		final int length = fieldName.length();
-		StringBuilder result = new StringBuilder(length);
-		boolean isNextUpperCase = false;
-		for (int i = 0; i < length; i++) {
-			char ch = fieldName.charAt(i);
-			if (ch == '_') {
-				isNextUpperCase = true;
-			} else if (isNextUpperCase) {
-				// This closely matches the logic for ASCII characters in:
-				// http://google3/google/protobuf/descriptor.cc?l=249-251&rcl=228891689
-				if ('a' <= ch && ch <= 'z') {
-					ch = (char) (ch - 'a' + 'A');
-				}
-				result.append(ch);
-				isNextUpperCase = false;
-			} else {
-				result.append(ch);
-			}
-		}
-		return result.toString();
-	}
+    /**
+     * Converts a field name to a JSON field name.
+     *
+     * @param fieldName the field name
+     * @return the JSON field name
+     */
+    static String toJsonFieldName(String fieldName) {
+        // based directly on protoc so output matches
+        final int length = fieldName.length();
+        StringBuilder result = new StringBuilder(length);
+        boolean isNextUpperCase = false;
+        for (int i = 0; i < length; i++) {
+            char ch = fieldName.charAt(i);
+            if (ch == '_') {
+                isNextUpperCase = true;
+            } else if (isNextUpperCase) {
+                // This closely matches the logic for ASCII characters in:
+                // http://google3/google/protobuf/descriptor.cc?l=249-251&rcl=228891689
+                if ('a' <= ch && ch <= 'z') {
+                    ch = (char) (ch - 'a' + 'A');
+                }
+                result.append(ch);
+                isNextUpperCase = false;
+            } else {
+                result.append(ch);
+            }
+        }
+        return result.toString();
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java
index f28afe99..477a930e 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecParseMethodGenerator.java
@@ -1,52 +1,54 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.json;
 
+import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
+import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName;
+
 import com.hedera.pbj.compiler.impl.Common;
 import com.hedera.pbj.compiler.impl.Field;
 import com.hedera.pbj.compiler.impl.MapField;
 import com.hedera.pbj.compiler.impl.OneOfField;
-
 import java.util.List;
 import java.util.stream.Collectors;
 
-import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
-import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName;
-
-/**
- * Code to generate the parse method for Codec classes.
- */
+/** Code to generate the parse method for Codec classes. */
 @SuppressWarnings("StringConcatenationInsideStringBufferAppend")
 class JsonCodecParseMethodGenerator {
 
     /**
-     * Because all UNSET OneOf values are the same and we use them often we create a static constant for them and just
-     * reuse it throughout codec code.
+     * Because all UNSET OneOf values are the same and we use them often we create a static constant
+     * for them and just reuse it throughout codec code.
      *
      * @param fields the fields to generate for
      * @return code for constants
      */
     static String generateUnsetOneOfConstants(final List<Field> fields) {
-        return "\n" + fields.stream()
-            .filter(f -> f instanceof OneOfField)
-            .map(f -> {
-                final OneOfField field = (OneOfField)f;
-                return """
+        return "\n"
+                + fields.stream()
+                        .filter(f -> f instanceof OneOfField)
+                        .map(
+                                f -> {
+                                    final OneOfField field = (OneOfField) f;
+                                    return """
                            /** Constant for an unset oneof for $fieldName */
                            public static final $className<$enum> $unsetFieldName = new $className<>($enum.UNSET,null);
                        """
-                        .replace("$className", field.className())
-                        .replace("$enum", field.getEnumClassRef())
-                        .replace("$fieldName", field.name())
-                        .replace("$unsetFieldName", Common.camelToUpperSnake(field.name())+"_UNSET")
-                        .replace("$unsetFieldName", field.getEnumClassRef());
-            })
-            .collect(Collectors.joining("\n"));
+                                            .replace("$className", field.className())
+                                            .replace("$enum", field.getEnumClassRef())
+                                            .replace("$fieldName", field.name())
+                                            .replace(
+                                                    "$unsetFieldName",
+                                                    Common.camelToUpperSnake(field.name())
+                                                            + "_UNSET")
+                                            .replace("$unsetFieldName", field.getEnumClassRef());
+                                })
+                        .collect(Collectors.joining("\n"));
     }
 
     static String generateParseObjectMethod(final String modelClassName, final List<Field> fields) {
         return """
                 /**
-                 * Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext. 
+                 * Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext.
                  * Throws an UnknownFieldException wrapped in a ParseException if in strict mode ONLY.
                  *
                  * @param root The JSON parsed object tree to parse data from
@@ -84,36 +86,65 @@ static String generateParseObjectMethod(final String modelClassName, final List<
                     }
                 }
                 """
-        .replace("$modelClassName",modelClassName)
-        .replace("$fieldDefs",fields.stream().map(field -> "    %s temp_%s = %s;".formatted(field.javaFieldType(),
-                field.name(), field.javaDefault())).collect(Collectors.joining("\n")))
-        .replace("$fieldsList",fields.stream().map(field -> "temp_"+field.name()).collect(Collectors.joining(", ")))
-        .replace("$caseStatements",generateCaseStatements(fields))
-        .indent(DEFAULT_INDENT);
+                .replace("$modelClassName", modelClassName)
+                .replace(
+                        "$fieldDefs",
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                "    %s temp_%s = %s;"
+                                                        .formatted(
+                                                                field.javaFieldType(),
+                                                                field.name(),
+                                                                field.javaDefault()))
+                                .collect(Collectors.joining("\n")))
+                .replace(
+                        "$fieldsList",
+                        fields.stream()
+                                .map(field -> "temp_" + field.name())
+                                .collect(Collectors.joining(", ")))
+                .replace("$caseStatements", generateCaseStatements(fields))
+                .indent(DEFAULT_INDENT);
     }
 
     /**
-     * Generate switch case statements for each tag (field & wire type pair). For repeated numeric value types we
-     * generate 2 case statements for packed and unpacked encoding.
+     * Generate switch case statements for each tag (field & wire type pair). For repeated numeric
+     * value types we generate 2 case statements for packed and unpacked encoding.
      *
      * @param fields list of all fields in record
      * @return string of case statement code
      */
     private static String generateCaseStatements(final List<Field> fields) {
         StringBuilder sb = new StringBuilder();
-        for(Field field: fields) {
+        for (Field field : fields) {
             if (field instanceof final OneOfField oneOfField) {
-                for(final Field subField: oneOfField.fields()) {
-                    sb.append("case \"" + toJsonFieldName(subField.name()) +"\" /* [" + subField.fieldNumber() + "] */ " +
-                            ": temp_" + oneOfField.name() + " = new %s<>(\n".formatted(oneOfField.className()) +
-                            oneOfField.getEnumClassRef().indent(DEFAULT_INDENT) +"."+Common.camelToUpperSnake(subField.name())+
-                            ", \n".indent(DEFAULT_INDENT));
+                for (final Field subField : oneOfField.fields()) {
+                    sb.append(
+                            "case \""
+                                    + toJsonFieldName(subField.name())
+                                    + "\" /* ["
+                                    + subField.fieldNumber()
+                                    + "] */ "
+                                    + ": temp_"
+                                    + oneOfField.name()
+                                    + " = new %s<>(\n".formatted(oneOfField.className())
+                                    + oneOfField.getEnumClassRef().indent(DEFAULT_INDENT)
+                                    + "."
+                                    + Common.camelToUpperSnake(subField.name())
+                                    + ", \n".indent(DEFAULT_INDENT));
                     generateFieldCaseStatement(sb, subField, "kvPair.value()");
                     sb.append("); break;\n");
                 }
             } else {
-                sb.append("case \"" + toJsonFieldName(field.name()) +"\" /* [" + field.fieldNumber() + "] */ " +
-                        ": temp_" + field.name()+" = ");
+                sb.append(
+                        "case \""
+                                + toJsonFieldName(field.name())
+                                + "\" /* ["
+                                + field.fieldNumber()
+                                + "] */ "
+                                + ": temp_"
+                                + field.name()
+                                + " = ");
                 generateFieldCaseStatement(sb, field, "kvPair.value()");
                 sb.append("; break;\n");
             }
@@ -128,15 +159,20 @@ private static String generateCaseStatements(final List<Field> fields) {
      * @param origSB StringBuilder to append code to
      * @param valueGetter normally a "kvPair.value()", but may be different e.g. for maps parsing
      */
-    private static void generateFieldCaseStatement(final StringBuilder origSB, final Field field, final String valueGetter) {
+    private static void generateFieldCaseStatement(
+            final StringBuilder origSB, final Field field, final String valueGetter) {
         final StringBuilder sb = new StringBuilder();
         if (field.repeated()) {
             if (field.type() == Field.FieldType.MESSAGE) {
-                sb.append("parseObjArray($valueGetter.arr(), "+field.messageType()+".JSON, maxDepth - 1)");
+                sb.append(
+                        "parseObjArray($valueGetter.arr(), "
+                                + field.messageType()
+                                + ".JSON, maxDepth - 1)");
             } else {
                 sb.append("$valueGetter.arr().value().stream().map(v -> ");
                 switch (field.type()) {
-                    case ENUM -> sb.append(field.messageType() + ".fromString(v.STRING().getText())");
+                    case ENUM -> sb.append(
+                            field.messageType() + ".fromString(v.STRING().getText())");
                     case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> sb.append("parseInteger(v)");
                     case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> sb.append("parseLong(v)");
                     case FLOAT -> sb.append("parseFloat(v)");
@@ -144,7 +180,8 @@ private static void generateFieldCaseStatement(final StringBuilder origSB, final
                     case STRING -> sb.append("unescape(v.STRING().getText())");
                     case BOOL -> sb.append("parseBoolean(v)");
                     case BYTES -> sb.append("Bytes.fromBase64(v.STRING().getText())");
-                    default -> throw new RuntimeException("Unknown field type [" + field.type() + "]");
+                    default -> throw new RuntimeException(
+                            "Unknown field type [" + field.type() + "]");
                 }
                 sb.append(").toList()");
             }
@@ -157,7 +194,8 @@ private static void generateFieldCaseStatement(final StringBuilder origSB, final
                 case "StringValue" -> sb.append("unescape($valueGetter.STRING().getText())");
                 case "BoolValue" -> sb.append("parseBoolean($valueGetter)");
                 case "BytesValue" -> sb.append("Bytes.fromBase64($valueGetter.STRING().getText())");
-                default -> throw new RuntimeException("Unknown message type [" + field.messageType() + "]");
+                default -> throw new RuntimeException(
+                        "Unknown message type [" + field.messageType() + "]");
             }
         } else if (field.type() == Field.FieldType.MAP) {
             final MapField mapField = (MapField) field;
@@ -168,27 +206,33 @@ private static void generateFieldCaseStatement(final StringBuilder origSB, final
             generateFieldCaseStatement(keySB, mapField.keyField(), "mapKV");
             generateFieldCaseStatement(valueSB, mapField.valueField(), "mapKV.value()");
 
-            sb.append("""
+            sb.append(
+                    """
                     $valueGetter.getChild(JSONParser.ObjContext.class, 0).pair().stream()
                                         .collect(Collectors.toMap(
                                             mapKV -> $mapEntryKey,
                                             new UncheckedThrowingFunction<>(mapKV -> $mapEntryValue)
                                         ))"""
-                    .replace("$mapEntryKey", keySB.toString())
-                    .replace("$mapEntryValue", valueSB.toString())
-            );
+                            .replace("$mapEntryKey", keySB.toString())
+                            .replace("$mapEntryValue", valueSB.toString()));
         } else {
             switch (field.type()) {
-                case MESSAGE -> sb.append(field.javaFieldType() + ".JSON.parse($valueGetter.getChild(JSONParser.ObjContext.class, 0), false, maxDepth - 1)");
-                case ENUM -> sb.append(field.javaFieldType() + ".fromString($valueGetter.STRING().getText())");
-                case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> sb.append("parseInteger($valueGetter)");
-                case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> sb.append("parseLong($valueGetter)");
+                case MESSAGE -> sb.append(
+                        field.javaFieldType()
+                                + ".JSON.parse($valueGetter.getChild(JSONParser.ObjContext.class,"
+                                + " 0), false, maxDepth - 1)");
+                case ENUM -> sb.append(
+                        field.javaFieldType() + ".fromString($valueGetter.STRING().getText())");
+                case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> sb.append(
+                        "parseInteger($valueGetter)");
+                case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> sb.append(
+                        "parseLong($valueGetter)");
                 case FLOAT -> sb.append("parseFloat($valueGetter)");
                 case DOUBLE -> sb.append("parseDouble($valueGetter)");
                 case STRING -> sb.append("unescape($valueGetter.STRING().getText())");
                 case BOOL -> sb.append("parseBoolean($valueGetter)");
                 case BYTES -> sb.append("Bytes.fromBase64($valueGetter.STRING().getText())");
-                default -> throw new RuntimeException("Unknown field type ["+field.type()+"]");
+                default -> throw new RuntimeException("Unknown field type [" + field.type() + "]");
             }
         }
         origSB.append(sb.toString().replace("$valueGetter", valueGetter));
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java
index 22a9eb8a..baa1ed53 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/json/JsonCodecWriteMethodGenerator.java
@@ -1,37 +1,46 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl.generators.json;
 
+import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
+import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName;
+
 import com.hedera.pbj.compiler.impl.Common;
 import com.hedera.pbj.compiler.impl.Field;
 import com.hedera.pbj.compiler.impl.MapField;
 import com.hedera.pbj.compiler.impl.OneOfField;
 import com.hedera.pbj.compiler.impl.SingleField;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.util.Comparator;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
-import static com.hedera.pbj.compiler.impl.generators.json.JsonCodecGenerator.toJsonFieldName;
-
-/**
- * Code to generate the write method for Codec classes.
- */
+/** Code to generate the write method for Codec classes. */
 @SuppressWarnings("SwitchStatementWithTooFewBranches")
 final class JsonCodecWriteMethodGenerator {
 
     static String generateWriteMethod(final String modelClassName, final List<Field> fields) {
-        final List<Field> fieldsToWrite = fields.stream()
-                .flatMap(field -> field.type() == Field.FieldType.ONE_OF ? ((OneOfField)field).fields().stream() : Stream.of(field))
-                .sorted(Comparator.comparingInt(Field::fieldNumber))
-                .toList();
-        final String fieldWriteLines = fieldsToWrite.stream()
-                .map(field -> generateFieldWriteLines(field, modelClassName, "data.%s()".formatted(field.nameCamelFirstLower())))
-                .collect(Collectors.joining("\n")).indent(DEFAULT_INDENT);
+        final List<Field> fieldsToWrite =
+                fields.stream()
+                        .flatMap(
+                                field ->
+                                        field.type() == Field.FieldType.ONE_OF
+                                                ? ((OneOfField) field).fields().stream()
+                                                : Stream.of(field))
+                        .sorted(Comparator.comparingInt(Field::fieldNumber))
+                        .toList();
+        final String fieldWriteLines =
+                fieldsToWrite.stream()
+                        .map(
+                                field ->
+                                        generateFieldWriteLines(
+                                                field,
+                                                modelClassName,
+                                                "data.%s()".formatted(field.nameCamelFirstLower())))
+                        .collect(Collectors.joining("\n"))
+                        .indent(DEFAULT_INDENT);
 
-        return """     
+        return """
                 /**
                  * Returns JSON string representing an item.
                  *
@@ -60,60 +69,108 @@ public String toJSON(@NonNull $modelClass data, String indent, boolean inline) {
                     return sb.toString();
                 }
                 """
-            .replace("$modelClass", modelClassName)
-            .replace("$fieldWriteLines", fieldWriteLines)
-            .indent(DEFAULT_INDENT);
+                .replace("$modelClass", modelClassName)
+                .replace("$fieldWriteLines", fieldWriteLines)
+                .indent(DEFAULT_INDENT);
     }
 
-
     /**
      * Generate lines of code for writing field
      *
      * @param field The field to generate writing line of code for
-     * @param modelClassName The model class name for model class for message type we are generating writer for
+     * @param modelClassName The model class name for model class for message type we are generating
+     *     writer for
      * @param getValueCode java code to get the value of field
      * @return java code to write field to output
      */
-    private static String generateFieldWriteLines(final Field field, final String modelClassName, String getValueCode) {
+    private static String generateFieldWriteLines(
+            final Field field, final String modelClassName, String getValueCode) {
         final String fieldDef = Common.camelToUpperSnake(field.name());
         final String fieldName = '\"' + toJsonFieldName(field.name()) + '\"';
-        final String basicFieldCode = generateBasicFieldLines(field, getValueCode, fieldDef, fieldName, "childIndent");
-        String prefix = "// ["+field.fieldNumber()+"] - "+field.name() + "\n";
+        final String basicFieldCode =
+                generateBasicFieldLines(field, getValueCode, fieldDef, fieldName, "childIndent");
+        String prefix = "// [" + field.fieldNumber() + "] - " + field.name() + "\n";
 
         if (field.parent() != null) {
             final OneOfField oneOfField = field.parent();
-            final String oneOfType = modelClassName+"."+oneOfField.nameCamelFirstUpper()+"OneOfType";
-            prefix += "if (data."+oneOfField.nameCamelFirstLower()+"().kind() == "+ oneOfType +"."+
-                    Common.camelToUpperSnake(field.name())+")";
+            final String oneOfType =
+                    modelClassName + "." + oneOfField.nameCamelFirstUpper() + "OneOfType";
+            prefix +=
+                    "if (data."
+                            + oneOfField.nameCamelFirstLower()
+                            + "().kind() == "
+                            + oneOfType
+                            + "."
+                            + Common.camelToUpperSnake(field.name())
+                            + ")";
             prefix += "\n";
             return prefix + "fieldLines.add(" + basicFieldCode + ");";
         } else {
             if (field.repeated()) {
-                return prefix + "if (!data." + field.nameCamelFirstLower() + "().isEmpty()) fieldLines.add(" + basicFieldCode + ");";
+                return prefix
+                        + "if (!data."
+                        + field.nameCamelFirstLower()
+                        + "().isEmpty()) fieldLines.add("
+                        + basicFieldCode
+                        + ");";
             } else if (field.type() == Field.FieldType.BYTES) {
-                return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() +
-                        " && data." + field.nameCamelFirstLower() + "() != null" +
-                        " && data." + field.nameCamelFirstLower() + "().length() > 0) fieldLines.add(" + basicFieldCode + ");";
+                return prefix
+                        + "if (data."
+                        + field.nameCamelFirstLower()
+                        + "() != "
+                        + field.javaDefault()
+                        + " && data."
+                        + field.nameCamelFirstLower()
+                        + "() != null"
+                        + " && data."
+                        + field.nameCamelFirstLower()
+                        + "().length() > 0) fieldLines.add("
+                        + basicFieldCode
+                        + ");";
             } else if (field.type() == Field.FieldType.MAP) {
-                return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() +
-                        " && !data." + field.nameCamelFirstLower() + "().isEmpty()) fieldLines.add(" + basicFieldCode + ");";
+                return prefix
+                        + "if (data."
+                        + field.nameCamelFirstLower()
+                        + "() != "
+                        + field.javaDefault()
+                        + " && !data."
+                        + field.nameCamelFirstLower()
+                        + "().isEmpty()) fieldLines.add("
+                        + basicFieldCode
+                        + ");";
             } else {
-                return prefix + "if (data." + field.nameCamelFirstLower() + "() != " + field.javaDefault() + ") fieldLines.add(" + basicFieldCode + ");";
+                return prefix
+                        + "if (data."
+                        + field.nameCamelFirstLower()
+                        + "() != "
+                        + field.javaDefault()
+                        + ") fieldLines.add("
+                        + basicFieldCode
+                        + ");";
             }
         }
     }
 
     @NonNull
-    private static String generateBasicFieldLines(Field field, String getValueCode, String fieldDef, String fieldName, String childIndent) {
+    private static String generateBasicFieldLines(
+            Field field,
+            String getValueCode,
+            String fieldDef,
+            String fieldName,
+            String childIndent) {
         if (field.optionalValueType()) {
             return switch (field.messageType()) {
-                case "StringValue", "BoolValue", "Int32Value",
-                        "UInt32Value", "FloatValue",
-                        "DoubleValue", "BytesValue" -> "field(%s, %s)"
-                        .formatted(fieldName, getValueCode);
+                case "StringValue",
+                        "BoolValue",
+                        "Int32Value",
+                        "UInt32Value",
+                        "FloatValue",
+                        "DoubleValue",
+                        "BytesValue" -> "field(%s, %s)".formatted(fieldName, getValueCode);
                 case "Int64Value", "UInt64Value" -> "field(%s, %s, true)"
                         .formatted(fieldName, getValueCode);
-                default -> throw new UnsupportedOperationException("Unhandled optional message type:" + field.messageType());
+                default -> throw new UnsupportedOperationException(
+                        "Unhandled optional message type:" + field.messageType());
             };
         } else if (field.repeated()) {
             return switch (field.type()) {
@@ -121,8 +178,12 @@ private static String generateBasicFieldLines(Field field, String getValueCode,
                         .replace("$fieldName", fieldName)
                         .replace("$fieldDef", fieldDef)
                         .replace("$valueCode", getValueCode)
-                        .replace("$codec", ((SingleField) field).messageTypeModelPackage() + "." +
-                                Common.capitalizeFirstLetter(field.messageType()) + ".JSON");
+                        .replace(
+                                "$codec",
+                                ((SingleField) field).messageTypeModelPackage()
+                                        + "."
+                                        + Common.capitalizeFirstLetter(field.messageType())
+                                        + ".JSON");
                 default -> "arrayField($fieldName, $fieldDef, $valueCode)"
                         .replace("$fieldName", fieldName)
                         .replace("$fieldDef", fieldDef)
@@ -130,16 +191,17 @@ private static String generateBasicFieldLines(Field field, String getValueCode,
             };
         } else if (field.type() == Field.FieldType.MAP) {
             final MapField mapField = (MapField) field;
-            final String vComposerMethod = generateBasicFieldLines(
-                    mapField.valueField(),
-                    "v",
-                    Common.camelToUpperSnake(mapField.valueField().name()),
-                    "n",
-                    "indent"
-            );
+            final String vComposerMethod =
+                    generateBasicFieldLines(
+                            mapField.valueField(),
+                            "v",
+                            Common.camelToUpperSnake(mapField.valueField().name()),
+                            "n",
+                            "indent");
             return "field(%s, %s, $kEncoder, $vComposer)"
                     .formatted(fieldName, getValueCode)
-                    // Maps in protobuf can only have simple scalar and not floating keys, so toString should do a good job.
+                    // Maps in protobuf can only have simple scalar and not floating keys, so
+                    // toString should do a good job.
                     // Also see https://protobuf.dev/programming-guides/proto3/#json
                     .replace("$kEncoder", "k -> escape(k.toString())")
                     .replace("$vComposer", "(n, v) -> " + vComposerMethod);
@@ -154,10 +216,13 @@ private static String generateBasicFieldLines(Field field, String getValueCode,
                         .replace("$fieldName", fieldName)
                         .replace("$fieldDef", fieldDef)
                         .replace("$valueCode", getValueCode)
-                        .replace("$codec", ((SingleField) field).messageTypeModelPackage() + "." +
-                                Common.capitalizeFirstLetter(field.messageType()) + ".JSON");
-                default -> "field(%s, %s)"
-                        .formatted(fieldName, getValueCode);
+                        .replace(
+                                "$codec",
+                                ((SingleField) field).messageTypeModelPackage()
+                                        + "."
+                                        + Common.capitalizeFirstLetter(field.messageType())
+                                        + ".JSON");
+                default -> "field(%s, %s)".formatted(fieldName, getValueCode);
             };
         }
     }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java
index 2c4312d3..65bd3e4e 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecFastEqualsMethodGenerator.java
@@ -4,12 +4,11 @@
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
 
 import com.hedera.pbj.compiler.impl.Field;
-
 import java.util.List;
 
 /**
- * Code to generate the fast equals method for Codec classes. The idea of fast equals is to parse and compare at same
- * time and fail fast as soon as parsed bytes do not match.
+ * Code to generate the fast equals method for Codec classes. The idea of fast equals is to parse
+ * and compare at same time and fail fast as soon as parsed bytes do not match.
  */
 @SuppressWarnings("unused")
 class CodecFastEqualsMethodGenerator {
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java
index abb55a19..3ce69dfe 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecGenerator.java
@@ -4,59 +4,66 @@
 import com.hedera.pbj.compiler.impl.*;
 import com.hedera.pbj.compiler.impl.generators.Generator;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser;
-
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.*;
 import java.util.stream.Collectors;
 
-/**
- * Code generator that parses protobuf files and generates writers for each message type.
- */
+/** Code generator that parses protobuf files and generates writers for each message type. */
 @SuppressWarnings("DuplicatedCode")
 public final class CodecGenerator implements Generator {
 
-	/**
-	 * {@inheritDoc}
-	 */
-	public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destinationSrcDir,
-						 File destinationTestSrcDir, final ContextualLookupHelper lookupHelper) throws IOException {
-		final String modelClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
-		final String codecClassName = lookupHelper.getUnqualifiedClassForMessage(FileType.CODEC, msgDef);
-		final String codecPackage = lookupHelper.getPackageForMessage(FileType.CODEC, msgDef);
-		final File javaFile = Common.getJavaFile(destinationSrcDir, codecPackage, codecClassName);
+    /** {@inheritDoc} */
+    public void generate(
+            Protobuf3Parser.MessageDefContext msgDef,
+            final File destinationSrcDir,
+            File destinationTestSrcDir,
+            final ContextualLookupHelper lookupHelper)
+            throws IOException {
+        final String modelClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.MODEL, msgDef);
+        final String codecClassName =
+                lookupHelper.getUnqualifiedClassForMessage(FileType.CODEC, msgDef);
+        final String codecPackage = lookupHelper.getPackageForMessage(FileType.CODEC, msgDef);
+        final File javaFile = Common.getJavaFile(destinationSrcDir, codecPackage, codecClassName);
 
-		final List<Field> fields = new ArrayList<>();
-		final Set<String> imports = new TreeSet<>();
-		imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef));
-		imports.add(lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef));
+        final List<Field> fields = new ArrayList<>();
+        final Set<String> imports = new TreeSet<>();
+        imports.add(lookupHelper.getPackageForMessage(FileType.MODEL, msgDef));
+        imports.add(lookupHelper.getPackageForMessage(FileType.SCHEMA, msgDef));
 
-		for(var item: msgDef.messageBody().messageElement()) {
-			if (item.messageDef() != null) { // process sub messages
-				generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
-			} else if (item.oneof() != null) { // process one ofs
-				final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, true, false);
-			} else if (item.mapField() != null) { // process map fields
-				final MapField field = new MapField(item.mapField(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, true, false);
-			} else if (item.field() != null && item.field().fieldName() != null) {
-				final var field = new SingleField(item.field(), lookupHelper);
-				fields.add(field);
-				field.addAllNeededImports(imports, true, true, false);
-			} else if (item.reserved() == null && item.optionStatement() == null) {
-				System.err.println("WriterGenerator Warning - Unknown element: "+item+" -- "+item.getText());
-			}
-		}
-		final String writeMethod = CodecWriteMethodGenerator.generateWriteMethod(modelClassName, fields);
+        for (var item : msgDef.messageBody().messageElement()) {
+            if (item.messageDef() != null) { // process sub messages
+                generate(item.messageDef(), destinationSrcDir, destinationTestSrcDir, lookupHelper);
+            } else if (item.oneof() != null) { // process one ofs
+                final var field = new OneOfField(item.oneof(), modelClassName, lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, true, false);
+            } else if (item.mapField() != null) { // process map fields
+                final MapField field = new MapField(item.mapField(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, true, false);
+            } else if (item.field() != null && item.field().fieldName() != null) {
+                final var field = new SingleField(item.field(), lookupHelper);
+                fields.add(field);
+                field.addAllNeededImports(imports, true, true, false);
+            } else if (item.reserved() == null && item.optionStatement() == null) {
+                System.err.println(
+                        "WriterGenerator Warning - Unknown element: "
+                                + item
+                                + " -- "
+                                + item.getText());
+            }
+        }
+        final String writeMethod =
+                CodecWriteMethodGenerator.generateWriteMethod(modelClassName, fields);
 
-		try (FileWriter javaWriter = new FileWriter(javaFile)) {
-			javaWriter.write("""
+        try (FileWriter javaWriter = new FileWriter(javaFile)) {
+            javaWriter.write(
+                    """
 					package $package;
-									
+
 					import com.hedera.pbj.runtime.*;
 					import com.hedera.pbj.runtime.io.*;
 					import com.hedera.pbj.runtime.io.buffer.*;
@@ -66,14 +73,14 @@ public void generate(Protobuf3Parser.MessageDefContext msgDef, final File destin
 					import java.nio.charset.*;
 					import java.util.*;
 					import edu.umd.cs.findbugs.annotations.NonNull;
-					
+
 					import $qualifiedModelClass;
 					$imports
 					import static $schemaClass.*;
 					import static com.hedera.pbj.runtime.ProtoWriterTools.*;
 					import static com.hedera.pbj.runtime.ProtoParserTools.*;
 					import static com.hedera.pbj.runtime.ProtoConstants.*;
-				
+
 					/**
 					 * Protobuf Codec for $modelClass model object. Generated based on protobuf schema.
 					 */
@@ -86,24 +93,48 @@ public final class $codecClass implements Codec<$modelClass> {
 					    $fastEqualsMethod
 					}
 					"""
-					.replace("$package", codecPackage)
-					.replace("$imports", imports.isEmpty() ? "" : imports.stream()
-							.filter(input -> !input.equals(codecPackage))
-							.collect(Collectors.joining(".*;\nimport ","\nimport ",".*;\n")))
-					.replace("$schemaClass", lookupHelper.getFullyQualifiedMessageClassname(FileType.SCHEMA, msgDef))
-					.replace("$modelClass", modelClassName)
-					.replace("$qualifiedModelClass", lookupHelper.getFullyQualifiedMessageClassname(FileType.MODEL, msgDef))
-					.replace("$codecClass", codecClassName)
-					.replace("$unsetOneOfConstants", CodecParseMethodGenerator.generateUnsetOneOfConstants(fields))
-					.replace("$parseMethod", CodecParseMethodGenerator.generateParseMethod(modelClassName, fields))
-					.replace("$writeMethod", writeMethod)
-					.replace("$measureDataMethod", CodecMeasureDataMethodGenerator.generateMeasureMethod(modelClassName, fields))
-					.replace("$measureRecordMethod", CodecMeasureRecordMethodGenerator.generateMeasureMethod(modelClassName, fields))
-					.replace("$fastEqualsMethod", CodecFastEqualsMethodGenerator.generateFastEqualsMethod(modelClassName, fields))
-			);
-		}
-	}
-
-
-
+                            .replace("$package", codecPackage)
+                            .replace(
+                                    "$imports",
+                                    imports.isEmpty()
+                                            ? ""
+                                            : imports.stream()
+                                                    .filter(input -> !input.equals(codecPackage))
+                                                    .collect(
+                                                            Collectors.joining(
+                                                                    ".*;\nimport ",
+                                                                    "\nimport ",
+                                                                    ".*;\n")))
+                            .replace(
+                                    "$schemaClass",
+                                    lookupHelper.getFullyQualifiedMessageClassname(
+                                            FileType.SCHEMA, msgDef))
+                            .replace("$modelClass", modelClassName)
+                            .replace(
+                                    "$qualifiedModelClass",
+                                    lookupHelper.getFullyQualifiedMessageClassname(
+                                            FileType.MODEL, msgDef))
+                            .replace("$codecClass", codecClassName)
+                            .replace(
+                                    "$unsetOneOfConstants",
+                                    CodecParseMethodGenerator.generateUnsetOneOfConstants(fields))
+                            .replace(
+                                    "$parseMethod",
+                                    CodecParseMethodGenerator.generateParseMethod(
+                                            modelClassName, fields))
+                            .replace("$writeMethod", writeMethod)
+                            .replace(
+                                    "$measureDataMethod",
+                                    CodecMeasureDataMethodGenerator.generateMeasureMethod(
+                                            modelClassName, fields))
+                            .replace(
+                                    "$measureRecordMethod",
+                                    CodecMeasureRecordMethodGenerator.generateMeasureMethod(
+                                            modelClassName, fields))
+                            .replace(
+                                    "$fastEqualsMethod",
+                                    CodecFastEqualsMethodGenerator.generateFastEqualsMethod(
+                                            modelClassName, fields)));
+        }
+    }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java
index 011a05f1..64be0b85 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureDataMethodGenerator.java
@@ -4,11 +4,11 @@
 import static com.hedera.pbj.compiler.impl.Common.DEFAULT_INDENT;
 
 import com.hedera.pbj.compiler.impl.Field;
-
 import java.util.List;
 
 /**
- * Code to generate the measure data method for Codec classes. This measures the size of bytes of data in the input to be parsed.
+ * Code to generate the measure data method for Codec classes. This measures the size of bytes of
+ * data in the input to be parsed.
  */
 @SuppressWarnings("unused")
 class CodecMeasureDataMethodGenerator {
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java
index a263b38d..04941211 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecMeasureRecordMethodGenerator.java
@@ -8,7 +8,6 @@
 import com.hedera.pbj.compiler.impl.MapField;
 import com.hedera.pbj.compiler.impl.OneOfField;
 import com.hedera.pbj.compiler.impl.SingleField;
-
 import java.util.Comparator;
 import java.util.List;
 import java.util.function.Function;
@@ -16,17 +15,18 @@
 import java.util.stream.Stream;
 
 /**
- * Code to generate the measure record method for Codec classes. This measures the number of bytes that would be
- * written if the record was serialized in protobuf format.
+ * Code to generate the measure record method for Codec classes. This measures the number of bytes
+ * that would be written if the record was serialized in protobuf format.
  */
 class CodecMeasureRecordMethodGenerator {
 
     static String generateMeasureMethod(final String modelClassName, final List<Field> fields) {
-        final String fieldSizeOfLines = buildFieldSizeOfLines(
-                modelClassName,
-                fields,
-                field -> "data.%s()".formatted(field.nameCamelFirstLower()),
-                true);
+        final String fieldSizeOfLines =
+                buildFieldSizeOfLines(
+                        modelClassName,
+                        fields,
+                        field -> "data.%s()".formatted(field.nameCamelFirstLower()),
+                        true);
         return """
                 /**
                  * Compute number of bytes that would be written when calling {@code write()} method.
@@ -51,77 +51,110 @@ static String buildFieldSizeOfLines(
             final Function<Field, String> getValueBuilder,
             boolean skipDefault) {
         return fields.stream()
-                .flatMap(field -> field.type() == Field.FieldType.ONE_OF ? ((OneOfField)field).fields().stream() : Stream.of(field))
+                .flatMap(
+                        field ->
+                                field.type() == Field.FieldType.ONE_OF
+                                        ? ((OneOfField) field).fields().stream()
+                                        : Stream.of(field))
                 .sorted(Comparator.comparingInt(Field::fieldNumber))
-                .map(field -> generateFieldSizeOfLines(field, modelClassName, getValueBuilder.apply(field), skipDefault))
-                .collect(Collectors.joining("\n")).indent(DEFAULT_INDENT);
+                .map(
+                        field ->
+                                generateFieldSizeOfLines(
+                                        field,
+                                        modelClassName,
+                                        getValueBuilder.apply(field),
+                                        skipDefault))
+                .collect(Collectors.joining("\n"))
+                .indent(DEFAULT_INDENT);
     }
 
     /**
-     * Generate lines of code for measure method, that measure the size of each field and add to "size" variable.
+     * Generate lines of code for measure method, that measure the size of each field and add to
+     * "size" variable.
      *
      * @param field The field to generate size of line
-     * @param modelClassName The model class name for model class for message type we are generating writer for
+     * @param modelClassName The model class name for model class for message type we are generating
+     *     writer for
      * @param getValueCode java code to get the value of field
      * @param skipDefault true if default value of the field should result in size zero
      * @return java code for adding fields size to "size" variable
      */
-    private static String generateFieldSizeOfLines(final Field field, final String modelClassName, String getValueCode, boolean skipDefault) {
+    private static String generateFieldSizeOfLines(
+            final Field field,
+            final String modelClassName,
+            String getValueCode,
+            boolean skipDefault) {
         final String fieldDef = Common.camelToUpperSnake(field.name());
-        String prefix = "// ["+field.fieldNumber()+"] - "+field.name();
+        String prefix = "// [" + field.fieldNumber() + "] - " + field.name();
         prefix += "\n";
 
         if (field.parent() != null) {
             final OneOfField oneOfField = field.parent();
-            final String oneOfType = modelClassName+"."+oneOfField.nameCamelFirstUpper()+"OneOfType";
-            getValueCode = "data."+oneOfField.nameCamelFirstLower()+"().as()";
-            prefix += "if (data."+oneOfField.nameCamelFirstLower()+"().kind() == "+ oneOfType +"."+
-                    Common.camelToUpperSnake(field.name())+")";
+            final String oneOfType =
+                    modelClassName + "." + oneOfField.nameCamelFirstUpper() + "OneOfType";
+            getValueCode = "data." + oneOfField.nameCamelFirstLower() + "().as()";
+            prefix +=
+                    "if (data."
+                            + oneOfField.nameCamelFirstLower()
+                            + "().kind() == "
+                            + oneOfType
+                            + "."
+                            + Common.camelToUpperSnake(field.name())
+                            + ")";
             prefix += "\n";
         }
 
         final String writeMethodName = field.methodNameType();
         if (field.optionalValueType()) {
-            return prefix + switch (field.messageType()) {
-                case "StringValue" -> "size += sizeOfOptionalString(%s, %s);"
-                        .formatted(fieldDef,getValueCode);
-                case "BoolValue" -> "size += sizeOfOptionalBoolean(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "Int32Value","UInt32Value" -> "size += sizeOfOptionalInteger(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "Int64Value","UInt64Value" -> "size += sizeOfOptionalLong(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "FloatValue" -> "size += sizeOfOptionalFloat(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "DoubleValue" -> "size += sizeOfOptionalDouble(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "BytesValue" -> "size += sizeOfOptionalBytes(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                default -> throw new UnsupportedOperationException("Unhandled optional message type:"+field.messageType());
-            };
+            return prefix
+                    + switch (field.messageType()) {
+                        case "StringValue" -> "size += sizeOfOptionalString(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "BoolValue" -> "size += sizeOfOptionalBoolean(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "Int32Value", "UInt32Value" -> "size += sizeOfOptionalInteger(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "Int64Value", "UInt64Value" -> "size += sizeOfOptionalLong(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "FloatValue" -> "size += sizeOfOptionalFloat(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "DoubleValue" -> "size += sizeOfOptionalDouble(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "BytesValue" -> "size += sizeOfOptionalBytes(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        default -> throw new UnsupportedOperationException(
+                                "Unhandled optional message type:" + field.messageType());
+                    };
         } else if (field.repeated()) {
-            return prefix + switch (field.type()) {
-                case ENUM -> "size += sizeOfEnumList(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case MESSAGE -> "size += sizeOfMessageList($fieldDef, $valueCode, $codec::measureRecord);"
-                        .replace("$fieldDef", fieldDef)
-                        .replace("$valueCode", getValueCode)
-                        .replace("$codec", ((SingleField) field).messageTypeModelPackage() + "." +
-                                Common.capitalizeFirstLetter(field.messageType()) + ".PROTOBUF");
-                default -> "size += sizeOf%sList(%s, %s);"
-                        .formatted(writeMethodName, fieldDef, getValueCode);
-            };
+            return prefix
+                    + switch (field.type()) {
+                        case ENUM -> "size += sizeOfEnumList(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case MESSAGE -> "size += sizeOfMessageList($fieldDef, $valueCode, $codec::measureRecord);"
+                                .replace("$fieldDef", fieldDef)
+                                .replace("$valueCode", getValueCode)
+                                .replace(
+                                        "$codec",
+                                        ((SingleField) field).messageTypeModelPackage()
+                                                + "."
+                                                + Common.capitalizeFirstLetter(field.messageType())
+                                                + ".PROTOBUF");
+                        default -> "size += sizeOf%sList(%s, %s);"
+                                .formatted(writeMethodName, fieldDef, getValueCode);
+                    };
         } else if (field.type() == Field.FieldType.MAP) {
             final MapField mapField = (MapField) field;
             final List<Field> mapEntryFields = List.of(mapField.keyField(), mapField.valueField());
-            final Function<Field, String> getValueBuilder = mapEntryField ->
-                    mapEntryField == mapField.keyField() ? "k" : (mapEntryField == mapField.valueField() ? "v" : null);
-            final String fieldSizeOfLines = CodecMeasureRecordMethodGenerator.buildFieldSizeOfLines(
-                    field.name(),
-                    mapEntryFields,
-                    getValueBuilder,
-                    false);
-            return prefix + """
+            final Function<Field, String> getValueBuilder =
+                    mapEntryField ->
+                            mapEntryField == mapField.keyField()
+                                    ? "k"
+                                    : (mapEntryField == mapField.valueField() ? "v" : null);
+            final String fieldSizeOfLines =
+                    CodecMeasureRecordMethodGenerator.buildFieldSizeOfLines(
+                            field.name(), mapEntryFields, getValueBuilder, false);
+            return prefix
+                    + """
                         if (!$map.isEmpty()) {
                             final Pbj$javaFieldType pbjMap = (Pbj$javaFieldType) $map;
                             final int mapSize = pbjMap.size();
@@ -135,31 +168,49 @@ private static String generateFieldSizeOfLines(final Field field, final String m
                             }
                         }
                         """
-                    .replace("$fieldDef", fieldDef)
-                    .replace("$map", getValueCode)
-                    .replace("$javaFieldType", mapField.javaFieldType())
-                    .replace("$K", mapField.keyField().type().boxedType)
-                    .replace("$V", mapField.valueField().type() == Field.FieldType.MESSAGE ? ((SingleField)mapField.valueField()).messageType() : mapField.valueField().type().boxedType)
-                    .replace("$fieldSizeOfLines", fieldSizeOfLines.indent(DEFAULT_INDENT))
-                    ;
+                            .replace("$fieldDef", fieldDef)
+                            .replace("$map", getValueCode)
+                            .replace("$javaFieldType", mapField.javaFieldType())
+                            .replace("$K", mapField.keyField().type().boxedType)
+                            .replace(
+                                    "$V",
+                                    mapField.valueField().type() == Field.FieldType.MESSAGE
+                                            ? ((SingleField) mapField.valueField()).messageType()
+                                            : mapField.valueField().type().boxedType)
+                            .replace("$fieldSizeOfLines", fieldSizeOfLines.indent(DEFAULT_INDENT));
         } else {
-            return prefix + switch(field.type()) {
-                case ENUM -> "size += sizeOfEnum(%s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case STRING -> "size += sizeOfString(%s, %s, %s);"
-                        .formatted(fieldDef, getValueCode, skipDefault);
-                case MESSAGE -> "size += sizeOfMessage($fieldDef, $valueCode, $codec::measureRecord);"
-                        .replace("$fieldDef", fieldDef)
-                        .replace("$valueCode", getValueCode)
-                        .replace("$codec", ((SingleField)field).messageTypeModelPackage() + "." +
-                                Common.capitalizeFirstLetter(field.messageType())+ ".PROTOBUF");
-                case BOOL -> "size += sizeOfBoolean(%s, %s, %s);"
-                        .formatted(fieldDef, getValueCode, skipDefault);
-                case INT32, UINT32, SINT32, FIXED32, SFIXED32, INT64, SINT64, UINT64, FIXED64, SFIXED64, BYTES -> "size += sizeOf%s(%s, %s, %s);"
-                        .formatted(writeMethodName, fieldDef, getValueCode, skipDefault);
-                default -> "size += sizeOf%s(%s, %s);"
-                        .formatted(writeMethodName, fieldDef, getValueCode);
-            };
+            return prefix
+                    + switch (field.type()) {
+                        case ENUM -> "size += sizeOfEnum(%s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case STRING -> "size += sizeOfString(%s, %s, %s);"
+                                .formatted(fieldDef, getValueCode, skipDefault);
+                        case MESSAGE -> "size += sizeOfMessage($fieldDef, $valueCode, $codec::measureRecord);"
+                                .replace("$fieldDef", fieldDef)
+                                .replace("$valueCode", getValueCode)
+                                .replace(
+                                        "$codec",
+                                        ((SingleField) field).messageTypeModelPackage()
+                                                + "."
+                                                + Common.capitalizeFirstLetter(field.messageType())
+                                                + ".PROTOBUF");
+                        case BOOL -> "size += sizeOfBoolean(%s, %s, %s);"
+                                .formatted(fieldDef, getValueCode, skipDefault);
+                        case INT32,
+                                UINT32,
+                                SINT32,
+                                FIXED32,
+                                SFIXED32,
+                                INT64,
+                                SINT64,
+                                UINT64,
+                                FIXED64,
+                                SFIXED64,
+                                BYTES -> "size += sizeOf%s(%s, %s, %s);"
+                                .formatted(writeMethodName, fieldDef, getValueCode, skipDefault);
+                        default -> "size += sizeOf%s(%s, %s);"
+                                .formatted(writeMethodName, fieldDef, getValueCode);
+                    };
         }
     }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java
index 4f69601b..d706fb5a 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecParseMethodGenerator.java
@@ -9,39 +9,41 @@
 import com.hedera.pbj.compiler.impl.OneOfField;
 import com.hedera.pbj.compiler.impl.PbjCompilerException;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.util.List;
 import java.util.stream.Collectors;
 
-/**
- * Code to generate the parse method for Codec classes.
- */
+/** Code to generate the parse method for Codec classes. */
 @SuppressWarnings("StringConcatenationInsideStringBufferAppend")
 class CodecParseMethodGenerator {
 
     /**
-     * Because all UNSET OneOf values are the same and we use them often we create a static constant for them and just
-     * reuse it throughout codec code.
+     * Because all UNSET OneOf values are the same and we use them often we create a static constant
+     * for them and just reuse it throughout codec code.
      *
      * @param fields the fields to generate for
      * @return code for constants
      */
     static String generateUnsetOneOfConstants(final List<Field> fields) {
-        return "\n" + fields.stream()
-            .filter(f -> f instanceof OneOfField)
-            .map(f -> {
-                final OneOfField field = (OneOfField)f;
-                return """
+        return "\n"
+                + fields.stream()
+                        .filter(f -> f instanceof OneOfField)
+                        .map(
+                                f -> {
+                                    final OneOfField field = (OneOfField) f;
+                                    return """
                            /** Constant for an unset oneof for $fieldName */
                            public static final $className<$enum> $unsetFieldName = new $className<>($enum.UNSET,null);
                        """
-                        .replace("$className", field.className())
-                        .replace("$enum", field.getEnumClassRef())
-                        .replace("$fieldName", field.name())
-                        .replace("$unsetFieldName", Common.camelToUpperSnake(field.name())+"_UNSET")
-                        .replace("$unsetFieldName", field.getEnumClassRef());
-            })
-            .collect(Collectors.joining("\n"));
+                                            .replace("$className", field.className())
+                                            .replace("$enum", field.getEnumClassRef())
+                                            .replace("$fieldName", field.name())
+                                            .replace(
+                                                    "$unsetFieldName",
+                                                    Common.camelToUpperSnake(field.name())
+                                                            + "_UNSET")
+                                            .replace("$unsetFieldName", field.getEnumClassRef());
+                                })
+                        .collect(Collectors.joining("\n"));
     }
 
     static String generateParseMethod(final String modelClassName, final List<Field> fields) {
@@ -79,13 +81,26 @@ static String generateParseMethod(final String modelClassName, final List<Field>
                     }
                 }
                 """
-        .replace("$modelClassName",modelClassName)
-        .replace("$fieldDefs",fields.stream().map(field -> "    %s temp_%s = %s;".formatted(field.javaFieldType(),
-                field.name(), field.javaDefault())).collect(Collectors.joining("\n")))
-        .replace("$fieldsList",fields.stream().map(field -> "temp_"+field.name()).collect(Collectors.joining(", ")))
-        .replace("$parseLoop", generateParseLoop(generateCaseStatements(fields), ""))
-        .replace("$skipMaxSize", String.valueOf(Field.DEFAULT_MAX_SIZE))
-        .indent(DEFAULT_INDENT);
+                .replace("$modelClassName", modelClassName)
+                .replace(
+                        "$fieldDefs",
+                        fields.stream()
+                                .map(
+                                        field ->
+                                                "    %s temp_%s = %s;"
+                                                        .formatted(
+                                                                field.javaFieldType(),
+                                                                field.name(),
+                                                                field.javaDefault()))
+                                .collect(Collectors.joining("\n")))
+                .replace(
+                        "$fieldsList",
+                        fields.stream()
+                                .map(field -> "temp_" + field.name())
+                                .collect(Collectors.joining(", ")))
+                .replace("$parseLoop", generateParseLoop(generateCaseStatements(fields), ""))
+                .replace("$skipMaxSize", String.valueOf(Field.DEFAULT_MAX_SIZE))
+                .indent(DEFAULT_INDENT);
     }
 
     // prefix is pre-pended to variable names to support a nested parsing loop.
@@ -151,28 +166,30 @@ static String generateParseLoop(final String caseStatements, @NonNull final Stri
                             }
                         }
                 """
-                .replace("$caseStatements",caseStatements)
-                .replace("$prefix",prefix)
+                .replace("$caseStatements", caseStatements)
+                .replace("$prefix", prefix)
                 .replace("$skipMaxSize", String.valueOf(Field.DEFAULT_MAX_SIZE))
                 .indent(DEFAULT_INDENT);
     }
 
     /**
-     * Generate switch case statements for each tag (field & wire type pair). For repeated numeric value types we
-     * generate 2 case statements for packed and unpacked encoding.
+     * Generate switch case statements for each tag (field & wire type pair). For repeated numeric
+     * value types we generate 2 case statements for packed and unpacked encoding.
      *
      * @param fields list of all fields in record
      * @return string of case statement code
      */
     private static String generateCaseStatements(final List<Field> fields) {
         StringBuilder sb = new StringBuilder();
-        for (Field field: fields) {
+        for (Field field : fields) {
             if (field instanceof final OneOfField oneOfField) {
-                for (final Field subField: oneOfField.fields()) {
-                    generateFieldCaseStatement(sb,subField);
+                for (final Field subField : oneOfField.fields()) {
+                    generateFieldCaseStatement(sb, subField);
                 }
-            } else if (field.repeated() && field.type().wireType() != Common.TYPE_LENGTH_DELIMITED) {
-                // for repeated fields that are not length encoded there are 2 forms they can be stored in file.
+            } else if (field.repeated()
+                    && field.type().wireType() != Common.TYPE_LENGTH_DELIMITED) {
+                // for repeated fields that are not length encoded there are 2 forms they can be
+                // stored in file.
                 // "packed" and repeated primitive fields
                 generateFieldCaseStatement(sb, field);
                 generateFieldCaseStatementPacked(sb, field);
@@ -190,13 +207,26 @@ private static String generateCaseStatements(final List<Field> fields) {
      * @param sb StringBuilder to append code to
      */
     @SuppressWarnings("StringConcatenationInsideStringBufferAppend")
-    private static void generateFieldCaseStatementPacked(final StringBuilder sb, final Field field) {
+    private static void generateFieldCaseStatementPacked(
+            final StringBuilder sb, final Field field) {
         final int wireType = Common.TYPE_LENGTH_DELIMITED;
         final int fieldNum = field.fieldNumber();
         final int tag = Common.getTag(wireType, fieldNum);
-        sb.append("case " + tag +" /* type=" + wireType + " [" + field.type() + "] packed-repeated " +
-                "field=" + fieldNum + " [" + field.name() + "] */ -> {\n");
-        sb.append("""
+        sb.append(
+                "case "
+                        + tag
+                        + " /* type="
+                        + wireType
+                        + " ["
+                        + field.type()
+                        + "] packed-repeated "
+                        + "field="
+                        + fieldNum
+                        + " ["
+                        + field.name()
+                        + "] */ -> {\n");
+        sb.append(
+                """
 				// Read the length of packed repeated field data
 				final long length = input.readVarInt(false);
 				if (length > $maxSize) {
@@ -215,12 +245,11 @@ private static void generateFieldCaseStatementPacked(final StringBuilder sb, fin
 				if (input.position() != beforePosition + length) {
 				    throw new BufferUnderflowException();
 				}"""
-                .replace("$tempFieldName", "temp_" + field.name())
-                .replace("$readMethod", readMethod(field))
-                .replace("$maxSize", String.valueOf(field.maxSize()))
-                .replace("$fieldName", field.name())
-                .indent(DEFAULT_INDENT)
-        );
+                        .replace("$tempFieldName", "temp_" + field.name())
+                        .replace("$readMethod", readMethod(field))
+                        .replace("$maxSize", String.valueOf(field.maxSize()))
+                        .replace("$fieldName", field.name())
+                        .indent(DEFAULT_INDENT));
         sb.append("\n}\n");
     }
 
@@ -231,13 +260,26 @@ private static void generateFieldCaseStatementPacked(final StringBuilder sb, fin
      * @param sb StringBuilder to append code to
      */
     private static void generateFieldCaseStatement(final StringBuilder sb, final Field field) {
-        final int wireType = field.optionalValueType() ? Common.TYPE_LENGTH_DELIMITED : field.type().wireType();
+        final int wireType =
+                field.optionalValueType() ? Common.TYPE_LENGTH_DELIMITED : field.type().wireType();
         final int fieldNum = field.fieldNumber();
         final int tag = Common.getTag(wireType, fieldNum);
-        sb.append("case " + tag +" /* type=" + wireType + " [" + field.type() + "] " +
-                "field=" + fieldNum + " [" + field.name() + "] */ -> {\n");
+        sb.append(
+                "case "
+                        + tag
+                        + " /* type="
+                        + wireType
+                        + " ["
+                        + field.type()
+                        + "] "
+                        + "field="
+                        + fieldNum
+                        + " ["
+                        + field.name()
+                        + "] */ -> {\n");
         if (field.optionalValueType()) {
-            sb.append("""
+            sb.append(
+                    """
 							// Read the message size, it is not needed
 							final var valueTypeMessageSize = input.readVarInt(false);
 							final $fieldType value;
@@ -256,32 +298,46 @@ private static void generateFieldCaseStatement(final StringBuilder sb, final Fie
 							    // means optional is default value
 							    value = $defaultValue;
 							}"""
-                    .replace("$fieldType", field.javaFieldType())
-                    .replace("$readMethod", readMethod(field))
-                    .replace("$defaultValue",
-                            switch (field.messageType()) {
-                                case "Int32Value", "UInt32Value" -> "0";
-                                case "Int64Value", "UInt64Value" -> "0l";
-                                case "FloatValue" -> "0f";
-                                case "DoubleValue" -> "0d";
-                                case "BoolValue" -> "false";
-                                case "BytesValue" -> "Bytes.EMPTY";
-                                case "StringValue" -> "\"\"";
-                                default -> throw new PbjCompilerException("Unexpected and unknown field type " + field.type() + " cannot be parsed");
-                            })
-                    .replace("$valueTypeWireType", Integer.toString(
-                            switch (field.messageType()) {
-                                case "StringValue", "BytesValue" -> Common.TYPE_LENGTH_DELIMITED;
-                                case "Int32Value", "UInt32Value", "Int64Value", "UInt64Value", "BoolValue" -> Common.TYPE_VARINT;
-                                case "FloatValue" -> Common.TYPE_FIXED32;
-                                case "DoubleValue" -> Common.TYPE_FIXED64;
-                                default -> throw new PbjCompilerException("Unexpected and unknown field type " + field.type() + " cannot be parsed");
-                            }))
-                    .indent(DEFAULT_INDENT)
-            );
+                            .replace("$fieldType", field.javaFieldType())
+                            .replace("$readMethod", readMethod(field))
+                            .replace(
+                                    "$defaultValue",
+                                    switch (field.messageType()) {
+                                        case "Int32Value", "UInt32Value" -> "0";
+                                        case "Int64Value", "UInt64Value" -> "0l";
+                                        case "FloatValue" -> "0f";
+                                        case "DoubleValue" -> "0d";
+                                        case "BoolValue" -> "false";
+                                        case "BytesValue" -> "Bytes.EMPTY";
+                                        case "StringValue" -> "\"\"";
+                                        default -> throw new PbjCompilerException(
+                                                "Unexpected and unknown field type "
+                                                        + field.type()
+                                                        + " cannot be parsed");
+                                    })
+                            .replace(
+                                    "$valueTypeWireType",
+                                    Integer.toString(
+                                            switch (field.messageType()) {
+                                                case "StringValue", "BytesValue" -> Common
+                                                        .TYPE_LENGTH_DELIMITED;
+                                                case "Int32Value",
+                                                        "UInt32Value",
+                                                        "Int64Value",
+                                                        "UInt64Value",
+                                                        "BoolValue" -> Common.TYPE_VARINT;
+                                                case "FloatValue" -> Common.TYPE_FIXED32;
+                                                case "DoubleValue" -> Common.TYPE_FIXED64;
+                                                default -> throw new PbjCompilerException(
+                                                        "Unexpected and unknown field type "
+                                                                + field.type()
+                                                                + " cannot be parsed");
+                                            }))
+                            .indent(DEFAULT_INDENT));
             sb.append('\n');
         } else if (field.type() == Field.FieldType.MESSAGE) {
-            sb.append("""
+            sb.append(
+                    """
 						final var messageLength = input.readVarInt(false);
 						final $fieldType value;
 						if (messageLength == 0) {
@@ -311,20 +367,22 @@ private static void generateFieldCaseStatement(final StringBuilder sb, final Fie
 							}
 						}
 						"""
-                    .replace("$readMethod", readMethod(field))
-                    .replace("$fieldType", field.javaFieldTypeBase())
-                    .replace("$fieldName", field.name())
-                    .replace("$maxSize", String.valueOf(field.maxSize()))
-                    .indent(DEFAULT_INDENT)
-            );
+                            .replace("$readMethod", readMethod(field))
+                            .replace("$fieldType", field.javaFieldTypeBase())
+                            .replace("$fieldName", field.name())
+                            .replace("$maxSize", String.valueOf(field.maxSize()))
+                            .indent(DEFAULT_INDENT));
         } else if (field.type() == Field.FieldType.MAP) {
-            // This is almost like reading a message above because that's how Protobuf encodes map entries.
-            // However(!), we read the key and value fields explicitly to avoid creating temporary entry objects.
+            // This is almost like reading a message above because that's how Protobuf encodes map
+            // entries.
+            // However(!), we read the key and value fields explicitly to avoid creating temporary
+            // entry objects.
             final MapField mapField = (MapField) field;
             final List<Field> mapEntryFields = List.of(mapField.keyField(), mapField.valueField());
-            sb.append("""
+            sb.append(
+                    """
 						final var __map_messageLength = input.readVarInt(false);
-						
+
 						$fieldDefs
 						if (__map_messageLength != 0) {
 							if (__map_messageLength > $maxSize) {
@@ -351,39 +409,80 @@ private static void generateFieldCaseStatement(final StringBuilder sb, final Fie
 							}
 						}
 						"""
-                    .replace("$fieldName", field.name())
-                    .replace("$fieldDefs",mapEntryFields.stream().map(mapEntryField -> "%s temp_%s = %s;".formatted(mapEntryField.javaFieldType(),
-                            mapEntryField.name(), mapEntryField.javaDefault())).collect(Collectors.joining("\n")))
-                    .replace("$mapParseLoop", generateParseLoop(generateCaseStatements(mapEntryFields), "map_entry_").indent(-DEFAULT_INDENT))
-                    .replace("$maxSize", String.valueOf(field.maxSize()))
-            );
+                            .replace("$fieldName", field.name())
+                            .replace(
+                                    "$fieldDefs",
+                                    mapEntryFields.stream()
+                                            .map(
+                                                    mapEntryField ->
+                                                            "%s temp_%s = %s;"
+                                                                    .formatted(
+                                                                            mapEntryField
+                                                                                    .javaFieldType(),
+                                                                            mapEntryField.name(),
+                                                                            mapEntryField
+                                                                                    .javaDefault()))
+                                            .collect(Collectors.joining("\n")))
+                            .replace(
+                                    "$mapParseLoop",
+                                    generateParseLoop(
+                                                    generateCaseStatements(mapEntryFields),
+                                                    "map_entry_")
+                                            .indent(-DEFAULT_INDENT))
+                            .replace("$maxSize", String.valueOf(field.maxSize())));
         } else {
             sb.append(("final var value = " + readMethod(field) + ";\n").indent(DEFAULT_INDENT));
         }
         // set value to temp var
         sb.append(Common.FIELD_INDENT);
         if (field.parent() != null && field.repeated()) {
-            throw new PbjCompilerException("Fields can not be oneof and repeated ["+field+"]");
+            throw new PbjCompilerException("Fields can not be oneof and repeated [" + field + "]");
         } else if (field.parent() != null) {
             final var oneOfField = field.parent();
-            sb.append("temp_" + oneOfField.name() + " =  new %s<>(".formatted(oneOfField.className()) +
-                    oneOfField.getEnumClassRef() + '.' + Common.camelToUpperSnake(field.name()) + ", value);\n");
+            sb.append(
+                    "temp_"
+                            + oneOfField.name()
+                            + " =  new %s<>(".formatted(oneOfField.className())
+                            + oneOfField.getEnumClassRef()
+                            + '.'
+                            + Common.camelToUpperSnake(field.name())
+                            + ", value);\n");
         } else if (field.repeated()) {
             sb.append("if (temp_" + field.name() + ".size() >= " + field.maxSize() + ") {\n");
-            sb.append("		throw new ParseException(\"" + field.name() + " size \" + temp_" + field.name() + ".size() + \" is greater than max \" + " + field.maxSize() + ");\n");
+            sb.append(
+                    "		throw new ParseException(\""
+                            + field.name()
+                            + " size \" + temp_"
+                            + field.name()
+                            + ".size() + \" is greater than max \" + "
+                            + field.maxSize()
+                            + ");\n");
             sb.append("	}\n");
             sb.append("	temp_" + field.name() + " = addToList(temp_" + field.name() + ",value);\n");
         } else if (field.type() == Field.FieldType.MAP) {
             final MapField mapField = (MapField) field;
 
             sb.append("if (__map_messageLength != 0) {\n");
-            sb.append("		if (temp_" + field.name() + ".size() >= " + field.maxSize() + ") {\n");
-            sb.append("				throw new ParseException(\"" + field.name() + " size \" + temp_" + field.name() + ".size() + \" is greater than max \" + " + field.maxSize() + ");\n");
+            sb.append(
+                    "		if (temp_" + field.name() + ".size()"
+                                                          + " >= " + field.maxSize() + ") {\n");
+            sb.append(
+                    "				throw new ParseException(\""
+                            + field.name()
+                            + " size \" + temp_"
+                            + field.name()
+                            + ".size() + \" is greater than max \" + "
+                            + field.maxSize()
+                            + ");\n");
             sb.append("			}\n");
-            sb.append("			temp_" + field.name() + " = addToMap(temp_" + field.name() + ", temp_$key, temp_$value);\n"
-                    .replace("$key", mapField.keyField().name())
-                    .replace("$value", mapField.valueField().name())
-            );
+            sb.append(
+                    "			temp_"
+                            + field.name()
+                            + " = addToMap(temp_"
+                            + field.name()
+                            + ", temp_$key, temp_$value);\n"
+                                    .replace("$key", mapField.keyField().name())
+                                    .replace("$value", mapField.valueField().name()));
             sb.append("		}\n");
         } else {
             sb.append("temp_" + field.name() + " = value;\n");
@@ -403,11 +502,13 @@ static String readMethod(Field field) {
                 case "DoubleValue" -> "readDouble(input)";
                 case "BoolValue" -> "readBool(input)";
                 case "BytesValue" -> "readBytes(input, " + field.maxSize() + ")";
-                default -> throw new PbjCompilerException("Optional message type [" + field.messageType() + "] not supported");
+                default -> throw new PbjCompilerException(
+                        "Optional message type [" + field.messageType() + "] not supported");
             };
         }
         return switch (field.type()) {
-            case ENUM ->  Common.snakeToCamel(field.messageType(), true) + ".fromProtobufOrdinal(readEnum(input))";
+            case ENUM -> Common.snakeToCamel(field.messageType(), true)
+                    + ".fromProtobufOrdinal(readEnum(input))";
             case INT32 -> "readInt32(input)";
             case UINT32 -> "readUint32(input)";
             case SINT32 -> "readSignedInt32(input)";
@@ -424,8 +525,10 @@ static String readMethod(Field field) {
             case BOOL -> "readBool(input)";
             case BYTES -> "readBytes(input, " + field.maxSize() + ")";
             case MESSAGE -> field.parseCode();
-            case ONE_OF -> throw new PbjCompilerException("Should never happen, oneOf handled elsewhere");
-            case MAP -> throw new PbjCompilerException("Should never happen, map handled elsewhere");
+            case ONE_OF -> throw new PbjCompilerException(
+                    "Should never happen, oneOf handled elsewhere");
+            case MAP -> throw new PbjCompilerException(
+                    "Should never happen, map handled elsewhere");
         };
     }
 }
diff --git a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java
index 685f95d2..07e59cfb 100644
--- a/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java
+++ b/pbj-core/pbj-compiler/src/main/java/com/hedera/pbj/compiler/impl/generators/protobuf/CodecWriteMethodGenerator.java
@@ -8,26 +8,24 @@
 import com.hedera.pbj.compiler.impl.MapField;
 import com.hedera.pbj.compiler.impl.OneOfField;
 import com.hedera.pbj.compiler.impl.SingleField;
-
 import java.util.Comparator;
 import java.util.List;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-/**
- * Code to generate the write method for Codec classes.
- */
+/** Code to generate the write method for Codec classes. */
 final class CodecWriteMethodGenerator {
 
     static String generateWriteMethod(final String modelClassName, final List<Field> fields) {
-        final String fieldWriteLines = buildFieldWriteLines(
-                modelClassName,
-                fields,
-                field -> "data.%s()".formatted(field.nameCamelFirstLower()),
-                true);
+        final String fieldWriteLines =
+                buildFieldWriteLines(
+                        modelClassName,
+                        fields,
+                        field -> "data.%s()".formatted(field.nameCamelFirstLower()),
+                        true);
 
-        return """     
+        return """
             /**
              * Write out a $modelClass model to output stream in protobuf format.
              *
@@ -39,9 +37,9 @@ public void write(@NonNull $modelClass data, @NonNull final WritableSequentialDa
                 $fieldWriteLines
             }
             """
-            .replace("$modelClass", modelClassName)
-            .replace("$fieldWriteLines", fieldWriteLines)
-            .indent(DEFAULT_INDENT);
+                .replace("$modelClass", modelClassName)
+                .replace("$fieldWriteLines", fieldWriteLines)
+                .indent(DEFAULT_INDENT);
     }
 
     private static String buildFieldWriteLines(
@@ -50,9 +48,19 @@ private static String buildFieldWriteLines(
             final Function<Field, String> getValueBuilder,
             final boolean skipDefault) {
         return fields.stream()
-                .flatMap(field -> field.type() == Field.FieldType.ONE_OF ? ((OneOfField)field).fields().stream() : Stream.of(field))
+                .flatMap(
+                        field ->
+                                field.type() == Field.FieldType.ONE_OF
+                                        ? ((OneOfField) field).fields().stream()
+                                        : Stream.of(field))
                 .sorted(Comparator.comparingInt(Field::fieldNumber))
-                .map(field -> generateFieldWriteLines(field, modelClassName, getValueBuilder.apply(field), skipDefault))
+                .map(
+                        field ->
+                                generateFieldWriteLines(
+                                        field,
+                                        modelClassName,
+                                        getValueBuilder.apply(field),
+                                        skipDefault))
                 .collect(Collectors.joining("\n"))
                 .indent(DEFAULT_INDENT);
     }
@@ -61,56 +69,75 @@ private static String buildFieldWriteLines(
      * Generate lines of code for writing field
      *
      * @param field The field to generate writing line of code for
-     * @param modelClassName The model class name for model class for message type we are generating writer for
+     * @param modelClassName The model class name for model class for message type we are generating
+     *     writer for
      * @param getValueCode java code to get the value of field
      * @param skipDefault skip writing the field if it has default value (for non-oneOf only)
      * @return java code to write field to output
      */
-    private static String generateFieldWriteLines(final Field field, final String modelClassName, String getValueCode, boolean skipDefault) {
+    private static String generateFieldWriteLines(
+            final Field field,
+            final String modelClassName,
+            String getValueCode,
+            boolean skipDefault) {
         final String fieldDef = Common.camelToUpperSnake(field.name());
-        String prefix = "// ["+field.fieldNumber()+"] - "+field.name();
+        String prefix = "// [" + field.fieldNumber() + "] - " + field.name();
         prefix += "\n";
 
         if (field.parent() != null) {
             final OneOfField oneOfField = field.parent();
-            final String oneOfType = modelClassName+"."+oneOfField.nameCamelFirstUpper()+"OneOfType";
-            getValueCode = "data."+oneOfField.nameCamelFirstLower()+"().as()";
-            prefix += "if (data."+oneOfField.nameCamelFirstLower()+"().kind() == "+ oneOfType +"."+
-                    Common.camelToUpperSnake(field.name())+")";
+            final String oneOfType =
+                    modelClassName + "." + oneOfField.nameCamelFirstUpper() + "OneOfType";
+            getValueCode = "data." + oneOfField.nameCamelFirstLower() + "().as()";
+            prefix +=
+                    "if (data."
+                            + oneOfField.nameCamelFirstLower()
+                            + "().kind() == "
+                            + oneOfType
+                            + "."
+                            + Common.camelToUpperSnake(field.name())
+                            + ")";
             prefix += "\n";
         }
 
         final String writeMethodName = field.methodNameType();
         if (field.optionalValueType()) {
-            return prefix + switch (field.messageType()) {
-                case "StringValue" -> "writeOptionalString(out, %s, %s);"
-                        .formatted(fieldDef,getValueCode);
-                case "BoolValue" -> "writeOptionalBoolean(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "Int32Value","UInt32Value" -> "writeOptionalInteger(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "Int64Value","UInt64Value" -> "writeOptionalLong(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "FloatValue" -> "writeOptionalFloat(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "DoubleValue" -> "writeOptionalDouble(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case "BytesValue" -> "writeOptionalBytes(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                default -> throw new UnsupportedOperationException("Unhandled optional message type:"+field.messageType());
-            };
+            return prefix
+                    + switch (field.messageType()) {
+                        case "StringValue" -> "writeOptionalString(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "BoolValue" -> "writeOptionalBoolean(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "Int32Value", "UInt32Value" -> "writeOptionalInteger(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "Int64Value", "UInt64Value" -> "writeOptionalLong(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "FloatValue" -> "writeOptionalFloat(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "DoubleValue" -> "writeOptionalDouble(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case "BytesValue" -> "writeOptionalBytes(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        default -> throw new UnsupportedOperationException(
+                                "Unhandled optional message type:" + field.messageType());
+                    };
         } else if (field.repeated()) {
-            return prefix + switch(field.type()) {
-                case ENUM -> "writeEnumList(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case MESSAGE -> "writeMessageList(out, $fieldDef, $valueCode, $codec::write, $codec::measureRecord);"
-                        .replace("$fieldDef", fieldDef)
-                        .replace("$valueCode", getValueCode)
-                        .replace("$codec", ((SingleField)field).messageTypeModelPackage() + "." +
-                                Common.capitalizeFirstLetter(field.messageType())+ ".PROTOBUF");
-                default -> "write%sList(out, %s, %s);"
-                        .formatted(writeMethodName, fieldDef, getValueCode);
-            };
+            return prefix
+                    + switch (field.type()) {
+                        case ENUM -> "writeEnumList(out, %s, %s);"
+                                .formatted(fieldDef, getValueCode);
+                        case MESSAGE -> "writeMessageList(out, $fieldDef, $valueCode, $codec::write, $codec::measureRecord);"
+                                .replace("$fieldDef", fieldDef)
+                                .replace("$valueCode", getValueCode)
+                                .replace(
+                                        "$codec",
+                                        ((SingleField) field).messageTypeModelPackage()
+                                                + "."
+                                                + Common.capitalizeFirstLetter(field.messageType())
+                                                + ".PROTOBUF");
+                        default -> "write%sList(out, %s, %s);"
+                                .formatted(writeMethodName, fieldDef, getValueCode);
+                    };
         } else if (field.type() == Field.FieldType.MAP) {
             // https://protobuf.dev/programming-guides/proto3/#maps
             // On the wire, a map is equivalent to:
@@ -124,19 +151,18 @@ private static String generateFieldWriteLines(final Field field, final String mo
             // NOTE: protoc serializes default values (e.g. "") in maps, so we should too.
             final MapField mapField = (MapField) field;
             final List<Field> mapEntryFields = List.of(mapField.keyField(), mapField.valueField());
-            final Function<Field, String> getValueBuilder = mapEntryField ->
-                    mapEntryField == mapField.keyField() ? "k" : (mapEntryField == mapField.valueField() ? "v" : null);
-            final String fieldWriteLines = buildFieldWriteLines(
-                    field.name(),
-                    mapEntryFields,
-                    getValueBuilder,
-                    false);
-            final String fieldSizeOfLines = CodecMeasureRecordMethodGenerator.buildFieldSizeOfLines(
-                    field.name(),
-                    mapEntryFields,
-                    getValueBuilder,
-                    false);
-            return prefix + """
+            final Function<Field, String> getValueBuilder =
+                    mapEntryField ->
+                            mapEntryField == mapField.keyField()
+                                    ? "k"
+                                    : (mapEntryField == mapField.valueField() ? "v" : null);
+            final String fieldWriteLines =
+                    buildFieldWriteLines(field.name(), mapEntryFields, getValueBuilder, false);
+            final String fieldSizeOfLines =
+                    CodecMeasureRecordMethodGenerator.buildFieldSizeOfLines(
+                            field.name(), mapEntryFields, getValueBuilder, false);
+            return prefix
+                    + """
                         if (!$map.isEmpty()) {
                             final Pbj$javaFieldType pbjMap = (Pbj$javaFieldType) $map;
                             final int mapSize = pbjMap.size();
@@ -151,33 +177,50 @@ private static String generateFieldWriteLines(final Field field, final String mo
                             }
                         }
                         """
-                    .replace("$fieldDef", fieldDef)
-                    .replace("$map", getValueCode)
-                    .replace("$javaFieldType", mapField.javaFieldType())
-                    .replace("$K", mapField.keyField().type().boxedType)
-                    .replace("$V", mapField.valueField().type() == Field.FieldType.MESSAGE ? ((SingleField)mapField.valueField()).messageType() : mapField.valueField().type().boxedType)
-                    .replace("$fieldWriteLines", fieldWriteLines.indent(DEFAULT_INDENT))
-                    .replace("$fieldSizeOfLines", fieldSizeOfLines.indent(DEFAULT_INDENT))
+                            .replace("$fieldDef", fieldDef)
+                            .replace("$map", getValueCode)
+                            .replace("$javaFieldType", mapField.javaFieldType())
+                            .replace("$K", mapField.keyField().type().boxedType)
+                            .replace(
+                                    "$V",
+                                    mapField.valueField().type() == Field.FieldType.MESSAGE
+                                            ? ((SingleField) mapField.valueField()).messageType()
+                                            : mapField.valueField().type().boxedType)
+                            .replace("$fieldWriteLines", fieldWriteLines.indent(DEFAULT_INDENT))
+                            .replace("$fieldSizeOfLines", fieldSizeOfLines.indent(DEFAULT_INDENT));
 
-                    ;
         } else {
-            return prefix + switch(field.type()) {
-                case ENUM -> "writeEnum(out, %s, %s);"
-                        .formatted(fieldDef, getValueCode);
-                case STRING -> "writeString(out, %s, %s, %s);"
-                        .formatted(fieldDef, getValueCode, skipDefault);
-                case MESSAGE -> "writeMessage(out, $fieldDef, $valueCode, $codec::write, $codec::measureRecord);"
-                        .replace("$fieldDef", fieldDef)
-                        .replace("$valueCode", getValueCode)
-                        .replace("$codec", ((SingleField)field).messageTypeModelPackage() + "." +
-                                Common.capitalizeFirstLetter(field.messageType())+ ".PROTOBUF");
-                case BOOL -> "writeBoolean(out, %s, %s, %s);"
-                        .formatted(fieldDef, getValueCode, skipDefault);
-                case INT32, UINT32, SINT32, FIXED32, SFIXED32, INT64, SINT64, UINT64, FIXED64, SFIXED64, BYTES -> "write%s(out, %s, %s, %s);"
-                        .formatted(writeMethodName, fieldDef, getValueCode, skipDefault);
-                default -> "write%s(out, %s, %s);"
-                        .formatted(writeMethodName, fieldDef, getValueCode);
-            };
+            return prefix
+                    + switch (field.type()) {
+                        case ENUM -> "writeEnum(out, %s, %s);".formatted(fieldDef, getValueCode);
+                        case STRING -> "writeString(out, %s, %s, %s);"
+                                .formatted(fieldDef, getValueCode, skipDefault);
+                        case MESSAGE -> "writeMessage(out, $fieldDef, $valueCode, $codec::write, $codec::measureRecord);"
+                                .replace("$fieldDef", fieldDef)
+                                .replace("$valueCode", getValueCode)
+                                .replace(
+                                        "$codec",
+                                        ((SingleField) field).messageTypeModelPackage()
+                                                + "."
+                                                + Common.capitalizeFirstLetter(field.messageType())
+                                                + ".PROTOBUF");
+                        case BOOL -> "writeBoolean(out, %s, %s, %s);"
+                                .formatted(fieldDef, getValueCode, skipDefault);
+                        case INT32,
+                                UINT32,
+                                SINT32,
+                                FIXED32,
+                                SFIXED32,
+                                INT64,
+                                SINT64,
+                                UINT64,
+                                FIXED64,
+                                SFIXED64,
+                                BYTES -> "write%s(out, %s, %s, %s);"
+                                .formatted(writeMethodName, fieldDef, getValueCode, skipDefault);
+                        default -> "write%s(out, %s, %s);"
+                                .formatted(writeMethodName, fieldDef, getValueCode);
+                    };
         }
     }
 }
diff --git a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java
index 17788d1a..7ad01369 100644
--- a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java
+++ b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/CommonTest.java
@@ -1,11 +1,11 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.compiler.impl;
 
+import static org.junit.jupiter.api.Assertions.*;
+
 import org.junit.jupiter.api.DisplayName;
 import org.junit.jupiter.api.Test;
 
-import static org.junit.jupiter.api.Assertions.*;
-
 final class CommonTest {
 
     // ================================================================================================================
@@ -21,159 +21,229 @@ void doubleAsterisk() {
     @Test
     @DisplayName("Test comment with params and return")
     void commentWithParamsAndReturn() {
-        String str = "/**\n*   Clean up a java doc style comment removing all the \"*\" etc.\n*\n* @param  fieldComment raw Java doc style comment\n* @return clean multi-line content of the comment\n*/\n";
+        String str =
+                "/**\n"
+                        + "*   Clean up a java doc style comment removing all the \"*\" etc.\n"
+                        + "*\n"
+                        + "* @param  fieldComment raw Java doc style comment\n"
+                        + "* @return clean multi-line content of the comment\n"
+                        + "*/\n";
         String result = Common.cleanJavaDocComment(str);
-        String expected = "Clean up a java doc style comment removing all the \"*\" etc.\n*\n* @param  fieldComment raw Java doc style comment\n* @return clean multi-line content of the comment";
+        String expected =
+                "Clean up a java doc style comment removing all the \"*\" etc.\n"
+                        + "*\n"
+                        + "* @param  fieldComment raw Java doc style comment\n"
+                        + "* @return clean multi-line content of the comment";
         assertEquals(expected, result);
     }
 
     @Test
     @DisplayName("Test one line comment on lultiple lines")
     void oneLineOnMultipleLines() {
-        String str = "/**\n" +
-                "     * The capacity of this sequence will be the difference between the <b>initial</b> position and the length of the delegate\n" +
-                "     */\n";
+        String str =
+                "/**\n"
+                        + "     * The capacity of this sequence will be the difference between the"
+                        + " <b>initial</b> position and the length of the delegate\n"
+                        + "     */\n";
         String result = Common.cleanJavaDocComment(str);
-        String expected = "The capacity of this sequence will be the difference between the <b>initial</b> position and the length of the delegate";
+        String expected =
+                "The capacity of this sequence will be the difference between the <b>initial</b>"
+                        + " position and the length of the delegate";
         assertEquals(expected, result);
     }
 
     @Test
     @DisplayName("Test params, throws and returns")
     void oneParamsThrowsAndReturns() {
-        String str = "/**\n" +
-                "     * Reads the signed byte at current {@link #position()}, and then increments the {@link #position()} by 1.\n" +
-                "     *\n" +
-                "     * @return The signed byte at the current {@link #position()}\n" +
-                "     * @throws BufferUnderflowException If there are no bytes remaining in this sequence\n" +
-                "     * @throws DataAccessException If an I/O error occurs\n" +
-                "     */";
+        String str =
+                "/**\n"
+                    + "     * Reads the signed byte at current {@link #position()}, and then"
+                    + " increments the {@link #position()} by 1.\n"
+                    + "     *\n"
+                    + "     * @return The signed byte at the current {@link #position()}\n"
+                    + "     * @throws BufferUnderflowException If there are no bytes remaining in"
+                    + " this sequence\n"
+                    + "     * @throws DataAccessException If an I/O error occurs\n"
+                    + "     */";
         String result = Common.cleanJavaDocComment(str);
-        String expected = "Reads the signed byte at current {@link #position()}, and then increments the {@link #position()} by 1.\n" +
-               "     *\n"  +
-                "@return The signed byte at the current {@link #position()}\n" +
-                "@throws BufferUnderflowException If there are no bytes remaining in this sequence\n" +
-                "@throws DataAccessException If an I/O error occurs";
+        String expected =
+                "Reads the signed byte at current {@link #position()}, and then increments the"
+                        + " {@link #position()} by 1.\n"
+                        + "     *\n"
+                        + "@return The signed byte at the current {@link #position()}\n"
+                        + "@throws BufferUnderflowException If there are no bytes remaining in this"
+                        + " sequence\n"
+                        + "@throws DataAccessException If an I/O error occurs";
         assertEquals(expected, result);
     }
 
     @Test
     @DisplayName("Test params, throws and returns")
     void oneParamsThrowsAndReturnsWithMore() {
-        String str = "    /**\n" +
-                "     * Read bytes starting at current {@link #position()} into the {@code dst} array, up to the size of the {@code dst}\n" +
-                "     * array. If {@code dst} is larger than the remaining bytes in the sequence, only the remaining bytes are read.\n" +
-                "     * The total number of bytes actually read are returned. The bytes will be placed starting at index 0 of the array.\n" +
-                "     * The {@link #position()} will be incremented by the number of bytes read. If no bytes are available in the\n" +
-                "     * sequence, then 0 is returned.\n" +
-                "     *\n" +
-                "     * <p>The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.\n" +
-                "     *\n" +
-                "     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this\n" +
-                "     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be\n" +
-                "     * incremented by the number of bytes read prior to the exception.\n" +
-                "     *\n" +
-                "     * @param dst The destination array. Cannot be null.\n" +
-                "     * @throws NullPointerException if {@code dst} is null\n" +
-                "     * @throws DataAccessException If an I/O error occurs\n" +
-                "     * @return The number of bytes read actually read and placed into {@code dst}\n" +
-                "     */";
+        String str =
+                "    /**\n"
+                    + "     * Read bytes starting at current {@link #position()} into the {@code"
+                    + " dst} array, up to the size of the {@code dst}\n"
+                    + "     * array. If {@code dst} is larger than the remaining bytes in the"
+                    + " sequence, only the remaining bytes are read.\n"
+                    + "     * The total number of bytes actually read are returned. The bytes will"
+                    + " be placed starting at index 0 of the array.\n"
+                    + "     * The {@link #position()} will be incremented by the number of bytes"
+                    + " read. If no bytes are available in the\n"
+                    + "     * sequence, then 0 is returned.\n"
+                    + "     *\n"
+                    + "     * <p>The {@code dst} array may be partially written to at the time that"
+                    + " any of the declared exceptions are thrown.\n"
+                    + "     *\n"
+                    + "     * <p>Bytes are read from the sequence one at a time. If there are not"
+                    + " {@code length} bytes remaining in this\n"
+                    + "     * sequence, then a {@link BufferUnderflowException} will be thrown. The"
+                    + " {@link #position()} will be\n"
+                    + "     * incremented by the number of bytes read prior to the exception.\n"
+                    + "     *\n"
+                    + "     * @param dst The destination array. Cannot be null.\n"
+                    + "     * @throws NullPointerException if {@code dst} is null\n"
+                    + "     * @throws DataAccessException If an I/O error occurs\n"
+                    + "     * @return The number of bytes read actually read and placed into {@code"
+                    + " dst}\n"
+                    + "     */";
         String result = Common.cleanJavaDocComment(str);
-        String expected = "Read bytes starting at current {@link #position()} into the {@code dst} array, up to the size of the {@code dst}\n" +
-                "array. If {@code dst} is larger than the remaining bytes in the sequence, only the remaining bytes are read.\n" +
-                "The total number of bytes actually read are returned. The bytes will be placed starting at index 0 of the array.\n" +
-                "The {@link #position()} will be incremented by the number of bytes read. If no bytes are available in the\n" +
-                "sequence, then 0 is returned.\n" +
-                "     *\n" +
-                "<p>The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.\n" +
-                "     *\n" +
-                "<p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this\n" +
-                "sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be\n" +
-                "incremented by the number of bytes read prior to the exception.\n" +
-                "     *\n" +
-                "@param dst The destination array. Cannot be null.\n" +
-                "@throws NullPointerException if {@code dst} is null\n" +
-                "@throws DataAccessException If an I/O error occurs\n" +
-                "@return The number of bytes read actually read and placed into {@code dst}";
+        String expected =
+                "Read bytes starting at current {@link #position()} into the {@code dst} array, up"
+                    + " to the size of the {@code dst}\n"
+                    + "array. If {@code dst} is larger than the remaining bytes in the sequence,"
+                    + " only the remaining bytes are read.\n"
+                    + "The total number of bytes actually read are returned. The bytes will be"
+                    + " placed starting at index 0 of the array.\n"
+                    + "The {@link #position()} will be incremented by the number of bytes read. If"
+                    + " no bytes are available in the\n"
+                    + "sequence, then 0 is returned.\n"
+                    + "     *\n"
+                    + "<p>The {@code dst} array may be partially written to at the time that any of"
+                    + " the declared exceptions are thrown.\n"
+                    + "     *\n"
+                    + "<p>Bytes are read from the sequence one at a time. If there are not {@code"
+                    + " length} bytes remaining in this\n"
+                    + "sequence, then a {@link BufferUnderflowException} will be thrown. The {@link"
+                    + " #position()} will be\n"
+                    + "incremented by the number of bytes read prior to the exception.\n"
+                    + "     *\n"
+                    + "@param dst The destination array. Cannot be null.\n"
+                    + "@throws NullPointerException if {@code dst} is null\n"
+                    + "@throws DataAccessException If an I/O error occurs\n"
+                    + "@return The number of bytes read actually read and placed into {@code dst}";
         assertEquals(expected, result);
     }
 
     @Test
     @DisplayName("Test params, throws and returns more")
     void oneParamsThrowsAndReturnsWithMore2() {
-        String str = "\n" +
-                "    /**\n" +
-                "     * Read bytes starting at the current {@link #position()} into the {@code dst} array, up to {@code maxLength}\n" +
-                "     * number of bytes. If {@code maxLength} is larger than the remaining bytes in the sequence, only the remaining\n" +
-                "     * bytes are read. The total number of bytes actually read are returned. The bytes will be placed starting at index\n" +
-                "     * {@code offset} of the array. The {@link #position()} will be incremented by the number of bytes read. If no\n" +
-                "     * bytes are available in the sequence, then 0 is returned.\n" +
-                "     *\n" +
-                "     * <p>The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.\n" +
-                "     *\n" +
-                "     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this\n" +
-                "     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be\n" +
-                "     * incremented by the number of bytes read prior to the exception.\n" +
-                "     *\n" +
-                "     * @param dst The array into which bytes are to be written\n" +
-                "     * @param offset The offset within the {@code dst} array of the first byte to be written; must be non-negative and\n" +
-                "     *                no larger than {@code dst.length - maxLength}.\n" +
-                "     * @param maxLength The maximum number of bytes to be written to the given {@code dst} array; must be non-negative\n" +
-                "     *                and no larger than {@code dst.length - offset}\n" +
-                "     * @throws NullPointerException If {@code dst} is null\n" +
-                "     * @throws IndexOutOfBoundsException If {@code offset} is out of bounds of {@code dst} or if\n" +
-                "     *                                  {@code offset + maxLength} is not less than {@code dst.length}\n" +
-                "     * @throws IllegalArgumentException If {@code maxLength} is negative\n" +
-                "     * @throws DataAccessException If an I/O error occurs\n" +
-                "     * @return The number of bytes read actually read and placed into {@code dst}\n" +
-                "     */";
+        String str =
+                "\n"
+                    + "    /**\n"
+                    + "     * Read bytes starting at the current {@link #position()} into the"
+                    + " {@code dst} array, up to {@code maxLength}\n"
+                    + "     * number of bytes. If {@code maxLength} is larger than the remaining"
+                    + " bytes in the sequence, only the remaining\n"
+                    + "     * bytes are read. The total number of bytes actually read are returned."
+                    + " The bytes will be placed starting at index\n"
+                    + "     * {@code offset} of the array. The {@link #position()} will be"
+                    + " incremented by the number of bytes read. If no\n"
+                    + "     * bytes are available in the sequence, then 0 is returned.\n"
+                    + "     *\n"
+                    + "     * <p>The {@code dst} array may be partially written to at the time that"
+                    + " any of the declared exceptions are thrown.\n"
+                    + "     *\n"
+                    + "     * <p>Bytes are read from the sequence one at a time. If there are not"
+                    + " {@code length} bytes remaining in this\n"
+                    + "     * sequence, then a {@link BufferUnderflowException} will be thrown. The"
+                    + " {@link #position()} will be\n"
+                    + "     * incremented by the number of bytes read prior to the exception.\n"
+                    + "     *\n"
+                    + "     * @param dst The array into which bytes are to be written\n"
+                    + "     * @param offset The offset within the {@code dst} array of the first"
+                    + " byte to be written; must be non-negative and\n"
+                    + "     *                no larger than {@code dst.length - maxLength}.\n"
+                    + "     * @param maxLength The maximum number of bytes to be written to the"
+                    + " given {@code dst} array; must be non-negative\n"
+                    + "     *                and no larger than {@code dst.length - offset}\n"
+                    + "     * @throws NullPointerException If {@code dst} is null\n"
+                    + "     * @throws IndexOutOfBoundsException If {@code offset} is out of bounds"
+                    + " of {@code dst} or if\n"
+                    + "     *                                  {@code offset + maxLength} is not"
+                    + " less than {@code dst.length}\n"
+                    + "     * @throws IllegalArgumentException If {@code maxLength} is negative\n"
+                    + "     * @throws DataAccessException If an I/O error occurs\n"
+                    + "     * @return The number of bytes read actually read and placed into {@code"
+                    + " dst}\n"
+                    + "     */";
         String result = Common.cleanJavaDocComment(str);
-        String expected = "Read bytes starting at the current {@link #position()} into the {@code dst} array, up to {@code maxLength}\n" +
-                "number of bytes. If {@code maxLength} is larger than the remaining bytes in the sequence, only the remaining\n" +
-                "bytes are read. The total number of bytes actually read are returned. The bytes will be placed starting at index\n" +
-                "{@code offset} of the array. The {@link #position()} will be incremented by the number of bytes read. If no\n" +
-                "bytes are available in the sequence, then 0 is returned.\n" +
-                "     *\n" +
-                "<p>The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.\n" +
-                "     *\n" +
-                "<p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this\n" +
-                "sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be\n" +
-                "incremented by the number of bytes read prior to the exception.\n" +
-                "     *\n" +
-                "@param dst The array into which bytes are to be written\n" +
-                "@param offset The offset within the {@code dst} array of the first byte to be written; must be non-negative and\n" +
-                "no larger than {@code dst.length - maxLength}.\n" +
-                "@param maxLength The maximum number of bytes to be written to the given {@code dst} array; must be non-negative\n" +
-                "and no larger than {@code dst.length - offset}\n" +
-                "@throws NullPointerException If {@code dst} is null\n" +
-                "@throws IndexOutOfBoundsException If {@code offset} is out of bounds of {@code dst} or if\n" +
-                "{@code offset + maxLength} is not less than {@code dst.length}\n" +
-                "@throws IllegalArgumentException If {@code maxLength} is negative\n" +
-                "@throws DataAccessException If an I/O error occurs\n" +
-                "@return The number of bytes read actually read and placed into {@code dst}";
+        String expected =
+                "Read bytes starting at the current {@link #position()} into the {@code dst} array,"
+                    + " up to {@code maxLength}\n"
+                    + "number of bytes. If {@code maxLength} is larger than the remaining bytes in"
+                    + " the sequence, only the remaining\n"
+                    + "bytes are read. The total number of bytes actually read are returned. The"
+                    + " bytes will be placed starting at index\n"
+                    + "{@code offset} of the array. The {@link #position()} will be incremented by"
+                    + " the number of bytes read. If no\n"
+                    + "bytes are available in the sequence, then 0 is returned.\n"
+                    + "     *\n"
+                    + "<p>The {@code dst} array may be partially written to at the time that any of"
+                    + " the declared exceptions are thrown.\n"
+                    + "     *\n"
+                    + "<p>Bytes are read from the sequence one at a time. If there are not {@code"
+                    + " length} bytes remaining in this\n"
+                    + "sequence, then a {@link BufferUnderflowException} will be thrown. The {@link"
+                    + " #position()} will be\n"
+                    + "incremented by the number of bytes read prior to the exception.\n"
+                    + "     *\n"
+                    + "@param dst The array into which bytes are to be written\n"
+                    + "@param offset The offset within the {@code dst} array of the first byte to"
+                    + " be written; must be non-negative and\n"
+                    + "no larger than {@code dst.length - maxLength}.\n"
+                    + "@param maxLength The maximum number of bytes to be written to the given"
+                    + " {@code dst} array; must be non-negative\n"
+                    + "and no larger than {@code dst.length - offset}\n"
+                    + "@throws NullPointerException If {@code dst} is null\n"
+                    + "@throws IndexOutOfBoundsException If {@code offset} is out of bounds of"
+                    + " {@code dst} or if\n"
+                    + "{@code offset + maxLength} is not less than {@code dst.length}\n"
+                    + "@throws IllegalArgumentException If {@code maxLength} is negative\n"
+                    + "@throws DataAccessException If an I/O error occurs\n"
+                    + "@return The number of bytes read actually read and placed into {@code dst}";
         assertEquals(expected, result);
     }
 
     @Test
     @DisplayName("Test params, throws and returns more 2")
     void oneParamsThrowsAndReturnsWithMore3() {
-        String str = " /**\n" +
-                "     * Reads the next four bytes at the current {@link #position()}, composing them into an int value according to\n" +
-                "     * specified byte order, and then increments the {@link #position()} by four.\n" +
-                "     *\n" +
-                "     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.\n" +
-                "     * @return The int value at the current {@link #position()}\n" +
-                "     * @throws BufferUnderflowException If there are fewer than four bytes remaining\n" +
-                "     * @throws DataAccessException if an I/O error occurs\n" +
-                "     */";
+        String str =
+                " /**\n"
+                    + "     * Reads the next four bytes at the current {@link #position()},"
+                    + " composing them into an int value according to\n"
+                    + "     * specified byte order, and then increments the {@link #position()} by"
+                    + " four.\n"
+                    + "     *\n"
+                    + "     * @param byteOrder the byte order, aka endian to use. Should never be"
+                    + " null. If it is null, BIG_ENDIAN is used.\n"
+                    + "     * @return The int value at the current {@link #position()}\n"
+                    + "     * @throws BufferUnderflowException If there are fewer than four bytes"
+                    + " remaining\n"
+                    + "     * @throws DataAccessException if an I/O error occurs\n"
+                    + "     */";
         String result = Common.cleanJavaDocComment(str);
-        String expected = "Reads the next four bytes at the current {@link #position()}, composing them into an int value according to\n" +
-                "specified byte order, and then increments the {@link #position()} by four.\n" +
-                "     *\n" +
-                "@param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.\n" +
-                "@return The int value at the current {@link #position()}\n" +
-                "@throws BufferUnderflowException If there are fewer than four bytes remaining\n" +
-                "@throws DataAccessException if an I/O error occurs";
+        String expected =
+                "Reads the next four bytes at the current {@link #position()}, composing them into"
+                    + " an int value according to\n"
+                    + "specified byte order, and then increments the {@link #position()} by four.\n"
+                    + "     *\n"
+                    + "@param byteOrder the byte order, aka endian to use. Should never be null. If"
+                    + " it is null, BIG_ENDIAN is used.\n"
+                    + "@return The int value at the current {@link #position()}\n"
+                    + "@throws BufferUnderflowException If there are fewer than four bytes"
+                    + " remaining\n"
+                    + "@throws DataAccessException if an I/O error occurs";
         assertEquals(expected, result);
     }
 }
diff --git a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java
index 9004d1f1..bf637908 100644
--- a/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java
+++ b/pbj-core/pbj-compiler/src/test/java/com/hedera/pbj/compiler/impl/LookupHelperTest.java
@@ -13,18 +13,16 @@
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageBodyContext;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageDefContext;
 import com.hedera.pbj.compiler.impl.grammar.Protobuf3Parser.MessageElementContext;
+import java.util.List;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 
-import java.util.List;
-
 class LookupHelperTest {
-    @Mock
-    MessageDefContext defContext;
-    @Mock
-    Protobuf3Parser.OptionCommentContext optionComment;
+    @Mock MessageDefContext defContext;
+    @Mock Protobuf3Parser.OptionCommentContext optionComment;
+
     @BeforeEach
     void setUp() {
         MockitoAnnotations.openMocks(this);
@@ -45,6 +43,7 @@ void testNormalizeFileName_alreadyNormalized() {
         String fileName = "common.proto";
         assertEquals(fileName, normalizeFileName(fileName));
     }
+
     private static void normalizeAndVerify(String fileName) {
         if (System.getProperty("os.name").toLowerCase().contains("windows")) {
             String expected = "state\\common.proto";
@@ -77,38 +76,40 @@ void testExtractComparableFields_malformedComment() {
 
     @Test
     void testExtractComparableFields_notApplicableComment() {
-        when(optionComment.getText()).thenReturn("// <<<pbj.java_package = \"com.hedera.pbj.test.proto.pbj\">>>");
+        when(optionComment.getText())
+                .thenReturn("// <<<pbj.java_package = \"com.hedera.pbj.test.proto.pbj\">>>");
         when(defContext.optionComment()).thenReturn(optionComment);
         assertTrue(extractComparableFields(defContext).isEmpty(), "Should return empty list");
     }
 
     @Test
     void testExtractComparableFields_commentWithUnkownField() {
-        when(optionComment.getText()).thenReturn("// <<<pbj.comparable = \"int32Number, int64Number, unknown, text\" >>>");
+        when(optionComment.getText())
+                .thenReturn(
+                        "// <<<pbj.comparable = \"int32Number, int64Number, unknown, text\" >>>");
         when(defContext.optionComment()).thenReturn(optionComment);
         final var messageBody = mock(MessageBodyContext.class);
         final var int32Number = createMessageElement("int32Number");
         final var int64Number = createMessageElement("int64Number");
         final var text = createMessageElement("text");
-        when(messageBody.messageElement()).thenReturn(asList(
-                int32Number, int64Number, text
-        ));
+        when(messageBody.messageElement()).thenReturn(asList(int32Number, int64Number, text));
         when(defContext.messageBody()).thenReturn(messageBody);
-        assertThrows(IllegalArgumentException.class, () -> extractComparableFields(defContext),
+        assertThrows(
+                IllegalArgumentException.class,
+                () -> extractComparableFields(defContext),
                 "Should throw IllegalArgumentException");
     }
 
     @Test
     void testExtractComparableFields_validComment() {
-        when(optionComment.getText()).thenReturn("// <<<pbj.comparable = \"int32Number, int64Number, text\" >>>");
+        when(optionComment.getText())
+                .thenReturn("// <<<pbj.comparable = \"int32Number, int64Number, text\" >>>");
         when(defContext.optionComment()).thenReturn(optionComment);
         final var messageBody = mock(MessageBodyContext.class);
         final var int32Number = createMessageElement("int32Number");
         final var int64Number = createMessageElement("int64Number");
         final var text = createMessageElement("text");
-        when(messageBody.messageElement()).thenReturn(asList(
-                int32Number, int64Number, text
-        ));
+        when(messageBody.messageElement()).thenReturn(asList(int32Number, int64Number, text));
         when(defContext.messageBody()).thenReturn(messageBody);
         List<String> comparableFields = extractComparableFields(defContext);
         assertEquals(3, comparableFields.size(), "Should return 3 fields");
@@ -127,6 +128,4 @@ private static MessageElementContext createMessageElement(final String fieldName
 
         return messageElement;
     }
-
-
 }
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java
index 61aed6e5..c22a0634 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandler.java
@@ -194,8 +194,7 @@ public void init() {
             // See https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md
             // In addition, "application/grpc" is interpreted as "application/grpc+proto".
             final var requestHeaders = headers.httpHeaders();
-            final var requestContentType =
-                    requestHeaders.contentType().orElse(null);
+            final var requestContentType = requestHeaders.contentType().orElse(null);
             final var ct = requestContentType == null ? "" : requestContentType.text();
             final var contentType =
                     switch (ct) {
@@ -223,9 +222,10 @@ public void init() {
             // FUTURE: Add support for the other compression schemes and let the response be in the
             // same scheme that was sent to us, or another scheme in "grpc-accept-encoding" that
             // we support, or identity.
-            final var encodings = requestHeaders.contains(GRPC_ENCODING)
-                    ? requestHeaders.get(GRPC_ENCODING).allValues(true)
-                    : List.of(IDENTITY);
+            final var encodings =
+                    requestHeaders.contains(GRPC_ENCODING)
+                            ? requestHeaders.get(GRPC_ENCODING).allValues(true)
+                            : List.of(IDENTITY);
             boolean identitySpecified = false;
             for (final var encoding : encodings) {
                 if (encoding.startsWith(IDENTITY)) {
@@ -236,7 +236,9 @@ public void init() {
             if (!identitySpecified) {
                 throw new GrpcException(
                         GrpcStatus.UNIMPLEMENTED,
-                        "Decompressor is not installed for grpc-encoding \"" + String.join(", ", encodings) + "\"");
+                        "Decompressor is not installed for grpc-encoding \""
+                                + String.join(", ", encodings)
+                                + "\"");
             }
 
             // The client may have sent a "grpc-accept-encoding" header. Note that
@@ -386,11 +388,16 @@ public void data(@NonNull final Http2FrameHeader header, @NonNull final BufferDa
                                             GrpcStatus.INVALID_ARGUMENT,
                                             "Message size exceeds maximum allowed size");
                                 }
-                                // Create a buffer to hold the message. We sadly cannot reuse this buffer
-                                // because once we have filled it and wrapped it in Bytes and sent it to the
-                                // handler, some user code may grab and hold that Bytes object for an arbitrary
-                                // amount of time, and if we were to scribble into the same byte array, we
-                                // would break the application. So we need a new buffer each time :-(
+                                // Create a buffer to hold the message. We sadly cannot reuse this
+                                // buffer
+                                // because once we have filled it and wrapped it in Bytes and sent
+                                // it to the
+                                // handler, some user code may grab and hold that Bytes object for
+                                // an arbitrary
+                                // amount of time, and if we were to scribble into the same byte
+                                // array, we
+                                // would break the application. So we need a new buffer each time
+                                // :-(
                                 entityBytes = new byte[(int) length];
                                 entityBytesIndex = 0;
                                 // done with length now, so move on to next state
@@ -400,10 +407,14 @@ public void data(@NonNull final Http2FrameHeader header, @NonNull final BufferDa
                         }
                     case READ_ENTITY_BYTES:
                         {
-                            // By the time we get here, entityBytes is no longer null. It may be empty, or it
-                            // may already have been partially populated from a previous iteration. It may be
-                            // that the number of bytes available to be read is larger than just this one
-                            // message. So we need to be careful to read, from what is available, only up to
+                            // By the time we get here, entityBytes is no longer null. It may be
+                            // empty, or it
+                            // may already have been partially populated from a previous iteration.
+                            // It may be
+                            // that the number of bytes available to be read is larger than just
+                            // this one
+                            // message. So we need to be careful to read, from what is available,
+                            // only up to
                             // the message length, and to leave the rest for the next iteration.
                             final int available = data.available();
                             final int numBytesToRead =
@@ -450,8 +461,10 @@ public void data(@NonNull final Http2FrameHeader header, @NonNull final BufferDa
      * <p>May be called by different threads concurrently.
      */
     private void error() {
-        // Canceling a future that has already completed has no effect. So by canceling here, we are saying:
-        // "If you have not yet executed, never execute. If you have already executed, then just ignore me".
+        // Canceling a future that has already completed has no effect. So by canceling here, we are
+        // saying:
+        // "If you have not yet executed, never execute. If you have already executed, then just
+        // ignore me".
         // The "isCancelled" flag is set if the future was canceled before it was executed.
 
         // cancel is threadsafe
@@ -534,7 +547,8 @@ private void sendResponseHeaders(
 
         // Some headers are http2 specific, the rest are used for the grpc protocol
         final var grpcHeaders = WritableHeaders.create();
-        // FUTURE: I think to support custom headers in the response, we would have to list them here.
+        // FUTURE: I think to support custom headers in the response, we would have to list them
+        // here.
         // Since this has to be sent before we have any data to send, we must know ahead of time
         // which custom headers are to be returned.
         grpcHeaders.set(HeaderNames.TRAILER, "grpc-status, grpc-message");
@@ -661,8 +675,8 @@ protected void send(
     }
 
     /**
-     * The implementation of {@link Pipeline} used to send messages to the client. It
-     * receives bytes from the handlers to send to the client.
+     * The implementation of {@link Pipeline} used to send messages to the client. It receives bytes
+     * from the handlers to send to the client.
      */
     private final class SendToClientSubscriber implements Pipeline<Bytes> {
         @Override
@@ -686,8 +700,10 @@ public void onNext(@NonNull final Bytes response) {
                                 Http2Flag.DataFlags.create(0),
                                 streamId);
 
-                // This method may throw an UncheckedIOException. If this happens, the connection with the client
-                // has been violently terminated, and we should raise the error, and we should throw an exception
+                // This method may throw an UncheckedIOException. If this happens, the connection
+                // with the client
+                // has been violently terminated, and we should raise the error, and we should throw
+                // an exception
                 // so the user knows the connection is toast.
                 streamWriter.writeData(new Http2FrameData(header, bufferData), flowControl);
             } catch (final Exception e) {
@@ -710,7 +726,8 @@ public void onError(@NonNull final Throwable throwable) {
                     new TrailerBuilder().grpcStatus(GrpcStatus.INTERNAL).send();
                 }
             } catch (Exception ignored) {
-                // If an exception is thrown trying to return headers, we're already in the error state, so
+                // If an exception is thrown trying to return headers, we're already in the error
+                // state, so
                 // just continue.
             }
             error();
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java
index 338b5f3b..c865c116 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/PbjProtocolSelector.java
@@ -5,20 +5,12 @@
 
 import com.hedera.pbj.grpc.helidon.config.PbjConfig;
 import edu.umd.cs.findbugs.annotations.NonNull;
-import io.helidon.common.buffers.BufferData;
 import io.helidon.http.HttpPrologue;
 import io.helidon.http.Method;
-import io.helidon.http.Status;
-import io.helidon.http.WritableHeaders;
-import io.helidon.http.http2.FlowControl;
-import io.helidon.http.http2.Http2Flag;
-import io.helidon.http.http2.Http2FrameHeader;
 import io.helidon.http.http2.Http2Headers;
-import io.helidon.http.http2.Http2RstStream;
 import io.helidon.http.http2.Http2Settings;
 import io.helidon.http.http2.Http2StreamState;
 import io.helidon.http.http2.Http2StreamWriter;
-import io.helidon.http.http2.Http2WindowUpdate;
 import io.helidon.http.http2.StreamFlowControl;
 import io.helidon.metrics.api.Counter;
 import io.helidon.metrics.api.Metrics;
diff --git a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java
index d3075e58..2b79aafa 100644
--- a/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java
+++ b/pbj-core/pbj-grpc-helidon/src/main/java/com/hedera/pbj/grpc/helidon/RouteNotFoundHandler.java
@@ -14,20 +14,17 @@
 import io.helidon.http.http2.Http2StreamWriter;
 import io.helidon.http.http2.Http2WindowUpdate;
 import io.helidon.webserver.http2.spi.Http2SubProtocolSelector;
-
 import java.util.Objects;
 
-/**
- * A handler for the case where the path is not found.
- */
-final class RouteNotFoundHandler
-        implements Http2SubProtocolSelector.SubProtocolHandler {
+/** A handler for the case where the path is not found. */
+final class RouteNotFoundHandler implements Http2SubProtocolSelector.SubProtocolHandler {
     private final Http2StreamWriter streamWriter;
     private final int streamId;
     private Http2StreamState currentStreamState;
 
     /**
      * Constructor
+     *
      * @param streamWriter the stream writer
      * @param streamId the stream id
      * @param currentStreamState the current stream state
@@ -50,8 +47,7 @@ public void init() {
         streamWriter.writeHeaders(
                 http2Headers,
                 streamId,
-                Http2Flag.HeaderFlags.create(
-                        Http2Flag.END_OF_HEADERS | Http2Flag.END_OF_STREAM),
+                Http2Flag.HeaderFlags.create(Http2Flag.END_OF_HEADERS | Http2Flag.END_OF_STREAM),
                 FlowControl.Outbound.NOOP);
         currentStreamState = Http2StreamState.HALF_CLOSED_LOCAL;
     }
diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java
index a1b653ee..2e913d75 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java
+++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/GreeterServiceImpl.java
@@ -60,8 +60,7 @@ public void onComplete() {
     }
 
     @Override
-    public void sayHelloStreamReply(
-            HelloRequest request, Pipeline<? super HelloReply> replies) {
+    public void sayHelloStreamReply(HelloRequest request, Pipeline<? super HelloReply> replies) {
         for (int i = 0; i < 10; i++) {
             replies.onNext(HelloReply.newBuilder().setMessage("Hello!").build());
         }
@@ -70,8 +69,7 @@ public void sayHelloStreamReply(
     }
 
     @Override
-    public Pipeline<? super HelloRequest> sayHelloStreamBidi(
-            Pipeline<? super HelloReply> replies) {
+    public Pipeline<? super HelloRequest> sayHelloStreamBidi(Pipeline<? super HelloReply> replies) {
         // Here we receive info from the client. In this case, it is a stream of requests with
         // names. We will respond with a stream of replies.
         return new Pipeline<>() {
diff --git a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java
index 7f24e03a..bc59cfeb 100644
--- a/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java
+++ b/pbj-core/pbj-grpc-helidon/src/test/java/com/hedera/pbj/grpc/helidon/PbjProtocolHandlerTest.java
@@ -59,8 +59,10 @@ class PbjProtocolHandlerTest {
 
     @BeforeEach
     void setUp() {
-        headers = Http2Headers.create(WritableHeaders.create()
-                .add(HeaderNames.CONTENT_TYPE, "application/grpc+proto"));
+        headers =
+                Http2Headers.create(
+                        WritableHeaders.create()
+                                .add(HeaderNames.CONTENT_TYPE, "application/grpc+proto"));
         streamWriter = new StreamWriterStub();
         streamId = 1;
         flowControl = new OutboundFlowControlStub();
@@ -78,9 +80,9 @@ void setUp() {
     }
 
     /**
-     * If the content-type is missing, or does not start with "application/grpc", the server should respond with a 415
-     * Unsupported Media Type and the stream state should end up CLOSED. See
-     * <a href="https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md"/>
+     * If the content-type is missing, or does not start with "application/grpc", the server should
+     * respond with a 415 Unsupported Media Type and the stream state should end up CLOSED. See <a
+     * href="https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md"/>
      */
     @ValueSource(strings = {"", "text/plain", "application/json"})
     @ParameterizedTest
@@ -90,8 +92,16 @@ void unsupportedContentType(String contentType) {
         headers = Http2Headers.create(h);
 
         // Initializing the handler will throw an error because the content types are unsupported
-        final var handler = new PbjProtocolHandler(
-                headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector);
+        final var handler =
+                new PbjProtocolHandler(
+                        headers,
+                        streamWriter,
+                        streamId,
+                        flowControl,
+                        currentStreamState,
+                        config,
+                        route,
+                        deadlineDetector);
         handler.init();
 
         // Even though the request failed, it was made, and should be counted
@@ -107,18 +117,26 @@ void unsupportedContentType(String contentType) {
         final var responseHeaderFrame = streamWriter.writtenHeaders.getFirst();
         assertThat(responseHeaderFrame.status()).isEqualTo(Status.UNSUPPORTED_MEDIA_TYPE_415);
 
-        // I verified with the go GRPC server its behavior in this scenario. The following headers should be
+        // I verified with the go GRPC server its behavior in this scenario. The following headers
+        // should be
         // available in the response
         // Content-Type: application/grpc
         // Grpc-Message: invalid gRPC request content-type ""
         // Grpc-Status: 3
-        final var responseHeaders = responseHeaderFrame.httpHeaders().stream()
-                .collect(Collectors.toMap(Header::name, Header::values));
-        assertThat(responseHeaders).contains(
-                entry("grpc-status", "" + GrpcStatus.INVALID_ARGUMENT.ordinal()),
-                entry("grpc-message", UriEncoding.encodeUri("invalid gRPC request content-type \"" + contentType + "\"")),
-                entry("Content-Type", "application/grpc"),
-                entry("grpc-accept-encoding", "identity"));
+        final var responseHeaders =
+                responseHeaderFrame.httpHeaders().stream()
+                        .collect(Collectors.toMap(Header::name, Header::values));
+        assertThat(responseHeaders)
+                .contains(
+                        entry("grpc-status", "" + GrpcStatus.INVALID_ARGUMENT.ordinal()),
+                        entry(
+                                "grpc-message",
+                                UriEncoding.encodeUri(
+                                        "invalid gRPC request content-type \""
+                                                + contentType
+                                                + "\"")),
+                        entry("Content-Type", "application/grpc"),
+                        entry("grpc-accept-encoding", "identity"));
 
         // The stream should be closed
         assertThat(handler.streamState()).isEqualTo(Http2StreamState.CLOSED);
@@ -134,8 +152,16 @@ void contentTypeIsNormalized() {
         headers = Http2Headers.create(h);
 
         // Initialize will succeed!
-        final var handler = new PbjProtocolHandler(
-                headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector);
+        final var handler =
+                new PbjProtocolHandler(
+                        headers,
+                        streamWriter,
+                        streamId,
+                        flowControl,
+                        currentStreamState,
+                        config,
+                        route,
+                        deadlineDetector);
         handler.init();
         assertThat(route.requestCounter().count()).isEqualTo(1);
 
@@ -167,8 +193,16 @@ void unsupportedGrpcEncodings(String encoding) {
         headers = Http2Headers.create(h);
 
         // Initializing the handler will throw an error because the content types are unsupported
-        final var handler = new PbjProtocolHandler(
-                headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector);
+        final var handler =
+                new PbjProtocolHandler(
+                        headers,
+                        streamWriter,
+                        streamId,
+                        flowControl,
+                        currentStreamState,
+                        config,
+                        route,
+                        deadlineDetector);
         handler.init();
 
         // Even though the request failed, it was made, and should be counted
@@ -184,18 +218,26 @@ void unsupportedGrpcEncodings(String encoding) {
         final var responseHeaderFrame = streamWriter.writtenHeaders.getFirst();
         assertThat(responseHeaderFrame.status()).isEqualTo(Status.OK_200);
 
-        // I verified with the go GRPC server its behavior in this scenario. The following headers should be
+        // I verified with the go GRPC server its behavior in this scenario. The following headers
+        // should be
         // available in the response
         // Content-Type: application/grpc
         // Grpc-Message: grpc: Decompressor is not installed for grpc-encoding "[bad encoding here]"
         // Grpc-Status: 12
-        final var responseHeaders = responseHeaderFrame.httpHeaders().stream()
-                .collect(Collectors.toMap(Header::name, Header::values));
-        assertThat(responseHeaders).contains(
-                entry("grpc-status", "" + GrpcStatus.UNIMPLEMENTED.ordinal()),
-                entry("grpc-message", UriEncoding.encodeUri("Decompressor is not installed for grpc-encoding \"" + encoding + "\"")),
-                entry("Content-Type", "application/grpc"),
-                entry("grpc-accept-encoding", "identity"));
+        final var responseHeaders =
+                responseHeaderFrame.httpHeaders().stream()
+                        .collect(Collectors.toMap(Header::name, Header::values));
+        assertThat(responseHeaders)
+                .contains(
+                        entry("grpc-status", "" + GrpcStatus.UNIMPLEMENTED.ordinal()),
+                        entry(
+                                "grpc-message",
+                                UriEncoding.encodeUri(
+                                        "Decompressor is not installed for grpc-encoding \""
+                                                + encoding
+                                                + "\"")),
+                        entry("Content-Type", "application/grpc"),
+                        entry("grpc-accept-encoding", "identity"));
 
         // The stream should be closed
         assertThat(handler.streamState()).isEqualTo(Http2StreamState.CLOSED);
@@ -204,21 +246,26 @@ void unsupportedGrpcEncodings(String encoding) {
     /**
      * These are encodings we support. They all contain "identity".
      *
-     * <p>See <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding"/> for information
-     * on the encoding header syntax.
+     * <p>See <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding"/>
+     * for information on the encoding header syntax.
      *
      * @param encoding
      */
-    @ValueSource(strings = {
-            // Simple identity strings with qualifiers
-            "identity", "identity;q=0.5", "identity;", "identity;nonsense",
-            // an identity with and without a qualifier in a list of encodings
-            "gzip, deflate;q=0.5, identity;q=0.1",
-            "gzip, deflate;q=0.5, identity",
-            "gzip, identity;q=0.1, deflate;q=0.5",
-            "gzip, identity, deflate;q=0.5",
-            "identity;q=.9, deflate;q=0.5, gzip;q=0.1, br;q=0.1",
-            "identity, deflate;q=0.5, gzip;q=0.1, br;q=0.1"})
+    @ValueSource(
+            strings = {
+                // Simple identity strings with qualifiers
+                "identity",
+                "identity;q=0.5",
+                "identity;",
+                "identity;nonsense",
+                // an identity with and without a qualifier in a list of encodings
+                "gzip, deflate;q=0.5, identity;q=0.1",
+                "gzip, deflate;q=0.5, identity",
+                "gzip, identity;q=0.1, deflate;q=0.5",
+                "gzip, identity, deflate;q=0.5",
+                "identity;q=.9, deflate;q=0.5, gzip;q=0.1, br;q=0.1",
+                "identity, deflate;q=0.5, gzip;q=0.1, br;q=0.1"
+            })
     @ParameterizedTest
     void supportedComplexEncodingsWithIdentity(String encoding) {
         final var h = WritableHeaders.create();
@@ -227,8 +274,16 @@ void supportedComplexEncodingsWithIdentity(String encoding) {
         headers = Http2Headers.create(h);
 
         // Initializing the handler will throw an error because the content types are unsupported
-        final var handler = new PbjProtocolHandler(
-                headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector);
+        final var handler =
+                new PbjProtocolHandler(
+                        headers,
+                        streamWriter,
+                        streamId,
+                        flowControl,
+                        currentStreamState,
+                        config,
+                        route,
+                        deadlineDetector);
         handler.init();
 
         // Even though the request failed, it was made, and should be counted
@@ -247,11 +302,13 @@ void supportedComplexEncodingsWithIdentity(String encoding) {
         final var responseHeaderFrame = streamWriter.writtenHeaders.getFirst();
         assertThat(responseHeaderFrame.status()).isEqualTo(Status.OK_200);
 
-        final var responseHeaders = responseHeaderFrame.httpHeaders().stream()
-                .collect(Collectors.toMap(Header::name, Header::values));
-        assertThat(responseHeaders).contains(
-                entry("Content-Type", "application/grpc+proto"),
-                entry("grpc-accept-encoding", "identity"));
+        final var responseHeaders =
+                responseHeaderFrame.httpHeaders().stream()
+                        .collect(Collectors.toMap(Header::name, Header::values));
+        assertThat(responseHeaders)
+                .contains(
+                        entry("Content-Type", "application/grpc+proto"),
+                        entry("grpc-accept-encoding", "identity"));
 
         // The stream should be closed
         assertThat(handler.streamState()).isEqualTo(Http2StreamState.HALF_CLOSED_REMOTE);
@@ -259,29 +316,45 @@ void supportedComplexEncodingsWithIdentity(String encoding) {
 
     @Test
     void errorThrownForOnNextWhenStreamIsClosed() {
-        // Use a custom streamWriter that will throw an exception when "streamClosed" is set to true, and it is
-        // asked to write something. This can be used to simulate what happens when the network connection fails.
+        // Use a custom streamWriter that will throw an exception when "streamClosed" is set to
+        // true, and it is
+        // asked to write something. This can be used to simulate what happens when the network
+        // connection fails.
         final var streamClosed = new AtomicBoolean(false);
-        streamWriter = new StreamWriterStub() {
-            @Override
-            public void writeData(Http2FrameData frame, FlowControl.Outbound flowControl) {
-                if (streamClosed.get()) {
-                    throw new IllegalStateException("Stream is closed");
-                }
-            }
-        };
+        streamWriter =
+                new StreamWriterStub() {
+                    @Override
+                    public void writeData(Http2FrameData frame, FlowControl.Outbound flowControl) {
+                        if (streamClosed.get()) {
+                            throw new IllegalStateException("Stream is closed");
+                        }
+                    }
+                };
 
         // Within this test, the replyRef will be set once when the setup is complete, and then
         // will be available for the test code to use to call onNext, onError, etc. as required.
         final var replyRef = new AtomicReference<Pipeline<? super HelloReply>>();
-        route = new PbjMethodRoute(new GreeterServiceImpl() {
-            @Override
-            public void sayHelloStreamReply(HelloRequest request, Pipeline<? super HelloReply> replies) {
-                replyRef.set(replies);
-            }
-        }, GreeterService.GreeterMethod.sayHelloStreamReply);
-
-        final var handler = new PbjProtocolHandler(headers, streamWriter, streamId, flowControl, currentStreamState, config, route, deadlineDetector);
+        route =
+                new PbjMethodRoute(
+                        new GreeterServiceImpl() {
+                            @Override
+                            public void sayHelloStreamReply(
+                                    HelloRequest request, Pipeline<? super HelloReply> replies) {
+                                replyRef.set(replies);
+                            }
+                        },
+                        GreeterService.GreeterMethod.sayHelloStreamReply);
+
+        final var handler =
+                new PbjProtocolHandler(
+                        headers,
+                        streamWriter,
+                        streamId,
+                        flowControl,
+                        currentStreamState,
+                        config,
+                        route,
+                        deadlineDetector);
         handler.init();
         sendAllData(handler, createRequestData("Alice"));
 
@@ -292,8 +365,7 @@ public void sayHelloStreamReply(HelloRequest request, Pipeline<? super HelloRepl
         streamClosed.set(true);
 
         final var failingReply = HelloReply.newBuilder().setMessage("Bad").build();
-        assertThatThrownBy(() -> replies.onNext(failingReply))
-                .isInstanceOf(Exception.class);
+        assertThatThrownBy(() -> replies.onNext(failingReply)).isInstanceOf(Exception.class);
 
         assertThat(route.requestCounter().count()).isEqualTo(1);
         assertThat(route.failedGrpcRequestCounter().count()).isEqualTo(0);
@@ -330,7 +402,11 @@ private BufferData createDataFrameBytes(Bytes data) {
     }
 
     private Http2FrameHeader createDataFrameHeader(int length) {
-        return Http2FrameHeader.create(length + 5, Http2FrameTypes.DATA, Http2Flag.DataFlags.create(Http2Flags.END_STREAM), streamId);
+        return Http2FrameHeader.create(
+                length + 5,
+                Http2FrameTypes.DATA,
+                Http2Flag.DataFlags.create(Http2Flags.END_STREAM),
+                streamId);
     }
 
     private static final class OutboundFlowControlStub implements FlowControl.Outbound {
@@ -346,9 +422,7 @@ public Http2FrameData[] cut(Http2FrameData frame) {
         }
 
         @Override
-        public void blockTillUpdate() {
-
-        }
+        public void blockTillUpdate() {}
 
         @Override
         public int maxFrameSize() {
@@ -356,14 +430,10 @@ public int maxFrameSize() {
         }
 
         @Override
-        public void decrementWindowSize(int decrement) {
-
-        }
+        public void decrementWindowSize(int decrement) {}
 
         @Override
-        public void resetStreamWindowSize(int size) {
-
-        }
+        public void resetStreamWindowSize(int size) {}
 
         @Override
         public int getRemainingWindowSize() {
@@ -375,7 +445,6 @@ private static class StreamWriterStub implements Http2StreamWriter {
         private final List<Http2FrameData> writtenDataFrames = new ArrayList<>();
         private final List<Http2Headers> writtenHeaders = new ArrayList<>();
 
-
         @Override
         public void write(Http2FrameData frame) {
             writtenDataFrames.add(frame);
@@ -387,13 +456,22 @@ public void writeData(Http2FrameData frame, FlowControl.Outbound flowControl) {
         }
 
         @Override
-        public int writeHeaders(Http2Headers headers, int streamId, Http2Flag.HeaderFlags flags, FlowControl.Outbound flowControl) {
+        public int writeHeaders(
+                Http2Headers headers,
+                int streamId,
+                Http2Flag.HeaderFlags flags,
+                FlowControl.Outbound flowControl) {
             writtenHeaders.add(headers);
             return 0;
         }
 
         @Override
-        public int writeHeaders(Http2Headers headers, int streamId, Http2Flag.HeaderFlags flags, Http2FrameData dataFrame, FlowControl.Outbound flowControl) {
+        public int writeHeaders(
+                Http2Headers headers,
+                int streamId,
+                Http2Flag.HeaderFlags flags,
+                Http2FrameData dataFrame,
+                FlowControl.Outbound flowControl) {
             writtenHeaders.add(headers);
             writtenDataFrames.add(dataFrame);
             return 0;
@@ -416,7 +494,8 @@ public String name() {
     private static final class DeadlineDetectorStub implements DeadlineDetector {
         @NonNull
         @Override
-        public ScheduledFuture<?> scheduleDeadline(long deadlineNanos, @NonNull Runnable onDeadlineExceeded) {
+        public ScheduledFuture<?> scheduleDeadline(
+                long deadlineNanos, @NonNull Runnable onDeadlineExceeded) {
             return new ScheduledFuture<>() {
                 @Override
                 public long getDelay(@NonNull TimeUnit unit) {
@@ -488,7 +567,8 @@ public List<Method> methods() {
         public Pipeline<? super Bytes> open(
                 @NonNull Method method,
                 @NonNull RequestOptions opts,
-                @NonNull Pipeline<? super Bytes> responses) throws GrpcException {
+                @NonNull Pipeline<? super Bytes> responses)
+                throws GrpcException {
             this.calledMethod = method;
             this.opts = opts;
             return new Pipeline<>() {
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java
index 11a98271..cd6250f7 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BufferedDataGetBytes.java
@@ -105,5 +105,4 @@ public void directToBytes(final Blackhole blackhole) {
             blackhole.consume(bytes);
         }
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java
index aa9b22b3..a5e315f7 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/ByteBufferGetByte.java
@@ -79,5 +79,4 @@ public void directUnsafeGet(final Blackhole blackhole) {
             blackhole.consume(UnsafeUtils.getDirectBufferByte(directBuffer, i));
         }
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java
index 6c8ebe39..83d4aa16 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/BytesGetLong.java
@@ -44,5 +44,4 @@ public void testUnsafeGetLong(final Blackhole blackhole) {
             blackhole.consume(UnsafeUtils.getLong(array, i));
         }
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java
index 8080952c..9731dda9 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WritableStreamingDataBench.java
@@ -63,5 +63,4 @@ public void writeRandomAccessData() {
         assert out.position() == SIZE;
         assert bout.toByteArray().length == SIZE;
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java
index 55cc0a6d..4713e08c 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBufferedDataBench.java
@@ -8,6 +8,14 @@
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.stream.ReadableStreamingData;
 import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UncheckedIOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
 import org.openjdk.jmh.annotations.Benchmark;
 import org.openjdk.jmh.annotations.BenchmarkMode;
 import org.openjdk.jmh.annotations.Fork;
@@ -22,15 +30,6 @@
 import org.openjdk.jmh.annotations.Warmup;
 import org.openjdk.jmh.infra.Blackhole;
 
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.UncheckedIOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-
 @SuppressWarnings("unused")
 @State(Scope.Benchmark)
 @Fork(1)
@@ -40,73 +39,75 @@
 @BenchmarkMode(Mode.Throughput)
 public class WriteBufferedDataBench {
 
-	public static final FieldDefinition BYTES_FIELD = new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17);
-	final static BufferedData sampleData;
-	final static byte[] sampleWrittenData;
+    public static final FieldDefinition BYTES_FIELD =
+            new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17);
+    static final BufferedData sampleData;
+    static final byte[] sampleWrittenData;
 
-	static {
-		final Random random = new Random(6262266);
-		byte[] data = new byte[1024*16];
-		random.nextBytes(data);
-		sampleData = BufferedData.wrap(data);
+    static {
+        final Random random = new Random(6262266);
+        byte[] data = new byte[1024 * 16];
+        random.nextBytes(data);
+        sampleData = BufferedData.wrap(data);
 
-		ByteArrayOutputStream bout = new ByteArrayOutputStream();
-		try (WritableStreamingData out = new WritableStreamingData(bout)) {
-			for (int i = 0; i < 100; i++) {
-				random.nextBytes(data);
-				ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData);
-			}
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		sampleWrittenData = bout.toByteArray();
-	}
+        ByteArrayOutputStream bout = new ByteArrayOutputStream();
+        try (WritableStreamingData out = new WritableStreamingData(bout)) {
+            for (int i = 0; i < 100; i++) {
+                random.nextBytes(data);
+                ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        sampleWrittenData = bout.toByteArray();
+    }
 
-	Path tempFileWriting;
-	Path tempFileReading;
-	OutputStream fout;
-	WritableStreamingData dataOut;
+    Path tempFileWriting;
+    Path tempFileReading;
+    OutputStream fout;
+    WritableStreamingData dataOut;
 
-	@Setup
-	public void prepare() {
-		try {
-			tempFileWriting = Files.createTempFile("WriteBytesBench", "dat");
-			tempFileWriting.toFile().deleteOnExit();
-			fout = Files.newOutputStream(tempFileWriting);
-			dataOut = new WritableStreamingData(fout);
-			tempFileReading = Files.createTempFile("WriteBytesBench", "dat");
-			tempFileReading.toFile().deleteOnExit();
-			Files.write(tempFileReading, sampleWrittenData);
-		} catch (IOException e) {
-			e.printStackTrace();
-			throw new UncheckedIOException(e);
-		}
-	}
+    @Setup
+    public void prepare() {
+        try {
+            tempFileWriting = Files.createTempFile("WriteBytesBench", "dat");
+            tempFileWriting.toFile().deleteOnExit();
+            fout = Files.newOutputStream(tempFileWriting);
+            dataOut = new WritableStreamingData(fout);
+            tempFileReading = Files.createTempFile("WriteBytesBench", "dat");
+            tempFileReading.toFile().deleteOnExit();
+            Files.write(tempFileReading, sampleWrittenData);
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw new UncheckedIOException(e);
+        }
+    }
 
-	@TearDown
-	public void cleanUp() {
-		try {
-			dataOut.close();
-			fout.close();
-		} catch (IOException e){
-			e.printStackTrace();
-			throw new UncheckedIOException(e);
-		}
-	}
+    @TearDown
+    public void cleanUp() {
+        try {
+            dataOut.close();
+            fout.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw new UncheckedIOException(e);
+        }
+    }
 
-	@Benchmark
-	public void writeBytes(Blackhole blackhole) throws IOException {
-		ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData);
-	}
+    @Benchmark
+    public void writeBytes(Blackhole blackhole) throws IOException {
+        ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData);
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(100)
-	public void readBytes(Blackhole blackhole) throws IOException {
-		try (ReadableStreamingData in = new ReadableStreamingData(Files.newInputStream(tempFileReading)) ) {
-			for (int i = 0; i < 100; i++) {
-				blackhole.consume(in.readVarInt(false));
-				blackhole.consume(ProtoParserTools.readBytes(in));
-			}
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(100)
+    public void readBytes(Blackhole blackhole) throws IOException {
+        try (ReadableStreamingData in =
+                new ReadableStreamingData(Files.newInputStream(tempFileReading))) {
+            for (int i = 0; i < 100; i++) {
+                blackhole.consume(in.readVarInt(false));
+                blackhole.consume(ProtoParserTools.readBytes(in));
+            }
+        }
+    }
 }
diff --git a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java
index c8779bb9..bab2e252 100644
--- a/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java
+++ b/pbj-core/pbj-runtime/src/jmh/java/com/hedera/pbj/runtime/io/WriteBytesBench.java
@@ -39,73 +39,75 @@
 @BenchmarkMode(Mode.Throughput)
 public class WriteBytesBench {
 
-	public static final FieldDefinition BYTES_FIELD = new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17);
-	final static Bytes sampleData;
-	final static byte[] sampleWrittenData;
+    public static final FieldDefinition BYTES_FIELD =
+            new FieldDefinition("bytesField", FieldType.BYTES, false, false, false, 17);
+    static final Bytes sampleData;
+    static final byte[] sampleWrittenData;
 
-	static {
-		final Random random = new Random(6262266);
-		byte[] data = new byte[1024*16];
-		random.nextBytes(data);
-		sampleData = Bytes.wrap(data);
+    static {
+        final Random random = new Random(6262266);
+        byte[] data = new byte[1024 * 16];
+        random.nextBytes(data);
+        sampleData = Bytes.wrap(data);
 
-		ByteArrayOutputStream bout = new ByteArrayOutputStream();
-		try (WritableStreamingData out = new WritableStreamingData(bout)) {
-			for (int i = 0; i < 100; i++) {
-				random.nextBytes(data);
-				ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData);
-			}
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		sampleWrittenData = bout.toByteArray();
-	}
+        ByteArrayOutputStream bout = new ByteArrayOutputStream();
+        try (WritableStreamingData out = new WritableStreamingData(bout)) {
+            for (int i = 0; i < 100; i++) {
+                random.nextBytes(data);
+                ProtoWriterTools.writeBytes(out, BYTES_FIELD, sampleData);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        sampleWrittenData = bout.toByteArray();
+    }
 
-	Path tempFileWriting;
-	Path tempFileReading;
-	OutputStream fout;
-	WritableStreamingData dataOut;
+    Path tempFileWriting;
+    Path tempFileReading;
+    OutputStream fout;
+    WritableStreamingData dataOut;
 
-	@Setup
-	public void prepare() {
-		try {
-			tempFileWriting = Files.createTempFile("WriteBytesBench", "dat");
-			tempFileWriting.toFile().deleteOnExit();
-			fout = Files.newOutputStream(tempFileWriting);
-			dataOut = new WritableStreamingData(fout);
-			tempFileReading = Files.createTempFile("WriteBytesBench", "dat");
-			tempFileReading.toFile().deleteOnExit();
-			Files.write(tempFileReading, sampleWrittenData);
-		} catch (IOException e) {
-			e.printStackTrace();
-			throw new UncheckedIOException(e);
-		}
-	}
+    @Setup
+    public void prepare() {
+        try {
+            tempFileWriting = Files.createTempFile("WriteBytesBench", "dat");
+            tempFileWriting.toFile().deleteOnExit();
+            fout = Files.newOutputStream(tempFileWriting);
+            dataOut = new WritableStreamingData(fout);
+            tempFileReading = Files.createTempFile("WriteBytesBench", "dat");
+            tempFileReading.toFile().deleteOnExit();
+            Files.write(tempFileReading, sampleWrittenData);
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw new UncheckedIOException(e);
+        }
+    }
 
-	@TearDown
-	public void cleanUp() {
-		try {
-			dataOut.close();
-			fout.close();
-		} catch (IOException e){
-			e.printStackTrace();
-			throw new UncheckedIOException(e);
-		}
-	}
+    @TearDown
+    public void cleanUp() {
+        try {
+            dataOut.close();
+            fout.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw new UncheckedIOException(e);
+        }
+    }
 
-	@Benchmark
-	public void writeBytes(Blackhole blackhole) throws IOException {
-		ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData);
-	}
+    @Benchmark
+    public void writeBytes(Blackhole blackhole) throws IOException {
+        ProtoWriterTools.writeBytes(dataOut, BYTES_FIELD, sampleData);
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(100)
-	public void readBytes(Blackhole blackhole) throws IOException {
-		try (ReadableStreamingData in = new ReadableStreamingData(Files.newInputStream(tempFileReading)) ) {
-			for (int i = 0; i < 100; i++) {
-				blackhole.consume(in.readVarInt(false));
-				blackhole.consume(ProtoParserTools.readBytes(in));
-			}
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(100)
+    public void readBytes(Blackhole blackhole) throws IOException {
+        try (ReadableStreamingData in =
+                new ReadableStreamingData(Files.newInputStream(tempFileReading))) {
+            for (int i = 0; i < 100; i++) {
+                blackhole.consume(in.readVarInt(false));
+                blackhole.consume(ProtoParserTools.readBytes(in));
+            }
+        }
+    }
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java
index d19cd86d..a6480fa1 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Codec.java
@@ -1,15 +1,14 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.UncheckedIOException;
-
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
 import com.hedera.pbj.runtime.io.WritableSequentialData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
 import edu.umd.cs.findbugs.annotations.NonNull;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.UncheckedIOException;
 
 /**
  * Encapsulates Serialization, Deserialization and other IO operations.
@@ -24,54 +23,65 @@ public interface Codec<T /*extends Record*/> {
 
     /**
      * Parses an object from the {@link ReadableSequentialData} and returns it.
-     * <p>
-     * If {@code strictMode} is {@code true}, then throws an exception if fields
-     * have been defined on the encoded object that are not supported by the parser. This
-     * breaks forwards compatibility (an older parser cannot parse a newer encoded object),
-     * which is sometimes requires to avoid parsing an object that is newer than the code
-     * parsing it is prepared to handle.
-     * <p>
-     * The {@code maxDepth} specifies the maximum allowed depth of nested messages. The parsing
+     *
+     * <p>If {@code strictMode} is {@code true}, then throws an exception if fields have been
+     * defined on the encoded object that are not supported by the parser. This breaks forwards
+     * compatibility (an older parser cannot parse a newer encoded object), which is sometimes
+     * requires to avoid parsing an object that is newer than the code parsing it is prepared to
+     * handle.
+     *
+     * <p>The {@code maxDepth} specifies the maximum allowed depth of nested messages. The parsing
      * will fail with a ParseException if the maximum depth is reached.
      *
-     * @param input The {@link ReadableSequentialData} from which to read the data to construct an object
-     * @param strictMode when {@code true}, the parser errors out on unknown fields; otherwise they'll be simply skipped.
-     * @param maxDepth a ParseException will be thrown if the depth of nested messages exceeds the maxDepth value.
+     * @param input The {@link ReadableSequentialData} from which to read the data to construct an
+     *     object
+     * @param strictMode when {@code true}, the parser errors out on unknown fields; otherwise
+     *     they'll be simply skipped.
+     * @param maxDepth a ParseException will be thrown if the depth of nested messages exceeds the
+     *     maxDepth value.
      * @return The parsed object. It must not return null.
      * @throws ParseException If parsing fails
      */
-    @NonNull T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth) throws ParseException;
+    @NonNull
+    T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth)
+            throws ParseException;
 
     /**
      * Parses an object from the {@link Bytes} and returns it.
-     * <p>
-     * If {@code strictMode} is {@code true}, then throws an exception if fields
-     * have been defined on the encoded object that are not supported by the parser. This
-     * breaks forwards compatibility (an older parser cannot parse a newer encoded object),
-     * which is sometimes requires to avoid parsing an object that is newer than the code
-     * parsing it is prepared to handle.
-     * <p>
-     * The {@code maxDepth} specifies the maximum allowed depth of nested messages. The parsing
+     *
+     * <p>If {@code strictMode} is {@code true}, then throws an exception if fields have been
+     * defined on the encoded object that are not supported by the parser. This breaks forwards
+     * compatibility (an older parser cannot parse a newer encoded object), which is sometimes
+     * requires to avoid parsing an object that is newer than the code parsing it is prepared to
+     * handle.
+     *
+     * <p>The {@code maxDepth} specifies the maximum allowed depth of nested messages. The parsing
      * will fail with a ParseException if the maximum depth is reached.
      *
      * @param bytes The {@link Bytes} from which to read the data to construct an object
-     * @param strictMode when {@code true}, the parser errors out on unknown fields; otherwise they'll be simply skipped.
-     * @param maxDepth a ParseException will be thrown if the depth of nested messages exceeds the maxDepth value.
+     * @param strictMode when {@code true}, the parser errors out on unknown fields; otherwise
+     *     they'll be simply skipped.
+     * @param maxDepth a ParseException will be thrown if the depth of nested messages exceeds the
+     *     maxDepth value.
      * @return The parsed object. It must not return null.
      * @throws ParseException If parsing fails
      */
-    @NonNull default T parse(@NonNull Bytes bytes, final boolean strictMode, final int maxDepth) throws ParseException {
+    @NonNull
+    default T parse(@NonNull Bytes bytes, final boolean strictMode, final int maxDepth)
+            throws ParseException {
         return parse(bytes.toReadableSequentialData(), strictMode, maxDepth);
     }
 
     /**
      * Parses an object from the {@link ReadableSequentialData} and returns it.
      *
-     * @param input The {@link ReadableSequentialData} from which to read the data to construct an object
+     * @param input The {@link ReadableSequentialData} from which to read the data to construct an
+     *     object
      * @return The parsed object. It must not return null.
      * @throws ParseException If parsing fails
      */
-    @NonNull default T parse(@NonNull ReadableSequentialData input) throws ParseException {
+    @NonNull
+    default T parse(@NonNull ReadableSequentialData input) throws ParseException {
         return parse(input, false, Integer.MAX_VALUE);
     }
 
@@ -82,37 +92,41 @@ public interface Codec<T /*extends Record*/> {
      * @return The parsed object. It must not return null.
      * @throws ParseException If parsing fails
      */
-    @NonNull default T parse(@NonNull Bytes bytes) throws ParseException {
+    @NonNull
+    default T parse(@NonNull Bytes bytes) throws ParseException {
         return parse(bytes.toReadableSequentialData());
     }
 
     /**
-     * Parses an object from the {@link ReadableSequentialData} and returns it. Throws an exception if fields
-     * have been defined on the encoded object that are not supported by the parser. This
-     * breaks forwards compatibility (an older parser cannot parse a newer encoded object),
-     * which is sometimes requires to avoid parsing an object that is newer than the code
-     * parsing it is prepared to handle.
+     * Parses an object from the {@link ReadableSequentialData} and returns it. Throws an exception
+     * if fields have been defined on the encoded object that are not supported by the parser. This
+     * breaks forwards compatibility (an older parser cannot parse a newer encoded object), which is
+     * sometimes requires to avoid parsing an object that is newer than the code parsing it is
+     * prepared to handle.
      *
-     * @param input The {@link ReadableSequentialData} from which to read the data to construct an object
+     * @param input The {@link ReadableSequentialData} from which to read the data to construct an
+     *     object
      * @return The parsed object. It must not return null.
      * @throws ParseException If parsing fails
      */
-    @NonNull default T parseStrict(@NonNull ReadableSequentialData input) throws ParseException {
+    @NonNull
+    default T parseStrict(@NonNull ReadableSequentialData input) throws ParseException {
         return parse(input, true, Integer.MAX_VALUE);
     }
 
     /**
-     * Parses an object from the {@link Bytes} and returns it. Throws an exception if fields
-     * have been defined on the encoded object that are not supported by the parser. This
-     * breaks forwards compatibility (an older parser cannot parse a newer encoded object),
-     * which is sometimes requires to avoid parsing an object that is newer than the code
-     * parsing it is prepared to handle.
+     * Parses an object from the {@link Bytes} and returns it. Throws an exception if fields have
+     * been defined on the encoded object that are not supported by the parser. This breaks forwards
+     * compatibility (an older parser cannot parse a newer encoded object), which is sometimes
+     * requires to avoid parsing an object that is newer than the code parsing it is prepared to
+     * handle.
      *
      * @param bytes The {@link Bytes} from which to read the data to construct an object
      * @return The parsed object. It must not return null.
      * @throws ParseException If parsing fails
      */
-    @NonNull default T parseStrict(@NonNull Bytes bytes) throws ParseException {
+    @NonNull
+    default T parseStrict(@NonNull Bytes bytes) throws ParseException {
         return parseStrict(bytes.toReadableSequentialData());
     }
 
@@ -156,20 +170,22 @@ public interface Codec<T /*extends Record*/> {
      * @return true if the bytes represent the item, false otherwise.
      * @throws ParseException If parsing fails
      */
-    boolean fastEquals(@NonNull T item, @NonNull ReadableSequentialData input) throws ParseException;
+    boolean fastEquals(@NonNull T item, @NonNull ReadableSequentialData input)
+            throws ParseException;
 
     /**
      * Converts a Record into a Bytes object
      *
      * @param item The input model data to convert into a Bytes object.
      * @return The new Bytes object.
-     * @throws RuntimeException wrapping an IOException If it is impossible
-     * to write to the {@link WritableStreamingData}
+     * @throws RuntimeException wrapping an IOException If it is impossible to write to the {@link
+     *     WritableStreamingData}
      */
     default Bytes toBytes(@NonNull T item) {
         byte[] bytes;
         try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
-             WritableStreamingData writableStreamingData = new WritableStreamingData(byteArrayOutputStream)) {
+                WritableStreamingData writableStreamingData =
+                        new WritableStreamingData(byteArrayOutputStream)) {
             write(item, writableStreamingData);
             bytes = byteArrayOutputStream.toByteArray();
         } catch (IOException e) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java
index 9c0326a5..b090adf8 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ComparableOneOf.java
@@ -4,26 +4,28 @@
 import java.util.Objects;
 
 /**
+ * This is a version of {@link OneOf} that implements `Comparable` interface to allow sorting of
+ * lists of ComparableOneOf objects. It requires that the value implements `Comparable` interface as
+ * well.
  *
- * This is a version of {@link OneOf} that implements `Comparable` interface to allow sorting of lists of ComparableOneOf objects.
- * It requires that the value implements `Comparable` interface as well.
- *
- * @param kind     An enum representing the kind of data being represented. Must not be null.
- * @param value    The actual value in the "oneof". May be null.
- * @param <E>      The enum type
+ * @param kind An enum representing the kind of data being represented. Must not be null.
+ * @param value The actual value in the "oneof". May be null.
+ * @param <E> The enum type
  */
-public record ComparableOneOf<E extends Enum<E>>(E kind, Comparable value) implements Comparable<ComparableOneOf<E>> {
+public record ComparableOneOf<E extends Enum<E>>(E kind, Comparable value)
+        implements Comparable<ComparableOneOf<E>> {
     /**
      * Construct a new ComparableOneOf
      *
-     * @param kind     An enum representing the kind of data being represented. Must not be null.
-     * @param value    The actual value in the "oneof". May be null.
+     * @param kind An enum representing the kind of data being represented. Must not be null.
+     * @param value The actual value in the "oneof". May be null.
      */
     public ComparableOneOf {
         if (kind == null) {
             throw new NullPointerException("An enum 'kind' must be supplied");
         }
-        assert kind instanceof EnumWithProtoMetadata : "OneOf 'kind' must implement EnumWithProtoMetadata";
+        assert kind instanceof EnumWithProtoMetadata
+                : "OneOf 'kind' must implement EnumWithProtoMetadata";
     }
 
     /**
@@ -46,7 +48,7 @@ public boolean equals(Object o) {
 
     @Override
     public int hashCode() {
-        return (31 + Integer.hashCode(((EnumWithProtoMetadata)kind).protoOrdinal())) * 31
+        return (31 + Integer.hashCode(((EnumWithProtoMetadata) kind).protoOrdinal())) * 31
                 + (value == null ? 0 : value.hashCode());
     }
 
@@ -63,4 +65,3 @@ public int compareTo(ComparableOneOf<E> thatObj) {
         return value.compareTo(thatObj.value);
     }
 }
-
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java
index 927b0fa9..822cdccb 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/EnumWithProtoMetadata.java
@@ -1,9 +1,7 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
-/**
- * Interface for enums that have a protobuf ordinal and name metdata
- */
+/** Interface for enums that have a protobuf ordinal and name metdata */
 public interface EnumWithProtoMetadata {
     /**
      * Get the Protobuf ordinal for this object
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java
index 4683bfa7..5e557c2e 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldDefinition.java
@@ -2,8 +2,8 @@
 package com.hedera.pbj.runtime;
 
 /**
- * Contains a definition of a field of a protobuf Message, as originally defined
- * in a protobuf schema.
+ * Contains a definition of a field of a protobuf Message, as originally defined in a protobuf
+ * schema.
  *
  * <p>For example, given the following message definition:
  *
@@ -14,26 +14,34 @@
  *     }
  * </pre>
  *
- * <p>The field definition for "bar" would be
- * 'new FieldDefinition("bar", FieldType.STRING, false, 1)'.
+ * <p>The field definition for "bar" would be 'new FieldDefinition("bar", FieldType.STRING, false,
+ * 1)'.
  *
- * @param name     The name of the field as contained in the schema. Cannot be null.
- * @param type     The type of the field as contained in the schema. Cannot be null.
+ * @param name The name of the field as contained in the schema. Cannot be null.
+ * @param type The type of the field as contained in the schema. Cannot be null.
  * @param repeated Whether this is a "repeated" field
- * @param optional Whether this is an "optional" field - which uses Protobuf built in value types to wrap raw value
- * @param oneOf    Whether this is a field is part of a oneOf
- * @param number   The field number. Must be &gt;= 0.
+ * @param optional Whether this is an "optional" field - which uses Protobuf built in value types to
+ *     wrap raw value
+ * @param oneOf Whether this is a field is part of a oneOf
+ * @param number The field number. Must be &gt;= 0.
  */
-public record FieldDefinition(String name, FieldType type, boolean repeated, boolean optional, boolean oneOf, int number) {
+public record FieldDefinition(
+        String name,
+        FieldType type,
+        boolean repeated,
+        boolean optional,
+        boolean oneOf,
+        int number) {
     /**
      * Construct new FieldDefinition, standard record all args constructor with extra checks
      *
-     * @param name     The name of the field as contained in the schema. Cannot be null.
-     * @param type     The type of the field as contained in the schema. Cannot be null.
+     * @param name The name of the field as contained in the schema. Cannot be null.
+     * @param type The type of the field as contained in the schema. Cannot be null.
      * @param repeated Whether this is a "repeated" field
-     * @param optional Whether this is a "optional" field - which uses Protobuf built in value types to wrap raw value
-     * @param oneOf    Whether this is a field is part of a oneOf
-     * @param number   The field number. Must be &gt;= 0.
+     * @param optional Whether this is a "optional" field - which uses Protobuf built in value types
+     *     to wrap raw value
+     * @param oneOf Whether this is a field is part of a oneOf
+     * @param number The field number. Must be &gt;= 0.
      */
     public FieldDefinition {
         if (name == null) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java
index 14b703a9..7a872584 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/FieldType.java
@@ -1,57 +1,54 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
-/**
- * Enumeration of possible types of fields.
- */
+/** Enumeration of possible types of fields. */
 public enum FieldType {
-	/** Protobuf 64bit Double Type */
-	DOUBLE,
-	/** Protobuf 32bit Float Type */
-	FLOAT,
-	/** Protobuf 32bit Signed Integer Type */
-	INT32,
-	/** Protobuf 64bit Signed Long Type */
-	INT64,
-	/** Protobuf 32bit Unsigned Long Type */
-	UINT32,
-	/** Protobuf 64bit Unsigned Long Type */
-	UINT64,
-	/** Protobuf 32bit Signed Integer Type, ZigZag encoded */
-	SINT32,
-	/** Protobuf 64bit Signed Long Type, ZigZag encoded */
-	SINT64,
-	/** Protobuf 32bit Unsigned Integer Type, not varint encoded, just little endian */
-	FIXED32,
-	/** Protobuf 64bit Unsigned Long Type, not varint encoded, just little endian */
-	FIXED64,
-	/** Protobuf 32bit Signed Integer Type, not varint encoded, just little endian */
-	SFIXED32,
-	/** Protobuf 64bit Signed Long Type, not varint encoded, just little endian */
-	SFIXED64,
-	/** Protobuf 1 byte boolean type */
-	BOOL,
-	/** Protobuf UTF8 String type */
-	STRING,
-	/** Protobuf bytes type */
-	BYTES,
-	/** Protobuf enum type */
-	ENUM,
-	/** Protobuf sub-message type */
-	MESSAGE,
-	/** Protobuf map type */
-	MAP;
+    /** Protobuf 64bit Double Type */
+    DOUBLE,
+    /** Protobuf 32bit Float Type */
+    FLOAT,
+    /** Protobuf 32bit Signed Integer Type */
+    INT32,
+    /** Protobuf 64bit Signed Long Type */
+    INT64,
+    /** Protobuf 32bit Unsigned Long Type */
+    UINT32,
+    /** Protobuf 64bit Unsigned Long Type */
+    UINT64,
+    /** Protobuf 32bit Signed Integer Type, ZigZag encoded */
+    SINT32,
+    /** Protobuf 64bit Signed Long Type, ZigZag encoded */
+    SINT64,
+    /** Protobuf 32bit Unsigned Integer Type, not varint encoded, just little endian */
+    FIXED32,
+    /** Protobuf 64bit Unsigned Long Type, not varint encoded, just little endian */
+    FIXED64,
+    /** Protobuf 32bit Signed Integer Type, not varint encoded, just little endian */
+    SFIXED32,
+    /** Protobuf 64bit Signed Long Type, not varint encoded, just little endian */
+    SFIXED64,
+    /** Protobuf 1 byte boolean type */
+    BOOL,
+    /** Protobuf UTF8 String type */
+    STRING,
+    /** Protobuf bytes type */
+    BYTES,
+    /** Protobuf enum type */
+    ENUM,
+    /** Protobuf sub-message type */
+    MESSAGE,
+    /** Protobuf map type */
+    MAP;
 
-	/**
-	 * Optional values have an inner field, with a standard definition for every FieldType. We create singleton
-	 * instances here for them to avoid them having to be created on every use. Placing them on the enum avoid a switch.
-	 */
-	final FieldDefinition optionalFieldDefinition;
+    /**
+     * Optional values have an inner field, with a standard definition for every FieldType. We
+     * create singleton instances here for them to avoid them having to be created on every use.
+     * Placing them on the enum avoid a switch.
+     */
+    final FieldDefinition optionalFieldDefinition;
 
-	/**
-	 * Constructor, creates optionalFieldDefinition automatically
-	 */
-	FieldType() {
-		optionalFieldDefinition = new FieldDefinition("value",this,false,1);
-	}
+    /** Constructor, creates optionalFieldDefinition automatically */
+    FieldType() {
+        optionalFieldDefinition = new FieldDefinition("value", this, false, 1);
+    }
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java
index 242ea055..02087449 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonCodec.java
@@ -7,7 +7,6 @@
 import com.hedera.pbj.runtime.jsonparser.JSONParser;
 import edu.umd.cs.findbugs.annotations.NonNull;
 import edu.umd.cs.findbugs.annotations.Nullable;
-
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.Objects;
@@ -22,9 +21,10 @@ public interface JsonCodec<T /*extends Record*/> extends Codec<T> {
     // then we should strongly enforce Codec works with Records. This will reduce bugs
     // where people try to use a mutable object.
 
-
     /** {@inheritDoc} */
-    default @NonNull T parse(@NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth) throws ParseException {
+    default @NonNull T parse(
+            @NonNull ReadableSequentialData input, final boolean strictMode, final int maxDepth)
+            throws ParseException {
         try {
             return parse(JsonTools.parseJson(input), strictMode, maxDepth);
         } catch (IOException ex) {
@@ -33,16 +33,19 @@ public interface JsonCodec<T /*extends Record*/> extends Codec<T> {
     }
 
     /**
-     * Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext. Throws if in strict mode ONLY.
+     * Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext. Throws if
+     * in strict mode ONLY.
      *
      * @param root The JSON parsed object tree to parse data from
      * @return Parsed HashObject model object or null if data input was null or empty
      * @throws ParseException If parsing fails
      */
-    @NonNull T parse(
+    @NonNull
+    T parse(
             @Nullable final JSONParser.ObjContext root,
             final boolean strictMode,
-            final int maxDepth) throws ParseException;
+            final int maxDepth)
+            throws ParseException;
 
     /**
      * Writes an item to the given {@link WritableSequentialData}.
@@ -58,7 +61,7 @@ default void write(@NonNull T item, @NonNull WritableSequentialData output) thro
     /**
      * Returns JSON string representing an item.
      *
-     * @param item      The item to convert. Must not be null.
+     * @param item The item to convert. Must not be null.
      */
     default String toJSON(@NonNull T item) {
         return toJSON(item, "", false);
@@ -67,10 +70,10 @@ default String toJSON(@NonNull T item) {
     /**
      * Returns JSON string representing an item.
      *
-     * @param item      The item to convert. Must not be null.
-     * @param indent    The indent to use for pretty printing
-     * @param inline    When true the output will start with indent end with a new line otherwise
-     *                        it will just be the object "{...}"
+     * @param item The item to convert. Must not be null.
+     * @param indent The indent to use for pretty printing
+     * @param inline When true the output will start with indent end with a new line otherwise it
+     *     will just be the object "{...}"
      */
     String toJSON(@NonNull T item, String indent, boolean inline);
 
@@ -78,8 +81,9 @@ default String toJSON(@NonNull T item) {
      * Reads from this data input the length of the data within the input. The implementation may
      * read all the data, or just some special serialized data, as needed to find out the length of
      * the data.
-     * <p>
-     * This is not an efficient implementation, but it is not considered performance critical for JSON.
+     *
+     * <p>This is not an efficient implementation, but it is not considered performance critical for
+     * JSON.
      *
      * @param input The input to use
      * @return The length of the data item in the input
@@ -88,13 +92,14 @@ default String toJSON(@NonNull T item) {
     default int measure(@NonNull ReadableSequentialData input) throws ParseException {
         final long startPosition = input.position();
         parse(input);
-        return (int)(input.position() - startPosition);
+        return (int) (input.position() - startPosition);
     }
 
     /**
      * Compute number of bytes that would be written when calling {@code write()} method.
-     * <p>
-     * This is not an efficient implementation, but it is not considered performance critical for JSON.
+     *
+     * <p>This is not an efficient implementation, but it is not considered performance critical for
+     * JSON.
      *
      * @param item The input model data to measure write bytes for
      * @return The length in bytes that would be written
@@ -116,15 +121,17 @@ default int measureRecord(T item) {
      * item in memory with serialized bytes and don't want to incur the cost of deserializing the
      * entire object, when we could have determined the bytes do not represent the same object very
      * cheaply and quickly.
-     * <p>
-     * This is not an efficient implementation, but it is not considered performance critical for JSON.
+     *
+     * <p>This is not an efficient implementation, but it is not considered performance critical for
+     * JSON.
      *
      * @param item The item to compare. Cannot be null.
      * @param input The input with the bytes to compare
      * @return true if the bytes represent the item, false otherwise.
      * @throws ParseException If parsing fails
      */
-    default boolean fastEquals(@NonNull T item, @NonNull ReadableSequentialData input) throws ParseException {
+    default boolean fastEquals(@NonNull T item, @NonNull ReadableSequentialData input)
+            throws ParseException {
         return Objects.equals(item, parse(input));
     }
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java
index eefc6bb2..d97686ca 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/JsonTools.java
@@ -7,13 +7,7 @@
 import com.hedera.pbj.runtime.jsonparser.JSONParser;
 import edu.umd.cs.findbugs.annotations.NonNull;
 import edu.umd.cs.findbugs.annotations.Nullable;
-import org.antlr.v4.runtime.CharStreams;
-import org.antlr.v4.runtime.CodePointBuffer;
-import org.antlr.v4.runtime.CodePointCharStream;
-import org.antlr.v4.runtime.CommonTokenStream;
-
 import java.io.IOException;
-import java.io.UncheckedIOException;
 import java.nio.CharBuffer;
 import java.util.Base64;
 import java.util.List;
@@ -21,10 +15,14 @@
 import java.util.function.BiFunction;
 import java.util.function.Function;
 import java.util.stream.Collectors;
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CodePointBuffer;
+import org.antlr.v4.runtime.CodePointCharStream;
+import org.antlr.v4.runtime.CommonTokenStream;
 
 /**
- * Class of static utility methods for working with JSON. All generated JSON is designed to be
- * 100% identical to that generated by Google Protobuf library.
+ * Class of static utility methods for working with JSON. All generated JSON is designed to be 100%
+ * identical to that generated by Google Protobuf library.
  */
 public final class JsonTools {
     /** The indent spacing to use for pretty printing JSON */
@@ -34,8 +32,8 @@ public final class JsonTools {
     // Conversion Utility Methods
 
     /**
-     * Convert a protobuf field name to a protobuf spec json field name. This is based directly on the code
-     * from protobuf library so that it matches exactly.
+     * Convert a protobuf field name to a protobuf spec json field name. This is based directly on
+     * the code from protobuf library so that it matches exactly.
      *
      * @param fieldName the protobuf field name to convert
      * @return the protobuf spec json field name
@@ -83,7 +81,7 @@ public static String unescape(@Nullable String string) {
      */
     public static String escape(@Nullable String string) {
         if (string == null) return null;
-        return string.replaceAll("\n","\\\\n").replaceAll("\r","\\\\r");
+        return string.replaceAll("\n", "\\\\n").replaceAll("\r", "\\\\r");
     }
 
     // ====================================================================================================
@@ -96,12 +94,13 @@ public static String escape(@Nullable String string) {
      * @return the Antlr JSON context object
      * @throws IOException if there was a problem parsing the JSON
      */
-    public static JSONParser.ObjContext parseJson(@NonNull final ReadableSequentialData input) throws IOException {
+    public static JSONParser.ObjContext parseJson(@NonNull final ReadableSequentialData input)
+            throws IOException {
         final JSONLexer lexer = new JSONLexer(CharStreams.fromStream(input.asInputStream()));
         final JSONParser parser = new JSONParser(new CommonTokenStream(lexer));
         final JSONParser.JsonContext jsonContext = parser.json();
         final JSONParser.ValueContext valueContext = jsonContext.value();
-        return  valueContext.obj();
+        return valueContext.obj();
     }
 
     /**
@@ -113,11 +112,14 @@ public static JSONParser.ObjContext parseJson(@NonNull final ReadableSequentialD
     public static JSONParser.ObjContext parseJson(@NonNull final CharBuffer input) {
         CodePointBuffer.Builder codePointBufferBuilder = CodePointBuffer.builder(input.remaining());
         codePointBufferBuilder.append(input);
-        final JSONLexer lexer = new JSONLexer(CodePointCharStream.fromBuffer(codePointBufferBuilder.build(), "CharBuffer"));
+        final JSONLexer lexer =
+                new JSONLexer(
+                        CodePointCharStream.fromBuffer(
+                                codePointBufferBuilder.build(), "CharBuffer"));
         final JSONParser parser = new JSONParser(new CommonTokenStream(lexer));
         final JSONParser.JsonContext jsonContext = parser.json();
         final JSONParser.ValueContext valueContext = jsonContext.value();
-        return  valueContext.obj();
+        return valueContext.obj();
     }
 
     /**
@@ -128,15 +130,18 @@ public static JSONParser.ObjContext parseJson(@NonNull final CharBuffer input) {
      * @return the list of parsed objects
      * @param <T> the type of the objects to parse
      */
-    public static <T> List<T> parseObjArray(JSONParser.ArrContext arrContext, JsonCodec<T> codec, final int maxDepth) {
+    public static <T> List<T> parseObjArray(
+            JSONParser.ArrContext arrContext, JsonCodec<T> codec, final int maxDepth) {
         return arrContext.value().stream()
-                .map(v -> {
-                    try {
-                        return codec.parse(v.obj(), false, maxDepth - 1);
-                    } catch (ParseException e) {
-                        throw new UncheckedParseException(e);
-                    }
-                }).toList();
+                .map(
+                        v -> {
+                            try {
+                                return codec.parse(v.obj(), false, maxDepth - 1);
+                            } catch (ParseException e) {
+                                throw new UncheckedParseException(e);
+                            }
+                        })
+                .toList();
     }
 
     /**
@@ -146,7 +151,10 @@ public static <T> List<T> parseObjArray(JSONParser.ArrContext arrContext, JsonCo
      * @return the parsed integer
      */
     public static int parseInteger(JSONParser.ValueContext valueContext) {
-        return Integer.parseInt(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText());
+        return Integer.parseInt(
+                valueContext.STRING() != null
+                        ? valueContext.STRING().getText()
+                        : valueContext.NUMBER().getText());
     }
 
     /**
@@ -156,7 +164,10 @@ public static int parseInteger(JSONParser.ValueContext valueContext) {
      * @return the parsed long
      */
     public static long parseLong(JSONParser.ValueContext valueContext) {
-        return Long.parseLong(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText());
+        return Long.parseLong(
+                valueContext.STRING() != null
+                        ? valueContext.STRING().getText()
+                        : valueContext.NUMBER().getText());
     }
 
     /**
@@ -166,7 +177,10 @@ public static long parseLong(JSONParser.ValueContext valueContext) {
      * @return the parsed float
      */
     public static float parseFloat(JSONParser.ValueContext valueContext) {
-        return Float.parseFloat(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText());
+        return Float.parseFloat(
+                valueContext.STRING() != null
+                        ? valueContext.STRING().getText()
+                        : valueContext.NUMBER().getText());
     }
 
     /**
@@ -176,7 +190,10 @@ public static float parseFloat(JSONParser.ValueContext valueContext) {
      * @return the parsed double
      */
     public static double parseDouble(JSONParser.ValueContext valueContext) {
-        return Double.parseDouble(valueContext.STRING() != null ? valueContext.STRING().getText() : valueContext.NUMBER().getText());
+        return Double.parseDouble(
+                valueContext.STRING() != null
+                        ? valueContext.STRING().getText()
+                        : valueContext.NUMBER().getText());
     }
 
     /**
@@ -260,11 +277,14 @@ private static String rawFieldCode(String fieldName, String rawValue) {
      * @param value the value of the field
      * @return the JSON string
      */
-    public static <T> String field(String indent, String fieldName,
-                                 JsonCodec<T> codec, @Nullable final T value) {
+    public static <T> String field(
+            String indent, String fieldName, JsonCodec<T> codec, @Nullable final T value) {
         if (value != null) {
-            return '"' + toJsonFieldName(fieldName) + '"' + ": " +
-                    codec.toJSON(value, indent, true);
+            return '"'
+                    + toJsonFieldName(fieldName)
+                    + '"'
+                    + ": "
+                    + codec.toJSON(value, indent, true);
         } else {
             return '"' + toJsonFieldName(fieldName) + '"' + ": null";
         }
@@ -309,10 +329,15 @@ public static String field(String fieldName, byte[] value) {
      * @param fieldName the name of the field
      * @param value the value of the field
      * @param kEncoder an encoder of a key value to a string
-     * @param vComposer a composer of a "key":value strings - basically, a JsonTools::field method for the value type
+     * @param vComposer a composer of a "key":value strings - basically, a JsonTools::field method
+     *     for the value type
      * @return the JSON string
      */
-    public static <K, V> String field(String fieldName, Map<K, V> value, Function<K, String> kEncoder, BiFunction<String, V, String> vComposer) {
+    public static <K, V> String field(
+            String fieldName,
+            Map<K, V> value,
+            Function<K, String> kEncoder,
+            BiFunction<String, V, String> vComposer) {
         assert !value.isEmpty();
         StringBuilder sb = new StringBuilder();
         PbjMap<K, V> pbjMap = (PbjMap<K, V>) value;
@@ -373,9 +398,9 @@ public static String field(String fieldName, float value) {
         if (Float.isNaN(value)) {
             return rawFieldCode(fieldName, "\"NaN\"");
         } else if (Float.isInfinite(value)) {
-            return rawFieldCode(fieldName, "\""+(value < 0 ? "-Infinity" : "Infinity")+"\"");
+            return rawFieldCode(fieldName, "\"" + (value < 0 ? "-Infinity" : "Infinity") + "\"");
         } else {
-            return rawFieldCode(fieldName,  Float.toString(value) );
+            return rawFieldCode(fieldName, Float.toString(value));
         }
     }
 
@@ -390,7 +415,7 @@ public static String field(String fieldName, double value) {
         if (Double.isNaN(value)) {
             return rawFieldCode(fieldName, "\"NaN\"");
         } else if (Double.isInfinite(value)) {
-            return rawFieldCode(fieldName, "\""+(value < 0 ? "-Infinity" : "Infinity")+"\"");
+            return rawFieldCode(fieldName, "\"" + (value < 0 ? "-Infinity" : "Infinity") + "\"");
         } else {
             return rawFieldCode(fieldName, Double.toString(value));
         }
@@ -481,33 +506,56 @@ public static String field(String fieldName, Double value) {
      * @return the JSON string
      * @param <T> the type of the items in the array
      */
-    public static <T> String arrayField(String fieldName,
-                                      FieldDefinition fieldDefinition, List<T> items) {
+    public static <T> String arrayField(
+            String fieldName, FieldDefinition fieldDefinition, List<T> items) {
         if (items != null) {
             if (items.isEmpty()) {
                 return rawFieldCode(fieldName, "[]");
             } else {
-                String values = items.stream()
-                        .map(item -> {
-                            if (fieldDefinition.optional() && item == null) {
-                                return "\"null\"";
-                            } else {
-                                return switch (fieldDefinition.type()) {
-                                    case STRING -> '"' + escape((String) item) + '"';
-                                    case BYTES -> '"' + ((Bytes) item).toBase64() + '"';
-                                    case INT32, SINT32, UINT32, FIXED32, SFIXED32 -> Integer.toString((Integer) item);
-                                    case INT64, SINT64, UINT64, FIXED64, SFIXED64 -> '"' + Long.toString((Long) item) + '"';
-                                    case FLOAT -> Float.toString((Float) item);
-                                    case DOUBLE -> Double.toString((Double) item);
-                                    case BOOL -> Boolean.toString((Boolean) item);
-                                    case ENUM -> '"' + ((EnumWithProtoMetadata)item).protoName() + '"';
-                                    case MESSAGE -> throw new UnsupportedOperationException("No expected here should have called other arrayField() method");
-                                    case MAP -> throw new UnsupportedOperationException("Arrays of maps not supported");
-                                };
-                            }
-                        })
-                        .collect(Collectors.joining(", "));
-                return rawFieldCode(fieldName, "["+values+"]");
+                String values =
+                        items.stream()
+                                .map(
+                                        item -> {
+                                            if (fieldDefinition.optional() && item == null) {
+                                                return "\"null\"";
+                                            } else {
+                                                return switch (fieldDefinition.type()) {
+                                                    case STRING -> '"'
+                                                            + escape((String) item)
+                                                            + '"';
+                                                    case BYTES -> '"'
+                                                            + ((Bytes) item).toBase64()
+                                                            + '"';
+                                                    case INT32,
+                                                            SINT32,
+                                                            UINT32,
+                                                            FIXED32,
+                                                            SFIXED32 -> Integer.toString(
+                                                            (Integer) item);
+                                                    case INT64,
+                                                            SINT64,
+                                                            UINT64,
+                                                            FIXED64,
+                                                            SFIXED64 -> '"'
+                                                            + Long.toString((Long) item)
+                                                            + '"';
+                                                    case FLOAT -> Float.toString((Float) item);
+                                                    case DOUBLE -> Double.toString((Double) item);
+                                                    case BOOL -> Boolean.toString((Boolean) item);
+                                                    case ENUM -> '"'
+                                                            + ((EnumWithProtoMetadata) item)
+                                                                    .protoName()
+                                                            + '"';
+                                                    case MESSAGE -> throw new UnsupportedOperationException(
+                                                            "No expected here should have called"
+                                                                    + " other arrayField() method");
+                                                    case MAP -> throw new UnsupportedOperationException(
+                                                            "Arrays of maps not supported");
+                                                };
+                                            }
+                                        })
+                                .collect(Collectors.joining(", "));
+                return rawFieldCode(fieldName, "[" + values + "]");
             }
         }
         return null;
@@ -523,15 +571,15 @@ public static <T> String arrayField(String fieldName,
      * @return the JSON string
      * @param <T> the type of the items in the array
      */
-    public static <T> String arrayField(String indent, String fieldName,
-                                      JsonCodec<T> codec, List<T> items) {
+    public static <T> String arrayField(
+            String indent, String fieldName, JsonCodec<T> codec, List<T> items) {
         if (items != null) {
             if (items.isEmpty()) {
                 return rawFieldCode(fieldName, "[]");
             } else {
                 StringBuilder code = new StringBuilder('"' + fieldName + '"' + ": [");
                 for (int i = 0; i < items.size(); i++) {
-                     var item = items.get(i);
+                    var item = items.get(i);
                     code.append(codec.toJSON(item, indent, true));
                     if (i < items.size() - 1) {
                         code.append(", ");
@@ -543,5 +591,4 @@ public static <T> String arrayField(String indent, String fieldName,
         }
         return null;
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java
index 8f91de88..3595774b 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/MalformedProtobufException.java
@@ -3,21 +3,19 @@
 
 import java.io.IOException;
 
-/**
- * Thrown during the parsing of protobuf data when it is malformed.
- */
+/** Thrown during the parsing of protobuf data when it is malformed. */
 public class MalformedProtobufException extends IOException {
 
-	/**
-	 * Construct new MalformedProtobufException
-	 *
-	 * @param message error message
-	 */
-	public MalformedProtobufException(final String message) {
-		super(message);
-	}
+    /**
+     * Construct new MalformedProtobufException
+     *
+     * @param message error message
+     */
+    public MalformedProtobufException(final String message) {
+        super(message);
+    }
 
-	public MalformedProtobufException(final String message, final Throwable cause) {
-		super(message, cause);
-	}
+    public MalformedProtobufException(final String message, final Throwable cause) {
+        super(message, cause);
+    }
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java
index 1efdc720..af72ecca 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/OneOf.java
@@ -4,29 +4,28 @@
 import java.util.Objects;
 
 /**
- * When a protobuf schema defines a field as "oneof", it is often useful
- * for parsers to represent the field as a {@link OneOf} because there is
- * often no useful supertype common to all fields within the "oneof". This
- * class takes the field num and an enum (defined by the parser) representing
- * the different possible types in this "oneof", and the actual value as
- * an object.
+ * When a protobuf schema defines a field as "oneof", it is often useful for parsers to represent
+ * the field as a {@link OneOf} because there is often no useful supertype common to all fields
+ * within the "oneof". This class takes the field num and an enum (defined by the parser)
+ * representing the different possible types in this "oneof", and the actual value as an object.
  *
- * @param kind     An enum representing the kind of data being represented. Must not be null.
- * @param value    The actual value in the "oneof". May be null.
- * @param <E>      The enum type
+ * @param kind An enum representing the kind of data being represented. Must not be null.
+ * @param value The actual value in the "oneof". May be null.
+ * @param <E> The enum type
  */
 public record OneOf<E extends Enum<E>>(E kind, Object value) {
     /**
      * Construct a new OneOf
      *
-     * @param kind     An enum representing the kind of data being represented. Must not be null.
-     * @param value    The actual value in the "oneof". May be null.
+     * @param kind An enum representing the kind of data being represented. Must not be null.
+     * @param value The actual value in the "oneof". May be null.
      */
     public OneOf {
         if (kind == null) {
             throw new NullPointerException("An enum 'kind' must be supplied");
         }
-        assert kind instanceof EnumWithProtoMetadata : "OneOf 'kind' must implement EnumWithProtoMetadata";
+        assert kind instanceof EnumWithProtoMetadata
+                : "OneOf 'kind' must implement EnumWithProtoMetadata";
     }
 
     /**
@@ -49,9 +48,7 @@ public boolean equals(Object o) {
 
     @Override
     public int hashCode() {
-        return (31 + Integer.hashCode(((EnumWithProtoMetadata)kind).protoOrdinal())) * 31
+        return (31 + Integer.hashCode(((EnumWithProtoMetadata) kind).protoOrdinal())) * 31
                 + (value == null ? 0 : value.hashCode());
     }
-
 }
-
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java
index 0f9bea01..cae4c92d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ParseException.java
@@ -4,9 +4,9 @@
 /**
  * A checked exception thrown by Codec.parse() methods when the parsing operation fails.
  *
- * The `cause` of this exception provides more details on the nature of the failure
- * which can be caused by I/O issues, malformed input data, or any other reason
- * that prevents the parse() method from completing the operation.
+ * <p>The `cause` of this exception provides more details on the nature of the failure which can be
+ * caused by I/O issues, malformed input data, or any other reason that prevents the parse() method
+ * from completing the operation.
  */
 public class ParseException extends Exception {
     public ParseException(Throwable cause) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java
index 457f7b36..1ce0fa5c 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/PbjMap.java
@@ -10,9 +10,9 @@
 
 /**
  * Implements an immutable map that exposes a list of keys sorted in their natural order.
- * <p>
- * This Map implementation allows one to iterate the entries in a deterministic order
- * which is useful for serializing, hash computation, etc.
+ *
+ * <p>This Map implementation allows one to iterate the entries in a deterministic order which is
+ * useful for serializing, hash computation, etc.
  *
  * @param <K> key type
  * @param <V> value type
@@ -30,12 +30,12 @@ private PbjMap(final Map<K, V> map) {
     }
 
     /**
-     * A public factory method for PbjMap objects.
-     * It returns the PbjMap.EMPTY if the input map is empty.
-     * It returns the map itself if the input map is an instance of PbjMap (because it's immutable anyway.)
-     * Otherwise, it returns a new PbjMap instance delegating to the provided input map.
-     * NOTE: the caller code is expected to never modify the input map after this factory method is called,
-     * otherwise the behavior is undefined.
+     * A public factory method for PbjMap objects. It returns the PbjMap.EMPTY if the input map is
+     * empty. It returns the map itself if the input map is an instance of PbjMap (because it's
+     * immutable anyway.) Otherwise, it returns a new PbjMap instance delegating to the provided
+     * input map. NOTE: the caller code is expected to never modify the input map after this factory
+     * method is called, otherwise the behavior is undefined.
+     *
      * @param map an input map
      * @return a PbjMap instance corresponding to the input map
      * @param <K> key type
@@ -49,6 +49,7 @@ public static <K, V> PbjMap<K, V> of(final Map<K, V> map) {
 
     /**
      * Return a list of keys sorted in their natural order.
+     *
      * @return the sorted keys list
      */
     public List<K> getSortedKeys() {
@@ -127,7 +128,8 @@ public boolean equals(Object o) {
     public int hashCode() {
         // This is a convenience hashCode() implementation that delegates to Java hashCode,
         // and it's implemented here solely to support the above equals() method override.
-        // Generated protobuf models compute map fields' hash codes differently and deterministically.
+        // Generated protobuf models compute map fields' hash codes differently and
+        // deterministically.
         return 31 * map.hashCode() + sortedKeys.hashCode();
     }
 
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java
index 45612948..29bc110d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoConstants.java
@@ -1,9 +1,7 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
-/**
- * Common constants used by parsers, writers and tests.
- */
+/** Common constants used by parsers, writers and tests. */
 public enum ProtoConstants {
 
     /** On wire encoded type for varint */
@@ -22,9 +20,7 @@ public enum ProtoConstants {
     // values() seems to allocate a new array on each call, so let's cache it here
     private static final ProtoConstants[] values = values();
 
-    /**
-     * Mask used to extract the wire type from the "tag" byte
-     */
+    /** Mask used to extract the wire type from the "tag" byte */
     public static final int TAG_WIRE_TYPE_MASK = 0b0000_0111;
 
     public static ProtoConstants get(int ordinal) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java
index 76c2c3be..fac60016 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoParserTools.java
@@ -1,9 +1,8 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
-import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
-
+import com.hedera.pbj.runtime.io.buffer.Bytes;
 import java.io.IOException;
 import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
@@ -18,16 +17,16 @@
 import java.util.Map;
 
 /**
- * This class is full of parse helper methods, they depend on a DataInput as input with position and limit set
- * correctly.
- * <p>
- * Methods that IDE things are unused are used in generated code by PBJ compiler.
+ * This class is full of parse helper methods, they depend on a DataInput as input with position and
+ * limit set correctly.
+ *
+ * <p>Methods that IDE things are unused are used in generated code by PBJ compiler.
  */
 @SuppressWarnings({"DuplicatedCode", "unused"})
 public final class ProtoParserTools {
     /**
-     * The number of lower order bits from the "tag" byte that should be rotated out
-     * to reveal the field number
+     * The number of lower order bits from the "tag" byte that should be rotated out to reveal the
+     * field number
      */
     public static final int TAG_FIELD_OFFSET = 3;
 
@@ -35,8 +34,9 @@ public final class ProtoParserTools {
     private ProtoParserTools() {}
 
     /**
-     * Add an item to a list returning a new list with the item or the same list with the item added. If the list is
-     * Collections.EMPTY_LIST then a new list is created and returned with the item added.
+     * Add an item to a list returning a new list with the item or the same list with the item
+     * added. If the list is Collections.EMPTY_LIST then a new list is created and returned with the
+     * item added.
      *
      * @param list The list to add item to or Collections.EMPTY_LIST
      * @param newItem The item to add
@@ -52,8 +52,9 @@ public static <T> List<T> addToList(List<T> list, T newItem) {
     }
 
     /**
-     * Add an entry to a map returning a new map with the entry or the same map with the entry added. If the map is
-     * Collections.EMPTY_MAP then a new map is created and returned with the entry added.
+     * Add an entry to a map returning a new map with the entry or the same map with the entry
+     * added. If the map is Collections.EMPTY_MAP then a new map is created and returned with the
+     * entry added.
      *
      * @param map The map to add entry to or Collections.EMPTY_MAP
      * @param key The key
@@ -76,7 +77,7 @@ public static <K, V> Map<K, V> addToMap(Map<K, V> map, final K key, final V valu
      * @param input The input data to read from
      * @return the read int
      */
-    public static int readInt32(final ReadableSequentialData input)  {
+    public static int readInt32(final ReadableSequentialData input) {
         return input.readVarInt(false);
     }
 
@@ -255,7 +256,8 @@ public static String readString(final ReadableSequentialData input, final long m
 
         try {
             // Shouldn't use `new String()` because we want to error out on malformed UTF-8 bytes.
-            return StandardCharsets.UTF_8.newDecoder()
+            return StandardCharsets.UTF_8
+                    .newDecoder()
                     .onMalformedInput(CodingErrorAction.REPORT)
                     .onUnmappableCharacter(CodingErrorAction.REPORT)
                     .decode(bb)
@@ -269,8 +271,8 @@ public static String readString(final ReadableSequentialData input, final long m
      * Read a Bytes field from data input
      *
      * @param input the input to read from
-     * @return read Bytes object, this can be a copy or a direct reference to inputs data. So it has same life span
-     * of InputData
+     * @return read Bytes object, this can be a copy or a direct reference to inputs data. So it has
+     *     same life span of InputData
      */
     public static Bytes readBytes(final ReadableSequentialData input) {
         try {
@@ -281,16 +283,17 @@ public static Bytes readBytes(final ReadableSequentialData input) {
     }
 
     /**
-     * Read a Bytes field from data input, or throw ParseException if the Bytes in the input
-     * is longer than the maxSize.
+     * Read a Bytes field from data input, or throw ParseException if the Bytes in the input is
+     * longer than the maxSize.
      *
      * @param input the input to read from
      * @param maxSize the maximum allowed size
-     * @return read Bytes object, this can be a copy or a direct reference to inputs data. So it has same life span
-     * of InputData
+     * @return read Bytes object, this can be a copy or a direct reference to inputs data. So it has
+     *     same life span of InputData
      * @throws ParseException if the length is greater than maxSize
      */
-    public static Bytes readBytes(final ReadableSequentialData input, final long maxSize) throws ParseException {
+    public static Bytes readBytes(final ReadableSequentialData input, final long maxSize)
+            throws ParseException {
         final int length = input.readVarInt(false);
         if (length > maxSize) {
             throw new ParseException("size " + length + " is greater than max " + maxSize);
@@ -312,7 +315,8 @@ public static Bytes readBytes(final ReadableSequentialData input, final long max
      * @param wireType The wire type of field to skip
      * @throws IOException For unsupported wire types
      */
-    public static void skipField(final ReadableSequentialData input, final ProtoConstants wireType) throws IOException {
+    public static void skipField(final ReadableSequentialData input, final ProtoConstants wireType)
+            throws IOException {
         try {
             skipField(input, wireType, Long.MAX_VALUE);
         } catch (ParseException ex) {
@@ -327,16 +331,20 @@ public static void skipField(final ReadableSequentialData input, final ProtoCons
      * @param wireType The wire type of field to skip
      * @param maxSize the maximum allowed size for repeated/length-encoded fields
      * @throws IOException For unsupported wire types
-     * @throws ParseException if the length of a repeated/length-encoded field is greater than maxSize
+     * @throws ParseException if the length of a repeated/length-encoded field is greater than
+     *     maxSize
      */
-    public static void skipField(final ReadableSequentialData input, final ProtoConstants wireType, final long maxSize)
+    public static void skipField(
+            final ReadableSequentialData input, final ProtoConstants wireType, final long maxSize)
             throws IOException, ParseException {
         switch (wireType) {
             case WIRE_TYPE_FIXED_64_BIT -> input.skip(8);
             case WIRE_TYPE_FIXED_32_BIT -> input.skip(4);
-            // The value for "zigZag" when calling varint doesn't matter because we are just reading past
-            // the varint, we don't care how to interpret it (zigzag is only used for interpretation of
-            // the bytes, not how many of them there are)
+                // The value for "zigZag" when calling varint doesn't matter because we are just
+                // reading past
+                // the varint, we don't care how to interpret it (zigzag is only used for
+                // interpretation of
+                // the bytes, not how many of them there are)
             case WIRE_TYPE_VARINT_OR_ZIGZAG -> input.readVarLong(false);
             case WIRE_TYPE_DELIMITED -> {
                 final int length = input.readVarInt(false);
@@ -348,9 +356,12 @@ public static void skipField(final ReadableSequentialData input, final ProtoCons
                 }
                 input.skip(length);
             }
-            case WIRE_TYPE_GROUP_START -> throw new IOException("Wire type 'Group Start' is unsupported");
-            case WIRE_TYPE_GROUP_END -> throw new IOException("Wire type 'Group End' is unsupported");
-            default -> throw new IOException("Unhandled wire type while trying to skip a field " + wireType);
+            case WIRE_TYPE_GROUP_START -> throw new IOException(
+                    "Wire type 'Group Start' is unsupported");
+            case WIRE_TYPE_GROUP_END -> throw new IOException(
+                    "Wire type 'Group End' is unsupported");
+            default -> throw new IOException(
+                    "Unhandled wire type while trying to skip a field " + wireType);
         }
     }
 
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java
index 0dfb397f..db111785 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoTestTools.java
@@ -3,31 +3,30 @@
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
-
 import java.nio.ByteBuffer;
 import java.nio.CharBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Static tools and test cases used by generated test classes.
- * <p>
- * It was very slow in testing when new buffers were created each test, so there is a thread local cache of buffers
- * here. That are used in unit tests. This saves a huge amount of GC work and reduced test time from hours to minutes.
- * </p>
+ *
+ * <p>It was very slow in testing when new buffers were created each test, so there is a thread
+ * local cache of buffers here. That are used in unit tests. This saves a huge amount of GC work and
+ * reduced test time from hours to minutes.
  */
 public final class ProtoTestTools {
 
     /** Size for reusable test buffers */
-    private static final int BUFFER_SIZE = 1024*1024;
+    private static final int BUFFER_SIZE = 1024 * 1024;
 
     /** Size for reusable test char buffers */
-    private static final int CHAR_BUFFER_SIZE = 2*1024*1024;
+    private static final int CHAR_BUFFER_SIZE = 2 * 1024 * 1024;
 
     /** Instance should never be created */
     private ProtoTestTools() {}
+
     /** Thread local set of reusable buffers */
     private static final ThreadLocal<BufferedData> THREAD_LOCAL_BUFFERS =
             ThreadLocal.withInitial(() -> BufferedData.allocate(BUFFER_SIZE));
@@ -118,23 +117,23 @@ public static <T> List<T> addNull(final List<T> list) {
     }
 
     /**
-     * Util method to create a list of lists of objects. Given a list of test cases it creates an empty list and then a
-     * splits the rest into sub-lists of length max 5.
-     * <p>
-     *     This was changed has it is way faster for tests to do many small chunks than a few huge objects.
-     * </p>
+     * Util method to create a list of lists of objects. Given a list of test cases it creates an
+     * empty list and then a splits the rest into sub-lists of length max 5.
+     *
+     * <p>This was changed has it is way faster for tests to do many small chunks than a few huge
+     * objects.
      *
      * @param list Input list
      * @return list of lists derived from input list
      * @param <T> the type for lists
      */
     public static <T> List<List<T>> generateListArguments(final List<T> list) {
-        ArrayList<List<T>> outputList = new ArrayList<>((list.size()/5)+1);
+        ArrayList<List<T>> outputList = new ArrayList<>((list.size() / 5) + 1);
         outputList.add(Collections.emptyList());
         int i = 0;
         while (i < list.size()) {
-            final int itemsToUse = Math.min(5, list.size()-i);
-            outputList.add(list.subList(i, i+itemsToUse));
+            final int itemsToUse = Math.min(5, list.size() - i);
+            outputList.add(list.subList(i, i + itemsToUse));
             i += itemsToUse;
         }
         return outputList;
@@ -144,32 +143,50 @@ public static <T> List<List<T>> generateListArguments(final List<T> list) {
     // Standard lists of values to test with
 
     /** integer type test cases */
-    public static final List<Integer> INTEGER_TESTS_LIST = List.of(Integer.MIN_VALUE, -42, -21, 0, 21, 42, Integer.MAX_VALUE);
+    public static final List<Integer> INTEGER_TESTS_LIST =
+            List.of(Integer.MIN_VALUE, -42, -21, 0, 21, 42, Integer.MAX_VALUE);
+
     /** unsigned integer type test cases */
-    public static final List<Integer> UNSIGNED_INTEGER_TESTS_LIST = List.of(0, 1, 2, Integer.MAX_VALUE);
+    public static final List<Integer> UNSIGNED_INTEGER_TESTS_LIST =
+            List.of(0, 1, 2, Integer.MAX_VALUE);
+
     /** long type test cases */
-    public static final List<Long> LONG_TESTS_LIST = List.of(Long.MIN_VALUE, -42L, -21L, 0L, 21L, 42L, Long.MAX_VALUE);
+    public static final List<Long> LONG_TESTS_LIST =
+            List.of(Long.MIN_VALUE, -42L, -21L, 0L, 21L, 42L, Long.MAX_VALUE);
+
     /** unsigned long type test cases */
     public static final List<Long> UNSIGNED_LONG_TESTS_LIST = List.of(0L, 21L, 42L, Long.MAX_VALUE);
+
     /** bytes float test cases */
-    public static final List<Float> FLOAT_TESTS_LIST = List.of(Float.MIN_NORMAL, -102.7f, -5f, 1.7f, 0f, 3f, 5.2f, 42.1f, Float.MAX_VALUE);
+    public static final List<Float> FLOAT_TESTS_LIST =
+            List.of(Float.MIN_NORMAL, -102.7f, -5f, 1.7f, 0f, 3f, 5.2f, 42.1f, Float.MAX_VALUE);
+
     /** double type test cases */
-    public static final List<Double> DOUBLE_TESTS_LIST = List.of(Double.MIN_NORMAL, -102.7d, -5d, 1.7d, 0d, 3d, 5.2d, 42.1d, Double.MAX_VALUE);
+    public static final List<Double> DOUBLE_TESTS_LIST =
+            List.of(Double.MIN_NORMAL, -102.7d, -5d, 1.7d, 0d, 3d, 5.2d, 42.1d, Double.MAX_VALUE);
+
     /** boolean type test cases */
     public static final List<Boolean> BOOLEAN_TESTS_LIST = List.of(true, false);
+
     /** bytes type test cases */
-    public static final List<Bytes> BYTES_TESTS_LIST = List.of(
-            Bytes.wrap(new byte[0]),
-            Bytes.wrap(new byte[]{0b001}),
-            Bytes.wrap(new byte[]{0b001, 0b010, 0b011, (byte)0xFF, Byte.MIN_VALUE, Byte.MAX_VALUE})
-    );
-
-    /** string type test cases, small as possible to make tests fast, there is a separate integration test with extra tests  */
-    public static final List<String> STRING_TESTS_LIST = List.of(
-            "",
-            """
+    public static final List<Bytes> BYTES_TESTS_LIST =
+            List.of(
+                    Bytes.wrap(new byte[0]),
+                    Bytes.wrap(new byte[] {0b001}),
+                    Bytes.wrap(
+                            new byte[] {
+                                0b001, 0b010, 0b011, (byte) 0xFF, Byte.MIN_VALUE, Byte.MAX_VALUE
+                            }));
+
+    /**
+     * string type test cases, small as possible to make tests fast, there is a separate integration
+     * test with extra tests
+     */
+    public static final List<String> STRING_TESTS_LIST =
+            List.of(
+                    "",
+                    """
             This a small to speed tests
             Couple extended chars ©« あめ بِها
-            """
-    );
+            """);
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java
index dd8db3dd..b1bc93b4 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriter.java
@@ -2,7 +2,6 @@
 package com.hedera.pbj.runtime;
 
 import com.hedera.pbj.runtime.io.WritableSequentialData;
-
 import java.io.IOException;
 
 /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java
index 71b0894d..c6f8435e 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/ProtoWriterTools.java
@@ -11,7 +11,6 @@
 import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
 import edu.umd.cs.findbugs.annotations.NonNull;
 import edu.umd.cs.findbugs.annotations.Nullable;
-
 import java.io.IOException;
 import java.nio.ByteOrder;
 import java.nio.charset.StandardCharsets;
@@ -19,13 +18,14 @@
 import java.util.function.Consumer;
 import java.util.function.ToIntFunction;
 
-/**
- * Static helper methods for Writers
- */
+/** Static helper methods for Writers */
 @SuppressWarnings({"DuplicatedCode", "ForLoopReplaceableByForEach"})
 public final class ProtoWriterTools {
 
-    /** The number of leading bits of the tag that are used to store field type, the rest is field number */
+    /**
+     * The number of leading bits of the tag that are used to store field type, the rest is field
+     * number
+     */
     static final int TAG_TYPE_BITS = 3;
 
     /** Instance should never be created */
@@ -70,16 +70,19 @@ public static void writeTag(final WritableSequentialData out, final FieldDefinit
      * @param field The field to include in tag
      * @param wireType The field wire type to include in tag
      */
-    public static void writeTag(final WritableSequentialData out, final FieldDefinition field, final ProtoConstants wireType) {
+    public static void writeTag(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final ProtoConstants wireType) {
         out.writeVarInt((field.number() << TAG_TYPE_BITS) | wireType.ordinal(), false);
     }
 
     /** Create an unsupported field type exception */
     private static RuntimeException unsupported() {
-        return new RuntimeException("Unsupported field type. Bug in ProtoOutputStream, shouldn't happen.");
+        return new RuntimeException(
+                "Unsupported field type. Bug in ProtoOutputStream, shouldn't happen.");
     }
 
-
     // ================================================================================================================
     // STANDARD WRITE METHODS
 
@@ -102,11 +105,13 @@ public static void writeInteger(WritableSequentialData out, FieldDefinition fiel
      * @param value the int value to write
      * @param skipDefault default value results in no-op for non-oneOf
      */
-    public static void writeInteger(WritableSequentialData out, FieldDefinition field, int value, boolean skipDefault) {
-        assert switch(field.type()) {
-            case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true;
-            default -> false;
-        } : "Not an integer type " + field;
+    public static void writeInteger(
+            WritableSequentialData out, FieldDefinition field, int value, boolean skipDefault) {
+        assert switch (field.type()) {
+                    case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true;
+                    default -> false;
+                }
+                : "Not an integer type " + field;
         assert !field.repeated() : "Use writeIntegerList with repeated types";
 
         if (skipDefault && !field.oneOf() && value == 0) {
@@ -154,11 +159,13 @@ public static void writeLong(WritableSequentialData out, FieldDefinition field,
      * @param value the long value to write
      * @param skipDefault default value results in no-op for non-oneOf
      */
-    public static void writeLong(WritableSequentialData out, FieldDefinition field, long value, boolean skipDefault) {
-        assert switch(field.type()) {
-            case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true;
-            default -> false;
-        } : "Not a long type " + field;
+    public static void writeLong(
+            WritableSequentialData out, FieldDefinition field, long value, boolean skipDefault) {
+        assert switch (field.type()) {
+                    case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true;
+                    default -> false;
+                }
+                : "Not a long type " + field;
         assert !field.repeated() : "Use writeLongList with repeated types";
         if (skipDefault && !field.oneOf() && value == 0) {
             return;
@@ -207,7 +214,8 @@ public static void writeFloat(WritableSequentialData out, FieldDefinition field,
      * @param field the descriptor for the field we are writing
      * @param value the double value to write
      */
-    public static void writeDouble(WritableSequentialData out, FieldDefinition field, double value) {
+    public static void writeDouble(
+            WritableSequentialData out, FieldDefinition field, double value) {
         assert field.type() == FieldType.DOUBLE : "Not a double type " + field;
         assert !field.repeated() : "Use writeDoubleList with repeated types";
         // When not a oneOf don't write default value
@@ -225,7 +233,8 @@ public static void writeDouble(WritableSequentialData out, FieldDefinition field
      * @param field the descriptor for the field we are writing
      * @param value the boolean value to write
      */
-    public static void writeBoolean(WritableSequentialData out, FieldDefinition field, boolean value) {
+    public static void writeBoolean(
+            WritableSequentialData out, FieldDefinition field, boolean value) {
         writeBoolean(out, field, value, true);
     }
 
@@ -237,13 +246,14 @@ public static void writeBoolean(WritableSequentialData out, FieldDefinition fiel
      * @param value the boolean value to write
      * @param skipDefault default value results in no-op for non-oneOf
      */
-    public static void writeBoolean(WritableSequentialData out, FieldDefinition field, boolean value, boolean skipDefault) {
+    public static void writeBoolean(
+            WritableSequentialData out, FieldDefinition field, boolean value, boolean skipDefault) {
         assert field.type() == FieldType.BOOL : "Not a boolean type " + field;
         assert !field.repeated() : "Use writeBooleanList with repeated types";
         // In the case of oneOf we write the value even if it is default value of false
         if (value || field.oneOf() || !skipDefault) {
             writeTag(out, field, WIRE_TYPE_VARINT_OR_ZIGZAG);
-            out.writeByte(value ? (byte)1 : 0);
+            out.writeByte(value ? (byte) 1 : 0);
         }
     }
 
@@ -254,7 +264,8 @@ public static void writeBoolean(WritableSequentialData out, FieldDefinition fiel
      * @param field the descriptor for the field we are writing
      * @param enumValue the enum value to write
      */
-    public static void writeEnum(WritableSequentialData out, FieldDefinition field, EnumWithProtoMetadata enumValue) {
+    public static void writeEnum(
+            WritableSequentialData out, FieldDefinition field, EnumWithProtoMetadata enumValue) {
         assert field.type() == FieldType.ENUM : "Not an enum type " + field;
         assert !field.repeated() : "Use writeEnumList with repeated types";
         // When not a oneOf don't write default value
@@ -273,8 +284,9 @@ public static void writeEnum(WritableSequentialData out, FieldDefinition field,
      * @param value the string value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeString(final WritableSequentialData out, final FieldDefinition field,
-                                   final String value) throws IOException {
+    public static void writeString(
+            final WritableSequentialData out, final FieldDefinition field, final String value)
+            throws IOException {
         writeString(out, field, value, true);
     }
 
@@ -287,25 +299,30 @@ public static void writeString(final WritableSequentialData out, final FieldDefi
      * @param skipDefault default value results in no-op for non-oneOf
      * @throws IOException If a I/O error occurs
      */
-    public static void writeString(final WritableSequentialData out, final FieldDefinition field,
-            final String value, boolean skipDefault) throws IOException {
+    public static void writeString(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final String value,
+            boolean skipDefault)
+            throws IOException {
         assert field.type() == FieldType.STRING : "Not a string type " + field;
         assert !field.repeated() : "Use writeStringList with repeated types";
         writeStringNoChecks(out, field, value, skipDefault);
     }
 
     /**
-     * Write a string to data output, assuming the field is repeated. Usually this method is called multiple
-     * times, one for every repeated value. If all values are available immediately, {@link #writeStringList(
-     * WritableSequentialData, FieldDefinition, List)} should be used instead.
+     * Write a string to data output, assuming the field is repeated. Usually this method is called
+     * multiple times, one for every repeated value. If all values are available immediately, {@link
+     * #writeStringList( WritableSequentialData, FieldDefinition, List)} should be used instead.
      *
      * @param out The data output to write to
      * @param field the descriptor for the field we are writing, the field must be non-repeated
      * @param value the string value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeOneRepeatedString(final WritableSequentialData out, final FieldDefinition field,
-            final String value) throws IOException {
+    public static void writeOneRepeatedString(
+            final WritableSequentialData out, final FieldDefinition field, final String value)
+            throws IOException {
         assert field.type() == FieldType.STRING : "Not a string type " + field;
         assert field.repeated() : "writeOneRepeatedString can only be used with repeated fields";
         writeStringNoChecks(out, field, value);
@@ -319,8 +336,9 @@ public static void writeOneRepeatedString(final WritableSequentialData out, fina
      * @param value the string value to write
      * @throws IOException If a I/O error occurs
      */
-    private static void writeStringNoChecks(final WritableSequentialData out, final FieldDefinition field,
-                                            final String value) throws IOException {
+    private static void writeStringNoChecks(
+            final WritableSequentialData out, final FieldDefinition field, final String value)
+            throws IOException {
         writeStringNoChecks(out, field, value, true);
     }
 
@@ -333,8 +351,12 @@ private static void writeStringNoChecks(final WritableSequentialData out, final
      * @param skipDefault default value results in no-op for non-oneOf
      * @throws IOException If a I/O error occurs
      */
-    private static void writeStringNoChecks(final WritableSequentialData out, final FieldDefinition field,
-            final String value, boolean skipDefault) throws IOException {
+    private static void writeStringNoChecks(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final String value,
+            boolean skipDefault)
+            throws IOException {
         // When not a oneOf don't write default value
         if (skipDefault && !field.oneOf() && (value == null || value.isEmpty())) {
             return;
@@ -345,22 +367,25 @@ private static void writeStringNoChecks(final WritableSequentialData out, final
     }
 
     /**
-     * Write a bytes to data output, assuming the corresponding field is non-repeated, and field type
-     * is any delimited: bytes, string, or message.
+     * Write a bytes to data output, assuming the corresponding field is non-repeated, and field
+     * type is any delimited: bytes, string, or message.
      *
      * @param out The data output to write to
      * @param field the descriptor for the field we are writing, the field must not be repeated
      * @param value the bytes value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeBytes(final WritableSequentialData out, final FieldDefinition field,
-                                  final RandomAccessData value) throws IOException {
+    public static void writeBytes(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final RandomAccessData value)
+            throws IOException {
         writeBytes(out, field, value, true);
     }
 
     /**
-     * Write a bytes to data output, assuming the corresponding field is non-repeated, and field type
-     * is any delimited: bytes, string, or message.
+     * Write a bytes to data output, assuming the corresponding field is non-repeated, and field
+     * type is any delimited: bytes, string, or message.
      *
      * @param out The data output to write to
      * @param field the descriptor for the field we are writing, the field must not be repeated
@@ -368,16 +393,20 @@ public static void writeBytes(final WritableSequentialData out, final FieldDefin
      * @param skipDefault default value results in no-op for non-oneOf
      * @throws IOException If a I/O error occurs
      */
-    public static void writeBytes(final WritableSequentialData out, final FieldDefinition field,
-            final RandomAccessData value, boolean skipDefault) throws IOException {
+    public static void writeBytes(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final RandomAccessData value,
+            boolean skipDefault)
+            throws IOException {
         assert field.type() == FieldType.BYTES : "Not a byte[] type " + field;
         assert !field.repeated() : "Use writeBytesList with repeated types";
         writeBytesNoChecks(out, field, value, skipDefault);
     }
 
     /**
-     * Write a bytes to data output, assuming the corresponding field is repeated, and field type
-     * is any delimited: bytes, string, or message. Usually this method is called multiple times, one
+     * Write a bytes to data output, assuming the corresponding field is repeated, and field type is
+     * any delimited: bytes, string, or message. Usually this method is called multiple times, one
      * for every repeated value. If all values are available immediately, {@link #writeBytesList(
      * WritableSequentialData, FieldDefinition, List)} should be used instead.
      *
@@ -386,8 +415,11 @@ public static void writeBytes(final WritableSequentialData out, final FieldDefin
      * @param value the bytes value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeOneRepeatedBytes(final WritableSequentialData out, final FieldDefinition field,
-            final RandomAccessData value) throws IOException {
+    public static void writeOneRepeatedBytes(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final RandomAccessData value)
+            throws IOException {
         assert field.type() == FieldType.BYTES : "Not a byte[] type " + field;
         assert field.repeated() : "writeOneRepeatedBytes can only be used with repeated fields";
         writeBytesNoChecks(out, field, value, true);
@@ -402,8 +434,12 @@ public static void writeOneRepeatedBytes(final WritableSequentialData out, final
      * @param skipZeroLength this is true for normal single bytes and false for repeated lists
      * @throws IOException If a I/O error occurs
      */
-    private static void writeBytesNoChecks(final WritableSequentialData out, final FieldDefinition field,
-            final RandomAccessData value, final boolean skipZeroLength) throws IOException {
+    private static void writeBytesNoChecks(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final RandomAccessData value,
+            final boolean skipZeroLength)
+            throws IOException {
         // When not a oneOf don't write default value
         if (!field.oneOf() && (skipZeroLength && (value.length() == 0))) {
             return;
@@ -414,7 +450,12 @@ private static void writeBytesNoChecks(final WritableSequentialData out, final F
         out.writeBytes(value);
         final long bytesWritten = out.position() - posBefore;
         if (bytesWritten != value.length()) {
-            throw new IOException("Wrote less bytes [" + bytesWritten + "] than expected [" + value.length() + "]");
+            throw new IOException(
+                    "Wrote less bytes ["
+                            + bytesWritten
+                            + "] than expected ["
+                            + value.length()
+                            + "]");
         }
     }
 
@@ -429,18 +470,23 @@ private static void writeBytesNoChecks(final WritableSequentialData out, final F
      * @throws IOException If a I/O error occurs
      * @param <T> type of message
      */
-    public static <T> void writeMessage(final WritableSequentialData out, final FieldDefinition field,
-            final T message, final ProtoWriter<T> writer, final ToIntFunction<T> sizeOf) throws IOException {
+    public static <T> void writeMessage(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final T message,
+            final ProtoWriter<T> writer,
+            final ToIntFunction<T> sizeOf)
+            throws IOException {
         assert field.type() == FieldType.MESSAGE : "Not a message type " + field;
         assert !field.repeated() : "Use writeMessageList with repeated types";
         writeMessageNoChecks(out, field, message, writer, sizeOf);
     }
 
     /**
-     * Write a message to data output, assuming the corresponding field is repeated. Usually this method is
-     * called multiple times, one for every repeated value. If all values are available immediately, {@link
-     * #writeMessageList(WritableSequentialData, FieldDefinition, List, ProtoWriter, ToIntFunction)}  should
-     * be used instead.
+     * Write a message to data output, assuming the corresponding field is repeated. Usually this
+     * method is called multiple times, one for every repeated value. If all values are available
+     * immediately, {@link #writeMessageList(WritableSequentialData, FieldDefinition, List,
+     * ProtoWriter, ToIntFunction)} should be used instead.
      *
      * @param out The data output to write to
      * @param field the descriptor for the field we are writing, the field must be repeated
@@ -450,8 +496,13 @@ public static <T> void writeMessage(final WritableSequentialData out, final Fiel
      * @throws IOException If a I/O error occurs
      * @param <T> type of message
      */
-    public static <T> void writeOneRepeatedMessage(final WritableSequentialData out, final FieldDefinition field,
-            final T message, final ProtoWriter<T> writer, final ToIntFunction<T> sizeOf) throws IOException {
+    public static <T> void writeOneRepeatedMessage(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final T message,
+            final ProtoWriter<T> writer,
+            final ToIntFunction<T> sizeOf)
+            throws IOException {
         assert field.type() == FieldType.MESSAGE : "Not a message type " + field;
         assert field.repeated() : "writeOneRepeatedMessage can only be used with repeated fields";
         writeMessageNoChecks(out, field, message, writer, sizeOf);
@@ -468,8 +519,13 @@ public static <T> void writeOneRepeatedMessage(final WritableSequentialData out,
      * @throws IOException If a I/O error occurs
      * @param <T> type of message
      */
-    private static <T> void writeMessageNoChecks(final WritableSequentialData out, final FieldDefinition field,
-            final T message, final ProtoWriter<T> writer, final ToIntFunction<T> sizeOf) throws IOException {
+    private static <T> void writeMessageNoChecks(
+            final WritableSequentialData out,
+            final FieldDefinition field,
+            final T message,
+            final ProtoWriter<T> writer,
+            final ToIntFunction<T> sizeOf)
+            throws IOException {
         // When not a oneOf don't write default value
         if (field.oneOf() && message == null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
@@ -491,8 +547,8 @@ public static <K, V> void writeMap(
             final ProtoWriter<K> kWriter,
             final ProtoWriter<V> vWriter,
             final ToIntFunction<K> sizeOfK,
-            final ToIntFunction<V> sizeOfV
-    ) throws IOException {
+            final ToIntFunction<V> sizeOfV)
+            throws IOException {
         // https://protobuf.dev/programming-guides/proto3/#maps
         // On the wire, a map is equivalent to:
         //    message MapFieldEntry {
@@ -526,7 +582,8 @@ public static <K, V> void writeMap(
      * @param field the descriptor for the field we are writing
      * @param value the optional integer value to write
      */
-    public static void writeOptionalInteger(WritableSequentialData out, FieldDefinition field, @Nullable Integer value) {
+    public static void writeOptionalInteger(
+            WritableSequentialData out, FieldDefinition field, @Nullable Integer value) {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
@@ -542,7 +599,8 @@ public static void writeOptionalInteger(WritableSequentialData out, FieldDefinit
      * @param field the descriptor for the field we are writing
      * @param value the optional long value to write
      */
-    public static void writeOptionalLong(WritableSequentialData out, FieldDefinition field, @Nullable Long value) {
+    public static void writeOptionalLong(
+            WritableSequentialData out, FieldDefinition field, @Nullable Long value) {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
@@ -558,12 +616,13 @@ public static void writeOptionalLong(WritableSequentialData out, FieldDefinition
      * @param field the descriptor for the field we are writing
      * @param value the optional float value to write
      */
-    public static void writeOptionalFloat(WritableSequentialData out, FieldDefinition field, @Nullable Float value) {
+    public static void writeOptionalFloat(
+            WritableSequentialData out, FieldDefinition field, @Nullable Float value) {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
             out.writeVarInt(sizeOfFloat(newField, value), false);
-            writeFloat(out,newField,value);
+            writeFloat(out, newField, value);
         }
     }
 
@@ -574,12 +633,13 @@ public static void writeOptionalFloat(WritableSequentialData out, FieldDefinitio
      * @param field the descriptor for the field we are writing
      * @param value the optional double value to write
      */
-    public static void writeOptionalDouble(WritableSequentialData out, FieldDefinition field, @Nullable Double value) {
+    public static void writeOptionalDouble(
+            WritableSequentialData out, FieldDefinition field, @Nullable Double value) {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
             out.writeVarInt(sizeOfDouble(newField, value), false);
-            writeDouble(out,newField,value);
+            writeDouble(out, newField, value);
         }
     }
 
@@ -590,7 +650,8 @@ public static void writeOptionalDouble(WritableSequentialData out, FieldDefiniti
      * @param field the descriptor for the field we are writing
      * @param value the optional boolean value to write
      */
-    public static void writeOptionalBoolean(WritableSequentialData out, FieldDefinition field, @Nullable Boolean value) {
+    public static void writeOptionalBoolean(
+            WritableSequentialData out, FieldDefinition field, @Nullable Boolean value) {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
@@ -607,7 +668,9 @@ public static void writeOptionalBoolean(WritableSequentialData out, FieldDefinit
      * @param value the optional string value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeOptionalString(WritableSequentialData out, FieldDefinition field, @Nullable String value) throws IOException {
+    public static void writeOptionalString(
+            WritableSequentialData out, FieldDefinition field, @Nullable String value)
+            throws IOException {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
@@ -624,14 +687,16 @@ public static void writeOptionalString(WritableSequentialData out, FieldDefiniti
      * @param value the optional bytes value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeOptionalBytes(WritableSequentialData out, FieldDefinition field, @Nullable Bytes value) throws IOException {
+    public static void writeOptionalBytes(
+            WritableSequentialData out, FieldDefinition field, @Nullable Bytes value)
+            throws IOException {
         if (value != null) {
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             final var newField = field.type().optionalFieldDefinition;
             final int size = sizeOfBytes(newField, value);
             out.writeVarInt(size, false);
             if (size > 0) {
-                writeBytes(out,newField, value);
+                writeBytes(out, newField, value);
             }
         }
     }
@@ -639,7 +704,6 @@ public static void writeOptionalBytes(WritableSequentialData out, FieldDefinitio
     // ================================================================================================================
     // LIST VERSIONS OF WRITE METHODS
 
-
     /**
      * Write a list of integers to data output
      *
@@ -647,11 +711,13 @@ public static void writeOptionalBytes(WritableSequentialData out, FieldDefinitio
      * @param field the descriptor for the field we are writing
      * @param list the list of integers value to write
      */
-    public static void writeIntegerList(WritableSequentialData out, FieldDefinition field, List<Integer> list) {
-        assert switch(field.type()) {
-            case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true;
-            default -> false;
-        } : "Not an integer type " + field;
+    public static void writeIntegerList(
+            WritableSequentialData out, FieldDefinition field, List<Integer> list) {
+        assert switch (field.type()) {
+                    case INT32, UINT32, SINT32, FIXED32, SFIXED32 -> true;
+                    default -> false;
+                }
+                : "Not an integer type " + field;
         assert field.repeated() : "Use writeInteger with non-repeated types";
 
         // When not a oneOf don't write default value
@@ -691,7 +757,7 @@ assert switch(field.type()) {
                 int size = 0;
                 for (int i = 0; i < listSize; i++) {
                     final int val = list.get(i);
-                    size += sizeOfUnsignedVarInt64(((long)val << 1) ^ ((long)val >> 63));
+                    size += sizeOfUnsignedVarInt64(((long) val << 1) ^ ((long) val >> 63));
                 }
                 writeTag(out, field, WIRE_TYPE_DELIMITED);
                 out.writeVarInt(size, false);
@@ -704,7 +770,7 @@ assert switch(field.type()) {
                 // The bytes in protobuf are in little-endian order -- backwards for Java.
                 // Smallest byte first.
                 writeTag(out, field, WIRE_TYPE_DELIMITED);
-                out.writeVarLong((long)list.size() * FIXED32_SIZE, false);
+                out.writeVarLong((long) list.size() * FIXED32_SIZE, false);
                 for (int i = 0; i < listSize; i++) {
                     final int val = list.get(i);
                     out.writeInt(val, ByteOrder.LITTLE_ENDIAN);
@@ -721,11 +787,13 @@ assert switch(field.type()) {
      * @param field the descriptor for the field we are writing
      * @param list the list of longs value to write
      */
-    public static void writeLongList(WritableSequentialData out, FieldDefinition field, List<Long> list) {
-        assert switch(field.type()) {
-            case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true;
-            default -> false;
-        } : "Not a long type " + field;
+    public static void writeLongList(
+            WritableSequentialData out, FieldDefinition field, List<Long> list) {
+        assert switch (field.type()) {
+                    case INT64, UINT64, SINT64, FIXED64, SFIXED64 -> true;
+                    default -> false;
+                }
+                : "Not a long type " + field;
         assert field.repeated() : "Use writeLong with non-repeated types";
 
         // When not a oneOf don't write default value
@@ -765,7 +833,7 @@ assert switch(field.type()) {
                 // The bytes in protobuf are in little-endian order -- backwards for Java.
                 // Smallest byte first.
                 writeTag(out, field, WIRE_TYPE_DELIMITED);
-                out.writeVarLong((long)list.size() * FIXED64_SIZE, false);
+                out.writeVarLong((long) list.size() * FIXED64_SIZE, false);
                 for (int i = 0; i < listSize; i++) {
                     final long val = list.get(i);
                     out.writeLong(val, ByteOrder.LITTLE_ENDIAN);
@@ -782,7 +850,8 @@ assert switch(field.type()) {
      * @param field the descriptor for the field we are writing
      * @param list the list of floats value to write
      */
-    public static void writeFloatList(WritableSequentialData out, FieldDefinition field, List<Float> list) {
+    public static void writeFloatList(
+            WritableSequentialData out, FieldDefinition field, List<Float> list) {
         assert field.type() == FieldType.FLOAT : "Not a float type " + field;
         assert field.repeated() : "Use writeFloat with non-repeated types";
         // When not a oneOf don't write default value
@@ -805,7 +874,8 @@ public static void writeFloatList(WritableSequentialData out, FieldDefinition fi
      * @param field the descriptor for the field we are writing
      * @param list the list of doubles value to write
      */
-    public static void writeDoubleList(WritableSequentialData out, FieldDefinition field, List<Double> list) {
+    public static void writeDoubleList(
+            WritableSequentialData out, FieldDefinition field, List<Double> list) {
         assert field.type() == FieldType.DOUBLE : "Not a double type " + field;
         assert field.repeated() : "Use writeDouble with non-repeated types";
         // When not a oneOf don't write default value
@@ -828,7 +898,8 @@ public static void writeDoubleList(WritableSequentialData out, FieldDefinition f
      * @param field the descriptor for the field we are writing
      * @param list the list of booleans value to write
      */
-    public static void writeBooleanList(WritableSequentialData out, FieldDefinition field, List<Boolean> list) {
+    public static void writeBooleanList(
+            WritableSequentialData out, FieldDefinition field, List<Boolean> list) {
         assert field.type() == FieldType.BOOL : "Not a boolean type " + field;
         assert field.repeated() : "Use writeBoolean with non-repeated types";
         // When not a oneOf don't write default value
@@ -852,7 +923,10 @@ public static void writeBooleanList(WritableSequentialData out, FieldDefinition
      * @param field the descriptor for the field we are writing
      * @param list the list of enums value to write
      */
-    public static void writeEnumList(WritableSequentialData out, FieldDefinition field, List<? extends EnumWithProtoMetadata> list) {
+    public static void writeEnumList(
+            WritableSequentialData out,
+            FieldDefinition field,
+            List<? extends EnumWithProtoMetadata> list) {
         assert field.type() == FieldType.ENUM : "Not an enum type " + field;
         assert field.repeated() : "Use writeEnum with non-repeated types";
         // When not a oneOf don't write default value
@@ -879,7 +953,9 @@ public static void writeEnumList(WritableSequentialData out, FieldDefinition fie
      * @param list the list of strings value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeStringList(WritableSequentialData out, FieldDefinition field, List<String> list) throws IOException {
+    public static void writeStringList(
+            WritableSequentialData out, FieldDefinition field, List<String> list)
+            throws IOException {
         assert field.type() == FieldType.STRING : "Not a string type " + field;
         assert field.repeated() : "Use writeString with non-repeated types";
         // When not a oneOf don't write default value
@@ -891,7 +967,7 @@ public static void writeStringList(WritableSequentialData out, FieldDefinition f
             final String value = list.get(i);
             writeTag(out, field, WIRE_TYPE_DELIMITED);
             out.writeVarInt(sizeOfStringNoTag(value), false);
-            Utf8Tools.encodeUtf8(value,out);
+            Utf8Tools.encodeUtf8(value, out);
         }
     }
 
@@ -906,7 +982,13 @@ public static void writeStringList(WritableSequentialData out, FieldDefinition f
      * @throws IOException If a I/O error occurs
      * @param <T> type of message
      */
-    public static <T> void writeMessageList(WritableSequentialData out, FieldDefinition field, List<T> list, ProtoWriter<T> writer, ToIntFunction<T> sizeOf) throws IOException {
+    public static <T> void writeMessageList(
+            WritableSequentialData out,
+            FieldDefinition field,
+            List<T> list,
+            ProtoWriter<T> writer,
+            ToIntFunction<T> sizeOf)
+            throws IOException {
         assert field.type() == FieldType.MESSAGE : "Not a message type " + field;
         assert field.repeated() : "Use writeMessage with non-repeated types";
         // When not a oneOf don't write default value
@@ -927,7 +1009,11 @@ public static <T> void writeMessageList(WritableSequentialData out, FieldDefinit
      * @param list the list of bytes objects value to write
      * @throws IOException If a I/O error occurs
      */
-    public static void writeBytesList(WritableSequentialData out, FieldDefinition field, List<? extends RandomAccessData> list) throws IOException {
+    public static void writeBytesList(
+            WritableSequentialData out,
+            FieldDefinition field,
+            List<? extends RandomAccessData> list)
+            throws IOException {
         assert field.type() == FieldType.BYTES : "Not a message type " + field;
         assert field.repeated() : "Use writeBytes with non-repeated types";
         // When not a oneOf don't write default value
@@ -941,7 +1027,8 @@ public static void writeBytesList(WritableSequentialData out, FieldDefinition fi
     }
 
     /**
-     * Write a generic delimited field by delegating to a supplied `writer` to write the actual elements.
+     * Write a generic delimited field by delegating to a supplied `writer` to write the actual
+     * elements.
      *
      * @param out The data output to write to
      * @param field the descriptor for the field we are writing
@@ -950,10 +1037,7 @@ public static void writeBytesList(WritableSequentialData out, FieldDefinition fi
      * @param <T> the type of the data output that extends WritableSequentialData
      */
     public static <T extends WritableSequentialData> void writeDelimited(
-            final T out,
-            final FieldDefinition field,
-            final int size,
-            final Consumer<T> writer) {
+            final T out, final FieldDefinition field, final int size, final Consumer<T> writer) {
         writeTag(out, field);
         out.writeVarInt(size, false);
         writer.accept(out);
@@ -1042,8 +1126,8 @@ private static int sizeOfUnsignedVarInt64(long value) {
     }
 
     /**
-     * Get number of bytes that would be needed to encode a field tag. Field wire type is
-     * calculated based on field type using {@link #wireType(FieldDefinition)} method.
+     * Get number of bytes that would be needed to encode a field tag. Field wire type is calculated
+     * based on field type using {@link #wireType(FieldDefinition)} method.
      *
      * @param field The field part of tag
      * @return the number of bytes for encoded value
@@ -1089,7 +1173,7 @@ public static int sizeOfOptionalInteger(FieldDefinition field, @Nullable Integer
     public static int sizeOfOptionalLong(FieldDefinition field, @Nullable Long value) {
         if (value != null) {
             final long longValue = value;
-            final int size =  sizeOfLong(field.type().optionalFieldDefinition, longValue);
+            final int size = sizeOfLong(field.type().optionalFieldDefinition, longValue);
             return sizeOfTag(field, WIRE_TYPE_DELIMITED) + sizeOfUnsignedVarInt32(size) + size;
         }
         return 0;
@@ -1193,8 +1277,10 @@ public static int sizeOfInteger(FieldDefinition field, int value, boolean skipDe
         if (skipDefault && !field.oneOf() && value == 0) return 0;
         return switch (field.type()) {
             case INT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfVarInt32(value);
-            case UINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt32(value);
-            case SINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt64(((long)value << 1) ^ ((long)value >> 63));
+            case UINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG)
+                    + sizeOfUnsignedVarInt32(value);
+            case SINT32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG)
+                    + sizeOfUnsignedVarInt64(((long) value << 1) ^ ((long) value >> 63));
             case SFIXED32, FIXED32 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + FIXED32_SIZE;
             default -> throw unsupported();
         };
@@ -1222,8 +1308,10 @@ public static int sizeOfLong(FieldDefinition field, long value) {
     public static int sizeOfLong(FieldDefinition field, long value, boolean skipDefault) {
         if (skipDefault && !field.oneOf() && value == 0) return 0;
         return switch (field.type()) {
-            case INT64, UINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt64(value);
-            case SINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfUnsignedVarInt64((value << 1) ^ (value >> 63));
+            case INT64, UINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG)
+                    + sizeOfUnsignedVarInt64(value);
+            case SINT64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG)
+                    + sizeOfUnsignedVarInt64((value << 1) ^ (value >> 63));
             case SFIXED64, FIXED64 -> sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + FIXED64_SIZE;
             default -> throw unsupported();
         };
@@ -1273,10 +1361,11 @@ public static int sizeOfBoolean(FieldDefinition field, boolean value) {
      * @return the number of bytes for encoded value
      */
     public static int sizeOfBoolean(FieldDefinition field, boolean value, boolean skipDefault) {
-        return (value || field.oneOf() || !skipDefault) ? sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + 1 : 0;
+        return (value || field.oneOf() || !skipDefault)
+                ? sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + 1
+                : 0;
     }
 
-
     /**
      * Get number of bytes that would be needed to encode an enum field
      *
@@ -1288,7 +1377,8 @@ public static int sizeOfEnum(FieldDefinition field, EnumWithProtoMetadata enumVa
         if (!field.oneOf() && (enumValue == null || enumValue.protoOrdinal() == 0)) {
             return 0;
         }
-        return sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG) + sizeOfVarInt32(enumValue.protoOrdinal());
+        return sizeOfTag(field, WIRE_TYPE_VARINT_OR_ZIGZAG)
+                + sizeOfVarInt32(enumValue.protoOrdinal());
     }
 
     /**
@@ -1355,7 +1445,8 @@ public static int sizeOfBytes(FieldDefinition field, RandomAccessData value) {
      * @param skipDefault default value results in zero size
      * @return the number of bytes for encoded value
      */
-    public static int sizeOfBytes(FieldDefinition field, RandomAccessData value, boolean skipDefault) {
+    public static int sizeOfBytes(
+            FieldDefinition field, RandomAccessData value, boolean skipDefault) {
         // When not a oneOf don't write default value
         if (skipDefault && !field.oneOf() && (value.length() == 0)) {
             return 0;
@@ -1410,7 +1501,7 @@ public static int sizeOfIntegerList(FieldDefinition field, List<Integer> list) {
             }
             case SINT32 -> {
                 for (final int i : list) {
-                    size += sizeOfUnsignedVarInt64(((long)i << 1) ^ ((long)i >> 63));
+                    size += sizeOfUnsignedVarInt64(((long) i << 1) ^ ((long) i >> 63));
                 }
             }
             case SFIXED32, FIXED32 -> size += FIXED32_SIZE * list.size();
@@ -1504,7 +1595,8 @@ public static int sizeOfBooleanList(FieldDefinition field, List<Boolean> list) {
      * @param list enum list value to get encoded size for
      * @return the number of bytes for encoded value
      */
-    public static int sizeOfEnumList(FieldDefinition field, List<? extends EnumWithProtoMetadata> list) {
+    public static int sizeOfEnumList(
+            FieldDefinition field, List<? extends EnumWithProtoMetadata> list) {
         // When not a oneOf don't write default value
         if (!field.oneOf() && list.isEmpty()) {
             return 0;
@@ -1540,7 +1632,8 @@ public static int sizeOfStringList(FieldDefinition field, List<String> list) {
      * @return the number of bytes for encoded value
      * @param <T> type for message
      */
-    public static <T> int sizeOfMessageList(FieldDefinition field, List<T> list, ToIntFunction<T> sizeOf) {
+    public static <T> int sizeOfMessageList(
+            FieldDefinition field, List<T> list, ToIntFunction<T> sizeOf) {
         int size = 0;
         for (final T value : list) {
             size += sizeOfMessage(field, value, sizeOf);
@@ -1555,15 +1648,21 @@ public static <T> int sizeOfMessageList(FieldDefinition field, List<T> list, ToI
      * @param list bytes list value to get encoded size for
      * @return the number of bytes for encoded value
      */
-    public static int sizeOfBytesList(FieldDefinition field, List<? extends RandomAccessData> list) {
+    public static int sizeOfBytesList(
+            FieldDefinition field, List<? extends RandomAccessData> list) {
         int size = 0;
         for (final RandomAccessData value : list) {
-            size += Math.toIntExact(sizeOfTag(field, WIRE_TYPE_DELIMITED) + sizeOfVarInt32(Math.toIntExact(value.length())) + value.length());
+            size +=
+                    Math.toIntExact(
+                            sizeOfTag(field, WIRE_TYPE_DELIMITED)
+                                    + sizeOfVarInt32(Math.toIntExact(value.length()))
+                                    + value.length());
         }
         return size;
     }
 
     public static int sizeOfDelimited(final FieldDefinition field, final int length) {
-        return Math.toIntExact(sizeOfTag(field, WIRE_TYPE_DELIMITED) + sizeOfVarInt32(length) + length);
+        return Math.toIntExact(
+                sizeOfTag(field, WIRE_TYPE_DELIMITED) + sizeOfVarInt32(length) + length);
     }
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java
index 2a44ab2d..6988cec4 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcMethodDefinition.java
@@ -11,9 +11,7 @@
  * @param <R> The type of the response message
  */
 public record RpcMethodDefinition<T extends Record, R extends Record>(
-        String path,
-        Class<T> requestType,
-        Class<R> responseType) {
+        String path, Class<T> requestType, Class<R> responseType) {
 
     /**
      * Create a new builder for a {@link RpcMethodDefinition}.
@@ -50,6 +48,7 @@ public Builder<T, R> path(String path) {
 
         /**
          * Set the request type.
+         *
          * @param requestType The request type
          * @return This builder
          */
@@ -60,6 +59,7 @@ public Builder<T, R> requestType(Class<T> requestType) {
 
         /**
          * Set the response type.
+         *
          * @param responseType The response type
          * @return This builder
          */
@@ -70,6 +70,7 @@ public Builder<T, R> responseType(Class<R> responseType) {
 
         /**
          * Build the {@link RpcMethodDefinition}.
+         *
          * @return The {@link RpcMethodDefinition}
          */
         public RpcMethodDefinition<T, R> build() {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java
index e47b2cc5..2816a64e 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/RpcServiceDefinition.java
@@ -5,17 +5,19 @@
 import java.util.Set;
 
 /**
- * Defines a single RPC Service. The protobuf schema can define messages, or services. A Service is a collection of
- * grpc endpoints, or methods. This class simply contains the definition of the service endpoint.
+ * Defines a single RPC Service. The protobuf schema can define messages, or services. A Service is
+ * a collection of grpc endpoints, or methods. This class simply contains the definition of the
+ * service endpoint.
  */
 public interface RpcServiceDefinition {
     /**
-     * The base path of the service. This is the path that will be used to register the service with the grpc server.
-     * For example, "proto.ConsensusService".
+     * The base path of the service. This is the path that will be used to register the service with
+     * the grpc server. For example, "proto.ConsensusService".
      *
      * @return The base path of the service
      */
-    @NonNull String basePath();
+    @NonNull
+    String basePath();
 
     /**
      * The set of methods that are defined for this service.
@@ -23,5 +25,6 @@ public interface RpcServiceDefinition {
      * @return The set of methods
      */
     @SuppressWarnings("java:S1452")
-    @NonNull Set<RpcMethodDefinition<? extends Record, ? extends Record>> methods();
+    @NonNull
+    Set<RpcMethodDefinition<? extends Record, ? extends Record>> methods();
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java
index 357e96f8..e27967ef 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Schema.java
@@ -2,13 +2,14 @@
 package com.hedera.pbj.runtime;
 
 /**
- * Interface for Schemas, schemas are a programmatic model of protobuf schema. Used in parsing, writing protobuf,
- * to/from record objects. It is just a marker interface as all methods are static. Implementing classes are expected to
- * provide static methods with the following signatures:
- *<ul>
- *     <li><code>public static boolean valid(FieldDefinition f) {...}</code></li>
- *     <li><code>public static FieldDefinition getField(final int fieldNumber) {...}</code></li>
- *</ul>
+ * Interface for Schemas, schemas are a programmatic model of protobuf schema. Used in parsing,
+ * writing protobuf, to/from record objects. It is just a marker interface as all methods are
+ * static. Implementing classes are expected to provide static methods with the following
+ * signatures:
+ *
+ * <ul>
+ *   <li><code>public static boolean valid(FieldDefinition f) {...}</code>
+ *   <li><code>public static FieldDefinition getField(final int fieldNumber) {...}</code>
+ * </ul>
  */
-public interface Schema {
-}
+public interface Schema {}
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java
index 8ecdcbaa..8f178a46 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UncheckedParseException.java
@@ -2,8 +2,8 @@
 package com.hedera.pbj.runtime;
 
 /**
- * An unchecked wrapper for a ParseException object used in rare cases
- * where existing code shouldn't throw checked exceptions.
+ * An unchecked wrapper for a ParseException object used in rare cases where existing code shouldn't
+ * throw checked exceptions.
  */
 public class UncheckedParseException extends RuntimeException {
     public UncheckedParseException(ParseException cause) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java
index 5aeb1733..f9b21189 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/UnknownFieldException.java
@@ -4,9 +4,7 @@
 import edu.umd.cs.findbugs.annotations.NonNull;
 import java.io.IOException;
 
-/**
- * An exception thrown when an unknown field is encountered while parsing a message.
- */
+/** An exception thrown when an unknown field is encountered while parsing a message. */
 public class UnknownFieldException extends IOException {
     /**
      * Constructs a new {@link UnknownFieldException} with the given field number.
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java
index 1ba66f94..712a9c7d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/Utf8Tools.java
@@ -6,9 +6,7 @@
 import com.hedera.pbj.runtime.io.WritableSequentialData;
 import java.io.IOException;
 
-/**
- * UTF8 tools based on protobuf standard library, so we are byte for byte identical
- */
+/** UTF8 tools based on protobuf standard library, so we are byte for byte identical */
 public final class Utf8Tools {
 
     /**
@@ -52,7 +50,8 @@ static int encodedLength(final CharSequence sequence) throws IOException {
         return utf8Length;
     }
 
-    private static int encodedLengthGeneral(final CharSequence sequence, final int start) throws IOException {
+    private static int encodedLengthGeneral(final CharSequence sequence, final int start)
+            throws IOException {
         int utf16Length = sequence.length();
         int utf8Length = 0;
         for (int i = start; i < utf16Length; i++) {
@@ -66,7 +65,8 @@ private static int encodedLengthGeneral(final CharSequence sequence, final int s
                     // Check that we have a well-formed surrogate pair.
                     int cp = Character.codePointAt(sequence, i);
                     if (cp < MIN_SUPPLEMENTARY_CODE_POINT) {
-                        throw new MalformedProtobufException("Unpaired surrogate at index " + i + " of " + utf16Length);
+                        throw new MalformedProtobufException(
+                                "Unpaired surrogate at index " + i + " of " + utf16Length);
                     }
                     i++;
                 }
@@ -76,10 +76,11 @@ private static int encodedLengthGeneral(final CharSequence sequence, final int s
     }
 
     /**
-     * Encodes the input character sequence to a {@link WritableSequentialData} using the same algorithm as protoc, so we are
-     * byte for byte the same.
+     * Encodes the input character sequence to a {@link WritableSequentialData} using the same
+     * algorithm as protoc, so we are byte for byte the same.
      */
-    static void encodeUtf8(final CharSequence in, final WritableSequentialData out) throws IOException {
+    static void encodeUtf8(final CharSequence in, final WritableSequentialData out)
+            throws IOException {
         final int inLength = in.length();
         for (int inIx = 0; inIx < inLength; ++inIx) {
             final char c = in.charAt(inIx);
@@ -102,10 +103,12 @@ static void encodeUtf8(final CharSequence in, final WritableSequentialData out)
                         (byte) (0x80 | (0x3F & c)));
             } else {
                 // Four bytes (1111 xxxx 10xx xxxx 10xx xxxx 10xx xxxx)
-                // Minimum code point represented by a surrogate pair is 0x10000, 17 bits, four UTF-8 bytes
+                // Minimum code point represented by a surrogate pair is 0x10000, 17 bits, four
+                // UTF-8 bytes
                 final char low;
                 if (inIx + 1 == inLength || !isSurrogatePair(c, (low = in.charAt(++inIx)))) {
-                    throw new MalformedProtobufException("Unpaired surrogate at index " + inIx + " of " + inLength);
+                    throw new MalformedProtobufException(
+                            "Unpaired surrogate at index " + inIx + " of " + inLength);
                 }
                 int codePoint = toCodePoint(c, low);
                 out.writeByte4(
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java
index afabaddb..59e08d8f 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcException.java
@@ -7,9 +7,10 @@
 import edu.umd.cs.findbugs.annotations.Nullable;
 
 /**
- * Thrown by an application when handling a gRPC request if the request fails. The status will be one of the canonical
- * gRPC statuses, and must be specified. This is returned back to the gRPC client. The message is optional and will be
- * returned to the client if specified. The cause is not returned to the client, but is used for debugging purposes.
+ * Thrown by an application when handling a gRPC request if the request fails. The status will be
+ * one of the canonical gRPC statuses, and must be specified. This is returned back to the gRPC
+ * client. The message is optional and will be returned to the client if specified. The cause is not
+ * returned to the client, but is used for debugging purposes.
  */
 public class GrpcException extends RuntimeException {
     /** The GRPC Status to return to the client */
@@ -17,6 +18,7 @@ public class GrpcException extends RuntimeException {
 
     /**
      * Create a new exception with the given status.
+     *
      * @param status the status of the exception
      */
     public GrpcException(@NonNull final GrpcStatus status) {
@@ -25,6 +27,7 @@ public GrpcException(@NonNull final GrpcStatus status) {
 
     /**
      * Create a new exception with the given status and message.
+     *
      * @param status the status of the exception
      * @param message the message of the exception
      */
@@ -34,6 +37,7 @@ public GrpcException(@NonNull final GrpcStatus status, @Nullable final String me
 
     /**
      * Create a new exception with the given status and cause.
+     *
      * @param status the status of the exception
      * @param cause the cause of the exception
      */
@@ -43,12 +47,15 @@ public GrpcException(@NonNull final GrpcStatus status, @Nullable final Throwable
 
     /**
      * Create a new gRPC Exception.
+     *
      * @param status the status of the exception
      * @param message the message of the exception
      * @param cause the cause of the exception
      */
     public GrpcException(
-            @NonNull final GrpcStatus status, @Nullable final String message, @Nullable final Throwable cause) {
+            @NonNull final GrpcStatus status,
+            @Nullable final String message,
+            @Nullable final Throwable cause) {
         super(message, cause);
         this.status = requireNonNull(status);
         if (status == GrpcStatus.OK) {
@@ -58,6 +65,7 @@ public GrpcException(
 
     /**
      * Get the status of the exception.
+     *
      * @return the status of the exception
      */
     @NonNull
@@ -67,6 +75,7 @@ public final GrpcStatus status() {
 
     /**
      * Get the message of the exception.
+     *
      * @return the message of the exception
      */
     @Nullable
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java
index f587ee48..6ad8dc2d 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/GrpcStatus.java
@@ -2,115 +2,108 @@
 package com.hedera.pbj.runtime.grpc;
 
 /**
- * Status codes for gRPC. These are added to a {@link GrpcException} when an error occurs. The ordinal of the enum
- * <strong>exactly matches</strong> the status code in the gRPC protocol. The order in which these enums are defined
- * is critical.
+ * Status codes for gRPC. These are added to a {@link GrpcException} when an error occurs. The
+ * ordinal of the enum <strong>exactly matches</strong> the status code in the gRPC protocol. The
+ * order in which these enums are defined is critical.
  *
  * @see <a href="https://grpc.github.io/grpc/core/md_doc_statuscodes.html">GRPC Status codes</a>
  */
 public enum GrpcStatus {
-    /**
-     * The operation completed successfully.
-     */
+    /** The operation completed successfully. */
     OK, // 0
-    /**
-     * The operation was cancelled (typically by the caller).
-     */
+    /** The operation was cancelled (typically by the caller). */
     CANCELLED, // 1
     /**
-     * Unknown error. An example of where this error may be returned is if a Status value received from another
-     * address space belongs to an error-space that is not known in this address space. Also, errors raised by APIs
-     * that do not return enough error information may be converted to this error.
+     * Unknown error. An example of where this error may be returned is if a Status value received
+     * from another address space belongs to an error-space that is not known in this address space.
+     * Also, errors raised by APIs that do not return enough error information may be converted to
+     * this error.
      */
     UNKNOWN, // 2
     /**
-     * Client specified an invalid argument. Note that this differs from FAILED_PRECONDITION. INVALID_ARGUMENT
-     * indicates arguments that are problematic regardless of the state of the system (e.g., a malformed file name).
+     * Client specified an invalid argument. Note that this differs from FAILED_PRECONDITION.
+     * INVALID_ARGUMENT indicates arguments that are problematic regardless of the state of the
+     * system (e.g., a malformed file name).
      */
     INVALID_ARGUMENT, // 3
     /**
-     * Deadline expired before operation could complete. For operations that change the state of the system, this
-     * error may be returned even if the operation has completed successfully. For example, a successful response
-     * from a server could have been delayed long enough for the deadline to expire.
+     * Deadline expired before operation could complete. For operations that change the state of the
+     * system, this error may be returned even if the operation has completed successfully. For
+     * example, a successful response from a server could have been delayed long enough for the
+     * deadline to expire.
      */
     DEADLINE_EXCEEDED, // 4
-    /**
-     * Some requested entity (e.g., file or directory) was not found.
-     */
+    /** Some requested entity (e.g., file or directory) was not found. */
     NOT_FOUND, // 5
-    /**
-     * Some entity that we attempted to create (e.g., file or directory) already exists.
-     */
+    /** Some entity that we attempted to create (e.g., file or directory) already exists. */
     ALREADY_EXISTS, // 6
     /**
-     * The caller does not have permission to execute the specified operation. PERMISSION_DENIED must not be used for
-     * rejections caused by exhausting some resource (use RESOURCE_EXHAUSTED instead for those errors).
-     * PERMISSION_DENIED must not be used if the caller cannot be identified (use UNAUTHENTICATED instead for those
-     * errors).
+     * The caller does not have permission to execute the specified operation. PERMISSION_DENIED
+     * must not be used for rejections caused by exhausting some resource (use RESOURCE_EXHAUSTED
+     * instead for those errors). PERMISSION_DENIED must not be used if the caller cannot be
+     * identified (use UNAUTHENTICATED instead for those errors).
      */
     PERMISSION_DENIED, // 7
     /**
-     * Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system is out of space.
+     * Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system
+     * is out of space.
      */
     RESOURCE_EXHAUSTED, // 8
     /**
-     * Operation was rejected because the system is not in a state required for the operation's execution. For example,
-     * directory to be deleted may be non-empty, an `rmdir` operation is applied to a non-directory, etc.
+     * Operation was rejected because the system is not in a state required for the operation's
+     * execution. For example, directory to be deleted may be non-empty, an `rmdir` operation is
+     * applied to a non-directory, etc.
      *
-     * <p>A litmus test that may help a service implementor in deciding between FAILED_PRECONDITION, ABORTED, and
-     * UNAVAILABLE:<br/>
-     * (a) Use UNAVAILABLE if the client can retry just the failing call.<br/>
-     * (b) Use ABORTED if the client should retry at a higher-level<br/>
-     * (e.g., restarting a read-modify-write sequence).<br/>
-     * (c) Use FAILED_PRECONDITION if the client should not retry until<br/>
-     * the system state has been explicitly fixed.  E.g., if an `rmdir`<br/>
-     * fails because the directory is non-empty, FAILED_PRECONDITION<br/>
-     * should be returned since the client should not retry unless<br/>
-     * they have first fixed up the directory by deleting files from it.<br/>
+     * <p>A litmus test that may help a service implementor in deciding between FAILED_PRECONDITION,
+     * ABORTED, and UNAVAILABLE:<br>
+     * (a) Use UNAVAILABLE if the client can retry just the failing call.<br>
+     * (b) Use ABORTED if the client should retry at a higher-level<br>
+     * (e.g., restarting a read-modify-write sequence).<br>
+     * (c) Use FAILED_PRECONDITION if the client should not retry until<br>
+     * the system state has been explicitly fixed. E.g., if an `rmdir`<br>
+     * fails because the directory is non-empty, FAILED_PRECONDITION<br>
+     * should be returned since the client should not retry unless<br>
+     * they have first fixed up the directory by deleting files from it.<br>
      */
     FAILED_PRECONDITION, // 9
     /**
-     * The operation was aborted, typically due to a concurrency issue like sequencer check failures, transaction
-     * aborts, etc.
+     * The operation was aborted, typically due to a concurrency issue like sequencer check
+     * failures, transaction aborts, etc.
      *
      * <p>See litmus test above for deciding between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE.
      */
     ABORTED, // 10
     /**
-     * Operation was attempted past the valid range.  E.g., seeking or reading past end of file.
+     * Operation was attempted past the valid range. E.g., seeking or reading past end of file.
      *
-     * <p>Unlike INVALID_ARGUMENT, this error indicates a problem that may be fixed if the system state changes.
-     * For example, a 32-bit file system will generate INVALID_ARGUMENT if asked to read at an offset that is not in
-     * the range [0,2^32-1], but it will generate OUT_OF_RANGE if asked to read from an offset past the current
-     * file size.
+     * <p>Unlike INVALID_ARGUMENT, this error indicates a problem that may be fixed if the system
+     * state changes. For example, a 32-bit file system will generate INVALID_ARGUMENT if asked to
+     * read at an offset that is not in the range [0,2^32-1], but it will generate OUT_OF_RANGE if
+     * asked to read from an offset past the current file size.
      *
-     * <p>There is a fair bit of overlap between FAILED_PRECONDITION and OUT_OF_RANGE. We recommend using OUT_OF_RANGE
-     * (the more specific error) when it applies so that callers who are iterating through a space can easily look for
-     * an OUT_OF_RANGE error to detect when they are done.
+     * <p>There is a fair bit of overlap between FAILED_PRECONDITION and OUT_OF_RANGE. We recommend
+     * using OUT_OF_RANGE (the more specific error) when it applies so that callers who are
+     * iterating through a space can easily look for an OUT_OF_RANGE error to detect when they are
+     * done.
      */
     OUT_OF_RANGE, // 11
-    /**
-     * Operation is not implemented or not supported/enabled in this service.
-     */
+    /** Operation is not implemented or not supported/enabled in this service. */
     UNIMPLEMENTED, // 12
     /**
-     * Internal errors.  Means some invariants expected by underlying system has been broken.  If you see one of these
-     * errors, something is very broken.
+     * Internal errors. Means some invariants expected by underlying system has been broken. If you
+     * see one of these errors, something is very broken.
      */
     INTERNAL, // 13
     /**
-     * The service is currently unavailable.  This is a most likely a transient condition and may be corrected by
-     * retrying with a backoff. Note that it is not always safe to retry non-idempotent operations.
+     * The service is currently unavailable. This is a most likely a transient condition and may be
+     * corrected by retrying with a backoff. Note that it is not always safe to retry non-idempotent
+     * operations.
      *
      * <p>See litmus test above for deciding between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE.
      */
     UNAVAILABLE, // 14
-    /**
-     * Unrecoverable data loss or corruption.
-     */
+    /** Unrecoverable data loss or corruption. */
     DATA_LOSS, // 15
-    /**
-     * The request does not have valid authentication credentials for the operation.
-     */
+    /** The request does not have valid authentication credentials for the operation. */
     UNAUTHENTICATED // 16
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java
index 7a01d63d..d613cc06 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipeline.java
@@ -9,13 +9,12 @@
  * @param <T> The subscribed item type
  */
 public interface Pipeline<T> extends Flow.Subscriber<T> {
-    /**
-     * Called when an END_STREAM frame is received from the client.
-     */
-    default void clientEndStreamReceived() { }
+    /** Called when an END_STREAM frame is received from the client. */
+    default void clientEndStreamReceived() {}
 
     /**
      * {@inheritDoc}
+     *
      * @throws RuntimeException if an error occurs while trying to write data to the pipeline
      */
     @Override
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java
index e3564686..2a01f8e2 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/Pipelines.java
@@ -8,8 +8,9 @@
 import java.util.concurrent.Flow;
 
 /**
- * Utility class for generating a "pipeline" of processing steps for gRPC services. This is not intended to be used
- * directly by application code, but rather by the PBJ compiler when generating service interfaces.
+ * Utility class for generating a "pipeline" of processing steps for gRPC services. This is not
+ * intended to be used directly by application code, but rather by the PBJ compiler when generating
+ * service interfaces.
  */
 public final class Pipelines {
 
@@ -18,8 +19,8 @@ private Pipelines() {
     }
 
     /**
-     * Returns a {@link Flow.Subscriber} that does nothing. This can be used in cases where a subscriber is required
-     * but no proper implementation is available.
+     * Returns a {@link Flow.Subscriber} that does nothing. This can be used in cases where a
+     * subscriber is required but no proper implementation is available.
      *
      * @return A No-op subscriber.
      */
@@ -57,7 +58,8 @@ public void onComplete() {
     }
 
     /**
-     * Create a new pipeline for a unary gRPC service method. A unary method is a simple request/response method.
+     * Create a new pipeline for a unary gRPC service method. A unary method is a simple
+     * request/response method.
      *
      * @return A new builder for constructing the pipeline.
      * @param <T> The type of the request message.
@@ -68,8 +70,9 @@ public static <T, R> UnaryBuilder<T, R> unary() {
     }
 
     /**
-     * Create a new pipeline for a bidirectional streaming gRPC service method. A bidirectional streaming method
-     * allows for a stream of requests and a stream of responses to operate concurrently.
+     * Create a new pipeline for a bidirectional streaming gRPC service method. A bidirectional
+     * streaming method allows for a stream of requests and a stream of responses to operate
+     * concurrently.
      *
      * @return A new builder for constructing the pipeline.
      * @param <T> The type of the request message.
@@ -80,8 +83,9 @@ public static <T, R> BidiStreamingBuilder<T, R> bidiStreaming() {
     }
 
     /**
-     * Create a new pipeline for a client streaming gRPC service method. A client streaming method allows for a
-     * stream of requests to be sent to the server, with a single response returned at the very end.
+     * Create a new pipeline for a client streaming gRPC service method. A client streaming method
+     * allows for a stream of requests to be sent to the server, with a single response returned at
+     * the very end.
      *
      * @return A new builder for constructing the pipeline.
      * @param <T> The type of the request message.
@@ -92,8 +96,8 @@ public static <T, R> ClientStreamingBuilder<T, R> clientStreaming() {
     }
 
     /**
-     * Create a new pipeline for a server streaming gRPC service method. A server streaming method allows for a
-     * single request to be sent to the server, with a stream of responses returned.
+     * Create a new pipeline for a server streaming gRPC service method. A server streaming method
+     * allows for a single request to be sent to the server, with a stream of responses returned.
      *
      * @return A new builder for constructing the pipeline.
      * @param <T> The type of the request message.
@@ -111,7 +115,8 @@ public static <T, R> ServerStreamingBuilder<T, R> serverStreaming() {
      */
     public interface UnaryBuilder<T, R> {
         /**
-         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must be specified.
+         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must
+         * be specified.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -120,8 +125,8 @@ public interface UnaryBuilder<T, R> {
         UnaryBuilder<T, R> mapRequest(@NonNull ExceptionalFunction<Bytes, T> mapper);
 
         /**
-         * Configures the unary method to be called when a request is received. This method handles the request and
-         * returns a response. This must be specified.
+         * Configures the unary method to be called when a request is received. This method handles
+         * the request and returns a response. This must be specified.
          *
          * @param method The method to call.
          * @return This builder.
@@ -130,7 +135,8 @@ public interface UnaryBuilder<T, R> {
         UnaryBuilder<T, R> method(@NonNull ExceptionalFunction<T, R> method);
 
         /**
-         * Configures a lambda for mapping from the response message type to {@link Bytes}. This must be specified.
+         * Configures a lambda for mapping from the response message type to {@link Bytes}. This
+         * must be specified.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -139,8 +145,9 @@ public interface UnaryBuilder<T, R> {
         UnaryBuilder<T, R> mapResponse(@NonNull ExceptionalFunction<R, Bytes> mapper);
 
         /**
-         * Configures a subscriber to receive the response messages. This must be specified. This subscriber is
-         * provided by the web server and is responsible for sending the responses back to the client.
+         * Configures a subscriber to receive the response messages. This must be specified. This
+         * subscriber is provided by the web server and is responsible for sending the responses
+         * back to the client.
          *
          * @param replies The subscriber to receive the responses.
          * @return This builder.
@@ -149,8 +156,8 @@ public interface UnaryBuilder<T, R> {
         UnaryBuilder<T, R> respondTo(@NonNull Pipeline<? super Bytes> replies);
 
         /**
-         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages, and contains
-         * the replies that are sent back to the client.
+         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages,
+         * and contains the replies that are sent back to the client.
          *
          * @return the communication pipeline
          */
@@ -166,8 +173,9 @@ public interface UnaryBuilder<T, R> {
      */
     public interface BidiStreamingBuilder<T, R> {
         /**
-         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must be specified.
-         * This function will be called once for each message arriving from the client.
+         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must
+         * be specified. This function will be called once for each message arriving from the
+         * client.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -176,9 +184,9 @@ public interface BidiStreamingBuilder<T, R> {
         BidiStreamingBuilder<T, R> mapRequest(@NonNull ExceptionalFunction<Bytes, T> mapper);
 
         /**
-         * Configures the bidirectional streaming method to be called when a request is received. This method is given
-         * a subscriber that it can push responses to, and it returns a subscriber that the system can push requests to.
-         * This must be specified.
+         * Configures the bidirectional streaming method to be called when a request is received.
+         * This method is given a subscriber that it can push responses to, and it returns a
+         * subscriber that the system can push requests to. This must be specified.
          *
          * @param method The method to call.
          * @return This builder.
@@ -187,8 +195,9 @@ public interface BidiStreamingBuilder<T, R> {
         BidiStreamingBuilder<T, R> method(@NonNull BidiStreamingMethod<T, R> method);
 
         /**
-         * Configures a lambda for mapping from the response message type to {@link Bytes}. This must be specified.
-         * This function will be called once for each message that the method sends back to the client.
+         * Configures a lambda for mapping from the response message type to {@link Bytes}. This
+         * must be specified. This function will be called once for each message that the method
+         * sends back to the client.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -197,8 +206,9 @@ public interface BidiStreamingBuilder<T, R> {
         BidiStreamingBuilder<T, R> mapResponse(@NonNull ExceptionalFunction<R, Bytes> mapper);
 
         /**
-         * Configures a subscriber to receive the response messages. This must be specified. This subscriber is
-         * provided by the web server and is responsible for sending the responses back to the client.
+         * Configures a subscriber to receive the response messages. This must be specified. This
+         * subscriber is provided by the web server and is responsible for sending the responses
+         * back to the client.
          *
          * @param replies The subscriber to receive the responses.
          * @return This builder.
@@ -207,8 +217,8 @@ public interface BidiStreamingBuilder<T, R> {
         BidiStreamingBuilder<T, R> respondTo(@NonNull Pipeline<? super Bytes> replies);
 
         /**
-         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages, and contains
-         * the replies that are sent back to the client.
+         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages,
+         * and contains the replies that are sent back to the client.
          *
          * @return the communication pipeline
          */
@@ -224,8 +234,9 @@ public interface BidiStreamingBuilder<T, R> {
      */
     public interface ClientStreamingBuilder<T, R> {
         /**
-         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must be specified.
-         * This function will be called once for each message arriving from the client.
+         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must
+         * be specified. This function will be called once for each message arriving from the
+         * client.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -234,10 +245,10 @@ public interface ClientStreamingBuilder<T, R> {
         ClientStreamingBuilder<T, R> mapRequest(@NonNull ExceptionalFunction<Bytes, T> mapper);
 
         /**
-         * Configures the client streaming method to be called when a request is received. This method is given
-         * a subscriber that it can push responses to, and it returns a subscriber that the system can push requests to.
-         * Only a single message is returned through the subscriber.
-         * This must be specified.
+         * Configures the client streaming method to be called when a request is received. This
+         * method is given a subscriber that it can push responses to, and it returns a subscriber
+         * that the system can push requests to. Only a single message is returned through the
+         * subscriber. This must be specified.
          *
          * @param method The method to call.
          * @return This builder.
@@ -246,8 +257,9 @@ public interface ClientStreamingBuilder<T, R> {
         ClientStreamingBuilder<T, R> method(@NonNull ClientStreamingMethod<T, R> method);
 
         /**
-         * Configures a lambda for mapping from the response message type to {@link Bytes}. This must be specified.
-         * This function will be called once for each message that the method sends back to the client.
+         * Configures a lambda for mapping from the response message type to {@link Bytes}. This
+         * must be specified. This function will be called once for each message that the method
+         * sends back to the client.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -256,8 +268,9 @@ public interface ClientStreamingBuilder<T, R> {
         ClientStreamingBuilder<T, R> mapResponse(@NonNull ExceptionalFunction<R, Bytes> mapper);
 
         /**
-         * Configures a subscriber to receive the response messages. This must be specified. This subscriber is
-         * provided by the web server and is responsible for sending the responses back to the client.
+         * Configures a subscriber to receive the response messages. This must be specified. This
+         * subscriber is provided by the web server and is responsible for sending the responses
+         * back to the client.
          *
          * @param replies The subscriber to receive the responses.
          * @return This builder.
@@ -266,8 +279,8 @@ public interface ClientStreamingBuilder<T, R> {
         ClientStreamingBuilder<T, R> respondTo(@NonNull Pipeline<? super Bytes> replies);
 
         /**
-         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages, and contains
-         * the replies that are sent back to the client.
+         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages,
+         * and contains the replies that are sent back to the client.
          *
          * @return the communication pipeline
          */
@@ -283,7 +296,8 @@ public interface ClientStreamingBuilder<T, R> {
      */
     public interface ServerStreamingBuilder<T, R> {
         /**
-         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must be specified.
+         * Configures a lambda for mapping from {@link Bytes} to the request message type. This must
+         * be specified.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -292,8 +306,8 @@ public interface ServerStreamingBuilder<T, R> {
         ServerStreamingBuilder<T, R> mapRequest(@NonNull ExceptionalFunction<Bytes, T> mapper);
 
         /**
-         * Configures the server streaming method to be called when a request is received. This method is given
-         * a subscriber that it can push responses to. This must be specified.
+         * Configures the server streaming method to be called when a request is received. This
+         * method is given a subscriber that it can push responses to. This must be specified.
          *
          * @param method The method to call.
          * @return This builder.
@@ -302,8 +316,9 @@ public interface ServerStreamingBuilder<T, R> {
         ServerStreamingBuilder<T, R> method(@NonNull ServerStreamingMethod<T, R> method);
 
         /**
-         * Configures a lambda for mapping from the response message type to {@link Bytes}. This must be specified.
-         * This function will be called once for each message that the method sends back to the client.
+         * Configures a lambda for mapping from the response message type to {@link Bytes}. This
+         * must be specified. This function will be called once for each message that the method
+         * sends back to the client.
          *
          * @param mapper The mapping function.
          * @return This builder.
@@ -312,8 +327,9 @@ public interface ServerStreamingBuilder<T, R> {
         ServerStreamingBuilder<T, R> mapResponse(@NonNull ExceptionalFunction<R, Bytes> mapper);
 
         /**
-         * Configures a subscriber to receive the response messages. This must be specified. This subscriber is
-         * provided by the web server and is responsible for sending the responses back to the client.
+         * Configures a subscriber to receive the response messages. This must be specified. This
+         * subscriber is provided by the web server and is responsible for sending the responses
+         * back to the client.
          *
          * @param replies The subscriber to receive the responses.
          * @return This builder.
@@ -322,8 +338,8 @@ public interface ServerStreamingBuilder<T, R> {
         ServerStreamingBuilder<T, R> respondTo(@NonNull Pipeline<? super Bytes> replies);
 
         /**
-         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages, and contains
-         * the replies that are sent back to the client.
+         * Builds the pipeline and returns it. The returned pipeline receives the incoming messages,
+         * and contains the replies that are sent back to the client.
          *
          * @return the communication pipeline
          */
@@ -351,8 +367,8 @@ public interface ExceptionalFunction<T, R> {
     }
 
     /**
-     * A function that handles a client streaming gRPC service method. Many messages are received from the client,
-     * but only a single response is sent back to the client when completed.
+     * A function that handles a client streaming gRPC service method. Many messages are received
+     * from the client, but only a single response is sent back to the client when completed.
      *
      * @param <T> The type of the request message.
      * @param <R> The type of the response message.
@@ -362,8 +378,8 @@ public interface ClientStreamingMethod<T, R>
             extends ExceptionalFunction<Pipeline<? super R>, Pipeline<? super T>> {}
 
     /**
-     * A function that handles a server streaming gRPC service method. A single request is received from the client,
-     * and many responses are sent back to the client.
+     * A function that handles a server streaming gRPC service method. A single request is received
+     * from the client, and many responses are sent back to the client.
      *
      * @param <T> The type of the request message.
      * @param <R> The type of the response message.
@@ -380,8 +396,8 @@ public interface ServerStreamingMethod<T, R> {
     }
 
     /**
-     * A function that handles a bidirectional streaming gRPC service method. Many messages are received from the
-     * client, and many responses are sent back to the client.
+     * A function that handles a bidirectional streaming gRPC service method. Many messages are
+     * received from the client, and many responses are sent back to the client.
      *
      * @param <T> The type of the request message.
      * @param <R> The type of the response message.
@@ -390,19 +406,20 @@ public interface BidiStreamingMethod<T, R>
             extends ExceptionalFunction<Pipeline<? super R>, Pipeline<? super T>> {}
 
     /**
-     * A convenient base class for the different builders. All builders have to hold state for request and
-     * response mapping functions, as well as the subscriber to send responses to, so we have a base class.
-     * This class also implements the {@link Pipeline} and {@link Flow.Subscription} interfaces, to
-     * reduce the overall number of instances created.
+     * A convenient base class for the different builders. All builders have to hold state for
+     * request and response mapping functions, as well as the subscriber to send responses to, so we
+     * have a base class. This class also implements the {@link Pipeline} and {@link
+     * Flow.Subscription} interfaces, to reduce the overall number of instances created.
      *
-     * <p>A {@link Flow.Subscription} is provided to each subscriber at the time they subscribe. Technically
-     * this can be a many-to-one relationship, but in our case, there is only going to be one subscriber for
-     * this {@link Flow.Subscription}, so we can simplify things a bit.
+     * <p>A {@link Flow.Subscription} is provided to each subscriber at the time they subscribe.
+     * Technically this can be a many-to-one relationship, but in our case, there is only going to
+     * be one subscriber for this {@link Flow.Subscription}, so we can simplify things a bit.
      *
      * @param <T> The type of the request message.
      * @param <R> The type of the response message.
      */
-    private abstract static class PipelineBuilderImpl<T, R> implements Pipeline<Bytes>, Flow.Subscription {
+    private abstract static class PipelineBuilderImpl<T, R>
+            implements Pipeline<Bytes>, Flow.Subscription {
         protected ExceptionalFunction<Bytes, T> requestMapper;
         protected ExceptionalFunction<R, Bytes> responseMapper;
         protected Pipeline<? super Bytes> replies;
@@ -411,7 +428,8 @@ private abstract static class PipelineBuilderImpl<T, R> implements Pipeline<Byte
 
         @Override
         public void request(long n) {
-            // If we supported flow control, we'd pay attention to the number being presented. And we should, ideally,
+            // If we supported flow control, we'd pay attention to the number being presented. And
+            // we should, ideally,
             // implement flow control. For now, we don't, so for now this is ignored.
         }
 
@@ -463,7 +481,8 @@ protected void validateParams() {
      * @param <T> The type of the request message.
      * @param <R> The type of the response message.
      */
-    private static final class UnaryBuilderImpl<T, R> extends PipelineBuilderImpl<T, R> implements UnaryBuilder<T, R> {
+    private static final class UnaryBuilderImpl<T, R> extends PipelineBuilderImpl<T, R>
+            implements UnaryBuilder<T, R> {
         private ExceptionalFunction<T, R> method;
 
         @Override
@@ -508,9 +527,12 @@ public Pipeline<? super Bytes> build() {
 
         @Override
         public void onNext(@NonNull final Bytes message) {
-            // A unary method call is pretty simple. We take the incoming bytes, convert them into the request
-            // message type, call the method, and then convert the response message back into bytes. If there
-            // are any exceptions, we forward that along. Otherwise, we just do the work and complete.
+            // A unary method call is pretty simple. We take the incoming bytes, convert them into
+            // the request
+            // message type, call the method, and then convert the response message back into bytes.
+            // If there
+            // are any exceptions, we forward that along. Otherwise, we just do the work and
+            // complete.
 
             if (completed) {
                 replies.onError(new IllegalStateException("Unary method already called."));
@@ -523,7 +545,7 @@ public void onNext(@NonNull final Bytes message) {
                 final var replyBytes = responseMapper.apply(reply);
                 replies.onNext(replyBytes);
                 onComplete();
-            } catch (RuntimeException e)  {
+            } catch (RuntimeException e) {
                 replies.onError(e);
                 throw e;
             } catch (Exception e) {
@@ -591,10 +613,14 @@ public Pipeline<? super Bytes> build() {
 
             replies.onSubscribe(this);
 
-            // This subscriber maps from the response type to bytes and sends them back to the client. Whenever
-            // the "onNext" method produces a new response, it will pass through this subscriber before being
-            // forwarded to the "replies" subscriber, where the webserver will return it to the client.
-            final var responseConverter = new MapSubscriber<R, Bytes>(replies, item -> responseMapper.apply(item));
+            // This subscriber maps from the response type to bytes and sends them back to the
+            // client. Whenever
+            // the "onNext" method produces a new response, it will pass through this subscriber
+            // before being
+            // forwarded to the "replies" subscriber, where the webserver will return it to the
+            // client.
+            final var responseConverter =
+                    new MapSubscriber<R, Bytes>(replies, item -> responseMapper.apply(item));
 
             try {
                 incoming = method.apply(responseConverter);
@@ -614,7 +640,7 @@ public void onNext(@NonNull final Bytes message) {
             try {
                 final var request = requestMapper.apply(message);
                 incoming.onNext(request);
-            } catch (RuntimeException e)  {
+            } catch (RuntimeException e) {
                 replies.onError(e);
                 throw e;
             } catch (Exception e) {
@@ -701,14 +727,15 @@ public Pipeline<? super Bytes> build() {
         @Override
         public void onNext(@NonNull final Bytes message) {
             if (completed) {
-                replies.onError(new IllegalStateException("ClientStreaming method already called."));
+                replies.onError(
+                        new IllegalStateException("ClientStreaming method already called."));
                 return;
             }
 
             try {
                 final var request = requestMapper.apply(message);
                 incoming.onNext(request);
-            } catch (RuntimeException e)  {
+            } catch (RuntimeException e) {
                 replies.onError(e);
                 throw e;
             } catch (Exception e) {
@@ -782,21 +809,23 @@ public Pipeline<? super Bytes> build() {
 
             responseConverter = new MapSubscriber<>(replies, item -> responseMapper.apply(item));
             responseConverter.onSubscribe(
-                    this); // Theoretically this should be done. But now I'm subscribing to this AND replies!
+                    this); // Theoretically this should be done. But now I'm subscribing to this AND
+            // replies!
             return this;
         }
 
         @Override
         public void onNext(@NonNull final Bytes message) {
             if (completed) {
-                replies.onError(new IllegalStateException("ServerStreaming method already called."));
+                replies.onError(
+                        new IllegalStateException("ServerStreaming method already called."));
                 return;
             }
 
             try {
                 final var request = requestMapper.apply(message);
                 method.apply(request, responseConverter);
-            } catch (RuntimeException e)  {
+            } catch (RuntimeException e) {
                 replies.onError(e);
                 throw e;
             } catch (Exception e) {
@@ -808,13 +837,14 @@ public void onNext(@NonNull final Bytes message) {
         @Override
         public void clientEndStreamReceived() {
             // nothing to do
-            // the server will continue streaming, since the message coming from the client is a subscription request
+            // the server will continue streaming, since the message coming from the client is a
+            // subscription request
         }
     }
 
     /**
-     * A subscriber that maps from one type to another. It is like a Java "map" operation on a stream, but as a
-     * subscriber.
+     * A subscriber that maps from one type to another. It is like a Java "map" operation on a
+     * stream, but as a subscriber.
      *
      * @param next The subscriber to send the mapped values to.
      * @param mapper The function to map from one type to another.
@@ -848,7 +878,7 @@ public void onNext(T item) {
             try {
                 final var r = mapper.apply(item);
                 next.onNext(r);
-            } catch (RuntimeException e)  {
+            } catch (RuntimeException e) {
                 next.onError(e);
                 throw e;
             } catch (Throwable t) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java
index 6d9cc0df..11aed577 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/grpc/ServiceInterface.java
@@ -7,13 +7,13 @@
 import java.util.Optional;
 
 /**
- * Defines a common interface for all implementations of a gRPC {@code service}. PBJ will generate a sub-interface
- * for each {@code service} in the protobuf schema definition files, with default implementations of each of the
- * given methods in this interface.
+ * Defines a common interface for all implementations of a gRPC {@code service}. PBJ will generate a
+ * sub-interface for each {@code service} in the protobuf schema definition files, with default
+ * implementations of each of the given methods in this interface.
  *
  * <p>For example, suppose I have the following protobuf file:
- * <pre>
- * {@code
+ *
+ * <pre>{@code
  * package example;
  *
  * service HelloService {
@@ -27,12 +27,12 @@
  * message HelloResponse {
  *   string reply = 1;
  * }
- * }
- * </pre>
+ * }</pre>
+ *
+ * <p>From this file, PBJ will generate a {@code HelloService} interface that extends {@code
+ * ServiceInterface}:
  *
- * <p>From this file, PBJ will generate a {@code HelloService} interface that extends {@code ServiceInterface}:
- * <pre>
- * {@code
+ * <pre>{@code
  * public interface HelloService extends ServiceInterface {
  *    // ...
  *
@@ -44,11 +44,10 @@
  *
  *    // ...
  * }
- * }
- * </pre>
+ * }</pre>
  *
- * In the application code, you will simply create a new class implementing the {@code HelloService} interface, and
- * register it with your webserver in whatever way is appropriate for your webserver.
+ * In the application code, you will simply create a new class implementing the {@code HelloService}
+ * interface, and register it with your webserver in whatever way is appropriate for your webserver.
  */
 public interface ServiceInterface {
     /** Represents the metadata of a method in a gRPC service. */
@@ -60,15 +59,18 @@ interface Method {
     interface RequestOptions {
         /** A constant for the gRPC content type "application/grpc". */
         String APPLICATION_GRPC = "application/grpc";
+
         /** A constant for the gRPC content type "application/grpc+proto". */
         String APPLICATION_GRPC_PROTO = "application/grpc+proto";
+
         /** A constant for the gRPC content type "application/grpc+json". */
         String APPLICATION_GRPC_JSON = "application/grpc+json";
 
         /**
-         * The authority of the client that is connecting to the service. This is the value of the ":authority" header
-         * in the HTTP/2 request. This value is used by the service to determine the client's identity. It may be that
-         * no authority is provided, in which case this method will return an empty optional.
+         * The authority of the client that is connecting to the service. This is the value of the
+         * ":authority" header in the HTTP/2 request. This value is used by the service to determine
+         * the client's identity. It may be that no authority is provided, in which case this method
+         * will return an empty optional.
          *
          * @return the authority of the client
          */
@@ -76,22 +78,23 @@ interface RequestOptions {
         Optional<String> authority();
 
         /**
-         * Gets whether the content type describes a protobuf message. This will be true if the {@link #contentType()}
-         * is equal to {@link #APPLICATION_GRPC_PROTO} or {@link #APPLICATION_GRPC}.
+         * Gets whether the content type describes a protobuf message. This will be true if the
+         * {@link #contentType()} is equal to {@link #APPLICATION_GRPC_PROTO} or {@link
+         * #APPLICATION_GRPC}.
          */
         boolean isProtobuf();
 
         /**
-         * Gets whether the content type describes a JSON message. This will be true if the {@link #contentType()}
-         * is equal to {@link #APPLICATION_GRPC_JSON}.
+         * Gets whether the content type describes a JSON message. This will be true if the {@link
+         * #contentType()} is equal to {@link #APPLICATION_GRPC_JSON}.
          */
         boolean isJson();
 
         /**
-         * Gets the content type of the request. This is the value of the "content-type" header in the HTTP/2 request.
-         * This value is used by the service to determine how to parse the request. Since gRPC supports custom content
-         * types, it is possible that the content type will be something other than the constants defined in this
-         * interface.
+         * Gets the content type of the request. This is the value of the "content-type" header in
+         * the HTTP/2 request. This value is used by the service to determine how to parse the
+         * request. Since gRPC supports custom content types, it is possible that the content type
+         * will be something other than the constants defined in this interface.
          *
          * @return the content type of the request
          */
@@ -102,18 +105,23 @@ interface RequestOptions {
     /** Gets the simple name of the service. For example, "HelloService". */
     @NonNull
     String serviceName();
+
     /** Gets the full name of the service. For example, "example.HelloService". */
     @NonNull
     String fullName();
-    /** Gets a list of each method in the service. This list may be empty but should never be null. */
+
+    /**
+     * Gets a list of each method in the service. This list may be empty but should never be null.
+     */
     @NonNull
     List<Method> methods();
 
     /**
-     * Called by the webserver to open a new connection between the client and the service. This method may be called
-     * many times concurrently, once per connection. The implementation must therefore be thread-safe. A default
-     * implementation is provided by the generated PBJ code, which will handle the dispatching of messages to the
-     * appropriate methods in the correct way (unary, server-side streaming, etc.).
+     * Called by the webserver to open a new connection between the client and the service. This
+     * method may be called many times concurrently, once per connection. The implementation must
+     * therefore be thread-safe. A default implementation is provided by the generated PBJ code,
+     * which will handle the dispatching of messages to the appropriate methods in the correct way
+     * (unary, server-side streaming, etc.).
      *
      * @param method The method that was called by the client.
      * @param opts Any options from the request, such as the content type.
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java
index ad06c932..d11d40d9 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/DataEncodingException.java
@@ -2,9 +2,10 @@
 package com.hedera.pbj.runtime.io;
 
 /**
- * A {@link RuntimeException} thrown when attempting to decode data from a {@link ReadableSequentialData}
- * but it cannot be decoded. See specifically {@link ReadableSequentialData#readVarInt(boolean)} and
- * {@link ReadableSequentialData#readVarLong(boolean)}
+ * A {@link RuntimeException} thrown when attempting to decode data from a {@link
+ * ReadableSequentialData} but it cannot be decoded. See specifically {@link
+ * ReadableSequentialData#readVarInt(boolean)} and {@link
+ * ReadableSequentialData#readVarLong(boolean)}
  */
 public class DataEncodingException extends RuntimeException {
     /**
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java
index 751274b1..f17065c4 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/ReadableSequentialData.java
@@ -6,7 +6,6 @@
 import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
 import com.hedera.pbj.runtime.io.stream.EOFException;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.io.InputStream;
 import java.io.UncheckedIOException;
 import java.nio.BufferUnderflowException;
@@ -14,19 +13,22 @@
 import java.nio.ByteOrder;
 
 /**
- * A {@link SequentialData} which may be read. This interface is suitable for reading data from a stream or buffer.
- * Once read, data cannot be re-read. The {@link #position()}, once incremented, cannot be reset or decremented.
+ * A {@link SequentialData} which may be read. This interface is suitable for reading data from a
+ * stream or buffer. Once read, data cannot be re-read. The {@link #position()}, once incremented,
+ * cannot be reset or decremented.
  *
- * <p>As data is read from the stream or buffer, the {@link #position()} is incremented. The limit is the maximum
- * {@link #position()} within the sequence from which data can be read. Implementations of this class must provide an
- * implementation of {@link #readByte()} that reads a byte from the current {@link #position()} and increments the
- * {@link #position()} by 1. All other read methods have a default implementation based on {@link #readByte()}.
- * Implementations of this interface may choose to reimplement those methods to be more efficient as needed.
+ * <p>As data is read from the stream or buffer, the {@link #position()} is incremented. The limit
+ * is the maximum {@link #position()} within the sequence from which data can be read.
+ * Implementations of this class must provide an implementation of {@link #readByte()} that reads a
+ * byte from the current {@link #position()} and increments the {@link #position()} by 1. All other
+ * read methods have a default implementation based on {@link #readByte()}. Implementations of this
+ * interface may choose to reimplement those methods to be more efficient as needed.
  */
 public interface ReadableSequentialData extends SequentialData {
 
     /**
-     * Reads the signed byte at current {@link #position()}, and then increments the {@link #position()} by 1.
+     * Reads the signed byte at current {@link #position()}, and then increments the {@link
+     * #position()} by 1.
      *
      * @return The signed byte at the current {@link #position()}
      * @throws BufferUnderflowException If there are no bytes remaining in this sequence
@@ -35,8 +37,9 @@ public interface ReadableSequentialData extends SequentialData {
     byte readByte() throws BufferUnderflowException, UncheckedIOException;
 
     /**
-     * Reads the unsigned byte at the current {@link #position()}, and then increments the {@link #position()} by 1.
-     * That is, it reads a single byte, but returns an integer in the range 0 to 255.
+     * Reads the unsigned byte at the current {@link #position()}, and then increments the {@link
+     * #position()} by 1. That is, it reads a single byte, but returns an integer in the range 0 to
+     * 255.
      *
      * @return The unsigned byte at current {@link #position()}
      * @throws BufferUnderflowException If there are no bytes remaining in this sequence
@@ -47,17 +50,19 @@ default int readUnsignedByte() throws BufferUnderflowException, UncheckedIOExcep
     }
 
     /**
-     * Read bytes starting at current {@link #position()} into the {@code dst} array, up to the size of the {@code dst}
-     * array. If {@code dst} is larger than the remaining bytes in the sequence, only the remaining bytes are read.
-     * The total number of bytes actually read are returned. The bytes will be placed starting at index 0 of the array.
-     * The {@link #position()} will be incremented by the number of bytes read. If no bytes are available in the
-     * sequence, then 0 is returned.
+     * Read bytes starting at current {@link #position()} into the {@code dst} array, up to the size
+     * of the {@code dst} array. If {@code dst} is larger than the remaining bytes in the sequence,
+     * only the remaining bytes are read. The total number of bytes actually read are returned. The
+     * bytes will be placed starting at index 0 of the array. The {@link #position()} will be
+     * incremented by the number of bytes read. If no bytes are available in the sequence, then 0 is
+     * returned.
      *
-     * <p>The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.
+     * <p>The {@code dst} array may be partially written to at the time that any of the declared
+     * exceptions are thrown.
      *
-     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this
-     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be
-     * incremented by the number of bytes read prior to the exception.
+     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes
+     * remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The
+     * {@link #position()} will be incremented by the number of bytes read prior to the exception.
      *
      * @param dst The destination array. Cannot be null.
      * @throws NullPointerException if {@code dst} is null
@@ -69,26 +74,28 @@ default long readBytes(@NonNull final byte[] dst) throws UncheckedIOException {
     }
 
     /**
-     * Read bytes starting at the current {@link #position()} into the {@code dst} array, up to {@code maxLength}
-     * number of bytes. If {@code maxLength} is larger than the remaining bytes in the sequence, only the remaining
-     * bytes are read. The total number of bytes actually read are returned. The bytes will be placed starting at index
-     * {@code offset} of the array. The {@link #position()} will be incremented by the number of bytes read. If no
-     * bytes are available in the sequence, then 0 is returned.
+     * Read bytes starting at the current {@link #position()} into the {@code dst} array, up to
+     * {@code maxLength} number of bytes. If {@code maxLength} is larger than the remaining bytes in
+     * the sequence, only the remaining bytes are read. The total number of bytes actually read are
+     * returned. The bytes will be placed starting at index {@code offset} of the array. The {@link
+     * #position()} will be incremented by the number of bytes read. If no bytes are available in
+     * the sequence, then 0 is returned.
      *
-     * <p>The {@code dst} array may be partially written to at the time that any of the declared exceptions are thrown.
+     * <p>The {@code dst} array may be partially written to at the time that any of the declared
+     * exceptions are thrown.
      *
-     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this
-     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be
-     * incremented by the number of bytes read prior to the exception.
+     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes
+     * remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The
+     * {@link #position()} will be incremented by the number of bytes read prior to the exception.
      *
      * @param dst The array into which bytes are to be written
-     * @param offset The offset within the {@code dst} array of the first byte to be written; must be non-negative and
-     *                no larger than {@code dst.length - maxLength}.
-     * @param maxLength The maximum number of bytes to be written to the given {@code dst} array; must be non-negative
-     *                and no larger than {@code dst.length - offset}
+     * @param offset The offset within the {@code dst} array of the first byte to be written; must
+     *     be non-negative and no larger than {@code dst.length - maxLength}.
+     * @param maxLength The maximum number of bytes to be written to the given {@code dst} array;
+     *     must be non-negative and no larger than {@code dst.length - offset}
      * @throws NullPointerException If {@code dst} is null
      * @throws IndexOutOfBoundsException If {@code offset} is out of bounds of {@code dst} or if
-     *                                  {@code offset + maxLength} is not less than {@code dst.length}
+     *     {@code offset + maxLength} is not less than {@code dst.length}
      * @throws IllegalArgumentException If {@code maxLength} is negative
      * @throws UncheckedIOException If an I/O error occurs
      * @return The number of bytes read actually read and placed into {@code dst}
@@ -99,8 +106,10 @@ default long readBytes(@NonNull final byte[] dst, final int offset, final int ma
             throw new IllegalArgumentException("Negative maxLength not allowed");
         }
 
-        // Read up to maxLength bytes into the dst array. Note the check for `hasRemaining()` is done in the loop
-        // because, for streams, we cannot determine ahead of time the total number of available bytes, so we must
+        // Read up to maxLength bytes into the dst array. Note the check for `hasRemaining()` is
+        // done in the loop
+        // because, for streams, we cannot determine ahead of time the total number of available
+        // bytes, so we must
         // continue to check as we process each byte. This is not efficient for buffers.
         final var length = Math.min(maxLength, remaining());
         final var maxIndex = offset + length;
@@ -118,18 +127,19 @@ default long readBytes(@NonNull final byte[] dst, final int offset, final int ma
     }
 
     /**
-     * Read bytes starting at current {@link #position()} into the destination {@link ByteBuffer}, up to
-     * {@link ByteBuffer#remaining()} number of bytes. If {@link ByteBuffer#remaining()} is larger than the remaining
-     * bytes in the sequence, only the remaining bytes are read. The total number of bytes actually read are returned.
-     * The bytes will be placed starting at index {@link ByteBuffer#position()} of the buffer and the
-     * {@link #position()} will be incremented by the number of bytes read. If no bytes are available in the sequence,
-     * then 0 is returned.
+     * Read bytes starting at current {@link #position()} into the destination {@link ByteBuffer},
+     * up to {@link ByteBuffer#remaining()} number of bytes. If {@link ByteBuffer#remaining()} is
+     * larger than the remaining bytes in the sequence, only the remaining bytes are read. The total
+     * number of bytes actually read are returned. The bytes will be placed starting at index {@link
+     * ByteBuffer#position()} of the buffer and the {@link #position()} will be incremented by the
+     * number of bytes read. If no bytes are available in the sequence, then 0 is returned.
      *
-     * <p>The {@code dst} buffer may be partially written to at the time that any of the declared exceptions are thrown.
+     * <p>The {@code dst} buffer may be partially written to at the time that any of the declared
+     * exceptions are thrown.
      *
-     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this
-     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be
-     * incremented by the number of bytes read prior to the exception.
+     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes
+     * remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The
+     * {@link #position()} will be incremented by the number of bytes read prior to the exception.
      *
      * @param dst The destination {@link ByteBuffer}
      * @throws NullPointerException If {@code dst} is null
@@ -137,8 +147,10 @@ default long readBytes(@NonNull final byte[] dst, final int offset, final int ma
      * @return The number of bytes read actually read and placed into {@code dst}
      */
     default long readBytes(@NonNull final ByteBuffer dst) throws UncheckedIOException {
-        // Read up to maxLength bytes into the dst array. Note the check for `hasRemaining()` is done in the loop
-        // because, for streams, we cannot determine ahead of time the total number of available bytes, so we must
+        // Read up to maxLength bytes into the dst array. Note the check for `hasRemaining()` is
+        // done in the loop
+        // because, for streams, we cannot determine ahead of time the total number of available
+        // bytes, so we must
         // continue to check as we process each byte. This is not efficient for buffers.
         final var len = dst.remaining();
         long bytesRead = 0;
@@ -155,26 +167,29 @@ default long readBytes(@NonNull final ByteBuffer dst) throws UncheckedIOExceptio
     }
 
     /**
-     * Read bytes starting at current {@link #position()} into the destination {@link BufferedData}, up to
-     * {@link BufferedData#remaining()} number of bytes. If {@link BufferedData#remaining()} is larger than the
-     * remaining bytes in the sequence, only the remaining bytes are read. The total number of bytes actually read are
-     * returned. The bytes will be placed starting at index {@link BufferedData#position()} of the buffer. The
-     * {@link #position()} will be incremented by the number of bytes read. If no bytes are available in the sequence,
-     * then 0 is returned.
+     * Read bytes starting at current {@link #position()} into the destination {@link BufferedData},
+     * up to {@link BufferedData#remaining()} number of bytes. If {@link BufferedData#remaining()}
+     * is larger than the remaining bytes in the sequence, only the remaining bytes are read. The
+     * total number of bytes actually read are returned. The bytes will be placed starting at index
+     * {@link BufferedData#position()} of the buffer. The {@link #position()} will be incremented by
+     * the number of bytes read. If no bytes are available in the sequence, then 0 is returned.
      *
-     * <p>The {@code dst} buffer may be partially written to at the time that any of the declared exceptions are thrown.
+     * <p>The {@code dst} buffer may be partially written to at the time that any of the declared
+     * exceptions are thrown.
      *
-     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this
-     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be
-     * incremented by the number of bytes read prior to the exception.
+     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes
+     * remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The
+     * {@link #position()} will be incremented by the number of bytes read prior to the exception.
      *
      * @param dst The destination {@link BufferedData}
      * @throws UncheckedIOException If an I/O error occurs
      * @return The number of bytes read actually read and placed into {@code dst}
      */
     default long readBytes(@NonNull final BufferedData dst) throws UncheckedIOException {
-        // Read up to maxLength bytes into the dst array. Note the check for `hasRemaining()` is done in the loop
-        // because, for streams, we cannot determine ahead of time the total number of available bytes, so we must
+        // Read up to maxLength bytes into the dst array. Note the check for `hasRemaining()` is
+        // done in the loop
+        // because, for streams, we cannot determine ahead of time the total number of available
+        // bytes, so we must
         // continue to check as we process each byte. This is not efficient for buffers.
         final var len = dst.remaining();
         long bytesRead = 0;
@@ -191,21 +206,23 @@ default long readBytes(@NonNull final BufferedData dst) throws UncheckedIOExcept
     }
 
     /**
-     * Read {@code length} bytes from this sequence, returning them as a {@link Bytes} buffer of
-     * the read data. The returned bytes will be immutable. The {@link #position()} of this sequence will be
-     * incremented by {@code length} bytes.
+     * Read {@code length} bytes from this sequence, returning them as a {@link Bytes} buffer of the
+     * read data. The returned bytes will be immutable. The {@link #position()} of this sequence
+     * will be incremented by {@code length} bytes.
      *
-     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this
-     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be
-     * incremented by the number of bytes read prior to the exception.
+     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes
+     * remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The
+     * {@link #position()} will be incremented by the number of bytes read prior to the exception.
      *
      * @param length The non-negative length in bytes to read
      * @return new {@link Bytes} containing the read data
      * @throws IllegalArgumentException If {@code length} is negative
-     * @throws BufferUnderflowException If there are not {@code length} bytes remaining in this sequence
+     * @throws BufferUnderflowException If there are not {@code length} bytes remaining in this
+     *     sequence
      * @throws UncheckedIOException If an I/O error occurs
      */
-    default @NonNull Bytes readBytes(final int length) throws BufferUnderflowException, UncheckedIOException {
+    default @NonNull Bytes readBytes(final int length)
+            throws BufferUnderflowException, UncheckedIOException {
         if (length < 0) {
             throw new IllegalArgumentException("Negative length not allowed");
         }
@@ -223,23 +240,25 @@ default long readBytes(@NonNull final BufferedData dst) throws UncheckedIOExcept
     }
 
     /**
-     * Return a "view" on the underlying sequence of bytes, starting at the current {@link #position()} and extending
-     * {@code length} bytes. The returned bytes may change over time if the underlying data is updated! The
-     * {@link #position()} of this sequence will be incremented by {@code length} bytes. The {@link #position()}
-     * of the returned sequence will be 0 and its {@link #limit()} and {@link #capacity()} will be {@code length}.
+     * Return a "view" on the underlying sequence of bytes, starting at the current {@link
+     * #position()} and extending {@code length} bytes. The returned bytes may change over time if
+     * the underlying data is updated! The {@link #position()} of this sequence will be incremented
+     * by {@code length} bytes. The {@link #position()} of the returned sequence will be 0 and its
+     * {@link #limit()} and {@link #capacity()} will be {@code length}.
      *
-     * <p>If the sequence is a stream, then the returned sequence will be a buffer of the bytes captured by
-     * the stream, and will be effectively immutable. If the sequence is a buffer, then the returned sequence
-     * will be a dynamic view of the underlying buffer.
+     * <p>If the sequence is a stream, then the returned sequence will be a buffer of the bytes
+     * captured by the stream, and will be effectively immutable. If the sequence is a buffer, then
+     * the returned sequence will be a dynamic view of the underlying buffer.
      *
-     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes remaining in this
-     * sequence, then a {@link BufferUnderflowException} will be thrown. The {@link #position()} will be
-     * incremented by the number of bytes read prior to the exception.
+     * <p>Bytes are read from the sequence one at a time. If there are not {@code length} bytes
+     * remaining in this sequence, then a {@link BufferUnderflowException} will be thrown. The
+     * {@link #position()} will be incremented by the number of bytes read prior to the exception.
      *
      * @param length The non-negative length in bytes to read
      * @return new {@link RandomAccessData} containing a view on the read data
      * @throws IllegalArgumentException If length is less than 0
-     * @throws BufferUnderflowException If there are no bytes remaining in this sequence and a byte is read
+     * @throws BufferUnderflowException If there are no bytes remaining in this sequence and a byte
+     *     is read
      * @throws UncheckedIOException If an I/O error occurs
      */
     default @NonNull ReadableSequentialData view(final int length)
@@ -258,15 +277,17 @@ default long readBytes(@NonNull final BufferedData dst) throws UncheckedIOExcept
     }
 
     /**
-     * Reads the next four bytes at the current {@link #position()}, composing them into an int value according to the
-     * Java standard big-endian byte order, and then increments the {@link #position()} by four.
+     * Reads the next four bytes at the current {@link #position()}, composing them into an int
+     * value according to the Java standard big-endian byte order, and then increments the {@link
+     * #position()} by four.
      *
      * @return The int value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than four bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
      */
     default int readInt() throws BufferUnderflowException, UncheckedIOException {
-        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs.
+        // little endian
         //noinspection DuplicatedCode
         if (remaining() < Integer.BYTES) {
             throw new BufferUnderflowException();
@@ -279,17 +300,20 @@ default int readInt() throws BufferUnderflowException, UncheckedIOException {
     }
 
     /**
-     * Reads the next four bytes at the current {@link #position()}, composing them into an int value according to
-     * specified byte order, and then increments the {@link #position()} by four.
+     * Reads the next four bytes at the current {@link #position()}, composing them into an int
+     * value according to specified byte order, and then increments the {@link #position()} by four.
      *
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The int value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than four bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default int readInt(@NonNull final ByteOrder byteOrder) throws BufferUnderflowException, UncheckedIOException {
+    default int readInt(@NonNull final ByteOrder byteOrder)
+            throws BufferUnderflowException, UncheckedIOException {
         if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
-            // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+            // False positive: bytes in "duplicated" fragments are read in opposite order for big
+            // vs. little endian
             //noinspection DuplicatedCode
             if (remaining() < Integer.BYTES) {
                 throw new BufferUnderflowException();
@@ -305,8 +329,9 @@ default int readInt(@NonNull final ByteOrder byteOrder) throws BufferUnderflowEx
     }
 
     /**
-     * Reads the next four bytes at the current {@link #position()}, composing them into an unsigned int value according
-     * to the Java standard big-endian byte order, and then increments the {@link #position()} by four.
+     * Reads the next four bytes at the current {@link #position()}, composing them into an unsigned
+     * int value according to the Java standard big-endian byte order, and then increments the
+     * {@link #position()} by four.
      *
      * @return The int value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than four bytes remaining
@@ -317,10 +342,12 @@ default long readUnsignedInt() throws BufferUnderflowException, UncheckedIOExcep
     }
 
     /**
-     * Reads the next four bytes at the current {@link #position()}, composing them into an unsigned int value according
-     * to specified byte order, and then increments the {@link #position()} by four.
+     * Reads the next four bytes at the current {@link #position()}, composing them into an unsigned
+     * int value according to specified byte order, and then increments the {@link #position()} by
+     * four.
      *
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The int value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than four bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
@@ -331,15 +358,17 @@ default long readUnsignedInt(@NonNull final ByteOrder byteOrder)
     }
 
     /**
-     * Reads the next eight bytes at the current {@link #position()}, composing them into a long value according to the
-     * Java standard big-endian byte order, and then increments the {@link #position()} by eight.
+     * Reads the next eight bytes at the current {@link #position()}, composing them into a long
+     * value according to the Java standard big-endian byte order, and then increments the {@link
+     * #position()} by eight.
      *
      * @return The long value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than eight bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
      */
     default long readLong() throws BufferUnderflowException, UncheckedIOException {
-        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs.
+        // little endian
         //noinspection DuplicatedCode
         if (remaining() < Long.BYTES) {
             throw new BufferUnderflowException();
@@ -352,28 +381,32 @@ default long readLong() throws BufferUnderflowException, UncheckedIOException {
         final byte b6 = readByte();
         final byte b7 = readByte();
         final byte b8 = readByte();
-        return (((long)b1 << 56) +
-                ((long)(b2 & 255) << 48) +
-                ((long)(b3 & 255) << 40) +
-                ((long)(b4 & 255) << 32) +
-                ((long)(b5 & 255) << 24) +
-                ((b6 & 255) << 16) +
-                ((b7 & 255) <<  8) +
-                (b8 & 255));
+        return (((long) b1 << 56)
+                + ((long) (b2 & 255) << 48)
+                + ((long) (b3 & 255) << 40)
+                + ((long) (b4 & 255) << 32)
+                + ((long) (b5 & 255) << 24)
+                + ((b6 & 255) << 16)
+                + ((b7 & 255) << 8)
+                + (b8 & 255));
     }
 
     /**
-     * Reads the next eight bytes at the current {@link #position()}, composing them into a long value according to
-     * specified byte order, and then increments the {@link #position()} by eight.
+     * Reads the next eight bytes at the current {@link #position()}, composing them into a long
+     * value according to specified byte order, and then increments the {@link #position()} by
+     * eight.
      *
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The long value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than eight bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default long readLong(@NonNull final ByteOrder byteOrder) throws BufferUnderflowException, UncheckedIOException {
+    default long readLong(@NonNull final ByteOrder byteOrder)
+            throws BufferUnderflowException, UncheckedIOException {
         if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
-            // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+            // False positive: bytes in "duplicated" fragments are read in opposite order for big
+            // vs. little endian
             //noinspection DuplicatedCode
             if (remaining() < Long.BYTES) {
                 throw new BufferUnderflowException();
@@ -386,22 +419,23 @@ default long readLong(@NonNull final ByteOrder byteOrder) throws BufferUnderflow
             final byte b3 = readByte();
             final byte b2 = readByte();
             final byte b1 = readByte();
-            return (((long) b1 << 56) +
-                    ((long) (b2 & 255) << 48) +
-                    ((long) (b3 & 255) << 40) +
-                    ((long) (b4 & 255) << 32) +
-                    ((long) (b5 & 255) << 24) +
-                    ((b6 & 255) << 16) +
-                    ((b7 & 255) << 8) +
-                    (b8 & 255));
+            return (((long) b1 << 56)
+                    + ((long) (b2 & 255) << 48)
+                    + ((long) (b3 & 255) << 40)
+                    + ((long) (b4 & 255) << 32)
+                    + ((long) (b5 & 255) << 24)
+                    + ((b6 & 255) << 16)
+                    + ((b7 & 255) << 8)
+                    + (b8 & 255));
         } else {
             return readLong();
         }
     }
 
     /**
-     * Reads the next four bytes at the current {@link #position()}, composing them into a float value according to the
-     * Java standard big-endian byte order, and then increments the {@link #position()} by four.
+     * Reads the next four bytes at the current {@link #position()}, composing them into a float
+     * value according to the Java standard big-endian byte order, and then increments the {@link
+     * #position()} by four.
      *
      * @return The float value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than four bytes remaining
@@ -412,21 +446,24 @@ default float readFloat() throws BufferUnderflowException, UncheckedIOException
     }
 
     /**
-     * Reads the next four bytes at the current {@link #position()}, composing them into a float value according to
-     * specified byte order, and then increments the {@link #position()} by four.
+     * Reads the next four bytes at the current {@link #position()}, composing them into a float
+     * value according to specified byte order, and then increments the {@link #position()} by four.
      *
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The float value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than four bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default float readFloat(@NonNull final ByteOrder byteOrder) throws BufferUnderflowException, UncheckedIOException {
+    default float readFloat(@NonNull final ByteOrder byteOrder)
+            throws BufferUnderflowException, UncheckedIOException {
         return Float.intBitsToFloat(readInt(byteOrder));
     }
 
     /**
-     * Reads the next eight bytes at the current {@link #position()}, composing them into a double value according to
-     * the Java standard big-endian byte order, and then increments the {@link #position()} by eight.
+     * Reads the next eight bytes at the current {@link #position()}, composing them into a double
+     * value according to the Java standard big-endian byte order, and then increments the {@link
+     * #position()} by eight.
      *
      * @return The double value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than eight bytes remaining
@@ -437,10 +474,12 @@ default double readDouble() throws BufferUnderflowException, UncheckedIOExceptio
     }
 
     /**
-     * Reads the next eight bytes at the current {@link #position()}, composing them into a double value according to
-     * specified byte order, and then increments the {@link #position()} by eight.
+     * Reads the next eight bytes at the current {@link #position()}, composing them into a double
+     * value according to specified byte order, and then increments the {@link #position()} by
+     * eight.
      *
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The double value at the current {@link #position()}
      * @throws BufferUnderflowException If there are fewer than eight bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
@@ -455,11 +494,12 @@ default double readDouble(@NonNull final ByteOrder byteOrder)
      *
      * @return integer read in var int format
      * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers
-     * @throws BufferUnderflowException If the end of the sequence is reached before the final variable byte fragment
-     *                                  is read
+     * @throws BufferUnderflowException If the end of the sequence is reached before the final
+     *     variable byte fragment is read
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default int readVarInt(final boolean zigZag) throws BufferUnderflowException, UncheckedIOException {
+    default int readVarInt(final boolean zigZag)
+            throws BufferUnderflowException, UncheckedIOException {
         return (int) readVarLong(zigZag);
     }
 
@@ -468,12 +508,13 @@ default int readVarInt(final boolean zigZag) throws BufferUnderflowException, Un
      *
      * @return long read in var int format
      * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers
-     * @throws BufferUnderflowException If the end of the sequence is reached before the final variable byte fragment
-     *                                  is read
+     * @throws BufferUnderflowException If the end of the sequence is reached before the final
+     *     variable byte fragment is read
      * @throws UncheckedIOException if an I/O error occurs
      * @throws DataEncodingException if the variable long cannot be decoded
      */
-    default long readVarLong(final boolean zigZag) throws BufferUnderflowException, UncheckedIOException {
+    default long readVarLong(final boolean zigZag)
+            throws BufferUnderflowException, UncheckedIOException {
         long value = 0;
 
         for (int i = 0; i < 10; i++) {
@@ -495,13 +536,12 @@ default InputStream asInputStream() {
         return new ReadableSequentialDataInputStream(this);
     }
 
-    /**
-     * InputStream that reads from a ReadableSequentialData
-     */
+    /** InputStream that reads from a ReadableSequentialData */
     class ReadableSequentialDataInputStream extends InputStream {
         final ReadableSequentialData sequentialData;
 
-        public ReadableSequentialDataInputStream(@NonNull final ReadableSequentialData sequentialData) {
+        public ReadableSequentialDataInputStream(
+                @NonNull final ReadableSequentialData sequentialData) {
             this.sequentialData = sequentialData;
         }
 
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java
index 9b05f31e..972e0b90 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/SequentialData.java
@@ -4,23 +4,24 @@
 import java.io.UncheckedIOException;
 
 /**
- * Represents sequential data which may either be buffered or streamed. Conceptually, streamed data is
- * a sequential stream of bytes, while a buffer is a sequential array of bytes. A stream <b>necessarily</b>
- * has the concept of "position" within the stream where data is being read or written to. A buffer that implements
- * {@link SequentialData} also contains a "position" within the buffer from which data is being read or written.
- * This makes it convenient to parse objects from a buffer without having to keep track of the read position
- * manually or to write data into a buffer.
+ * Represents sequential data which may either be buffered or streamed. Conceptually, streamed data
+ * is a sequential stream of bytes, while a buffer is a sequential array of bytes. A stream
+ * <b>necessarily</b> has the concept of "position" within the stream where data is being read or
+ * written to. A buffer that implements {@link SequentialData} also contains a "position" within the
+ * buffer from which data is being read or written. This makes it convenient to parse objects from a
+ * buffer without having to keep track of the read position manually or to write data into a buffer.
  *
- * <p>This interface does not itself define any methods by which data may be read or written, because a stream,
- * for example, may be readable or writable but not both. See sub-interfaces {@link ReadableSequentialData} and
- * {@link WritableSequentialData} for API for reading or writing.
+ * <p>This interface does not itself define any methods by which data may be read or written,
+ * because a stream, for example, may be readable or writable but not both. See sub-interfaces
+ * {@link ReadableSequentialData} and {@link WritableSequentialData} for API for reading or writing.
  */
 public interface SequentialData {
     /**
-     * Get the maximum number of bytes that may be in this {@link SequentialData}. For a buffer, this is the
-     * size of the buffer. For a stream, this is {@code Long.MAX_VALUE}, since the maximum potential capacity
-     * of the stream is unbounded (unless, by some other mechanism, you know ahead of time how many possible bytes
-     * there are, such as with an HTTP request with a known Content-Length header).
+     * Get the maximum number of bytes that may be in this {@link SequentialData}. For a buffer,
+     * this is the size of the buffer. For a stream, this is {@code Long.MAX_VALUE}, since the
+     * maximum potential capacity of the stream is unbounded (unless, by some other mechanism, you
+     * know ahead of time how many possible bytes there are, such as with an HTTP request with a
+     * known Content-Length header).
      *
      * <p>The capacity will never change.
      *
@@ -29,35 +30,37 @@ public interface SequentialData {
     long capacity();
 
     /**
-     * Current read (or if applicable, write) position relative to origin, which is position 0. The position will
-     * never be greater than {@link #capacity()}. It will always be non-negative.
+     * Current read (or if applicable, write) position relative to origin, which is position 0. The
+     * position will never be greater than {@link #capacity()}. It will always be non-negative.
      *
      * @return The current read position.
      */
     long position();
 
     /**
-     * The byte position that can be read up to, relative to the origin. The limit will always be greater than or equal
-     * to the {@link #position()}, and less than or equal to the {@link #capacity()}. It will therefore always be
-     * non-negative. If the limit is equal to the {@link #position()}, then there are no bytes left to
-     * ready, or no room left to write. Any attempt to read or write at the limit will throw an exception.
+     * The byte position that can be read up to, relative to the origin. The limit will always be
+     * greater than or equal to the {@link #position()}, and less than or equal to the {@link
+     * #capacity()}. It will therefore always be non-negative. If the limit is equal to the {@link
+     * #position()}, then there are no bytes left to ready, or no room left to write. Any attempt to
+     * read or write at the limit will throw an exception.
      *
      * @return maximum position that can be read from origin
      */
     long limit();
 
     /**
-     * Set the limit that can be read up to, relative to origin. If less than {@link #position()} then clamp to
-     * {@link #position()}, meaning there are no bytes left to read. If greater than {@link #limit()} then clamp to
-     * the {@link #capacity()}, meaning the end of the sequence.
+     * Set the limit that can be read up to, relative to origin. If less than {@link #position()}
+     * then clamp to {@link #position()}, meaning there are no bytes left to read. If greater than
+     * {@link #limit()} then clamp to the {@link #capacity()}, meaning the end of the sequence.
      *
      * @param limit The new limit relative to origin.
      */
     void limit(long limit);
 
     /**
-     * Returns true if there are bytes remaining between the current {@link #position()} and {@link #limit()}. If this
-     * method returns true, then there will be at least one byte available to read or write.
+     * Returns true if there are bytes remaining between the current {@link #position()} and {@link
+     * #limit()}. If this method returns true, then there will be at least one byte available to
+     * read or write.
      *
      * @return true if ({@link #limit()} - {@link #position()}) > 0
      */
@@ -66,8 +69,8 @@ default boolean hasRemaining() {
     }
 
     /**
-     * Gets the number of bytes remaining between the current {@link #position()} and {@link #limit()}. This
-     * value will always be non-negative.
+     * Gets the number of bytes remaining between the current {@link #position()} and {@link
+     * #limit()}. This value will always be non-negative.
      *
      * @return number of bytes remaining to be read
      */
@@ -76,8 +79,8 @@ default long remaining() {
     }
 
     /**
-     * Move {@link #position()} forward by {@code count} bytes. If the {@code count} would move the position past the
-     * {@link #limit()}, then a buffer overflow or underflow exception is thrown.
+     * Move {@link #position()} forward by {@code count} bytes. If the {@code count} would move the
+     * position past the {@link #limit()}, then a buffer overflow or underflow exception is thrown.
      *
      * @param count number of bytes to skip. If 0 or negative, then no bytes are skipped.
      * @throws UncheckedIOException if an I/O error occurs
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java
index 48f554a5..8e39af48 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/UnsafeUtils.java
@@ -8,27 +8,23 @@
 import java.nio.ByteOrder;
 import sun.misc.Unsafe;
 
-/**
- * A set of utility methods on top of sun.misc.Unsafe
- */
+/** A set of utility methods on top of sun.misc.Unsafe */
 public class UnsafeUtils {
 
     private static final Unsafe UNSAFE;
 
     /**
-     * Java and PBJ use BIG_ENDIAN, while native byte order used by Unsafe may or may not
-     * be BIG_ENDIAN. This flag indicates that if they don't match
+     * Java and PBJ use BIG_ENDIAN, while native byte order used by Unsafe may or may not be
+     * BIG_ENDIAN. This flag indicates that if they don't match
      */
     private static final boolean NEED_CHANGE_BYTE_ORDER;
 
-    /**
-     * Field offset of the byte[] class
-     */
+    /** Field offset of the byte[] class */
     private static final int BYTE_ARRAY_BASE_OFFSET;
 
     /**
-     * Direct byte buffer "address" field offset. This is not the address of the buffer,
-     * but the offset of the field, which contains the address of the buffer
+     * Direct byte buffer "address" field offset. This is not the address of the buffer, but the
+     * offset of the field, which contains the address of the buffer
      */
     private static final long DIRECT_BYTEBUFFER_ADDRESS_OFFSET;
 
@@ -41,17 +37,17 @@ public class UnsafeUtils {
             BYTE_ARRAY_BASE_OFFSET = UNSAFE.arrayBaseOffset(byte[].class);
             final Field addressField = Buffer.class.getDeclaredField("address");
             DIRECT_BYTEBUFFER_ADDRESS_OFFSET = UNSAFE.objectFieldOffset(addressField);
-        } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) {
+        } catch (NoSuchFieldException
+                | SecurityException
+                | IllegalArgumentException
+                | IllegalAccessException e) {
             throw new InternalError(e);
         }
     }
 
-    private UnsafeUtils() {
-    }
+    private UnsafeUtils() {}
 
-    /**
-     * Get byte array element at a given offset. Identical to arr[offset].
-     */
+    /** Get byte array element at a given offset. Identical to arr[offset]. */
     public static byte getArrayByte(final byte[] arr, final int offset) {
         if (arr.length <= offset) {
             throw new IndexOutOfBoundsException();
@@ -60,8 +56,8 @@ public static byte getArrayByte(final byte[] arr, final int offset) {
     }
 
     /**
-     * Get byte array element at a given offset. Identical to arr[offset], but faster,
-     * because no array bounds checks are performed.
+     * Get byte array element at a given offset. Identical to arr[offset], but faster, because no
+     * array bounds checks are performed.
      *
      * <p><b>Use with caution!</b>
      */
@@ -70,8 +66,8 @@ public static byte getArrayByteNoChecks(final byte[] arr, final int offset) {
     }
 
     /**
-     * Get heap byte buffer element at a given offset. Identical to buf.get(offset). May only
-     * be called for Java heap byte buffers.
+     * Get heap byte buffer element at a given offset. Identical to buf.get(offset). May only be
+     * called for Java heap byte buffers.
      */
     public static byte getHeapBufferByte(final ByteBuffer buf, final int offset) {
         if (buf.limit() < offset + 1) {
@@ -91,8 +87,8 @@ public static byte getHeapBufferByteNoChecks(final ByteBuffer buf, final int off
     }
 
     /**
-     * Get direct byte buffer element at a given offset. Identical to buf.get(offset). May only
-     * be called for direct byte buffers
+     * Get direct byte buffer element at a given offset. Identical to buf.get(offset). May only be
+     * called for direct byte buffers
      */
     public static byte getDirectBufferByte(final ByteBuffer buf, final int offset) {
         if (buf.limit() < offset + 1) {
@@ -128,8 +124,8 @@ public static int getInt(final byte[] arr, final int offset) {
     }
 
     /**
-     * Reads a long from the given array starting at the given offset. Array bytes are
-     * interpreted in BIG_ENDIAN order.
+     * Reads a long from the given array starting at the given offset. Array bytes are interpreted
+     * in BIG_ENDIAN order.
      *
      * @param arr The byte array
      * @param offset The offset to read a long at
@@ -145,42 +141,58 @@ public static long getLong(final byte[] arr, final int offset) {
     }
 
     /**
-     * Copies heap byte buffer bytes to a given byte array. May only be called for heap
-     * byte buffers
+     * Copies heap byte buffer bytes to a given byte array. May only be called for heap byte buffers
      */
     public static void getHeapBufferToArray(
-            final ByteBuffer buffer, final long offset, final byte[] dst, final int dstOffset, final int length) {
-        UNSAFE.copyMemory(buffer.array(), BYTE_ARRAY_BASE_OFFSET + offset,
-                dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length);
+            final ByteBuffer buffer,
+            final long offset,
+            final byte[] dst,
+            final int dstOffset,
+            final int length) {
+        UNSAFE.copyMemory(
+                buffer.array(),
+                BYTE_ARRAY_BASE_OFFSET + offset,
+                dst,
+                BYTE_ARRAY_BASE_OFFSET + dstOffset,
+                length);
     }
 
     /**
-     * Copies direct byte buffer bytes to a given byte array. May only be called for direct
-     * byte buffers
+     * Copies direct byte buffer bytes to a given byte array. May only be called for direct byte
+     * buffers
      */
     public static void getDirectBufferToArray(
-            final ByteBuffer buffer, final long offset, final byte[] dst, final int dstOffset, final int length) {
+            final ByteBuffer buffer,
+            final long offset,
+            final byte[] dst,
+            final int dstOffset,
+            final int length) {
         final long address = UNSAFE.getLong(buffer, DIRECT_BYTEBUFFER_ADDRESS_OFFSET);
-        UNSAFE.copyMemory(null, address + offset,
-                dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length);
+        UNSAFE.copyMemory(null, address + offset, dst, BYTE_ARRAY_BASE_OFFSET + dstOffset, length);
     }
 
     /**
-     * Copies direct byte buffer bytes to another direct byte buffer. May only be called for
-     * direct byte buffers
+     * Copies direct byte buffer bytes to another direct byte buffer. May only be called for direct
+     * byte buffers
      */
     public static void getDirectBufferToDirectBuffer(
-            final ByteBuffer buffer, final long offset, final ByteBuffer dst, final int dstOffset, final int length) {
+            final ByteBuffer buffer,
+            final long offset,
+            final ByteBuffer dst,
+            final int dstOffset,
+            final int length) {
         final long address = UNSAFE.getLong(buffer, DIRECT_BYTEBUFFER_ADDRESS_OFFSET);
         final long dstAddress = UNSAFE.getLong(dst, DIRECT_BYTEBUFFER_ADDRESS_OFFSET);
         UNSAFE.copyMemory(null, address + offset, null, dstAddress, length);
     }
 
-    /**
-     * Copies a byte array to a direct byte buffer. May only be called for direct byte buffers
-     */
+    /** Copies a byte array to a direct byte buffer. May only be called for direct byte buffers */
     public static void putByteArrayToDirectBuffer(
-            final ByteBuffer buffer, final long offset, final byte[] src, final int srcOffset, final int length) {
+            final ByteBuffer buffer,
+            final long offset,
+            final byte[] src,
+            final int srcOffset,
+            final int length) {
         final long address = UNSAFE.getLong(buffer, DIRECT_BYTEBUFFER_ADDRESS_OFFSET);
         UNSAFE.copyMemory(src, BYTE_ARRAY_BASE_OFFSET + srcOffset, null, address + offset, length);
     }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java
index 311b73b9..48aaa6b3 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/WritableSequentialData.java
@@ -14,17 +14,18 @@
 import java.util.Objects;
 
 /**
- * A writable {@link SequentialData}. As with {@link SequentialData}, this may be backed by a stream,
- * array, buffer, or other form of sequential data.
+ * A writable {@link SequentialData}. As with {@link SequentialData}, this may be backed by a
+ * stream, array, buffer, or other form of sequential data.
  */
 public interface WritableSequentialData extends SequentialData {
 
     /**
-     * Writes the given byte at the current {@link #position()}, and then increments the {@link #position()}.
+     * Writes the given byte at the current {@link #position()}, and then increments the {@link
+     * #position()}.
      *
      * @param b The byte to be written
-     * @throws BufferOverflowException If this buffer's current {@link #position()} is not smaller than its
-     *      {@link #limit()}
+     * @throws BufferOverflowException If this buffer's current {@link #position()} is not smaller
+     *     than its {@link #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     void writeByte(byte b) throws BufferOverflowException, UncheckedIOException;
@@ -60,41 +61,46 @@ default void writeByte4(byte b1, byte b2, byte b3, byte b4) {
     }
 
     /**
-     * Writes the given unsigned byte at the current {@link #position()}, and then increments the {@link #position()}.
+     * Writes the given unsigned byte at the current {@link #position()}, and then increments the
+     * {@link #position()}.
      *
-     * @param b The unsigned byte as an integer to be written Only the low 8 bits of the integer are used.
-     * @throws BufferOverflowException If this buffer's current {@link #position()} is not smaller than its
-     *      {@link #limit()}
+     * @param b The unsigned byte as an integer to be written Only the low 8 bits of the integer are
+     *     used.
+     * @throws BufferOverflowException If this buffer's current {@link #position()} is not smaller
+     *     than its {@link #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeUnsignedByte(final int b) throws BufferOverflowException, UncheckedIOException {
-        writeByte((byte)b);
+    default void writeUnsignedByte(final int b)
+            throws BufferOverflowException, UncheckedIOException {
+        writeByte((byte) b);
     }
 
     /**
-     * Writes the entire content of the given source into the sequence. The {@link #position()} is then incremented by
-     * {@code src.length}.
+     * Writes the entire content of the given source into the sequence. The {@link #position()} is
+     * then incremented by {@code src.length}.
      *
      * @param src The source array to write
      * @throws BufferOverflowException If there is insufficient space before {@link #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeBytes(@NonNull final byte[] src) throws BufferOverflowException, UncheckedIOException {
+    default void writeBytes(@NonNull final byte[] src)
+            throws BufferOverflowException, UncheckedIOException {
         writeBytes(src, 0, src.length);
     }
 
     /**
-     * Write {@code length} bytes from the given array, starting at the given offset in the array and at the current
-     * {@link #position()} of this sequence. The {@link #position()} is then incremented by {@code length}.
+     * Write {@code length} bytes from the given array, starting at the given offset in the array
+     * and at the current {@link #position()} of this sequence. The {@link #position()} is then
+     * incremented by {@code length}.
      *
      * @param src The array from which bytes are to be read
-     * @param offset The offset within the array of the first byte to be read; must be non-negative and no larger
-     *                than {@code src.length}
-     * @param length The number of bytes to be read from the given array; must be non-negative and no larger
-     *                than {@code src.length - offset}
+     * @param offset The offset within the array of the first byte to be read; must be non-negative
+     *     and no larger than {@code src.length}
+     * @param length The number of bytes to be read from the given array; must be non-negative and
+     *     no larger than {@code src.length - offset}
      * @throws BufferOverflowException If there is insufficient space before {@link #limit()}
-     * @throws IndexOutOfBoundsException If the preconditions on the {@code offset} and {@code length} parameters do
-     *          not hold
+     * @throws IndexOutOfBoundsException If the preconditions on the {@code offset} and {@code
+     *     length} parameters do not hold
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeBytes(@NonNull final byte[] src, final int offset, final int length)
@@ -109,16 +115,17 @@ default void writeBytes(@NonNull final byte[] src, final int offset, final int l
     }
 
     /**
-     * This method writes the entire content of the given {@link ByteBuffer}, all bytes between its current
-     * {@link #position()} and {@link #limit()}. The {@link #position()} of this sequence is then incremented by number
-     * of written bytes.
+     * This method writes the entire content of the given {@link ByteBuffer}, all bytes between its
+     * current {@link #position()} and {@link #limit()}. The {@link #position()} of this sequence is
+     * then incremented by number of written bytes.
      *
-     * @param src The source {@link ByteBuffer} to write, its {@link #position()} and {@link #limit()} is expected to
-     *            be set correctly
+     * @param src The source {@link ByteBuffer} to write, its {@link #position()} and {@link
+     *     #limit()} is expected to be set correctly
      * @throws BufferOverflowException If there is insufficient space before {@link #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeBytes(@NonNull final ByteBuffer src) throws BufferOverflowException, UncheckedIOException {
+    default void writeBytes(@NonNull final ByteBuffer src)
+            throws BufferOverflowException, UncheckedIOException {
         if (remaining() < src.remaining()) {
             throw new BufferOverflowException();
         }
@@ -130,14 +137,15 @@ default void writeBytes(@NonNull final ByteBuffer src) throws BufferOverflowExce
 
     /**
      * Writes the entire content of the given {@link BufferedData}, all bytes between its current
-     * {@link #position()} and {@link #limit()}. The {@link #position()} of this sequence is then incremented by
-     * the number of written bytes.
+     * {@link #position()} and {@link #limit()}. The {@link #position()} of this sequence is then
+     * incremented by the number of written bytes.
      *
      * @param src The source {@link BufferedData} to write
      * @throws BufferOverflowException If there is insufficient space before {@link #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeBytes(@NonNull final BufferedData src) throws BufferOverflowException, UncheckedIOException {
+    default void writeBytes(@NonNull final BufferedData src)
+            throws BufferOverflowException, UncheckedIOException {
         if (remaining() < src.remaining()) {
             throw new BufferOverflowException();
         }
@@ -148,14 +156,15 @@ default void writeBytes(@NonNull final BufferedData src) throws BufferOverflowEx
     }
 
     /**
-     * This method writes the entire content of the given {@link RandomAccessData}. The
-     * {@link #position()} is then incremented by {@code src.length()}.
+     * This method writes the entire content of the given {@link RandomAccessData}. The {@link
+     * #position()} is then incremented by {@code src.length()}.
      *
      * @param src The source {@link RandomAccessData} with bytes to be written to this sequence
      * @throws BufferOverflowException If there is insufficient space before {@link #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeBytes(@NonNull final RandomAccessData src) throws BufferOverflowException, UncheckedIOException {
+    default void writeBytes(@NonNull final RandomAccessData src)
+            throws BufferOverflowException, UncheckedIOException {
         if (remaining() < src.length()) {
             throw new BufferOverflowException();
         }
@@ -166,21 +175,24 @@ default void writeBytes(@NonNull final RandomAccessData src) throws BufferOverfl
     }
 
     /**
-     * Writes the bytes from the given {@link java.io.InputStream} into this {@link WritableSequentialData}.
-     * The {@link #position()} is then incremented by the number of bytes written, which is also returned.
-     * If the end-of-stream was reached without reading data, then no change is made to the {@link #position()}
-     * and 0 is returned. There is no guarantee that we will read from the stream completely, once we get to the
-     * {@link #limit()}, we will read no more from the stream.
+     * Writes the bytes from the given {@link java.io.InputStream} into this {@link
+     * WritableSequentialData}. The {@link #position()} is then incremented by the number of bytes
+     * written, which is also returned. If the end-of-stream was reached without reading data, then
+     * no change is made to the {@link #position()} and 0 is returned. There is no guarantee that we
+     * will read from the stream completely, once we get to the {@link #limit()}, we will read no
+     * more from the stream.
      *
      * @param src The source {@link java.io.InputStream} to read bytes from
-     * @param maxLength The maximum number of bytes to read from the {@link java.io.InputStream}. If the
-     *            stream does not have this many bytes, then only those bytes available, if any,
-     *            are read. If maxLength is 0 or less, then nothing is read and 0 is returned.
-     * @return The number of bytes read from the stream, or 0 if the end of stream was reached without reading bytes.
+     * @param maxLength The maximum number of bytes to read from the {@link java.io.InputStream}. If
+     *     the stream does not have this many bytes, then only those bytes available, if any, are
+     *     read. If maxLength is 0 or less, then nothing is read and 0 is returned.
+     * @return The number of bytes read from the stream, or 0 if the end of stream was reached
+     *     without reading bytes.
      * @throws IllegalArgumentException if {@code len} is negative
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default int writeBytes(@NonNull final InputStream src, final int maxLength) throws UncheckedIOException {
+    default int writeBytes(@NonNull final InputStream src, final int maxLength)
+            throws UncheckedIOException {
         // Check for a bad length or a null src
         Objects.requireNonNull(src);
         if (maxLength < 0) {
@@ -204,7 +216,8 @@ default int writeBytes(@NonNull final InputStream src, final int maxLength) thro
             final var buf = new byte[8192];
             int totalBytesRead = 0;
             while (totalBytesRead < numBytesToRead) {
-                final var maxBytesToRead = Math.toIntExact(Math.min(numBytesToRead - totalBytesRead, buf.length));
+                final var maxBytesToRead =
+                        Math.toIntExact(Math.min(numBytesToRead - totalBytesRead, buf.length));
                 final var numBytesRead = src.read(buf, 0, maxBytesToRead);
                 if (numBytesRead == -1) {
                     return totalBytesRead;
@@ -229,8 +242,8 @@ default void writeUTF8(@NonNull final String value) {
     }
 
     /**
-     * Writes four bytes containing the given int value, in the standard Java big-endian byte order, at the current
-     * {@link #position()}, and then increments the {@link #position()} by four.
+     * Writes four bytes containing the given int value, in the standard Java big-endian byte order,
+     * at the current {@link #position()}, and then increments the {@link #position()} by four.
      *
      * @param value The int value to be written
      * @throws BufferOverflowException If there are fewer than four bytes remaining
@@ -240,12 +253,13 @@ default void writeInt(final int value) throws BufferOverflowException, Unchecked
         if (remaining() < Integer.BYTES) {
             throw new BufferOverflowException();
         }
-        writeByte4((byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) (value));
+        writeByte4(
+                (byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) (value));
     }
 
     /**
-     * Writes four bytes containing the given int value, in the standard Java big-endian byte order, at the current
-     * {@link #position()}, and then increments the {@link #position()} by four.
+     * Writes four bytes containing the given int value, in the standard Java big-endian byte order,
+     * at the current {@link #position()}, and then increments the {@link #position()} by four.
      *
      * @param value The int value to be written
      * @param byteOrder the byte order, aka endian to use
@@ -260,28 +274,34 @@ default void writeInt(final int value, @NonNull final ByteOrder byteOrder)
             if (remaining() < Integer.BYTES) {
                 throw new BufferOverflowException();
             }
-            writeByte4((byte) (value), (byte) (value >>> 8), (byte) (value >>> 16), (byte) (value >>> 24));
+            writeByte4(
+                    (byte) (value),
+                    (byte) (value >>> 8),
+                    (byte) (value >>> 16),
+                    (byte) (value >>> 24));
         }
     }
 
     /**
-     * Writes four bytes containing the given int value, in the standard Java big-endian byte order, at the current
-     * {@link #position()}, and then increments the {@link #position()} by four.
+     * Writes four bytes containing the given int value, in the standard Java big-endian byte order,
+     * at the current {@link #position()}, and then increments the {@link #position()} by four.
      *
      * @param value The int value to be written
      * @throws BufferOverflowException If there are fewer than four bytes remaining
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeUnsignedInt(final long value) throws BufferOverflowException, UncheckedIOException {
+    default void writeUnsignedInt(final long value)
+            throws BufferOverflowException, UncheckedIOException {
         if (remaining() < Integer.BYTES) {
             throw new BufferOverflowException();
         }
-        writeByte4((byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) (value));
+        writeByte4(
+                (byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) (value));
     }
 
     /**
-     * Writes four bytes containing the given int value, in the standard Java big-endian byte order, at the current
-     * {@link #position()}, and then increments the {@link #position()} by four.
+     * Writes four bytes containing the given int value, in the standard Java big-endian byte order,
+     * at the current {@link #position()}, and then increments the {@link #position()} by four.
      *
      * @param value The int value to be written
      * @param byteOrder the byte order, aka endian to use
@@ -296,33 +316,45 @@ default void writeUnsignedInt(final long value, @NonNull final ByteOrder byteOrd
             if (remaining() < Integer.BYTES) {
                 throw new BufferOverflowException();
             }
-            writeByte4((byte) (value), (byte) (value >>> 8), (byte) (value >>> 16), (byte) (value >>> 24));
+            writeByte4(
+                    (byte) (value),
+                    (byte) (value >>> 8),
+                    (byte) (value >>> 16),
+                    (byte) (value >>> 24));
         }
     }
 
     /**
-     * Writes eight bytes containing the given long value, in the standard Java big-endian  byte order at the current
-     * {@link #position()}, and then increments the {@link #position()} by eight.
+     * Writes eight bytes containing the given long value, in the standard Java big-endian byte
+     * order at the current {@link #position()}, and then increments the {@link #position()} by
+     * eight.
      *
      * @param value The long value to be written
-     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeLong(final long value) throws BufferOverflowException, UncheckedIOException {
         if (remaining() < Long.BYTES) {
             throw new BufferOverflowException();
         }
-        writeByte4((byte) (value >>> 56), (byte) (value >>> 48), (byte) (value >>> 40), (byte) (value >>> 32));
-        writeByte4((byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) (value));
+        writeByte4(
+                (byte) (value >>> 56),
+                (byte) (value >>> 48),
+                (byte) (value >>> 40),
+                (byte) (value >>> 32));
+        writeByte4(
+                (byte) (value >>> 24), (byte) (value >>> 16), (byte) (value >>> 8), (byte) (value));
     }
 
     /**
-     * Writes eight bytes containing the given long value, in the specified byte order at the current  {@link #position()}, and
-     * then increments the {@link #position()} by eight.
+     * Writes eight bytes containing the given long value, in the specified byte order at the
+     * current {@link #position()}, and then increments the {@link #position()} by eight.
      *
      * @param value The long value to be written
      * @param byteOrder the byte order, aka endian to use
-     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeLong(final long value, @NonNull final ByteOrder byteOrder)
@@ -333,30 +365,42 @@ default void writeLong(final long value, @NonNull final ByteOrder byteOrder)
             if (remaining() < Long.BYTES) {
                 throw new BufferOverflowException();
             }
-            writeByte4((byte) (value), (byte) (value >>> 8), (byte) (value >>> 16), (byte) (value >>> 24));
-            writeByte4((byte) (value >>> 32), (byte) (value >>> 40), (byte) (value >>> 48), (byte) (value >>> 56));
+            writeByte4(
+                    (byte) (value),
+                    (byte) (value >>> 8),
+                    (byte) (value >>> 16),
+                    (byte) (value >>> 24));
+            writeByte4(
+                    (byte) (value >>> 32),
+                    (byte) (value >>> 40),
+                    (byte) (value >>> 48),
+                    (byte) (value >>> 56));
         }
     }
 
     /**
-     * Writes four bytes containing the given float value, in the standard Java big-endian byte order at the current
-     * {@link #position()}, and then increments the {@link #position()} by four.
+     * Writes four bytes containing the given float value, in the standard Java big-endian byte
+     * order at the current {@link #position()}, and then increments the {@link #position()} by
+     * four.
      *
      * @param value The float value to be written
-     * @throws BufferOverflowException If there are fewer than four bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than four bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeFloat(final float value) throws BufferOverflowException, UncheckedIOException {
+    default void writeFloat(final float value)
+            throws BufferOverflowException, UncheckedIOException {
         writeInt(Float.floatToIntBits(value));
     }
 
     /**
-     * Writes four bytes containing the given float value, in the specified byte order at the current {@link #position()}, and then
-     * increments the {@link #position()} by four.
+     * Writes four bytes containing the given float value, in the specified byte order at the
+     * current {@link #position()}, and then increments the {@link #position()} by four.
      *
      * @param value The float value to be written
      * @param byteOrder the byte order, aka endian to use
-     * @throws BufferOverflowException If there are fewer than four bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than four bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeFloat(final float value, @NonNull final ByteOrder byteOrder)
@@ -365,24 +409,28 @@ default void writeFloat(final float value, @NonNull final ByteOrder byteOrder)
     }
 
     /**
-     * Writes eight bytes containing the given double value, in the standard Java big-endian byte order at the current
-     * {@link #position()}, and then increments the {@link #position()} by eight.
+     * Writes eight bytes containing the given double value, in the standard Java big-endian byte
+     * order at the current {@link #position()}, and then increments the {@link #position()} by
+     * eight.
      *
      * @param value The double value to be written
-     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
-    default void writeDouble(final double value) throws BufferOverflowException, UncheckedIOException {
+    default void writeDouble(final double value)
+            throws BufferOverflowException, UncheckedIOException {
         writeLong(Double.doubleToLongBits(value));
     }
 
     /**
-     * Writes eight bytes containing the given double value, in the specified byte order at the current {@link #position()}, and
-     * then increments the {@link #position()} by eight.
+     * Writes eight bytes containing the given double value, in the specified byte order at the
+     * current {@link #position()}, and then increments the {@link #position()} by eight.
      *
      * @param value The double value to be written
      * @param byteOrder the byte order, aka endian to use
-     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeDouble(final double value, @NonNull final ByteOrder byteOrder)
@@ -398,7 +446,8 @@ default void writeDouble(final double value, @NonNull final ByteOrder byteOrder)
      *
      * @param value integer to write in var int format
      * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers
-     * @throws BufferOverflowException If there are fewer than ten bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than ten bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeVarInt(final int value, final boolean zigZag)
@@ -407,11 +456,13 @@ default void writeVarInt(final int value, final boolean zigZag)
     }
 
     /**
-     * Write a 64bit protobuf varint at current {@link #position()}. A long var int can be 1 to 10 bytes.
+     * Write a 64bit protobuf varint at current {@link #position()}. A long var int can be 1 to 10
+     * bytes.
      *
      * @param value long to write in var int format
      * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers
-     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link #limit()}
+     * @throws BufferOverflowException If there are fewer than eight bytes remaining before {@link
+     *     #limit()}
      * @throws UncheckedIOException if an I/O error occurs
      */
     default void writeVarLong(long value, final boolean zigZag)
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java
index af9c8fb3..0872a4b3 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedData.java
@@ -8,8 +8,8 @@
 import edu.umd.cs.findbugs.annotations.NonNull;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.UncheckedIOException;
 import java.io.OutputStream;
+import java.io.UncheckedIOException;
 import java.nio.BufferOverflowException;
 import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
@@ -18,15 +18,18 @@
 import java.nio.channels.WritableByteChannel;
 
 /**
- * A buffer backed by a {@link ByteBuffer} that is a {@link BufferedSequentialData} (and therefore contains
- * a "position" cursor into the data), a {@link ReadableSequentialData} (and therefore can be read from),
- * a {@link WritableSequentialData} (and therefore can be written to), and a {@link RandomAccessData} (and therefore can
- * be accessed at any position).
+ * A buffer backed by a {@link ByteBuffer} that is a {@link BufferedSequentialData} (and therefore
+ * contains a "position" cursor into the data), a {@link ReadableSequentialData} (and therefore can
+ * be read from), a {@link WritableSequentialData} (and therefore can be written to), and a {@link
+ * RandomAccessData} (and therefore can be accessed at any position).
  *
  * <p>This class is the most commonly used for buffered read/write data.
  */
 public sealed class BufferedData
-        implements BufferedSequentialData, ReadableSequentialData, WritableSequentialData, RandomAccessData
+        implements BufferedSequentialData,
+                ReadableSequentialData,
+                WritableSequentialData,
+                RandomAccessData
         permits ByteArrayBufferedData, DirectBufferedData {
 
     /** Single instance of an empty buffer we can use anywhere we need an empty read only buffer */
@@ -36,9 +39,10 @@ public sealed class BufferedData
     /**
      * {@link ByteBuffer} used as backing buffer for this instance.
      *
-     * <p>The buffer may be direct, or may be on the heap. It may also be a "view" of another buffer. The ByteBuffer has
-     * an inner array, which can be accessed directly. If it is, you MUST BE VERY CAREFUL to take the array offset into
-     * account, otherwise you will read out of bounds of the view.
+     * <p>The buffer may be direct, or may be on the heap. It may also be a "view" of another
+     * buffer. The ByteBuffer has an inner array, which can be accessed directly. If it is, you MUST
+     * BE VERY CAREFUL to take the array offset into account, otherwise you will read out of bounds
+     * of the view.
      */
     protected final ByteBuffer buffer;
 
@@ -49,8 +53,10 @@ public sealed class BufferedData
      */
     protected BufferedData(@NonNull final ByteBuffer buffer) {
         this.buffer = buffer;
-        // We switch the buffer to BIG_ENDIAN so that all our normal "get/read" methods can assume they are in
-        // BIG_ENDIAN mode, reducing the boilerplate around those methods. This necessarily means the LITTLE_ENDIAN
+        // We switch the buffer to BIG_ENDIAN so that all our normal "get/read" methods can assume
+        // they are in
+        // BIG_ENDIAN mode, reducing the boilerplate around those methods. This necessarily means
+        // the LITTLE_ENDIAN
         // methods will be slower. We're assuming BIG_ENDIAN is what we want to optimize for.
         this.buffer.order(BIG_ENDIAN);
     }
@@ -59,8 +65,8 @@ protected BufferedData(@NonNull final ByteBuffer buffer) {
     // Static Builder Methods
 
     /**
-     * Wrap an existing allocated {@link ByteBuffer}. No copy is made. DO NOT modify this buffer after having wrapped
-     * it.
+     * Wrap an existing allocated {@link ByteBuffer}. No copy is made. DO NOT modify this buffer
+     * after having wrapped it.
      *
      * @param buffer the {@link ByteBuffer} to wrap
      * @return new instance using {@code buffer} as its data buffer
@@ -78,10 +84,11 @@ public static BufferedData wrap(@NonNull final ByteBuffer buffer) {
     }
 
     /**
-     * Wrap an existing allocated byte[]. No copy is made. DO NOT modify this array after having wrapped it.
+     * Wrap an existing allocated byte[]. No copy is made. DO NOT modify this array after having
+     * wrapped it.
      *
-     * <p>The current position of the created {@link BufferedData} will be 0, the length and capacity will
-     * be the length of the wrapped byte array.
+     * <p>The current position of the created {@link BufferedData} will be 0, the length and
+     * capacity will be the length of the wrapped byte array.
      *
      * @param array the byte[] to wrap
      * @return new BufferedData using {@code array} as its data buffer
@@ -92,13 +99,16 @@ public static BufferedData wrap(@NonNull final byte[] array) {
     }
 
     /**
-     * Wrap an existing allocated byte[]. No copy is made. DO NOT modify this array after having wrapped it.
+     * Wrap an existing allocated byte[]. No copy is made. DO NOT modify this array after having
+     * wrapped it.
      *
-     * <p>The current position of the created {@link BufferedData} will be {@code offset}, the length will be
-     * set to {@code offset} + {@code len}, and capacity will be the length of the wrapped byte array.
+     * <p>The current position of the created {@link BufferedData} will be {@code offset}, the
+     * length will be set to {@code offset} + {@code len}, and capacity will be the length of the
+     * wrapped byte array.
      *
      * @param array the byte[] to wrap
-     * @param offset the offset into the byte array which will form the origin of this {@link BufferedData}.
+     * @param offset the offset into the byte array which will form the origin of this {@link
+     *     BufferedData}.
      * @param len the length of the {@link BufferedData} in bytes.
      * @return new BufferedData using {@code array} as its data buffer
      */
@@ -119,11 +129,11 @@ public static BufferedData allocate(final int size) {
     }
 
     /**
-     * Allocate a new buffered data object with new memory, off the Java heap. Off heap has higher cost of allocation
-     * and garbage collection but is much faster to read and write to. It should be used for long-lived buffers where
-     * performance is critical. On heap is slower for read and writes but cheaper to allocate and garbage collect.
-     * Off-heap comes from different memory allocation that needs to be manually managed so make sure we have space
-     * for it before using.
+     * Allocate a new buffered data object with new memory, off the Java heap. Off heap has higher
+     * cost of allocation and garbage collection but is much faster to read and write to. It should
+     * be used for long-lived buffers where performance is critical. On heap is slower for read and
+     * writes but cheaper to allocate and garbage collect. Off-heap comes from different memory
+     * allocation that needs to be manually managed so make sure we have space for it before using.
      *
      * @param size size of new buffer in bytes
      * @return a new allocated BufferedData
@@ -140,7 +150,8 @@ public static BufferedData allocateOffHeap(final int size) {
      * Exposes this {@link BufferedData} as an {@link InputStream}. This is a zero-copy operation.
      * The {@link #position()} and {@link #limit()} are **IGNORED**.
      *
-     * @return An {@link InputStream} that streams over the full set of data in this {@link BufferedData}.
+     * @return An {@link InputStream} that streams over the full set of data in this {@link
+     *     BufferedData}.
      */
     @NonNull
     public InputStream toInputStream() {
@@ -162,14 +173,16 @@ public int read() throws IOException {
             }
 
             @Override
-            public int read(@NonNull final byte[] b, final int off, final int len) throws IOException {
+            public int read(@NonNull final byte[] b, final int off, final int len)
+                    throws IOException {
                 final var remaining = length - pos;
                 if (remaining <= 0) {
                     return -1;
                 }
 
                 try {
-                    // We know for certain int is big enough because the min of an int and long will be an int
+                    // We know for certain int is big enough because the min of an int and long will
+                    // be an int
                     final int toRead = (int) Math.min(len, remaining);
                     getBytes(pos, b, off, toRead);
                     pos += toRead;
@@ -252,50 +265,38 @@ public long capacity() {
         return buffer.capacity();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long position() {
         return buffer.position();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long limit() {
         return buffer.limit();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void limit(final long limit) {
         final var lim = Math.min(capacity(), Math.max(limit, position()));
         buffer.limit((int) lim);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public boolean hasRemaining() {
         return buffer.hasRemaining();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long remaining() {
         return buffer.remaining();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void skip(final long count) {
         if (count > Integer.MAX_VALUE || (int) count > buffer.remaining()) {
@@ -311,17 +312,15 @@ public void skip(final long count) {
     // ================================================================================================================
     // BufferedSequentialData Methods
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void position(final long position) {
         buffer.position(Math.toIntExact(position));
     }
 
     /**
-     * Set the limit to current position and position to origin. This is useful when you have just finished writing
-     * into a buffer and want to flip it ready to read back from, or vice versa.
+     * Set the limit to current position and position to origin. This is useful when you have just
+     * finished writing into a buffer and want to flip it ready to read back from, or vice versa.
      */
     @Override
     public void flip() {
@@ -329,7 +328,8 @@ public void flip() {
     }
 
     /**
-     * Reset position to origin and limit to capacity, allowing this buffer to be read or written again
+     * Reset position to origin and limit to capacity, allowing this buffer to be read or written
+     * again
      */
     @Override
     public void reset() {
@@ -337,7 +337,8 @@ public void reset() {
     }
 
     /**
-     * Reset position to origin and leave limit alone, allowing this buffer to be read again with existing limit
+     * Reset position to origin and leave limit alone, allowing this buffer to be read again with
+     * existing limit
      */
     @Override
     public void resetPosition() {
@@ -362,7 +363,11 @@ public byte getByte(final long offset) {
 
     /** {@inheritDoc} */
     @Override
-    public long getBytes(final long offset, @NonNull final byte[] dst, final int dstOffset, final int maxLength) {
+    public long getBytes(
+            final long offset,
+            @NonNull final byte[] dst,
+            final int dstOffset,
+            final int maxLength) {
         if (maxLength < 0) {
             throw new IllegalArgumentException("Negative maxLength not allowed");
         }
@@ -385,7 +390,8 @@ public long getBytes(final long offset, @NonNull final ByteBuffer dst) {
     @Override
     public long getBytes(final long offset, @NonNull final BufferedData dst) {
         final var len = Math.min(dst.remaining(), length() - offset);
-        dst.buffer.put(dst.buffer.position(), buffer, Math.toIntExact(offset), Math.toIntExact(len));
+        dst.buffer.put(
+                dst.buffer.position(), buffer, Math.toIntExact(offset), Math.toIntExact(len));
         return len;
     }
 
@@ -400,7 +406,8 @@ public Bytes getBytes(final long offset, final long length) {
         if (length() - offset < length) {
             throw new BufferUnderflowException();
         }
-        // It is vital that we always copy here, we can never assume ownership of the underlying buffer
+        // It is vital that we always copy here, we can never assume ownership of the underlying
+        // buffer
         final var copy = new byte[len];
         buffer.get(Math.toIntExact(offset), copy, 0, len);
         return Bytes.wrap(copy);
@@ -597,9 +604,7 @@ public int readInt() {
         return buffer.getInt();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int readInt(@NonNull final ByteOrder byteOrder) {
         final var order = buffer.order();
@@ -611,25 +616,19 @@ public int readInt(@NonNull final ByteOrder byteOrder) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readUnsignedInt() {
         return Integer.toUnsignedLong(buffer.getInt());
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readLong() {
         return buffer.getLong();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readLong(@NonNull final ByteOrder byteOrder) {
         final var order = buffer.order();
@@ -641,17 +640,13 @@ public long readLong(@NonNull final ByteOrder byteOrder) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public float readFloat() {
         return buffer.getFloat();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public float readFloat(@NonNull final ByteOrder byteOrder) {
         final var order = buffer.order();
@@ -663,17 +658,13 @@ public float readFloat(@NonNull final ByteOrder byteOrder) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public double readDouble() {
         return buffer.getDouble();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public double readDouble(@NonNull final ByteOrder byteOrder) {
         final var order = buffer.order();
@@ -688,32 +679,24 @@ public double readDouble(@NonNull final ByteOrder byteOrder) {
     // ================================================================================================================
     // DataOutput Write Methods
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeByte(final byte b) {
         buffer.put(b);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     public void writeUnsignedByte(final int b) {
         buffer.put((byte) b);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final byte[] src) {
         buffer.put(src);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final byte[] src, final int offset, final int length) {
         if (length < 0) {
@@ -722,9 +705,7 @@ public void writeBytes(@NonNull final byte[] src, final int offset, final int le
         buffer.put(src, offset, length);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final ByteBuffer src) {
         if ((limit() - position()) < src.remaining()) {
@@ -733,9 +714,7 @@ public void writeBytes(@NonNull final ByteBuffer src) {
         buffer.put(src);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final BufferedData src) {
         if ((limit() - position()) < src.remaining()) {
@@ -744,9 +723,7 @@ public void writeBytes(@NonNull final BufferedData src) {
         buffer.put(src.buffer);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final RandomAccessData src) {
         if (src instanceof Bytes buf) {
@@ -759,99 +736,77 @@ public void writeBytes(@NonNull final RandomAccessData src) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeInt(final int value) {
         buffer.order(BIG_ENDIAN);
         buffer.putInt(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeInt(final int value, @NonNull final ByteOrder byteOrder) {
         buffer.order(byteOrder);
         buffer.putInt(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeUnsignedInt(final long value) {
         buffer.order(BIG_ENDIAN);
         buffer.putInt((int) value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeUnsignedInt(final long value, @NonNull final ByteOrder byteOrder) {
         buffer.order(byteOrder);
         buffer.putInt((int) value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeLong(final long value) {
         buffer.order(BIG_ENDIAN);
         buffer.putLong(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeLong(final long value, @NonNull final ByteOrder byteOrder) {
         buffer.order(byteOrder);
         buffer.putLong(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeFloat(final float value) {
         buffer.order(BIG_ENDIAN);
         buffer.putFloat(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeFloat(final float value, @NonNull final ByteOrder byteOrder) {
         buffer.order(byteOrder);
         buffer.putFloat(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeDouble(final double value) {
         buffer.order(BIG_ENDIAN);
         buffer.putDouble(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeDouble(final double value, @NonNull final ByteOrder byteOrder) {
         buffer.order(byteOrder);
         buffer.putDouble(value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeVarLong(long value, final boolean zigZag) {
         if (zigZag) {
@@ -868,9 +823,7 @@ public void writeVarLong(long value, final boolean zigZag) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeTo(@NonNull OutputStream outStream) {
         try {
@@ -881,9 +834,7 @@ public void writeTo(@NonNull OutputStream outStream) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeTo(@NonNull OutputStream outStream, int offset, int length) {
         validateCanRead(offset, length);
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java
index 2e61736e..f4e616a5 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/BufferedSequentialData.java
@@ -4,15 +4,16 @@
 import com.hedera.pbj.runtime.io.SequentialData;
 
 /**
- * Represents buffered {@link SequentialData}. The buffer may be on-heap or off-heap, backed by a byte array or a
- * {@link java.nio.ByteBuffer} or memory-mapped file, or any other form of buffered data. {@link BufferedSequentialData}
- * has a maximum {@link #capacity()} (or size) supported by the buffer.
+ * Represents buffered {@link SequentialData}. The buffer may be on-heap or off-heap, backed by a
+ * byte array or a {@link java.nio.ByteBuffer} or memory-mapped file, or any other form of buffered
+ * data. {@link BufferedSequentialData} has a maximum {@link #capacity()} (or size) supported by the
+ * buffer.
  */
 public interface BufferedSequentialData extends SequentialData, RandomAccessData {
     /**
-     * Sets the {@link #position()} to the given value and leaves the {@link #limit()} alone. The position must be
-     * non-negative and no larger than the {@link #limit()}. If set to {@link #limit()}, then there will be no
-     * remaining room for reading or writing from the buffer.
+     * Sets the {@link #position()} to the given value and leaves the {@link #limit()} alone. The
+     * position must be non-negative and no larger than the {@link #limit()}. If set to {@link
+     * #limit()}, then there will be no remaining room for reading or writing from the buffer.
      *
      * @param position the new position
      * @throws IllegalArgumentException if the position is negative or greater than the limit
@@ -20,21 +21,22 @@ public interface BufferedSequentialData extends SequentialData, RandomAccessData
     void position(long position);
 
     /**
-     * Set the {@link #limit()} to the current {@link #position()} and the {@link #position()} to the origin. This is
-     * useful when you have just finished writing into a buffer and want to flip it to be ready to read back from, or
-     * vice versa.
+     * Set the {@link #limit()} to the current {@link #position()} and the {@link #position()} to
+     * the origin. This is useful when you have just finished writing into a buffer and want to flip
+     * it to be ready to read back from, or vice versa.
      */
     void flip();
 
     /**
-     * Reset the {@link #position()} to the origin and the {@link #limit()} to the {@link #capacity()}, allowing this
-     * buffer to be read or written again, such that the entire buffer can be used.
+     * Reset the {@link #position()} to the origin and the {@link #limit()} to the {@link
+     * #capacity()}, allowing this buffer to be read or written again, such that the entire buffer
+     * can be used.
      */
     void reset();
 
     /**
-     * Reset the {@link #position()} to the origin and leave the {@link #limit()} alone, allowing this buffer to be
-     * read again with the existing {@link #limit()}.
+     * Reset the {@link #position()} to the origin and leave the {@link #limit()} alone, allowing
+     * this buffer to be read again with the existing {@link #limit()}.
      */
     void resetPosition();
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java
index ab2b6b28..f72bc219 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedData.java
@@ -14,8 +14,8 @@
 
 /**
  * BufferedData subclass for instances backed by a byte array. Provides slightly more optimized
- * versions of several methods to get / read / write bytes using {@link System#arraycopy} and
- * direct array reads / writes.
+ * versions of several methods to get / read / write bytes using {@link System#arraycopy} and direct
+ * array reads / writes.
  */
 final class ByteArrayBufferedData extends BufferedData {
 
@@ -28,7 +28,8 @@ final class ByteArrayBufferedData extends BufferedData {
     ByteArrayBufferedData(final ByteBuffer buffer) {
         super(buffer);
         if (!buffer.hasArray()) {
-            throw new IllegalArgumentException("Cannot create a ByteArrayBufferedData over a buffer with no array");
+            throw new IllegalArgumentException(
+                    "Cannot create a ByteArrayBufferedData over a buffer with no array");
         }
         this.array = buffer.array();
         this.arrayOffset = buffer.arrayOffset();
@@ -50,9 +51,7 @@ public String toString() {
         return sb.toString();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public boolean contains(final long offset, @NonNull final byte[] bytes) {
         checkOffset(offset, length());
@@ -67,20 +66,20 @@ public boolean contains(final long offset, @NonNull final byte[] bytes) {
         return Arrays.equals(array, fromThisIndex, fromToIndex, bytes, 0, len);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public byte getByte(final long offset) {
         checkOffset(offset, length());
         return array[Math.toIntExact(arrayOffset + offset)];
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
-    public long getBytes(final long offset, @NonNull final byte[] dst, final int dstOffset, final int maxLength) {
+    public long getBytes(
+            final long offset,
+            @NonNull final byte[] dst,
+            final int dstOffset,
+            final int maxLength) {
         validateLen(maxLength);
         checkOffset(offset);
         final long len = Math.min(maxLength, length() - offset);
@@ -88,13 +87,12 @@ public long getBytes(final long offset, @NonNull final byte[] dst, final int dst
         if (len == 0) {
             return 0;
         }
-        System.arraycopy(array, Math.toIntExact(arrayOffset + offset), dst, dstOffset, Math.toIntExact(len));
+        System.arraycopy(
+                array, Math.toIntExact(arrayOffset + offset), dst, dstOffset, Math.toIntExact(len));
         return len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long getBytes(final long offset, @NonNull final ByteBuffer dst) {
         if (!dst.hasArray()) {
@@ -106,13 +104,15 @@ public long getBytes(final long offset, @NonNull final ByteBuffer dst) {
         final int dstPos = dst.position();
         final int dstArrOffset = dst.arrayOffset();
         System.arraycopy(
-                array, Math.toIntExact(arrayOffset + offset), dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
+                array,
+                Math.toIntExact(arrayOffset + offset),
+                dstArr,
+                dstArrOffset + dstPos,
+                Math.toIntExact(len));
         return len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @NonNull
     @Override
     public Bytes getBytes(final long offset, final long len) {
@@ -126,17 +126,13 @@ public Bytes getBytes(final long offset, final long len) {
         return Bytes.wrap(res);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int getVarInt(final long offset, final boolean zigZag) {
         return (int) getVar(Math.toIntExact(offset), zigZag);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long getVarLong(final long offset, final boolean zigZag) {
         return getVar(Math.toIntExact(offset), zigZag);
@@ -162,12 +158,12 @@ private long getVar(final int offset, final boolean zigZag) {
                 return zigZag ? (value >>> 1) ^ -(value & 1) : value;
             }
         }
-        throw (i == 10) ? new DataEncodingException("Malformed var int") : new BufferUnderflowException();
+        throw (i == 10)
+                ? new DataEncodingException("Malformed var int")
+                : new BufferUnderflowException();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public byte readByte() {
         if (remaining() == 0) {
@@ -179,9 +175,7 @@ public byte readByte() {
         return res;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readBytes(@NonNull byte[] dst, int offset, int maxLength) {
         validateLen(maxLength);
@@ -195,9 +189,7 @@ public long readBytes(@NonNull byte[] dst, int offset, int maxLength) {
         return len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readBytes(@NonNull final ByteBuffer dst) {
         if (!dst.hasArray()) {
@@ -208,15 +200,14 @@ public long readBytes(@NonNull final ByteBuffer dst) {
         final byte[] dstArr = dst.array();
         final int dstPos = dst.position();
         final int dstArrOffset = dst.arrayOffset();
-        System.arraycopy(array, arrayOffset + pos, dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
+        System.arraycopy(
+                array, arrayOffset + pos, dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
         buffer.position(Math.toIntExact(pos + len));
         dst.position(Math.toIntExact(dstPos + len));
         return len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @NonNull
     @Override
     public Bytes readBytes(final int len) {
@@ -232,17 +223,13 @@ public Bytes readBytes(final int len) {
         return Bytes.wrap(res);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int readVarInt(final boolean zigZag) {
         return (int) readVar(zigZag);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readVarLong(final boolean zigZag) {
         return readVar(zigZag);
@@ -268,12 +255,12 @@ private long readVar(final boolean zigZag) {
                 return zigZag ? (value >>> 1) ^ -(value & 1) : value;
             }
         }
-        throw (i == 10) ? new DataEncodingException("Malformed var int") : new BufferUnderflowException();
+        throw (i == 10)
+                ? new DataEncodingException("Malformed var int")
+                : new BufferUnderflowException();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeByte(final byte b) {
         validateCanWrite(1);
@@ -322,9 +309,7 @@ public void writeBytes(@NonNull final byte[] src, final int offset, final int le
         buffer.position(pos + len);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final ByteBuffer src) {
         if (!src.hasArray()) {
@@ -337,14 +322,13 @@ public void writeBytes(@NonNull final ByteBuffer src) {
         final byte[] srcArr = src.array();
         final int srcArrOffset = src.arrayOffset();
         final int srcPos = src.position();
-        System.arraycopy(srcArr, srcArrOffset + srcPos, array, arrayOffset + pos, Math.toIntExact(len));
+        System.arraycopy(
+                srcArr, srcArrOffset + srcPos, array, arrayOffset + pos, Math.toIntExact(len));
         src.position(Math.toIntExact(srcPos + len));
         buffer.position(Math.toIntExact(pos + len));
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int writeBytes(@NonNull final InputStream src, final int maxLength) {
         // Check for a bad length or a null src
@@ -367,7 +351,8 @@ public int writeBytes(@NonNull final InputStream src, final int maxLength) {
             int pos = buffer.position();
             int totalBytesRead = 0;
             while (totalBytesRead < numBytesToRead) {
-                int bytesRead = src.read(array, pos + arrayOffset, (int) numBytesToRead - totalBytesRead);
+                int bytesRead =
+                        src.read(array, pos + arrayOffset, (int) numBytesToRead - totalBytesRead);
                 if (bytesRead == -1) {
                     buffer.position(pos);
                     return totalBytesRead;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java
index 8a5ccbeb..9f1879e0 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/Bytes.java
@@ -1,6 +1,8 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.buffer;
 
+import static java.util.Objects.requireNonNull;
+
 import com.hedera.pbj.runtime.io.DataEncodingException;
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
 import com.hedera.pbj.runtime.io.UnsafeUtils;
@@ -15,19 +17,17 @@
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
 import java.security.Signature;
 import java.security.SignatureException;
 import java.util.Arrays;
 import java.util.Base64;
-import java.util.HexFormat;
-
-import java.security.MessageDigest;
 import java.util.Comparator;
-
-import static java.util.Objects.requireNonNull;
+import java.util.HexFormat;
 
 /**
- * An immutable representation of a byte array. This class is designed to be efficient and usable across threads.
+ * An immutable representation of a byte array. This class is designed to be efficient and usable
+ * across threads.
  */
 @SuppressWarnings("unused")
 public final class Bytes implements RandomAccessData, Comparable<Bytes> {
@@ -36,37 +36,41 @@ public final class Bytes implements RandomAccessData, Comparable<Bytes> {
     public static final Bytes EMPTY = new Bytes(new byte[0]);
 
     /** Sorts {@link Bytes} according to their length, shorter first. */
-    public static final Comparator<Bytes> SORT_BY_LENGTH = (Bytes o1, Bytes o2) ->
-            Comparator.comparingLong(Bytes::length).compare(o1, o2);
+    public static final Comparator<Bytes> SORT_BY_LENGTH =
+            (Bytes o1, Bytes o2) -> Comparator.comparingLong(Bytes::length).compare(o1, o2);
 
-    /** Sorts {@link Bytes} according to their byte values, lower valued bytes first.
-      * Bytes are compared on a signed basis.
-      */
+    /**
+     * Sorts {@link Bytes} according to their byte values, lower valued bytes first. Bytes are
+     * compared on a signed basis.
+     */
     public static final Comparator<Bytes> SORT_BY_SIGNED_VALUE = valueSorter(Byte::compare);
 
-    /** Sorts {@link Bytes} according to their byte values, lower valued bytes first.
-      * Bytes are compared on an unsigned basis
-      */
-    public static final Comparator<Bytes> SORT_BY_UNSIGNED_VALUE = valueSorter(Byte::compareUnsigned);
+    /**
+     * Sorts {@link Bytes} according to their byte values, lower valued bytes first. Bytes are
+     * compared on an unsigned basis
+     */
+    public static final Comparator<Bytes> SORT_BY_UNSIGNED_VALUE =
+            valueSorter(Byte::compareUnsigned);
 
     /** byte[] used as backing buffer */
     private final byte[] buffer;
 
     /**
-     * The offset within the backing buffer where this {@link Bytes} starts. To prevent array copies, we sometimes
-     * want to have a "view" or "slice" of another buffer, where we begin at some offset and have a length.
+     * The offset within the backing buffer where this {@link Bytes} starts. To prevent array
+     * copies, we sometimes want to have a "view" or "slice" of another buffer, where we begin at
+     * some offset and have a length.
      */
     private final int start;
 
     /**
-     * The number of bytes in this {@link Bytes}. To prevent array copies, we sometimes want to have a "view" or
-     * "slice" of another buffer, where we begin at some offset and have a length.
+     * The number of bytes in this {@link Bytes}. To prevent array copies, we sometimes want to have
+     * a "view" or "slice" of another buffer, where we begin at some offset and have a length.
      */
     private final int length;
 
     /**
-     * Create a new ByteOverByteBuffer over given byte array. This does not copy data it just wraps so
-     * any changes to arrays contents will be effected here.
+     * Create a new ByteOverByteBuffer over given byte array. This does not copy data it just wraps
+     * so any changes to arrays contents will be effected here.
      *
      * @param data The data t
      */
@@ -75,8 +79,8 @@ private Bytes(@NonNull final byte[] data) {
     }
 
     /**
-     * Create a new ByteOverByteBuffer over given byte array. This does not copy data it just wraps so
-     * any changes to arrays contents will be effected here.
+     * Create a new ByteOverByteBuffer over given byte array. This does not copy data it just wraps
+     * so any changes to arrays contents will be effected here.
      *
      * @param data The data t
      * @param offset The offset within that buffer to start
@@ -88,8 +92,8 @@ private Bytes(@NonNull final byte[] data, final int offset, final int length) {
         this.length = length;
 
         if (offset < 0 || offset > data.length) {
-            throw new IndexOutOfBoundsException("Offset " + offset + " is out of bounds for buffer of length "
-                    + data.length);
+            throw new IndexOutOfBoundsException(
+                    "Offset " + offset + " is out of bounds for buffer of length " + data.length);
         }
 
         if (length < 0) {
@@ -97,8 +101,13 @@ private Bytes(@NonNull final byte[] data, final int offset, final int length) {
         }
 
         if (offset + length > data.length) {
-            throw new IllegalArgumentException("Length " + length + " is too large buffer of length "
-                    + data.length + " starting at offset " + offset);
+            throw new IllegalArgumentException(
+                    "Length "
+                            + length
+                            + " is too large buffer of length "
+                            + data.length
+                            + " starting at offset "
+                            + offset);
         }
     }
 
@@ -106,8 +115,8 @@ private Bytes(@NonNull final byte[] data, final int offset, final int length) {
     // Static Methods
 
     /**
-     * Create a new {@link Bytes} over the contents of the given byte array. This does not copy data it just
-     * wraps so any changes to array's contents will be visible in the returned result.
+     * Create a new {@link Bytes} over the contents of the given byte array. This does not copy data
+     * it just wraps so any changes to array's contents will be visible in the returned result.
      *
      * @param byteArray The byte array to wrap
      * @return new {@link Bytes} with same contents as byte array
@@ -119,12 +128,14 @@ public static Bytes wrap(@NonNull final byte[] byteArray) {
     }
 
     /**
-     * Create a new {@link Bytes} over the contents of the given byte array. This does not copy data it just
-     * wraps so any changes to arrays contents will be visible in the returned result.
+     * Create a new {@link Bytes} over the contents of the given byte array. This does not copy data
+     * it just wraps so any changes to arrays contents will be visible in the returned result.
      *
      * @param byteArray The byte array to wrap
-     * @param offset The offset within that buffer to start. Must be &gt;= 0 and &lt; byteArray.length
-     * @param length The length of bytes staring at offset to wrap. Must be &gt;= 0 and &lt; byteArray.length - offset
+     * @param offset The offset within that buffer to start. Must be &gt;= 0 and &lt;
+     *     byteArray.length
+     * @param length The length of bytes staring at offset to wrap. Must be &gt;= 0 and &lt;
+     *     byteArray.length - offset
      * @return new {@link Bytes} with same contents as byte array
      * @throws NullPointerException if byteArray is null
      * @throws IndexOutOfBoundsException if offset or length are out of bounds
@@ -181,7 +192,9 @@ public int getInt(final long offset) {
 
     @Override
     public int getInt(final long offset, @NonNull final ByteOrder byteOrder) {
-        return byteOrder == ByteOrder.BIG_ENDIAN ? getInt(offset) : Integer.reverseBytes(getInt(offset));
+        return byteOrder == ByteOrder.BIG_ENDIAN
+                ? getInt(offset)
+                : Integer.reverseBytes(getInt(offset));
     }
 
     @Override
@@ -191,12 +204,15 @@ public long getLong(final long offset) {
 
     @Override
     public long getLong(final long offset, @NonNull final ByteOrder byteOrder) {
-        return byteOrder == ByteOrder.BIG_ENDIAN ? getLong(offset) : Long.reverseBytes(getLong(offset));
+        return byteOrder == ByteOrder.BIG_ENDIAN
+                ? getLong(offset)
+                : Long.reverseBytes(getLong(offset));
     }
 
     /**
-     * Duplicate this {@link Bytes} by making a copy of the underlying byte array and returning a new {@link Bytes}
-     * over the copied data. Use this method when you need to wrap a copy of a byte array:
+     * Duplicate this {@link Bytes} by making a copy of the underlying byte array and returning a
+     * new {@link Bytes} over the copied data. Use this method when you need to wrap a copy of a
+     * byte array:
      *
      * <pre>
      *     final var arr = new byte[] { 1, 2, 3 };
@@ -204,8 +220,9 @@ public long getLong(final long offset, @NonNull final ByteOrder byteOrder) {
      *     arr[0] = 4; // this modification will NOT be visible in the "bytes" instance
      * </pre>
      *
-     * <p>Implementation note: since we will be making an array copy, if the source array had an offset and length,
-     * the newly copied array will only contain the bytes between the offset and length of the original array.
+     * <p>Implementation note: since we will be making an array copy, if the source array had an
+     * offset and length, the newly copied array will only contain the bytes between the offset and
+     * length of the original array.
      *
      * @return A new {@link Bytes} instance with a copy of the underlying byte array data.
      */
@@ -217,8 +234,8 @@ public Bytes replicate() {
     }
 
     /**
-     * A helper method for efficient copy of our data into another ByteBuffer.
-     * The destination buffers position is updated.
+     * A helper method for efficient copy of our data into another ByteBuffer. The destination
+     * buffers position is updated.
      *
      * @param dstBuffer the buffer to copy into
      */
@@ -227,8 +244,8 @@ public void writeTo(@NonNull final ByteBuffer dstBuffer) {
     }
 
     /**
-     * A helper method for efficient copy of our data into another ByteBuffer.
-     * The destination buffers position is updated.
+     * A helper method for efficient copy of our data into another ByteBuffer. The destination
+     * buffers position is updated.
      *
      * @param dstBuffer the buffer to copy into
      * @param offset The offset from the start of this {@link Bytes} object to get the bytes from.
@@ -238,9 +255,7 @@ public void writeTo(@NonNull final ByteBuffer dstBuffer, final int offset, final
         dstBuffer.put(buffer, Math.toIntExact(start + offset), length);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeTo(@NonNull final OutputStream outStream) {
         try {
@@ -250,9 +265,7 @@ public void writeTo(@NonNull final OutputStream outStream) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeTo(@NonNull final OutputStream outStream, final int offset, final int length) {
         try {
@@ -263,8 +276,9 @@ public void writeTo(@NonNull final OutputStream outStream, final int offset, fin
     }
 
     /**
-     * A helper method for efficient copy of our data into an WritableSequentialData without creating a defensive copy
-     * of the data. The implementation relies on a well-behaved WritableSequentialData that doesn't modify the buffer data.
+     * A helper method for efficient copy of our data into an WritableSequentialData without
+     * creating a defensive copy of the data. The implementation relies on a well-behaved
+     * WritableSequentialData that doesn't modify the buffer data.
      *
      * @param wsd the OutputStream to copy into
      */
@@ -273,20 +287,23 @@ public void writeTo(@NonNull final WritableSequentialData wsd) {
     }
 
     /**
-     * A helper method for efficient copy of our data into an WritableSequentialData without creating a defensive copy
-     * of the data. The implementation relies on a well-behaved WritableSequentialData that doesn't modify the buffer data.
+     * A helper method for efficient copy of our data into an WritableSequentialData without
+     * creating a defensive copy of the data. The implementation relies on a well-behaved
+     * WritableSequentialData that doesn't modify the buffer data.
      *
      * @param wsd The OutputStream to copy into.
      * @param offset The offset from the start of this {@link Bytes} object to get the bytes from.
      * @param length The number of bytes to extract.
      */
-    public void writeTo(@NonNull final WritableSequentialData wsd, final int offset, final int length) {
+    public void writeTo(
+            @NonNull final WritableSequentialData wsd, final int offset, final int length) {
         wsd.writeBytes(buffer, Math.toIntExact(start + offset), length);
     }
 
     /**
-     * A helper method for efficient copy of our data into an MessageDigest without creating a defensive copy
-     * of the data. The implementation relies on a well-behaved MessageDigest that doesn't modify the buffer data.
+     * A helper method for efficient copy of our data into an MessageDigest without creating a
+     * defensive copy of the data. The implementation relies on a well-behaved MessageDigest that
+     * doesn't modify the buffer data.
      *
      * @param digest the MessageDigest to copy into
      */
@@ -295,8 +312,9 @@ public void writeTo(@NonNull final MessageDigest digest) {
     }
 
     /**
-     * A helper method for efficient copy of our data into an MessageDigest without creating a defensive copy
-     * of the data. The implementation relies on a well-behaved MessageDigest that doesn't modify the buffer data.
+     * A helper method for efficient copy of our data into an MessageDigest without creating a
+     * defensive copy of the data. The implementation relies on a well-behaved MessageDigest that
+     * doesn't modify the buffer data.
      *
      * @param digest the MessageDigest to copy into
      * @param offset The offset from the start of this {@link Bytes} object to get the bytes from.
@@ -307,51 +325,55 @@ public void writeTo(@NonNull final MessageDigest digest, final int offset, final
     }
 
     /**
-     * Same as {@link #updateSignature(Signature, int, int)} with offset 0 and length equal to the length of this
-     * {@link Bytes} object.
+     * Same as {@link #updateSignature(Signature, int, int)} with offset 0 and length equal to the
+     * length of this {@link Bytes} object.
      */
     public void updateSignature(@NonNull final Signature signature) throws SignatureException {
         signature.update(buffer, start, length);
     }
 
     /**
-     * A helper method for efficient copy of our data into a Signature without creating a defensive copy of the data.
-     * The implementation relies on a well-behaved Signature that doesn't modify the buffer data. Calls the
-     * {@link Signature#update(byte[], int, int)} method with all the data in this {@link Bytes} object. This method
-     * should be used when the data in the buffer should be validated or signed.
+     * A helper method for efficient copy of our data into a Signature without creating a defensive
+     * copy of the data. The implementation relies on a well-behaved Signature that doesn't modify
+     * the buffer data. Calls the {@link Signature#update(byte[], int, int)} method with all the
+     * data in this {@link Bytes} object. This method should be used when the data in the buffer
+     * should be validated or signed.
      *
      * @param signature The Signature to update
-     * @param offset    The offset from the start of this {@link Bytes} object to get the bytes from
-     * @param length    The number of bytes to extract
+     * @param offset The offset from the start of this {@link Bytes} object to get the bytes from
+     * @param length The number of bytes to extract
      * @throws SignatureException If the Signature instance throws this exception
      */
-    public void updateSignature(@NonNull final Signature signature, final int offset, final int length)
+    public void updateSignature(
+            @NonNull final Signature signature, final int offset, final int length)
             throws SignatureException {
         validateOffsetLength(offset, length);
         signature.update(buffer, calculateOffset(offset), length);
     }
 
     /**
-     * Same as {@link #verifySignature(Signature, int, int)} with offset 0 and length equal to the length of this
-     * {@link Bytes} object.
+     * Same as {@link #verifySignature(Signature, int, int)} with offset 0 and length equal to the
+     * length of this {@link Bytes} object.
      */
     public boolean verifySignature(@NonNull final Signature signature) throws SignatureException {
         return signature.verify(buffer, start, length);
     }
 
     /**
-     * A helper method for efficient copy of our data into a Signature without creating a defensive copy of the data.
-     * The implementation relies on a well-behaved Signature that doesn't modify the buffer data. Calls the
-     * {@link Signature#verify(byte[], int, int)} method with all the data in this {@link Bytes} object. This method
-     * should be used when the data in the buffer is a signature that should be verified.
+     * A helper method for efficient copy of our data into a Signature without creating a defensive
+     * copy of the data. The implementation relies on a well-behaved Signature that doesn't modify
+     * the buffer data. Calls the {@link Signature#verify(byte[], int, int)} method with all the
+     * data in this {@link Bytes} object. This method should be used when the data in the buffer is
+     * a signature that should be verified.
      *
      * @param signature the Signature to use to verify
-     * @param offset    The offset from the start of this {@link Bytes} object to get the bytes from
-     * @param length    The number of bytes to extract
+     * @param offset The offset from the start of this {@link Bytes} object to get the bytes from
+     * @param length The number of bytes to extract
      * @return true if the signature is valid, false otherwise
      * @throws SignatureException If the Signature instance throws this exception
      */
-    public boolean verifySignature(@NonNull final Signature signature, final int offset, final int length)
+    public boolean verifySignature(
+            @NonNull final Signature signature, final int offset, final int length)
             throws SignatureException {
         validateOffsetLength(offset, length);
         return signature.verify(buffer, calculateOffset(offset), length);
@@ -376,6 +398,7 @@ public ReadableSequentialData toReadableSequentialData() {
     public InputStream toInputStream() {
         return new InputStream() {
             private long pos = 0;
+
             @Override
             public int read() throws IOException {
                 if (length - pos <= 0) {
@@ -392,9 +415,12 @@ public int read() throws IOException {
     }
 
     /**
-     * Compare this {@link Bytes} object to another {@link Bytes} object. The comparison is done on a byte-by-byte
+     * Compare this {@link Bytes} object to another {@link Bytes} object. The comparison is done on
+     * a byte-by-byte
+     *
      * @param otherData the object to be compared.
-     * @return a negative integer, zero, or a positive integer as this object is less than, equal to, or greater than
+     * @return a negative integer, zero, or a positive integer as this object is less than, equal
+     *     to, or greater than
      */
     @Override
     public int compareTo(Bytes otherData) {
@@ -431,7 +457,7 @@ public String toBase64() {
             return Base64.getEncoder().encodeToString(buffer);
         } else {
             byte[] bytes = new byte[length];
-            getBytes(0,bytes);
+            getBytes(0, bytes);
             return Base64.getEncoder().encodeToString(bytes);
         }
     }
@@ -442,12 +468,12 @@ public String toBase64() {
      * @return Hex encoded string of the bytes in this object.
      */
     public String toHex() {
-        return HexFormat.of().formatHex(buffer,start,start+length);
+        return HexFormat.of().formatHex(buffer, start, start + length);
     }
 
     /**
-     * Equals, important that it works for all subclasses of Bytes as well. As any 2 Bytes classes with same contents of
-     * bytes are equal
+     * Equals, important that it works for all subclasses of Bytes as well. As any 2 Bytes classes
+     * with same contents of bytes are equal
      *
      * @param o the other Bytes object to compare to for equality
      * @return true if o instance of Bytes and contents match
@@ -456,7 +482,8 @@ public String toHex() {
     public boolean equals(@Nullable final Object o) {
         if (this == o) return true;
         if (!(o instanceof Bytes that)) return false;
-        return Arrays.equals(buffer, start, start + length, that.buffer, that.start, that.start + that.length);
+        return Arrays.equals(
+                buffer, start, start + length, that.buffer, that.start, that.start + that.length);
     }
 
     /**
@@ -495,7 +522,11 @@ public byte getByte(final long offset) {
 
     /** {@inheritDoc} */
     @Override
-    public long getBytes(final long offset, @NonNull final byte[] dst, final int dstOffset, final int maxLength) {
+    public long getBytes(
+            final long offset,
+            @NonNull final byte[] dst,
+            final int dstOffset,
+            final int maxLength) {
         if (maxLength < 0) {
             throw new IllegalArgumentException("Negative maxLength not allowed");
         }
@@ -505,9 +536,11 @@ public long getBytes(final long offset, @NonNull final byte[] dst, final int dst
             return 0;
         }
         validateOffset(offset);
-        // This is a faster implementation than the default, since it has access to the entire byte array
+        // This is a faster implementation than the default, since it has access to the entire byte
+        // array
         // and can do a system array copy instead of a loop.
-        System.arraycopy(buffer, Math.toIntExact(start + offset), dst, dstOffset, Math.toIntExact(len));
+        System.arraycopy(
+                buffer, Math.toIntExact(start + offset), dst, dstOffset, Math.toIntExact(len));
         return len;
     }
 
@@ -520,7 +553,8 @@ public long getBytes(final long offset, @NonNull final ByteBuffer dst) {
             return 0;
         }
         validateOffset(offset);
-        // This is a faster implementation than the default, since it has access to the entire byte array
+        // This is a faster implementation than the default, since it has access to the entire byte
+        // array
         // and can do a system array copy instead of a loop.
         dst.put(buffer, Math.toIntExact(start + offset), Math.toIntExact(len));
         return len;
@@ -535,7 +569,8 @@ public long getBytes(final long offset, @NonNull final BufferedData dst) {
             return 0;
         }
         validateOffset(offset);
-        // This is a faster implementation than the default, since it has access to the entire byte array
+        // This is a faster implementation than the default, since it has access to the entire byte
+        // array
         // and can do a system array copy instead of a loop.
         dst.writeBytes(buffer, Math.toIntExact(start + offset), Math.toIntExact(len));
         return len;
@@ -556,7 +591,8 @@ public Bytes getBytes(final long offset, final long len) {
             return Bytes.EMPTY;
         }
         validateOffset(offset);
-        // Our buffer is assumed to be immutable, so we can just return a new Bytes object that wraps the same buffer
+        // Our buffer is assumed to be immutable, so we can just return a new Bytes object that
+        // wraps the same buffer
         return new Bytes(buffer, Math.toIntExact(start + offset), Math.toIntExact(len));
     }
 
@@ -585,7 +621,13 @@ public boolean contains(final long offset, @NonNull final byte[] bytes) {
             return false;
         }
         validateOffset(offset);
-        return Arrays.equals(buffer, Math.toIntExact(start + offset), Math.toIntExact(start + offset + len), bytes, 0, len);
+        return Arrays.equals(
+                buffer,
+                Math.toIntExact(start + offset),
+                Math.toIntExact(start + offset + len),
+                bytes,
+                0,
+                len);
     }
 
     /** {@inheritDoc} */
@@ -595,7 +637,8 @@ public Bytes slice(final long offset, final long length) {
         return getBytes(offset, length);
     }
 
-    /** * Gets a byte[] of the bytes of this {@link Bytes} object..
+    /**
+     * * Gets a byte[] of the bytes of this {@link Bytes} object..
      *
      * @return a clone of the bytes of this {@link Bytes} object or null.
      */
@@ -604,7 +647,8 @@ public byte[] toByteArray() {
         return toByteArray(0, length);
     }
 
-    /** * Gets a byte[] of the bytes of this {@link Bytes} object.
+    /**
+     * * Gets a byte[] of the bytes of this {@link Bytes} object.
      *
      * @param offset The start offset to get the bytes from.
      * @param len The number of bytes to get.
@@ -627,7 +671,8 @@ private void validateOffset(final long offset) {
     }
 
     /**
-     * Validates whether the offset and length supplied to a method are within the bounds of the Bytes object.
+     * Validates whether the offset and length supplied to a method are within the bounds of the
+     * Bytes object.
      *
      * @param suppliedOffset the offset supplied
      * @param suppliedLength the length supplied
@@ -639,8 +684,7 @@ private void validateOffsetLength(final long suppliedOffset, final long supplied
         if (suppliedOffset + suppliedLength > length) {
             throw new IndexOutOfBoundsException(
                     "The offset(%d) and length(%d) provided are out of bounds for this Bytes object, which has a length of %d"
-                            .formatted(suppliedOffset, suppliedLength, length)
-            );
+                            .formatted(suppliedOffset, suppliedLength, length));
         }
     }
 
@@ -654,9 +698,10 @@ private int calculateOffset(final long suppliedOffset) {
         return Math.toIntExact(start + suppliedOffset);
     }
 
-    /** Sorts {@link Bytes} according to their byte values, lower valued bytes first.
-      * Bytes are compared using the passed in Byte Comparator
-      */
+    /**
+     * Sorts {@link Bytes} according to their byte values, lower valued bytes first. Bytes are
+     * compared using the passed in Byte Comparator
+     */
     private static Comparator<Bytes> valueSorter(@NonNull final Comparator<Byte> byteComparator) {
         return (Bytes o1, Bytes o2) -> {
             final var val = Math.min(o1.length(), o2.length());
@@ -667,8 +712,10 @@ private static Comparator<Bytes> valueSorter(@NonNull final Comparator<Byte> byt
                 }
             }
 
-            // In case one of the buffers is longer than the other and the first n bytes (where n in the length of the
-            // shorter buffer) are equal, the buffer with the shorter length is first in the sort order.
+            // In case one of the buffers is longer than the other and the first n bytes (where n in
+            // the length of the
+            // shorter buffer) are equal, the buffer with the shorter length is first in the sort
+            // order.
             long len = o1.length() - o2.length();
             if (len == 0) {
                 return 0;
@@ -678,52 +725,54 @@ private static Comparator<Bytes> valueSorter(@NonNull final Comparator<Byte> byt
     }
 
     /**
-     * Appends a {@link Bytes} object to this {@link Bytes} object, producing a new immutable {link Bytes} object.
+     * Appends a {@link Bytes} object to this {@link Bytes} object, producing a new immutable {link
+     * Bytes} object.
+     *
      * @param bytes The {@link Bytes} object to append.
      * @return A new {link Bytes} object containing the concatenated bytes and b.
      * @throws BufferUnderflowException if the buffer is empty
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than Bytes.length()
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     Bytes.length()
      */
     @NonNull
     public Bytes append(@NonNull final Bytes bytes) {
         // The length field of Bytes is int. The length() returns always an int,
         // so safe to cast.
         long length = this.length();
-        byte[] newBytes = new byte[(int)(length + (int)bytes.length())];
+        byte[] newBytes = new byte[(int) (length + (int) bytes.length())];
         this.getBytes(0, newBytes, 0, (int) length);
-        bytes.getBytes(0, newBytes, (int) length, (int)bytes.length());
+        bytes.getBytes(0, newBytes, (int) length, (int) bytes.length());
         return Bytes.wrap(newBytes);
     }
 
     /**
-     * Appends a {@link RandomAccessData} object to this {@link Bytes} object, producing a new immutable {link Bytes} object.
+     * Appends a {@link RandomAccessData} object to this {@link Bytes} object, producing a new
+     * immutable {link Bytes} object.
+     *
      * @param data The {@link RandomAccessData} object to append.
      * @return A new {link Bytes} object containing the concatenated bytes and b.
      * @throws BufferUnderflowException if the buffer is empty
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than Bytes.length()
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     Bytes.length()
      */
     @NonNull
     public Bytes append(@NonNull final RandomAccessData data) {
         // The length field of Bytes is int. The length(0 returns always an int,
         // so safe to cast.
-        byte[] newBytes = new byte[(int)(this.length() + (int)data.length())];
+        byte[] newBytes = new byte[(int) (this.length() + (int) data.length())];
         int length1 = (int) this.length();
         this.getBytes(0, newBytes, 0, length1);
-        data.getBytes(0, newBytes, length1, (int)data.length());
+        data.getBytes(0, newBytes, length1, (int) data.length());
         return Bytes.wrap(newBytes);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int getVarInt(final long offset, final boolean zigZag) {
         return (int) getVar(Math.toIntExact(offset), zigZag);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long getVarLong(final long offset, final boolean zigZag) {
         return getVar(Math.toIntExact(offset), zigZag);
@@ -751,5 +800,4 @@ private long getVar(int offset, final boolean zigZag) {
         }
         throw new DataEncodingException("Malformed var int");
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java
index 6359f566..59ab2684 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/DirectBufferedData.java
@@ -9,23 +9,27 @@
 import java.util.Objects;
 
 /**
- * BufferedData subclass for instances backed by direct byte buffers. Provides slightly more optimized
- * versions of several methods to get / read / write bytes using {@link UnsafeUtils} methods.
+ * BufferedData subclass for instances backed by direct byte buffers. Provides slightly more
+ * optimized versions of several methods to get / read / write bytes using {@link UnsafeUtils}
+ * methods.
  */
 final class DirectBufferedData extends BufferedData {
 
     DirectBufferedData(final ByteBuffer buffer) {
         super(buffer);
         if (!buffer.isDirect()) {
-            throw new IllegalArgumentException("Cannot create a DirectBufferedData over a heap byte buffer");
+            throw new IllegalArgumentException(
+                    "Cannot create a DirectBufferedData over a heap byte buffer");
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
-    public long getBytes(final long offset, @NonNull final byte[] dst, final int dstOffset, final int maxLength) {
+    public long getBytes(
+            final long offset,
+            @NonNull final byte[] dst,
+            final int dstOffset,
+            final int maxLength) {
         validateLen(maxLength);
         final long len = Math.min(maxLength, length() - offset);
         checkOffsetToRead(offset, length(), len);
@@ -37,9 +41,7 @@ public long getBytes(final long offset, @NonNull final byte[] dst, final int dst
         return len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long getBytes(final long offset, @NonNull final ByteBuffer dst) {
         if (!dst.hasArray()) {
@@ -50,13 +52,12 @@ public long getBytes(final long offset, @NonNull final ByteBuffer dst) {
         final byte[] dstArr = dst.array();
         final int dstPos = dst.position();
         final int dstArrOffset = dst.arrayOffset();
-        UnsafeUtils.getDirectBufferToArray(buffer, offset, dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
+        UnsafeUtils.getDirectBufferToArray(
+                buffer, offset, dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
         return len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @NonNull
     @Override
     public Bytes getBytes(final long offset, final long len) {
@@ -70,17 +71,13 @@ public Bytes getBytes(final long offset, final long len) {
         return Bytes.wrap(res);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int getVarInt(final long offset, final boolean zigZag) {
         return (int) getVar(Math.toIntExact(offset), zigZag);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long getVarLong(final long offset, final boolean zigZag) {
         return getVar(Math.toIntExact(offset), zigZag);
@@ -106,12 +103,12 @@ private long getVar(final int offset, final boolean zigZag) {
                 return zigZag ? (value >>> 1) ^ -(value & 1) : value;
             }
         }
-        throw (i == 10) ? new DataEncodingException("Malformed var int") : new BufferUnderflowException();
+        throw (i == 10)
+                ? new DataEncodingException("Malformed var int")
+                : new BufferUnderflowException();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readBytes(@NonNull final byte[] dst, final int dstOffset, final int maxLength) {
         validateLen(maxLength);
@@ -137,12 +134,14 @@ public long readBytes(@NonNull final ByteBuffer dst) {
         if (dst.hasArray()) {
             final byte[] dstArr = dst.array();
             final int dstArrOffset = dst.arrayOffset();
-            UnsafeUtils.getDirectBufferToArray(buffer, pos, dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
+            UnsafeUtils.getDirectBufferToArray(
+                    buffer, pos, dstArr, dstArrOffset + dstPos, Math.toIntExact(len));
             buffer.position(Math.toIntExact(pos + len));
             dst.position(Math.toIntExact(dstPos + len));
             return len;
         } else if (dst.isDirect()) {
-            UnsafeUtils.getDirectBufferToDirectBuffer(buffer, pos, dst, dstPos, Math.toIntExact(len));
+            UnsafeUtils.getDirectBufferToDirectBuffer(
+                    buffer, pos, dst, dstPos, Math.toIntExact(len));
             buffer.position(Math.toIntExact(pos + len));
             dst.position(Math.toIntExact(dstPos + len));
             return len;
@@ -151,9 +150,7 @@ public long readBytes(@NonNull final ByteBuffer dst) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @NonNull
     @Override
     public Bytes readBytes(final int len) {
@@ -166,17 +163,13 @@ public Bytes readBytes(final int len) {
         return Bytes.wrap(res);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int readVarInt(final boolean zigZag) {
         return (int) readVar(zigZag);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public long readVarLong(final boolean zigZag) {
         return readVar(zigZag);
@@ -204,9 +197,7 @@ private long readVar(final boolean zigZag) {
         throw (i == 10) ? new DataEncodingException("") : new BufferUnderflowException();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final byte[] src, final int offset, final int len) {
         Objects.requireNonNull(src);
@@ -218,9 +209,7 @@ public void writeBytes(@NonNull final byte[] src, final int offset, final int le
         buffer.position(pos + len);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final ByteBuffer src) {
         if (!src.hasArray()) {
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java
index ae95948f..c4541e2c 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessData.java
@@ -4,19 +4,19 @@
 import com.hedera.pbj.runtime.io.DataEncodingException;
 import com.hedera.pbj.runtime.io.SequentialData;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
-import java.nio.BufferOverflowException;
 import java.io.OutputStream;
+import java.nio.BufferOverflowException;
 import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.nio.charset.StandardCharsets;
 
 /**
- * Represents data which may be accessed out of order in some random manner. Unlike {@link SequentialData},
- * this interface is only backed by a buffer of some kind: an array, a {@link ByteBuffer}, a memory-mapped file, etc.
- * Unlike {@link BufferedSequentialData}, it does not define any kind of "position" cursor, just a "length" representing
- * the valid range of indexes and methods for reading data at any of those indexes.
+ * Represents data which may be accessed out of order in some random manner. Unlike {@link
+ * SequentialData}, this interface is only backed by a buffer of some kind: an array, a {@link
+ * ByteBuffer}, a memory-mapped file, etc. Unlike {@link BufferedSequentialData}, it does not define
+ * any kind of "position" cursor, just a "length" representing the valid range of indexes and
+ * methods for reading data at any of those indexes.
  */
 @SuppressWarnings("unused")
 public interface RandomAccessData {
@@ -33,7 +33,8 @@ public interface RandomAccessData {
      *
      * @param offset The offset into data to get a byte from.
      * @return The signed byte at given {@code offset}
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     byte getByte(final long offset);
 
@@ -42,23 +43,26 @@ public interface RandomAccessData {
      *
      * @param offset The offset into data to get an unsigned byte from.
      * @return The unsigned byte at given {@code offset}
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default int getUnsignedByte(final long offset) {
         return Byte.toUnsignedInt(getByte(offset));
     }
 
     /**
-     * Get bytes starting at the given {@code offset} and write them into the {@code dst} array, up to the size of
-     * the {@code dst} array. If {@code dst} is larger than the number of bytes between {@code offset} and
-     * {@link #length()}, only the maximum available bytes are read. The total number of bytes actually read are
-     * returned. The bytes will be placed starting at index 0 of the {@code dst} array. If the number of bytes
-     * between {@code offset} and {@link #length()} is 0, then 0 is returned.
+     * Get bytes starting at the given {@code offset} and write them into the {@code dst} array, up
+     * to the size of the {@code dst} array. If {@code dst} is larger than the number of bytes
+     * between {@code offset} and {@link #length()}, only the maximum available bytes are read. The
+     * total number of bytes actually read are returned. The bytes will be placed starting at index
+     * 0 of the {@code dst} array. If the number of bytes between {@code offset} and {@link
+     * #length()} is 0, then 0 is returned.
      *
      * @param offset The offset into data to begin reading bytes
      * @param dst The array into which bytes are to be written
      * @throws NullPointerException if {@code dst} is null
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      * @return The number of bytes read actually read and placed into {@code dst}
      */
     default long getBytes(final long offset, @NonNull final byte[] dst) {
@@ -66,26 +70,31 @@ default long getBytes(final long offset, @NonNull final byte[] dst) {
     }
 
     /**
-     * Get bytes starting at the given {@code offset} into the {@code dst} array, up to {@code maxLength} number of
-     * bytes. If {@code maxLength} is larger than the number of bytes between {@code offset} and {@link #length()},
-     * only the maximum available bytes are read. The total number of bytes actually read are returned. The bytes will
-     * be placed starting at index {@code offset} of the {@code dst} array. If the number of bytes between
-     * {@code offset} and {@link #length()} is 0, then 0 is returned.
+     * Get bytes starting at the given {@code offset} into the {@code dst} array, up to {@code
+     * maxLength} number of bytes. If {@code maxLength} is larger than the number of bytes between
+     * {@code offset} and {@link #length()}, only the maximum available bytes are read. The total
+     * number of bytes actually read are returned. The bytes will be placed starting at index {@code
+     * offset} of the {@code dst} array. If the number of bytes between {@code offset} and {@link
+     * #length()} is 0, then 0 is returned.
      *
      * @param offset The offset into data to begin reading bytes
      * @param dst The array into which bytes are to be written
-     * @param dstOffset The offset within the {@code dst} array of the first byte to be written; must be non-negative
-     *                and no larger than {@code dst.length - maxLength}.
-     * @param maxLength The maximum number of bytes to be written to the given {@code dst} array; must be non-negative
-     *                and no larger than {@code dst.length - offset}
+     * @param dstOffset The offset within the {@code dst} array of the first byte to be written;
+     *     must be non-negative and no larger than {@code dst.length - maxLength}.
+     * @param maxLength The maximum number of bytes to be written to the given {@code dst} array;
+     *     must be non-negative and no larger than {@code dst.length - offset}
      * @throws NullPointerException if {@code dst} is null
-     * @throws IndexOutOfBoundsException If {@code dstOffset} is out of bounds of {@code dst},
-     *             or if {@code dstOffset + maxLength} is greater than {@code dst.length},
-     *             or if {@code offset} is out of bounds of this RandomAccessData.
+     * @throws IndexOutOfBoundsException If {@code dstOffset} is out of bounds of {@code dst}, or if
+     *     {@code dstOffset + maxLength} is greater than {@code dst.length}, or if {@code offset} is
+     *     out of bounds of this RandomAccessData.
      * @throws IllegalArgumentException If {@code maxLength} is negative
      * @return The number of bytes read actually read and placed into {@code dst}
      */
-    default long getBytes(final long offset, @NonNull final byte[] dst, final int dstOffset, final int maxLength) {
+    default long getBytes(
+            final long offset,
+            @NonNull final byte[] dst,
+            final int dstOffset,
+            final int maxLength) {
         if (maxLength < 0) {
             throw new IllegalArgumentException("Negative maxLength not allowed");
         }
@@ -99,16 +108,17 @@ default long getBytes(final long offset, @NonNull final byte[] dst, final int ds
 
     /**
      * Get bytes starting at the given {@code offset} into the destination {@link ByteBuffer}, up to
-     * {@link ByteBuffer#remaining()} number of bytes. If {@link ByteBuffer#remaining()} is larger than the number
-     * of bytes between {@code offset} and {@link #length()}, only the maximum available bytes are read. The total
-     * number of bytes actually read are returned. The bytes will be placed starting at index
-     * {@link ByteBuffer#position()} of the destination buffer. If the number of bytes between {@code offset} and
-     * {@link #length()} is 0, then 0 is returned.
+     * {@link ByteBuffer#remaining()} number of bytes. If {@link ByteBuffer#remaining()} is larger
+     * than the number of bytes between {@code offset} and {@link #length()}, only the maximum
+     * available bytes are read. The total number of bytes actually read are returned. The bytes
+     * will be placed starting at index {@link ByteBuffer#position()} of the destination buffer. If
+     * the number of bytes between {@code offset} and {@link #length()} is 0, then 0 is returned.
      *
      * @param offset The offset into data to begin reading bytes
      * @param dst The destination {@link ByteBuffer}
      * @throws NullPointerException if {@code dst} is null
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      * @return The number of bytes read actually read and placed into {@code dst}
      */
     default long getBytes(final long offset, @NonNull final ByteBuffer dst) {
@@ -124,16 +134,17 @@ default long getBytes(final long offset, @NonNull final ByteBuffer dst) {
 
     /**
      * Get bytes starting at given {@code offset} into the destination {@link BufferedData}, up to
-     * {@link BufferedData#remaining()} number of bytes. If {@link BufferedData#remaining()} is larger than the
-     * number of bytes between {@code offset} and {@link #length()}, only the remaining bytes are read. The total
-     * number of bytes actually read are returned. The bytes will be placed starting at index
-     * {@link BufferedData#position()} of the buffer. If the number of bytes between {@code offset} and
-     * {@link #length()} is 0, then 0 is returned.
+     * {@link BufferedData#remaining()} number of bytes. If {@link BufferedData#remaining()} is
+     * larger than the number of bytes between {@code offset} and {@link #length()}, only the
+     * remaining bytes are read. The total number of bytes actually read are returned. The bytes
+     * will be placed starting at index {@link BufferedData#position()} of the buffer. If the number
+     * of bytes between {@code offset} and {@link #length()} is 0, then 0 is returned.
      *
      * @param offset The offset into data to begin reading bytes
      * @param dst The destination {@link BufferedData}
      * @throws NullPointerException if {@code dst} is null
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      * @return The number of bytes read actually read and placed into {@code dst}
      */
     default long getBytes(final long offset, @NonNull final BufferedData dst) {
@@ -148,16 +159,17 @@ default long getBytes(final long offset, @NonNull final BufferedData dst) {
     }
 
     /**
-     * Get {@code length} bytes starting at the given {@code offset} from this buffer. The returned bytes will
-     * be immutable. The returned {@link Bytes} will have exactly {@code length} bytes.
+     * Get {@code length} bytes starting at the given {@code offset} from this buffer. The returned
+     * bytes will be immutable. The returned {@link Bytes} will have exactly {@code length} bytes.
      *
      * @param offset The offset into data to begin reading bytes
      * @param length The non-negative length in bytes to read
      * @return new {@link Bytes} containing the read data
      * @throws IllegalArgumentException If {@code length} is negative
-     * @throws BufferUnderflowException If there are not {@code length} bytes between {@code offset} and
-     *                                  {@link #length()}
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are not {@code length} bytes between {@code offset}
+     *     and {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     @NonNull
     default Bytes getBytes(final long offset, final long length) {
@@ -185,16 +197,18 @@ default Bytes getBytes(final long offset, final long length) {
     }
 
     /**
-     * Get {@code length} bytes starting at the given {@code offset} from this buffer. The returned bytes will
-     * be immutable. The returned {@link RandomAccessData} will have exactly {@code length} bytes.
+     * Get {@code length} bytes starting at the given {@code offset} from this buffer. The returned
+     * bytes will be immutable. The returned {@link RandomAccessData} will have exactly {@code
+     * length} bytes.
      *
      * @param offset The offset into data to begin reading bytes
      * @param length The non-negative length in bytes to read
      * @return new {@link RandomAccessData} containing the read data
      * @throws IllegalArgumentException If {@code length} is negative
-     * @throws BufferUnderflowException If there are not {@code length} bytes between {@code offset} and
-     *                                  {@link #length()}
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are not {@code length} bytes between {@code offset}
+     *     and {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     @NonNull
     default RandomAccessData slice(final long offset, final long length) {
@@ -202,20 +216,23 @@ default RandomAccessData slice(final long offset, final long length) {
     }
 
     /**
-     * Gets four bytes at the given {@code offset}, composing them into an int value according to the Java
-     * standard big-endian byte order.
+     * Gets four bytes at the given {@code offset}, composing them into an int value according to
+     * the Java standard big-endian byte order.
      *
      * @param offset The offset into data to get an integer from.
      * @return The int value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default int getInt(final long offset) {
         checkOffset(offset, length());
         if ((length() - offset) < Integer.BYTES) {
             throw new BufferUnderflowException();
         }
-        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs.
+        // little endian
         //noinspection DuplicatedCode
         final byte b1 = getByte(offset);
         final byte b2 = getByte(offset + 1);
@@ -225,14 +242,17 @@ default int getInt(final long offset) {
     }
 
     /**
-     * Gets four bytes at the given {@code offset}, composing them into an int value according to specified byte
-     * order.
+     * Gets four bytes at the given {@code offset}, composing them into an int value according to
+     * specified byte order.
      *
      * @param offset The offset into data to get an integer from.
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The int value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default int getInt(final long offset, @NonNull final ByteOrder byteOrder) {
         if ((length() - offset) < Integer.BYTES) {
@@ -240,7 +260,8 @@ default int getInt(final long offset, @NonNull final ByteOrder byteOrder) {
         }
         if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
             checkOffset(offset, length());
-            // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+            // False positive: bytes in "duplicated" fragments are read in opposite order for big
+            // vs. little endian
             //noinspection DuplicatedCode
             final byte b4 = getByte(offset);
             final byte b3 = getByte(offset + 1);
@@ -253,47 +274,55 @@ default int getInt(final long offset, @NonNull final ByteOrder byteOrder) {
     }
 
     /**
-     * Gets four bytes at the given {@code offset}, composing them into an unsigned int value according to the
-     * Java standard big-endian byte order.
+     * Gets four bytes at the given {@code offset}, composing them into an unsigned int value
+     * according to the Java standard big-endian byte order.
      *
      * @param offset The offset into data to get an unsigned integer from.
      * @return The int value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default long getUnsignedInt(final long offset) {
         return (getInt(offset)) & 0xFFFFFFFFL;
     }
 
     /**
-     * Gets four bytes at the given {@code offset}, composing them into an unsigned int value according to
-     * specified byte order.
+     * Gets four bytes at the given {@code offset}, composing them into an unsigned int value
+     * according to specified byte order.
      *
      * @param offset The offset into data to get an unsigned integer from.
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The int value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default long getUnsignedInt(final long offset, @NonNull final ByteOrder byteOrder) {
         return (getInt(offset, byteOrder)) & 0xFFFFFFFFL;
     }
 
     /**
-     * Gets eight bytes at the given {@code offset}, composing them into a long value according to the Java
-     * standard big-endian byte order.
+     * Gets eight bytes at the given {@code offset}, composing them into a long value according to
+     * the Java standard big-endian byte order.
      *
      * @param offset The offset into data to get a signed long from.
      * @return The long value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default long getLong(final long offset) {
         checkOffset(offset, length());
         if ((length() - offset) < Long.BYTES) {
             throw new BufferUnderflowException();
         }
-        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+        // False positive: bytes in "duplicated" fragments are read in opposite order for big vs.
+        // little endian
         //noinspection DuplicatedCode
         final byte b1 = getByte(offset);
         final byte b2 = getByte(offset + 1);
@@ -303,25 +332,28 @@ default long getLong(final long offset) {
         final byte b6 = getByte(offset + 5);
         final byte b7 = getByte(offset + 6);
         final byte b8 = getByte(offset + 7);
-        return (((long)b1 << 56) +
-                ((long)(b2 & 255) << 48) +
-                ((long)(b3 & 255) << 40) +
-                ((long)(b4 & 255) << 32) +
-                ((long)(b5 & 255) << 24) +
-                ((b6 & 255) << 16) +
-                ((b7 & 255) <<  8) +
-                (b8 & 255));
+        return (((long) b1 << 56)
+                + ((long) (b2 & 255) << 48)
+                + ((long) (b3 & 255) << 40)
+                + ((long) (b4 & 255) << 32)
+                + ((long) (b5 & 255) << 24)
+                + ((b6 & 255) << 16)
+                + ((b7 & 255) << 8)
+                + (b8 & 255));
     }
 
     /**
-     * Gets eight bytes at the given {@code offset}, composing them into a long value according to specified byte
-     * order.
+     * Gets eight bytes at the given {@code offset}, composing them into a long value according to
+     * specified byte order.
      *
      * @param offset The offset into data to get a signed long from.
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The long value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default long getLong(final long offset, @NonNull final ByteOrder byteOrder) {
         if ((length() - offset) < Long.BYTES) {
@@ -329,7 +361,8 @@ default long getLong(final long offset, @NonNull final ByteOrder byteOrder) {
         }
         if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
             checkOffset(offset, length());
-            // False positive: bytes in "duplicated" fragments are read in opposite order for big vs. little endian
+            // False positive: bytes in "duplicated" fragments are read in opposite order for big
+            // vs. little endian
             //noinspection DuplicatedCode
             final byte b8 = getByte(offset);
             final byte b7 = getByte(offset + 1);
@@ -339,68 +372,78 @@ default long getLong(final long offset, @NonNull final ByteOrder byteOrder) {
             final byte b3 = getByte(offset + 5);
             final byte b2 = getByte(offset + 6);
             final byte b1 = getByte(offset + 7);
-            return (((long) b1 << 56) +
-                    ((long) (b2 & 255) << 48) +
-                    ((long) (b3 & 255) << 40) +
-                    ((long) (b4 & 255) << 32) +
-                    ((long) (b5 & 255) << 24) +
-                    ((b6 & 255) << 16) +
-                    ((b7 & 255) << 8) +
-                    (b8 & 255));
+            return (((long) b1 << 56)
+                    + ((long) (b2 & 255) << 48)
+                    + ((long) (b3 & 255) << 40)
+                    + ((long) (b4 & 255) << 32)
+                    + ((long) (b5 & 255) << 24)
+                    + ((b6 & 255) << 16)
+                    + ((b7 & 255) << 8)
+                    + (b8 & 255));
         } else {
             return getLong(offset);
         }
     }
 
     /**
-     * Gets four bytes at the given {@code offset}, composing them into a float value according to the Java
-     * standard big-endian byte order.
+     * Gets four bytes at the given {@code offset}, composing them into a float value according to
+     * the Java standard big-endian byte order.
      *
      * @param offset The offset into data to get a float from.
      * @return The float value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default float getFloat(final long offset) {
         return Float.intBitsToFloat(getInt(offset));
     }
 
     /**
-     * Gets four bytes at the given {@code offset}, composing them into a float value according to specified byte
-     * order.
+     * Gets four bytes at the given {@code offset}, composing them into a float value according to
+     * specified byte order.
      *
      * @param offset The offset into data to get a float from.
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The float value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than four bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default float getFloat(final long offset, @NonNull final ByteOrder byteOrder) {
         return Float.intBitsToFloat(getInt(offset, byteOrder));
     }
 
     /**
-     * Gets eight bytes at the given {@code offset}, composing them into a double value according to the Java
-     * standard big-endian byte order.
+     * Gets eight bytes at the given {@code offset}, composing them into a double value according to
+     * the Java standard big-endian byte order.
      *
      * @param offset The offset into data to get a double from.
      * @return The double value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default double getDouble(final long offset) {
         return Double.longBitsToDouble(getLong(offset));
     }
 
     /**
-     * Gets eight bytes at the given {@code offset}, composing them into a double value according to specified byte
-     * order.
+     * Gets eight bytes at the given {@code offset}, composing them into a double value according to
+     * specified byte order.
      *
      * @param offset The offset into data to get a double from.
-     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null, BIG_ENDIAN is used.
+     * @param byteOrder the byte order, aka endian to use. Should never be null. If it is null,
+     *     BIG_ENDIAN is used.
      * @return The double value at the given {@code offset}
-     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to the end of the buffer
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If there are fewer than eight bytes from {@code offset} to
+     *     the end of the buffer
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default double getDouble(final long offset, @NonNull final ByteOrder byteOrder) {
         return Double.longBitsToDouble(getLong(offset, byteOrder));
@@ -412,8 +455,10 @@ default double getDouble(final long offset, @NonNull final ByteOrder byteOrder)
      * @param offset The offset into data to get a varint from.
      * @return integer get in var int format
      * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers
-     * @throws BufferUnderflowException If the end of the buffer is encountered before the last segment of the varint
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException If the end of the buffer is encountered before the last
+     *     segment of the varint
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default int getVarInt(final long offset, final boolean zigZag) {
         return (int) getVarLong(Math.toIntExact(offset), zigZag);
@@ -425,8 +470,9 @@ default int getVarInt(final long offset, final boolean zigZag) {
      * @param offset The offset into data to get a varlong from.
      * @return long get in var long format
      * @param zigZag use protobuf zigZag varint encoding, optimized for negative numbers
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()},
-     *         or the end of the buffer is encountered before the last segment of the varlong
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}, or the end of the buffer is encountered before the last segment of the
+     *     varlong
      * @throws DataEncodingException if the var long is malformed
      */
     default long getVarLong(final long offset, final boolean zigZag) {
@@ -442,7 +488,8 @@ default long getVarLong(final long offset, final boolean zigZag) {
     }
 
     /**
-     * Get the contents of this entire buffer as a string, assuming bytes contained are a UTF8 encoded string.
+     * Get the contents of this entire buffer as a string, assuming bytes contained are a UTF8
+     * encoded string.
      *
      * @return data converted to string
      */
@@ -457,8 +504,10 @@ default String asUtf8String() {
      * @param offset the offset into the buffer to start reading bytes from
      * @param len the number of bytes to read
      * @return data converted to string
-     * @throws BufferUnderflowException if {@code len} is greater than {@link #length()} - {@code offset}
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws BufferUnderflowException if {@code len} is greater than {@link #length()} - {@code
+     *     offset}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     @NonNull
     default String asUtf8String(final long offset, final long len) {
@@ -477,7 +526,8 @@ default String asUtf8String(final long offset, final long len) {
     }
 
     /**
-     * Check if the beginning of this buffer matches the given prefix bytes. An empty buffer matches an empty prefix.
+     * Check if the beginning of this buffer matches the given prefix bytes. An empty buffer matches
+     * an empty prefix.
      *
      * @param prefix the prefix bytes to compare with
      * @return true if prefix bytes match the beginning of the bytes from the buffer
@@ -488,24 +538,28 @@ default boolean matchesPrefix(@NonNull final byte[] prefix) {
     }
 
     /**
-     * Check if the bytes of this buffer beginning at the given {@code offset} contain the given bytes.
+     * Check if the bytes of this buffer beginning at the given {@code offset} contain the given
+     * bytes.
      *
      * @param offset the offset into this buffer to start comparing bytes at
      * @param bytes the bytes to compare with
      * @return true if bytes match the beginning of our bytes
      * @throws NullPointerException if prefix is null
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default boolean contains(final long offset, @NonNull final byte[] bytes) {
         checkOffset(offset, length());
 
-        // If the number of bytes between offset and length is shorter than the bytes we're matching, then there
+        // If the number of bytes between offset and length is shorter than the bytes we're
+        // matching, then there
         // is NO WAY we could have a match, so we need to return false.
         if (length() - offset < bytes.length) {
             return false;
         }
 
-        // Check each byte one at a time until we find a mismatch or, we get to the end, and all bytes match.
+        // Check each byte one at a time until we find a mismatch or, we get to the end, and all
+        // bytes match.
         for (int i = 0; i < bytes.length; i++) {
             if (bytes[i] != getByte(offset + i)) {
                 return false;
@@ -526,13 +580,15 @@ default boolean matchesPrefix(@NonNull final RandomAccessData prefix) {
     }
 
     /**
-     * Check if the bytes of this buffer beginning at the given {@code offset} contain the given data.
+     * Check if the bytes of this buffer beginning at the given {@code offset} contain the given
+     * data.
      *
      * @param offset the offset into this buffer to start comparing bytes at
      * @param data the bytes to compare with
      * @return true if prefix bytes match the beginning of our bytes
      * @throws NullPointerException if data is null
-     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than {@link #length()}
+     * @throws IndexOutOfBoundsException If the given {@code offset} is negative or not less than
+     *     {@link #length()}
      */
     default boolean contains(final long offset, @NonNull final RandomAccessData data) {
         // If this data is EMPTY, return true if only the incoming data is EMPTY too.
@@ -555,16 +611,18 @@ default boolean contains(final long offset, @NonNull final RandomAccessData data
     }
 
     /**
-     * A helper method for efficient copy of our data into an OutputStream without creating a defensive copy
-     * of the data. The implementation relies on a well-behaved OutputStream that doesn't modify the buffer data.
+     * A helper method for efficient copy of our data into an OutputStream without creating a
+     * defensive copy of the data. The implementation relies on a well-behaved OutputStream that
+     * doesn't modify the buffer data.
      *
      * @param outStream the OutputStream to copy into
      */
     void writeTo(@NonNull final OutputStream outStream);
 
     /**
-     * A helper method for efficient copy of our data into an OutputStream without creating a defensive copy
-     * of the data. The implementation relies on a well-behaved OutputStream that doesn't modify the buffer data.
+     * A helper method for efficient copy of our data into an OutputStream without creating a
+     * defensive copy of the data. The implementation relies on a well-behaved OutputStream that
+     * doesn't modify the buffer data.
      *
      * @param outStream The OutputStream to copy into.
      * @param offset The offset from the start of this {@link Bytes} object to get the bytes from.
@@ -574,11 +632,11 @@ default boolean contains(final long offset, @NonNull final RandomAccessData data
 
     /**
      * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative.
-     * <p>
-     * This is used to check if the numeric value is valid. Note that the offset may still be
-     * larger than the largest valid offset, and it is assumed that the subsequent computations
-     * by the caller of this method will clamp it properly or run a full check including the length
-     * at a later time.
+     *
+     * <p>This is used to check if the numeric value is valid. Note that the offset may still be
+     * larger than the largest valid offset, and it is assumed that the subsequent computations by
+     * the caller of this method will clamp it properly or run a full check including the length at
+     * a later time.
      *
      * @param offset an offset in this RandomAccessData that the caller wants to access
      */
@@ -589,38 +647,40 @@ default void checkOffset(final long offset) {
     }
 
     /**
-     * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative
-     * or greater than/equal to the given {@code length}.
+     * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative or greater
+     * than/equal to the given {@code length}.
      *
      * @param offset an offset in this RandomAccessData that the caller wants to access
-     * @param length the maximum offset plus one (1) that is allowed to be accessed.
-     *               It may be equal to the total length of the underlying buffer,
-     *               or be less than the total length when a limit value is used.
+     * @param length the maximum offset plus one (1) that is allowed to be accessed. It may be equal
+     *     to the total length of the underlying buffer, or be less than the total length when a
+     *     limit value is used.
      */
     default void checkOffset(final long offset, final long length) {
         if (offset < 0 || offset >= length) {
-            throw new IndexOutOfBoundsException("offset " + offset + " is out of bounds for length " + length);
+            throw new IndexOutOfBoundsException(
+                    "offset " + offset + " is out of bounds for length " + length);
         }
     }
 
     /**
-     * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative
-     * or greater than/equal to the given {@code length}, and throws {@code BufferUnderflowException}
-     * if the {@code offset + dataLength - 1} exceeds the given {@code length}.
-     * <p>
-     * checkOffsetToRead(offset, length, 1) is equivalent to checkOffset(offset, length)
-     * because the dataLength condition is always satisfied for dataLength == 1 byte
-     * as long as the initial offset is within the bounds.
+     * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative or greater
+     * than/equal to the given {@code length}, and throws {@code BufferUnderflowException} if the
+     * {@code offset + dataLength - 1} exceeds the given {@code length}.
+     *
+     * <p>checkOffsetToRead(offset, length, 1) is equivalent to checkOffset(offset, length) because
+     * the dataLength condition is always satisfied for dataLength == 1 byte as long as the initial
+     * offset is within the bounds.
      *
      * @param offset an offset in this RandomAccessData that the caller wants to access
-     * @param length the maximum offset plus one (1) that is allowed to be accessed.
-     *               It may be equal to the total length of the underlying buffer,
-     *               or be less than the total length when a limit value is used.
+     * @param length the maximum offset plus one (1) that is allowed to be accessed. It may be equal
+     *     to the total length of the underlying buffer, or be less than the total length when a
+     *     limit value is used.
      * @param dataLength the length of the data to read
      */
     default void checkOffsetToRead(final long offset, final long length, final long dataLength) {
         if (offset < 0 || offset > length || (offset == length && dataLength != 0)) {
-            throw new IndexOutOfBoundsException("offset " + offset + " is out of bounds for length " + length);
+            throw new IndexOutOfBoundsException(
+                    "offset " + offset + " is out of bounds for length " + length);
         }
         if (offset > length - dataLength) {
             throw new BufferUnderflowException();
@@ -628,23 +688,24 @@ default void checkOffsetToRead(final long offset, final long length, final long
     }
 
     /**
-     * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative
-     * or greater than/equal to the given {@code length}, and throws {@code BufferOverflowException}
-     * if the {@code offset + dataLength - 1} exceeds the given {@code length}.
-     * <p>
-     * checkOffsetToWrite(offset, length, 1) is equivalent to checkOffset(offset, length)
-     * because the dataLength condition is always satisfied for dataLength == 1 byte
-     * as long as the initial offset is within the bounds.
+     * Throws {@code IndexOutOfBoundsException} if the given {@code offset} is negative or greater
+     * than/equal to the given {@code length}, and throws {@code BufferOverflowException} if the
+     * {@code offset + dataLength - 1} exceeds the given {@code length}.
+     *
+     * <p>checkOffsetToWrite(offset, length, 1) is equivalent to checkOffset(offset, length) because
+     * the dataLength condition is always satisfied for dataLength == 1 byte as long as the initial
+     * offset is within the bounds.
      *
      * @param offset an offset in this RandomAccessData that the caller wants to access
-     * @param length the maximum offset plus one (1) that is allowed to be accessed.
-     *               It may be equal to the total length of the underlying buffer,
-     *               or be less than the total length when a limit value is used.
+     * @param length the maximum offset plus one (1) that is allowed to be accessed. It may be equal
+     *     to the total length of the underlying buffer, or be less than the total length when a
+     *     limit value is used.
      * @param dataLength the length of the data to write
      */
     default void checkOffsetToWrite(final long offset, final long length, final long dataLength) {
         if (offset < 0 || offset > length || (offset == length && dataLength != 0)) {
-            throw new IndexOutOfBoundsException("offset " + offset + " is out of bounds for length " + length);
+            throw new IndexOutOfBoundsException(
+                    "offset " + offset + " is out of bounds for length " + length);
         }
         if (offset > length - dataLength) {
             throw new BufferOverflowException();
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java
index 179690c3..3eaa9779 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/buffer/RandomAccessSequenceAdapter.java
@@ -8,29 +8,33 @@
 import java.nio.ByteOrder;
 
 /**
- * A helper class used by {@link Bytes} (and some tests) to provide a {@link ReadableSequentialData} view of a
- * {@link RandomAccessData} instance. Since {@link RandomAccessData} has no position or limit, this class adds those,
- * and otherwise delegates to the underlying {@link RandomAccessData} instance.
+ * A helper class used by {@link Bytes} (and some tests) to provide a {@link ReadableSequentialData}
+ * view of a {@link RandomAccessData} instance. Since {@link RandomAccessData} has no position or
+ * limit, this class adds those, and otherwise delegates to the underlying {@link RandomAccessData}
+ * instance.
  */
 final class RandomAccessSequenceAdapter implements ReadableSequentialData {
     /** The delegate {@link RandomAccessData} instance */
     private final RandomAccessData delegate;
 
     /**
-     * The capacity of this sequence will be the difference between the <b>initial</b> position and the
-     * length of the delegate
+     * The capacity of this sequence will be the difference between the <b>initial</b> position and
+     * the length of the delegate
      */
     private final long capacity;
+
     /** The starting index into the delegate */
     private final long start;
+
     /** The position. Will be a value between 0 and the {@link #capacity} */
     private long position;
+
     /** The limit. Will be a value between {@link #position} and the {@link #capacity} */
     private long limit;
 
     /**
-     * Create a new instance where the position begins at 0 (the start of the random data buffer). The capacity of
-     * this instance will be the length of the delegate.
+     * Create a new instance where the position begins at 0 (the start of the random data buffer).
+     * The capacity of this instance will be the length of the delegate.
      */
     RandomAccessSequenceAdapter(@NonNull final RandomAccessData delegate) {
         this.delegate = delegate;
@@ -41,8 +45,8 @@ final class RandomAccessSequenceAdapter implements ReadableSequentialData {
 
     /**
      * Create a new instance where the start begins at the given start, which must be less than the
-     * length of the delegate. The capacity of this instance will be difference between the given {@code position}
-     * and the length of the delegate.
+     * length of the delegate. The capacity of this instance will be difference between the given
+     * {@code position} and the length of the delegate.
      */
     RandomAccessSequenceAdapter(@NonNull final RandomAccessData delegate, final long start) {
         this.delegate = delegate;
@@ -51,7 +55,8 @@ final class RandomAccessSequenceAdapter implements ReadableSequentialData {
         this.limit = this.capacity;
 
         if (this.start > delegate.length()) {
-            throw new IllegalArgumentException("Start " + start + " is greater than the delegate length " + delegate.length());
+            throw new IllegalArgumentException(
+                    "Start " + start + " is greater than the delegate length " + delegate.length());
         }
     }
 
@@ -120,7 +125,8 @@ public int readUnsignedByte() {
     @Override
     public long readBytes(@NonNull final byte[] dst, final int offset, final int maxLength) {
         if (offset < 0 || offset > dst.length) {
-            throw new IndexOutOfBoundsException("Offset cannot be negative or larger than last index");
+            throw new IndexOutOfBoundsException(
+                    "Offset cannot be negative or larger than last index");
         }
 
         final var length = Math.min(maxLength, remaining());
@@ -132,7 +138,8 @@ public long readBytes(@NonNull final byte[] dst, final int offset, final int max
     /** {@inheritDoc} */
     @Override
     public long readBytes(@NonNull final ByteBuffer dst) {
-        // False positive: duplicate code, yes, but two totally different data types that cannot reuse same code
+        // False positive: duplicate code, yes, but two totally different data types that cannot
+        // reuse same code
         //noinspection DuplicatedCode
         final var dstPos = dst.position();
         final var length = Math.min(dst.remaining(), remaining());
@@ -149,7 +156,8 @@ public long readBytes(@NonNull final ByteBuffer dst) {
     /** {@inheritDoc} */
     @Override
     public long readBytes(@NonNull final BufferedData dst) {
-        // False positive: duplicate code, yes, but two totally different data types that cannot reuse same code
+        // False positive: duplicate code, yes, but two totally different data types that cannot
+        // reuse same code
         //noinspection DuplicatedCode
         final var dstPos = dst.position();
         final var length = Math.min(dst.remaining(), remaining());
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java
index 84bb308b..a395c9d2 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/EOFException.java
@@ -3,7 +3,7 @@
 
 import java.nio.BufferUnderflowException;
 
-/**  This class is used as an exception to signal that the end of stream is reached when reading.  */
-public class EOFException  extends BufferUnderflowException {
+/** This class is used as an exception to signal that the end of stream is reached when reading. */
+public class EOFException extends BufferUnderflowException {
     private static final long serialVersionUID = 1799983599892333203L;
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java
index 30c37070..e63a7517 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingData.java
@@ -1,6 +1,8 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
+import static java.util.Objects.requireNonNull;
+
 import com.hedera.pbj.runtime.io.DataEncodingException;
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
@@ -16,11 +18,9 @@
 import java.nio.file.Path;
 import java.nio.file.StandardOpenOption;
 
-import static java.util.Objects.requireNonNull;
-
 /**
- * <p>A {@code ReadableSequentialData} backed by an input stream. If the instance is closed,
- * the underlying {@link InputStream} is closed too.
+ * A {@code ReadableSequentialData} backed by an input stream. If the instance is closed, the
+ * underlying {@link InputStream} is closed too.
  */
 public class ReadableStreamingData implements ReadableSequentialData, Closeable {
 
@@ -112,7 +112,8 @@ public long limit() {
     /** {@inheritDoc} */
     @Override
     public void limit(long limit) {
-        // Any attempt to set the limit must be clamped between position on the low end and capacity on the high end.
+        // Any attempt to set the limit must be clamped between position on the low end and capacity
+        // on the high end.
         this.limit = Math.min(capacity(), Math.max(position, limit));
     }
 
@@ -155,7 +156,8 @@ public byte readByte() {
     /**
      * {@inheritDoc}
      *
-     * @throws BufferUnderflowException if {@code count} would move the position past the {@link #limit()}.
+     * @throws BufferUnderflowException if {@code count} would move the position past the {@link
+     *     #limit()}.
      */
     @Override
     public void skip(final long n) {
@@ -239,7 +241,6 @@ public long readBytes(@NonNull final BufferedData dst) {
         return bytesRead;
     }
 
-
     @Override
     public long readVarLong(final boolean zigZag) {
         if (!hasRemaining()) {
@@ -268,5 +269,4 @@ public long readVarLong(final boolean zigZag) {
             throw new UncheckedIOException(e);
         }
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java
index 3f4824df..017fca88 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/io/stream/WritableStreamingData.java
@@ -11,27 +11,30 @@
 import java.io.OutputStream;
 import java.io.UncheckedIOException;
 import java.nio.BufferOverflowException;
-import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.util.Objects;
 
 /**
- * <p>A {@code WritableSequentialData} backed by an output stream. If the instance is closed,
- * the underlying {@link OutputStream} is closed too.
+ * A {@code WritableSequentialData} backed by an output stream. If the instance is closed, the
+ * underlying {@link OutputStream} is closed too.
  */
 public class WritableStreamingData implements WritableSequentialData, Closeable, Flushable {
 
     /** The underlying output stream */
     private final OutputStream out;
+
     /** The current position, aka the number of bytes written */
     private long position = 0;
+
     /** The current limit for writing, defaults to Long.MAX_VALUE, which is basically unlimited */
     private long limit = Long.MAX_VALUE;
-    /** The maximum capacity. Normally this is unbounded ({@link Long#MAX_VALUE})*/
+
+    /** The maximum capacity. Normally this is unbounded ({@link Long#MAX_VALUE}) */
     private final long capacity;
 
     /**
-     * Creates a {@code WritableStreamingData} built on top of the specified underlying output stream.
+     * Creates a {@code WritableStreamingData} built on top of the specified underlying output
+     * stream.
      *
      * @param out the underlying output stream to be written to, can not be null
      */
@@ -41,7 +44,8 @@ public WritableStreamingData(@NonNull final OutputStream out) {
     }
 
     /**
-     * Creates a {@code WritableStreamingData} built on top of the specified underlying output stream.
+     * Creates a {@code WritableStreamingData} built on top of the specified underlying output
+     * stream.
      *
      * @param out the underlying output stream to be written to, can not be null
      * @param capacity the maximum capacity of the stream
@@ -89,7 +93,8 @@ public long limit() {
     /** {@inheritDoc} */
     @Override
     public void limit(final long limit) {
-        // Any attempt to set the limit must be clamped between position on the low end and capacity on the high end.
+        // Any attempt to set the limit must be clamped between position on the low end and capacity
+        // on the high end.
         this.limit = Math.min(capacity(), Math.max(position, limit));
     }
 
@@ -103,14 +108,16 @@ public boolean hasRemaining() {
      * Move position forward by {@code count} bytes byte writing zeros to output stream.
      *
      * @param count number of bytes to skip. If 0 or negative, then no bytes are skipped.
-     * @throws BufferOverflowException if {@code count} would move the position past the {@link #limit()}.
+     * @throws BufferOverflowException if {@code count} would move the position past the {@link
+     *     #limit()}.
      * @throws UncheckedIOException if an I/O error occurs
      */
     @Override
     public void skip(final long count) {
         try {
             // We can only skip UP TO count.
-            // And if the maximum bytes we can end up skipping is not positive, then we can't skip any bytes.
+            // And if the maximum bytes we can end up skipping is not positive, then we can't skip
+            // any bytes.
             if (count > remaining()) {
                 throw new BufferOverflowException();
             }
@@ -118,10 +125,12 @@ public void skip(final long count) {
                 return;
             }
 
-            // Each byte skipped is a "zero" byte written to the output stream. To make this faster, we will support
-            // writing in chunks instead of a single byte at a time. We will keep writing chunks until we're done.
+            // Each byte skipped is a "zero" byte written to the output stream. To make this faster,
+            // we will support
+            // writing in chunks instead of a single byte at a time. We will keep writing chunks
+            // until we're done.
             final byte[] zeros = new byte[1024];
-            for (int i = 0; i < count;) {
+            for (int i = 0; i < count; ) {
                 final var toWrite = (int) Math.min(zeros.length, count - i);
                 out.write(zeros, 0, toWrite);
                 i += toWrite;
@@ -137,9 +146,7 @@ public void skip(final long count) {
     // ================================================================================================================
     // WritableSequentialData Methods
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeByte(final byte b) {
         if (position >= limit) {
@@ -154,9 +161,7 @@ public void writeByte(final byte b) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final byte[] src, final int offset, final int length) {
         if (length < 0) {
@@ -179,9 +184,7 @@ public void writeBytes(@NonNull final byte[] src, final int offset, final int le
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final byte[] src) {
         if (src.length > remaining()) {
@@ -196,9 +199,7 @@ public void writeBytes(@NonNull final byte[] src) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final ByteBuffer src) {
         if (!src.hasArray()) {
@@ -227,11 +228,10 @@ public void writeBytes(@NonNull final ByteBuffer src) {
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
-    public void writeBytes(@NonNull BufferedData src) throws BufferOverflowException, UncheckedIOException {
+    public void writeBytes(@NonNull BufferedData src)
+            throws BufferOverflowException, UncheckedIOException {
         if (remaining() < src.remaining()) {
             throw new BufferOverflowException();
         }
@@ -241,9 +241,7 @@ public void writeBytes(@NonNull BufferedData src) throws BufferOverflowException
         position += len;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void writeBytes(@NonNull final RandomAccessData src) {
         final long len = src.length();
@@ -257,9 +255,7 @@ public void writeBytes(@NonNull final RandomAccessData src) {
     // ================================================================================================================
     // Flushable Methods
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public void flush() throws IOException {
         out.flush();
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java
index b1749d24..1e22c2e9 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/CharBufferToWritableSequentialData.java
@@ -4,15 +4,16 @@
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
 import com.hedera.pbj.runtime.io.WritableSequentialData;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.io.UncheckedIOException;
 import java.nio.BufferUnderflowException;
 import java.nio.CharBuffer;
 
 /**
- * <p>A {@code WritableSequentialData} backed by a {@link CharBuffer}. It only supports writing UTF8 strings.
+ * A {@code WritableSequentialData} backed by a {@link CharBuffer}. It only supports writing UTF8
+ * strings.
  */
-public class CharBufferToWritableSequentialData implements WritableSequentialData, ReadableSequentialData {
+public class CharBufferToWritableSequentialData
+        implements WritableSequentialData, ReadableSequentialData {
     private final CharBuffer charBuffer;
 
     public CharBufferToWritableSequentialData(CharBuffer charBuffer) {
@@ -70,6 +71,4 @@ public void writeUTF8(@NonNull String value) {
     public byte readByte() {
         throw new UnsupportedOperationException();
     }
-
-
 }
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java
index ea378578..acb64ce8 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/NoToStringWrapper.java
@@ -3,9 +3,7 @@
 
 import java.util.Objects;
 
-/**
- * Wrapper for arguments to avoid expensive toString() calls during junit tests
- */
+/** Wrapper for arguments to avoid expensive toString() calls during junit tests */
 public final class NoToStringWrapper<T> {
     private final T value;
     private final String toString;
@@ -39,9 +37,7 @@ public String toString() {
         return toString;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public boolean equals(Object o) {
         if (this == o) return true;
@@ -50,9 +46,7 @@ public boolean equals(Object o) {
         return value.equals(that.value);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    /** {@inheritDoc} */
     @Override
     public int hashCode() {
         return Objects.hash(value);
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java
index f423c000..e0fd3ef2 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/Sneaky.java
@@ -1,24 +1,22 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.test;
 
-/**
- * A utility class that implements sneakyThrow().
- */
+/** A utility class that implements sneakyThrow(). */
 public final class Sneaky {
     /**
-     * Throw a checked exception pretending that it's unchecked,
-     * and also pretend to return a value for convenience.
+     * Throw a checked exception pretending that it's unchecked, and also pretend to return a value
+     * for convenience.
      *
-     * A non-void method could perform `return sneakyThrow(ex);` to avoid
-     * adding an extra line of code with a no-op return statement.
-     * A void method could just call this method and not worry about the return value.
+     * <p>A non-void method could perform `return sneakyThrow(ex);` to avoid adding an extra line of
+     * code with a no-op return statement. A void method could just call this method and not worry
+     * about the return value.
      *
      * @param throwable any exception, even a checked exception
      * @return this method never really returns a value, but javac thinks it could
      * @param <E> an exception type that javac assumes is an unchecked exception
      * @param <R> a fake return type for convenience of calling this from non-void methods
-     * @throws E this method always throws its argument throwable regardless of its type,
-     *           but javac thinks it's of type E, which it assumes to be an unchecked exception.
+     * @throws E this method always throws its argument throwable regardless of its type, but javac
+     *     thinks it's of type E, which it assumes to be an unchecked exception.
      */
     public static <E extends Throwable, R> R sneakyThrow(final Throwable throwable) throws E {
         throw (E) throwable;
diff --git a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java
index 1c5365c8..dc71d3ac 100644
--- a/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java
+++ b/pbj-core/pbj-runtime/src/main/java/com/hedera/pbj/runtime/test/UncheckedThrowingFunction.java
@@ -10,9 +10,8 @@
  * @param <T> function argument type
  * @param <R> function return type
  */
-public final record UncheckedThrowingFunction<T, R>(
-        ThrowingFunction<T, R> function
-) implements Function<T, R>  {
+public final record UncheckedThrowingFunction<T, R>(ThrowingFunction<T, R> function)
+        implements Function<T, R> {
 
     /** A function that can throw checked exceptions. */
     public static interface ThrowingFunction<T, R> {
diff --git a/pbj-core/pbj-runtime/src/main/java/module-info.java b/pbj-core/pbj-runtime/src/main/java/module-info.java
index 6c85816f..ff038863 100644
--- a/pbj-core/pbj-runtime/src/main/java/module-info.java
+++ b/pbj-core/pbj-runtime/src/main/java/module-info.java
@@ -1,10 +1,8 @@
 // SPDX-License-Identifier: Apache-2.0
 /** Runtime module of code needed by PBJ generated code at runtime. */
 module com.hedera.pbj.runtime {
-
     requires jdk.unsupported;
     requires transitive org.antlr.antlr4.runtime;
-
     requires static com.github.spotbugs.annotations;
 
     exports com.hedera.pbj.runtime;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java
index 02470ac9..747773e2 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoParserToolsTest.java
@@ -19,7 +19,6 @@
 import static com.hedera.pbj.runtime.ProtoWriterTools.writeString;
 import static com.hedera.pbj.runtime.ProtoWriterToolsTest.createFieldDefinition;
 import static com.hedera.pbj.runtime.ProtoWriterToolsTest.randomVarSizeString;
-import static java.lang.Integer.MAX_VALUE;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
@@ -27,13 +26,6 @@
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.stream.ReadableStreamingData;
 import com.hedera.pbj.runtime.test.UncheckedThrowingFunction;
-import net.bytebuddy.utility.RandomString;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.EnumSource;
-import org.junit.jupiter.params.provider.ValueSource;
-import test.proto.Apple;
-
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.BufferUnderflowException;
@@ -42,6 +34,12 @@
 import java.util.function.Function;
 import java.util.function.Supplier;
 import java.util.random.RandomGenerator;
+import net.bytebuddy.utility.RandomString;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+import org.junit.jupiter.params.provider.ValueSource;
+import test.proto.Apple;
 
 class ProtoParserToolsTest {
 
@@ -49,7 +47,8 @@ class ProtoParserToolsTest {
 
     @Test
     void testReadInt32() {
-        testRead(rng::nextInt,
+        testRead(
+                rng::nextInt,
                 (d, v) -> d.writeVarInt(v, false),
                 ProtoParserTools::readInt32,
                 // in this case the size may up to 10 bytes in case of negative numbers,
@@ -59,7 +58,8 @@ void testReadInt32() {
 
     @Test
     void testReadInt64() {
-        testRead(rng::nextLong,
+        testRead(
+                rng::nextLong,
                 (d, v) -> d.writeVarLong(v, false),
                 ProtoParserTools::readInt64,
                 // in this case the size may be 10 bytes, because we don't use zigzag encoding
@@ -68,8 +68,8 @@ void testReadInt64() {
 
     @Test
     void testReadUint32() {
-        testRead(() ->
-                rng.nextInt(0, Integer.MAX_VALUE),
+        testRead(
+                () -> rng.nextInt(0, Integer.MAX_VALUE),
                 (d, v) -> d.writeVarInt(v, false),
                 ProtoParserTools::readUint32,
                 // the size may vary from 1 to 5 bytes
@@ -78,7 +78,8 @@ void testReadUint32() {
 
     @Test
     void testReadUint64() {
-        testRead(rng::nextLong,
+        testRead(
+                rng::nextLong,
                 (d, v) -> d.writeVarLong(v, false),
                 ProtoParserTools::readUint64,
                 // the size may vary from 1 to 10 bytes
@@ -88,13 +89,17 @@ void testReadUint64() {
     @ParameterizedTest
     @ValueSource(ints = {0, 1})
     void testReadBool(final int value) {
-        testRead(() -> value != 0, (d, v) -> d.writeVarInt(value, false), input -> {
-            try {
-                return ProtoParserTools.readBool(input);
-            } catch (IOException e) {
-                throw new RuntimeException(e);
-            }
-        }, 1);
+        testRead(
+                () -> value != 0,
+                (d, v) -> d.writeVarInt(value, false),
+                input -> {
+                    try {
+                        return ProtoParserTools.readBool(input);
+                    } catch (IOException e) {
+                        throw new RuntimeException(e);
+                    }
+                },
+                1);
     }
 
     @ParameterizedTest
@@ -103,10 +108,10 @@ void testReadEnum(int value) {
         testRead(() -> value, (d, v) -> d.writeVarInt(value, false), ProtoParserTools::readEnum, 1);
     }
 
-
     @Test
     void testReadSignedInt32() {
-        testRead(rng::nextInt,
+        testRead(
+                rng::nextInt,
                 (d, v) -> d.writeVarInt(v, true),
                 ProtoParserTools::readSignedInt32,
                 Integer.BYTES + 1);
@@ -114,7 +119,8 @@ void testReadSignedInt32() {
 
     @Test
     void testReadSignedInt64() {
-        testRead(rng::nextLong,
+        testRead(
+                rng::nextLong,
                 (d, v) -> d.writeVarLong(v, true),
                 ProtoParserTools::readSignedInt64,
                 Long.BYTES + 2);
@@ -122,7 +128,8 @@ void testReadSignedInt64() {
 
     @Test
     void testReadSignedFixedInt32() {
-        testRead(rng::nextInt,
+        testRead(
+                rng::nextInt,
                 (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN),
                 ProtoParserTools::readSignedFixed32,
                 Integer.BYTES);
@@ -130,7 +137,8 @@ void testReadSignedFixedInt32() {
 
     @Test
     void testReadFixedInt32() {
-        testRead(rng::nextInt,
+        testRead(
+                rng::nextInt,
                 (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN),
                 ProtoParserTools::readFixed32,
                 Integer.BYTES);
@@ -138,7 +146,8 @@ void testReadFixedInt32() {
 
     @Test
     void testReadSginedFixed64() {
-        testRead(rng::nextLong,
+        testRead(
+                rng::nextLong,
                 (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN),
                 ProtoParserTools::readSignedFixed64,
                 Long.BYTES);
@@ -146,7 +155,8 @@ void testReadSginedFixed64() {
 
     @Test
     void testReadFixed64() {
-        testRead(rng::nextLong,
+        testRead(
+                rng::nextLong,
                 (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN),
                 ProtoParserTools::readFixed64,
                 Long.BYTES);
@@ -154,7 +164,8 @@ void testReadFixed64() {
 
     @Test
     void testReadFloat() {
-        testRead(rng::nextFloat,
+        testRead(
+                rng::nextFloat,
                 (d, v) -> d.writeFloat(v, ByteOrder.LITTLE_ENDIAN),
                 ProtoParserTools::readFloat,
                 Long.BYTES);
@@ -162,7 +173,8 @@ void testReadFloat() {
 
     @Test
     void testReadDouble() {
-        testRead(rng::nextDouble,
+        testRead(
+                rng::nextDouble,
                 (d, v) -> d.writeDouble(v, ByteOrder.LITTLE_ENDIAN),
                 ProtoParserTools::readDouble,
                 Long.BYTES);
@@ -173,7 +185,8 @@ void testReadString() {
         final int length = rng.nextInt(0, 100);
         final RandomString randomString = new RandomString(length);
 
-        testRead(randomString::nextString,
+        testRead(
+                randomString::nextString,
                 (d, v) -> {
                     d.writeVarInt(length, false); // write the size first
                     d.writeUTF8(v);
@@ -192,7 +205,8 @@ void testReadString_maxSize() throws IOException {
         data.writeVarInt(maxSize + 1, false); // write the size first
         data.writeBytes(byteArray);
         final ReadableStreamingData streamingData = new ReadableStreamingData(data.toInputStream());
-        assertThrows(ParseException.class, () -> ProtoParserTools.readString(streamingData, maxSize));
+        assertThrows(
+                ParseException.class, () -> ProtoParserTools.readString(streamingData, maxSize));
     }
 
     @Test
@@ -206,9 +220,10 @@ void testReadString_incomplete() throws IOException {
         final byte[] bytes = data.toInputStream().readAllBytes();
         final byte[] incompleteCopy = new byte[bytes.length - 1];
         System.arraycopy(bytes, 0, incompleteCopy, 0, bytes.length - 1);
-        final ReadableStreamingData streamingData = new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy));
-        assertThrows(BufferUnderflowException.class, () -> ProtoParserTools.readString(streamingData));
-
+        final ReadableStreamingData streamingData =
+                new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy));
+        assertThrows(
+                BufferUnderflowException.class, () -> ProtoParserTools.readString(streamingData));
     }
 
     @Test
@@ -218,7 +233,8 @@ void testReadBytes() {
         rng.nextBytes(byteArray);
         final Bytes bytes = Bytes.wrap(byteArray);
 
-        testRead(() -> bytes,
+        testRead(
+                () -> bytes,
                 (d, v) -> {
                     d.writeVarInt(length, false); // write the size first
                     d.writeBytes(v);
@@ -237,7 +253,8 @@ void testReadBytes_maxSize() throws IOException {
         data.writeVarInt(maxSize + 1, false); // write the size first
         data.writeBytes(byteArray);
         final ReadableStreamingData streamingData = new ReadableStreamingData(data.toInputStream());
-        assertThrows(ParseException.class, () -> ProtoParserTools.readBytes(streamingData, maxSize));
+        assertThrows(
+                ParseException.class, () -> ProtoParserTools.readBytes(streamingData, maxSize));
     }
 
     @Test
@@ -251,13 +268,20 @@ void testReadBytes_incomplete() throws IOException {
         final byte[] bytes = data.toInputStream().readAllBytes();
         final byte[] incompleteCopy = new byte[bytes.length - 1];
         System.arraycopy(bytes, 0, incompleteCopy, 0, bytes.length - 1);
-        assertThrows(BufferUnderflowException.class,
-                () -> ProtoParserTools.readString(new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy))));
-        assertThrows(BufferUnderflowException.class,
-                () -> ProtoParserTools.readBytes(new ReadableStreamingData(new ByteArrayInputStream(incompleteCopy))));
+        assertThrows(
+                BufferUnderflowException.class,
+                () ->
+                        ProtoParserTools.readString(
+                                new ReadableStreamingData(
+                                        new ByteArrayInputStream(incompleteCopy))));
+        assertThrows(
+                BufferUnderflowException.class,
+                () ->
+                        ProtoParserTools.readBytes(
+                                new ReadableStreamingData(
+                                        new ByteArrayInputStream(incompleteCopy))));
     }
 
-
     @Test
     void testReadNextFieldNumber() throws IOException {
         BufferedData bufferedData = BufferedData.allocate(100);
@@ -265,13 +289,17 @@ void testReadNextFieldNumber() throws IOException {
         final String appleStr = randomVarSizeString();
         final Apple apple = Apple.newBuilder().setVariety(appleStr).build();
 
-        writeMessage(bufferedData, definition, apple, (data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize);
+        writeMessage(
+                bufferedData,
+                definition,
+                apple,
+                (data, out) -> out.writeBytes(data.toByteArray()),
+                Apple::getSerializedSize);
         bufferedData.flip();
 
         assertEquals(definition.number(), readNextFieldNumber(bufferedData));
     }
 
-
     @Test
     void testSkipField() throws IOException {
         final String valToRead = randomVarSizeString();
@@ -307,7 +335,9 @@ void testSkipField_maxSize() throws IOException {
         data.writeVarInt(maxSize + 1, false); // write the size first
         data.writeBytes(byteArray);
         final ReadableStreamingData streamingData = new ReadableStreamingData(data.toInputStream());
-        assertThrows(ParseException.class, () -> ProtoParserTools.skipField(streamingData, WIRE_TYPE_DELIMITED, maxSize));
+        assertThrows(
+                ParseException.class,
+                () -> ProtoParserTools.skipField(streamingData, WIRE_TYPE_DELIMITED, maxSize));
     }
 
     @ParameterizedTest
@@ -321,15 +351,15 @@ private static void skipTag(BufferedData data) {
         data.readVarInt(false);
     }
 
-    private static <T> void testRead(final Supplier<? extends T> valueSupplier,
-                                     final BiConsumer<BufferedData, ? super T> valueWriter,
-                                     final Function<? super BufferedData, T> reader,
-                                     final int size) {
+    private static <T> void testRead(
+            final Supplier<? extends T> valueSupplier,
+            final BiConsumer<BufferedData, ? super T> valueWriter,
+            final Function<? super BufferedData, T> reader,
+            final int size) {
         final T value = valueSupplier.get();
         final BufferedData data = BufferedData.allocate(size);
         valueWriter.accept(data, value);
         data.flip();
         assertEquals(value, reader.apply(data));
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java
index 5456bdae..7b8486fc 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/ProtoWriterToolsTest.java
@@ -69,15 +69,6 @@
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
 import com.hedera.pbj.runtime.test.Sneaky;
-import net.bytebuddy.utility.RandomString;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.Arguments;
-import org.junit.jupiter.params.provider.EnumSource;
-import org.junit.jupiter.params.provider.MethodSource;
-import test.proto.Apple;
-
 import java.io.IOException;
 import java.nio.BufferOverflowException;
 import java.nio.ByteOrder;
@@ -88,6 +79,14 @@
 import java.util.random.RandomGenerator;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
+import net.bytebuddy.utility.RandomString;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.EnumSource;
+import org.junit.jupiter.params.provider.MethodSource;
+import test.proto.Apple;
 
 class ProtoWriterToolsTest {
 
@@ -100,7 +99,9 @@ class ProtoWriterToolsTest {
 
     static {
         // to test logic branches unreachable otherwise
-        ProtoWriterTools.class.getClassLoader().setClassAssertionStatus(ProtoWriterTools.class.getName(), false);
+        ProtoWriterTools.class
+                .getClassLoader()
+                .setClassAssertionStatus(ProtoWriterTools.class.getName(), false);
     }
 
     private static final RandomString RANDOM_STRING = new RandomString(10);
@@ -123,7 +124,6 @@ void testWireType() {
         assertEquals(WIRE_TYPE_VARINT_OR_ZIGZAG, wireType(createFieldDefinition(UINT32)));
         assertEquals(WIRE_TYPE_VARINT_OR_ZIGZAG, wireType(createFieldDefinition(UINT64)));
 
-
         assertEquals(WIRE_TYPE_FIXED_32_BIT, wireType(createFieldDefinition(FIXED32)));
         assertEquals(WIRE_TYPE_FIXED_32_BIT, wireType(createFieldDefinition(SFIXED32)));
         assertEquals(WIRE_TYPE_FIXED_64_BIT, wireType(createFieldDefinition(FIXED64)));
@@ -150,7 +150,9 @@ void testWriteTagSpecialWireType() {
         FieldDefinition definition = createFieldDefinition(DOUBLE);
         writeTag(bufferedData, definition, WIRE_TYPE_FIXED_64_BIT);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(),
+                bufferedData.readVarInt(false));
     }
 
     @Test
@@ -165,7 +167,9 @@ void testWriteInteger_zero() {
 
     private static int nextNonZeroRandomInt() {
         int ret;
-        do { ret = RNG.nextInt(); } while (ret == 0);
+        do {
+            ret = RNG.nextInt();
+        } while (ret == 0);
         return ret;
     }
 
@@ -200,7 +204,9 @@ void testWriteInteger_sint32() {
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"SFIXED32", "FIXED32"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"SFIXED32", "FIXED32"})
     void testWriteInteger_fixed32(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
         final int valToWrite = nextNonZeroRandomInt();
@@ -211,13 +217,27 @@ void testWriteInteger_fixed32(FieldType type) {
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {
-            "DOUBLE", "FLOAT", "INT64", "UINT64", "SINT64",
-            "FIXED64", "SFIXED64", "BOOL",
-            "STRING", "BYTES", "ENUM", "MESSAGE"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {
+                "DOUBLE",
+                "FLOAT",
+                "INT64",
+                "UINT64",
+                "SINT64",
+                "FIXED64",
+                "SFIXED64",
+                "BOOL",
+                "STRING",
+                "BYTES",
+                "ENUM",
+                "MESSAGE"
+            })
     void testWriteInteger_unsupported(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertThrows(RuntimeException.class, () -> writeInteger(bufferedData, definition, RNG.nextInt()));
+        assertThrows(
+                RuntimeException.class,
+                () -> writeInteger(bufferedData, definition, RNG.nextInt()));
     }
 
     @Test
@@ -232,7 +252,9 @@ void testWriteLong_zero() {
 
     private static long nextNonZeroRandomLong() {
         long ret;
-        do { ret = RNG.nextLong(); } while (ret == 0L);
+        do {
+            ret = RNG.nextLong();
+        } while (ret == 0L);
         return ret;
     }
 
@@ -267,29 +289,48 @@ void testWriteLong_sint64() {
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"SFIXED64", "FIXED64"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"SFIXED64", "FIXED64"})
     void testWriteLong_fixed64(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
         final long valToWrite = nextNonZeroRandomLong();
         writeLong(bufferedData, definition, valToWrite);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(),
+                bufferedData.readVarInt(false));
         assertEquals(valToWrite, bufferedData.readLong(ByteOrder.LITTLE_ENDIAN));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {
-            "DOUBLE", "FLOAT", "INT32", "UINT32", "SINT32",
-            "FIXED32", "SFIXED32", "BOOL",
-            "STRING", "BYTES", "ENUM", "MESSAGE"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {
+                "DOUBLE",
+                "FLOAT",
+                "INT32",
+                "UINT32",
+                "SINT32",
+                "FIXED32",
+                "SFIXED32",
+                "BOOL",
+                "STRING",
+                "BYTES",
+                "ENUM",
+                "MESSAGE"
+            })
     void testWriteLong_unsupported(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertThrows(RuntimeException.class, () -> writeLong(bufferedData, definition, RNG.nextInt()));
+        assertThrows(
+                RuntimeException.class, () -> writeLong(bufferedData, definition, RNG.nextInt()));
     }
 
     private static float nextNonZeroRandomFloat() {
         float ret;
-        do { ret = RNG.nextFloat(); } while (ret == 0);
+        do {
+            ret = RNG.nextFloat();
+        } while (ret == 0);
         return ret;
     }
 
@@ -305,7 +346,9 @@ void testWriteFloat() {
 
     private static double nextNonZeroRandomDouble() {
         double ret;
-        do { ret = RNG.nextDouble(); } while (ret == 0);
+        do {
+            ret = RNG.nextDouble();
+        } while (ret == 0);
         return ret;
     }
 
@@ -315,7 +358,9 @@ void testWriteDouble() {
         final double valToWrite = nextNonZeroRandomDouble();
         ProtoWriterTools.writeDouble(bufferedData, definition, valToWrite);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(),
+                bufferedData.readVarInt(false));
         assertEquals(valToWrite, bufferedData.readDouble(ByteOrder.LITTLE_ENDIAN));
     }
 
@@ -380,7 +425,9 @@ void testWriteString() throws IOException {
         String valToWrite = RANDOM_STRING.nextString();
         writeString(bufferedData, definition, valToWrite);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(),
+                bufferedData.readVarInt(false));
         int length = bufferedData.readVarInt(false);
         assertEquals(valToWrite, new String(bufferedData.readBytes(length).toByteArray()));
     }
@@ -424,7 +471,9 @@ void testWriteBytes() throws IOException {
         Bytes valToWrite = Bytes.wrap(RANDOM_STRING.nextString());
         writeBytes(bufferedData, definition, valToWrite);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(),
+                bufferedData.readVarInt(false));
         int length = bufferedData.readVarInt(false);
         assertEquals(valToWrite, bufferedData.readBytes(length));
     }
@@ -449,11 +498,20 @@ void testWriteMessage() throws IOException {
         FieldDefinition definition = createFieldDefinition(MESSAGE);
         String appleStr = RANDOM_STRING.nextString();
         Apple apple = Apple.newBuilder().setVariety(appleStr).build();
-        writeMessage(bufferedData, definition, apple, (data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize);
+        writeMessage(
+                bufferedData,
+                definition,
+                apple,
+                (data, out) -> out.writeBytes(data.toByteArray()),
+                Apple::getSerializedSize);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(),
+                bufferedData.readVarInt(false));
         int length = bufferedData.readVarInt(false);
-        assertEquals(appleStr, Apple.parseFrom(bufferedData.readBytes(length).toByteArray()).getVariety());
+        assertEquals(
+                appleStr,
+                Apple.parseFrom(bufferedData.readBytes(length).toByteArray()).getVariety());
     }
 
     @Test
@@ -465,11 +523,14 @@ void testWriteOneRepeatedMessage() throws IOException {
         final Apple apple2 = Apple.newBuilder().setVariety(appleStr2).build();
         final BufferedData buf1 = BufferedData.allocate(256);
         final ProtoWriter<Apple> writer = (data, out) -> out.writeBytes(data.toByteArray());
-        ProtoWriterTools.writeMessageList(buf1, definition, List.of(apple1, apple2), writer, Apple::getSerializedSize);
+        ProtoWriterTools.writeMessageList(
+                buf1, definition, List.of(apple1, apple2), writer, Apple::getSerializedSize);
         final Bytes writtenBytes1 = buf1.getBytes(0, buf1.position());
         final BufferedData buf2 = BufferedData.allocate(256);
-        ProtoWriterTools.writeOneRepeatedMessage(buf2, definition, apple1, writer, Apple::getSerializedSize);
-        ProtoWriterTools.writeOneRepeatedMessage(buf2, definition, apple2, writer, Apple::getSerializedSize);
+        ProtoWriterTools.writeOneRepeatedMessage(
+                buf2, definition, apple1, writer, Apple::getSerializedSize);
+        ProtoWriterTools.writeOneRepeatedMessage(
+                buf2, definition, apple2, writer, Apple::getSerializedSize);
         final Bytes writtenBytes2 = buf2.getBytes(0, buf2.position());
         assertEquals(writtenBytes1, writtenBytes2);
     }
@@ -477,9 +538,16 @@ void testWriteOneRepeatedMessage() throws IOException {
     @Test
     void testWriteOneOfMessage() throws IOException {
         FieldDefinition definition = createOneOfFieldDefinition(MESSAGE);
-        writeMessage(bufferedData, definition, null, (data, out) -> out.writeBytes(data.toByteArray()), Apple::getSerializedSize);
+        writeMessage(
+                bufferedData,
+                definition,
+                null,
+                (data, out) -> out.writeBytes(data.toByteArray()),
+                Apple::getSerializedSize);
         bufferedData.flip();
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(),
+                bufferedData.readVarInt(false));
         int length = bufferedData.readVarInt(false);
         assertEquals(0, length);
     }
@@ -618,10 +686,13 @@ void testWriteOptionalString() throws IOException {
         writeOptionalString(bufferedData, definition, valToWrite);
         bufferedData.flip();
         assertTypeDelimitedTag(definition);
-        assertEquals(valToWrite.length() + TAG_SIZE + MIN_LENGTH_VAR_SIZE, bufferedData.readVarInt(false));
+        assertEquals(
+                valToWrite.length() + TAG_SIZE + MIN_LENGTH_VAR_SIZE,
+                bufferedData.readVarInt(false));
         assertTypeDelimitedTag(definition.type().optionalFieldDefinition);
         assertEquals(valToWrite.length(), bufferedData.readVarInt(false));
-        assertEquals(valToWrite, new String(bufferedData.readBytes(valToWrite.length()).toByteArray()));
+        assertEquals(
+                valToWrite, new String(bufferedData.readBytes(valToWrite.length()).toByteArray()));
     }
 
     @Test
@@ -640,7 +711,8 @@ void testWriteOptionalBytes() throws IOException {
         writeOptionalBytes(bufferedData, definition, Bytes.wrap(valToWrite));
         bufferedData.flip();
         assertTypeDelimitedTag(definition);
-        assertEquals(valToWrite.length + TAG_SIZE + MIN_LENGTH_VAR_SIZE, bufferedData.readVarInt(false));
+        assertEquals(
+                valToWrite.length + TAG_SIZE + MIN_LENGTH_VAR_SIZE, bufferedData.readVarInt(false));
         assertTypeDelimitedTag(definition.type().optionalFieldDefinition);
         assertEquals(valToWrite.length, bufferedData.readVarInt(false));
         assertArrayEquals(valToWrite, bufferedData.readBytes(valToWrite.length).toByteArray());
@@ -683,35 +755,48 @@ void testSizeOfVarInt32() {
     @Test
     void testSizeOfLong_int32() {
         FieldDefinition definition = createFieldDefinition(INT32);
-        assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE,
-                sizeOfInteger(definition, randomLargeInt()));
+        assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, sizeOfInteger(definition, randomLargeInt()));
     }
 
     @Test
     void testSizeOfLong_uint32() {
         FieldDefinition definition = createFieldDefinition(UINT32);
-        assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE,
-                sizeOfInteger(definition, randomLargeInt()));
+        assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE, sizeOfInteger(definition, randomLargeInt()));
     }
 
     @Test
     void testSizeOfLong_sint32() {
         FieldDefinition definition = createFieldDefinition(SINT32);
-        assertEquals(TAG_SIZE + MAX_VAR_INT_SIZE,
-                sizeOfInteger(definition, randomLargeNegativeInt()));
+        assertEquals(
+                TAG_SIZE + MAX_VAR_INT_SIZE, sizeOfInteger(definition, randomLargeNegativeInt()));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"SFIXED32", "FIXED32"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"SFIXED32", "FIXED32"})
     void testSizeOfLong_fixed32(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertEquals(TAG_SIZE + Integer.BYTES,
-                sizeOfInteger(definition, randomLargeNegativeInt()));
+        assertEquals(TAG_SIZE + Integer.BYTES, sizeOfInteger(definition, randomLargeNegativeInt()));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT64", "UINT64", "SINT64",
-            "FIXED64", "SFIXED64", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {
+                "DOUBLE",
+                "FLOAT",
+                "INT64",
+                "UINT64",
+                "SINT64",
+                "FIXED64",
+                "SFIXED64",
+                "BOOL",
+                "STRING",
+                "BYTES",
+                "ENUM",
+                "MESSAGE"
+            })
     void testSizeOfInteger_notSupported(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
         assertThrows(RuntimeException.class, () -> sizeOfInteger(definition, RNG.nextInt()));
@@ -720,36 +805,50 @@ void testSizeOfInteger_notSupported(FieldType type) {
     @Test
     void testSizeOfLong_int64() {
         FieldDefinition definition = createFieldDefinition(INT64);
-        assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE,
-                sizeOfLong(definition, randomLargeLong()));
+        assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE, sizeOfLong(definition, randomLargeLong()));
     }
 
     @Test
     void testSizeOfLong_uint64() {
         FieldDefinition definition = createFieldDefinition(UINT64);
-        assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE,
-                sizeOfLong(definition, randomLargeLong()));
+        assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE, sizeOfLong(definition, randomLargeLong()));
     }
 
     @Test
     void testSizeOfLong_sint64() {
         FieldDefinition definition = createFieldDefinition(SINT64);
         long value = randomLargeNegativeLong();
-        assertEquals(TAG_SIZE + MAX_VAR_LONG_SIZE + 1 /* zigzag encoding */,
+        assertEquals(
+                TAG_SIZE + MAX_VAR_LONG_SIZE + 1 /* zigzag encoding */,
                 sizeOfLong(definition, value));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"SFIXED64", "FIXED64"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"SFIXED64", "FIXED64"})
     void testSizeOfLong_fixed64(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertEquals(TAG_SIZE + Long.BYTES,
-                sizeOfLong(definition, randomLargeNegativeInt()));
+        assertEquals(TAG_SIZE + Long.BYTES, sizeOfLong(definition, randomLargeNegativeInt()));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT32", "UINT32", "SINT32",
-            "FIXED32", "SFIXED32", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {
+                "DOUBLE",
+                "FLOAT",
+                "INT32",
+                "UINT32",
+                "SINT32",
+                "FIXED32",
+                "SFIXED32",
+                "BOOL",
+                "STRING",
+                "BYTES",
+                "ENUM",
+                "MESSAGE"
+            })
     void testSizeOfLong_notSupported(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
         assertThrows(RuntimeException.class, () -> sizeOfLong(definition, RNG.nextLong()));
@@ -798,35 +897,66 @@ void testSizeOfVarInt64() {
         assertEquals(10, sizeOfVarInt64(Long.MIN_VALUE));
     }
 
-
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"INT32", "UINT32"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"INT32", "UINT32"})
     void testSizeOfIntegerList_int32(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */,
+        assertEquals(
+                TAG_SIZE
+                        + MIN_LENGTH_VAR_SIZE
+                        + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */,
                 sizeOfIntegerList(definition, asList(RNG.nextInt(0, 127), RNG.nextInt(0, 128))));
     }
 
     @Test
     void testSizeOfIntegerList_sint32() {
         FieldDefinition definition = createFieldDefinition(SINT32);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */,
+        assertEquals(
+                TAG_SIZE
+                        + MIN_LENGTH_VAR_SIZE
+                        + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */,
                 sizeOfIntegerList(definition, asList(RNG.nextInt(-63, 0), RNG.nextInt(0, 64))));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"SFIXED32", "FIXED32"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"SFIXED32", "FIXED32"})
     void testSizeOfIntegerList_fixed(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + Integer.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */,
+        assertEquals(
+                TAG_SIZE
+                        + MIN_LENGTH_VAR_SIZE
+                        + Integer.BYTES
+                                * 2 /* size of two unsigned var longs in the range [0, 128) */,
                 sizeOfIntegerList(definition, asList(RNG.nextInt(), RNG.nextInt())));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT64", "UINT64", "SINT64",
-            "FIXED64", "SFIXED64", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {
+                "DOUBLE",
+                "FLOAT",
+                "INT64",
+                "UINT64",
+                "SINT64",
+                "FIXED64",
+                "SFIXED64",
+                "BOOL",
+                "STRING",
+                "BYTES",
+                "ENUM",
+                "MESSAGE"
+            })
     void testSizeOfIntegerList_notSupported(FieldType type) {
-        assertThrows(RuntimeException.class, () -> sizeOfIntegerList(createFieldDefinition(type), asList(RNG.nextInt(), RNG.nextInt())));
+        assertThrows(
+                RuntimeException.class,
+                () ->
+                        sizeOfIntegerList(
+                                createFieldDefinition(type), asList(RNG.nextInt(), RNG.nextInt())));
     }
 
     @Test
@@ -836,38 +966,71 @@ void testSizeOfIntegerList_empty() {
 
     @Test
     void testSizeOfOneOfIntegerList_empty() {
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE,
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE,
                 sizeOfLongList(createOneOfFieldDefinition(INT64), emptyList()));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"INT64", "UINT64"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"INT64", "UINT64"})
     void testSizeOfLongList_int64(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */,
+        assertEquals(
+                TAG_SIZE
+                        + MIN_LENGTH_VAR_SIZE
+                        + 1 * 2 /* size of two unsigned var longs in the range [0, 128) */,
                 sizeOfLongList(definition, asList(RNG.nextLong(0, 127), RNG.nextLong(0, 128))));
     }
 
     @Test
     void testSizeOfLongList_sint64() {
         FieldDefinition definition = createFieldDefinition(SINT64);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */,
+        assertEquals(
+                TAG_SIZE
+                        + MIN_LENGTH_VAR_SIZE
+                        + 1 * 2 /* size of two unsigned var longs in the range (-64, 64) */,
                 sizeOfLongList(definition, asList(RNG.nextLong(-63, 0), RNG.nextLong(0, 64))));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"SFIXED64", "FIXED64"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {"SFIXED64", "FIXED64"})
     void testSizeOfLongList_fixed(FieldType type) {
         FieldDefinition definition = createFieldDefinition(type);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + Long.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */,
+        assertEquals(
+                TAG_SIZE
+                        + MIN_LENGTH_VAR_SIZE
+                        + Long.BYTES * 2 /* size of two unsigned var longs in the range [0, 128) */,
                 sizeOfLongList(definition, asList(RNG.nextLong(), RNG.nextLong())));
     }
 
     @ParameterizedTest
-    @EnumSource(value = FieldType.class, names = {"DOUBLE", "FLOAT", "INT32", "UINT32", "SINT32",
-            "FIXED32", "SFIXED32", "BOOL", "STRING", "BYTES", "ENUM", "MESSAGE"})
+    @EnumSource(
+            value = FieldType.class,
+            names = {
+                "DOUBLE",
+                "FLOAT",
+                "INT32",
+                "UINT32",
+                "SINT32",
+                "FIXED32",
+                "SFIXED32",
+                "BOOL",
+                "STRING",
+                "BYTES",
+                "ENUM",
+                "MESSAGE"
+            })
     void testSizeOfLongList_notSupported(FieldType type) {
-        assertThrows(RuntimeException.class, () -> sizeOfLongList(createFieldDefinition(type), asList(RNG.nextLong(), RNG.nextLong())));
+        assertThrows(
+                RuntimeException.class,
+                () ->
+                        sizeOfLongList(
+                                createFieldDefinition(type),
+                                asList(RNG.nextLong(), RNG.nextLong())));
     }
 
     @Test
@@ -877,14 +1040,16 @@ void testSizeOfLongList_empty() {
 
     @Test
     void testSizeOfOneOfLongList_empty() {
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE,
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE,
                 sizeOfLongList(createOneOfFieldDefinition(INT64), emptyList()));
     }
 
     @Test
     void testSizeOfFloatList() {
         FieldDefinition definition = createFieldDefinition(FLOAT);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Float.BYTES,
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Float.BYTES,
                 sizeOfFloatList(definition, asList(RNG.nextFloat(), RNG.nextFloat())));
     }
 
@@ -895,14 +1060,16 @@ void testSizeOfFloatList_empty() {
 
     @Test
     void testSizeOfOneOfFloatList_empty() {
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE,
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE,
                 sizeOfFloatList(createOneOfFieldDefinition(FLOAT), emptyList()));
     }
 
     @Test
     void testSizeOfDoubleList() {
         FieldDefinition definition = createFieldDefinition(DOUBLE);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Double.BYTES,
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2 * Double.BYTES,
                 sizeOfDoubleList(definition, asList(RNG.nextDouble(), RNG.nextDouble())));
     }
 
@@ -913,19 +1080,21 @@ void testSizeOfDoubleList_empty() {
 
     @Test
     void testSizeOfOneOfDoubleList_empty() {
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE,
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE,
                 sizeOfDoubleList(createOneOfFieldDefinition(DOUBLE), emptyList()));
     }
 
-
     @Test
-    void testSizeOfBooleanList(){
+    void testSizeOfBooleanList() {
         FieldDefinition definition = createFieldDefinition(BOOL);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2, sizeOfBooleanList(definition, Arrays.asList(true, false)));
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE + 2,
+                sizeOfBooleanList(definition, Arrays.asList(true, false)));
     }
 
     @Test
-    void testSizeOfBooleanList_empty(){
+    void testSizeOfBooleanList_empty() {
         assertEquals(0, sizeOfBooleanList(createFieldDefinition(BOOL), Collections.emptyList()));
     }
 
@@ -945,7 +1114,8 @@ void testSizeOfEnumList() {
         when(enum1.protoOrdinal()).thenReturn(RNG.nextInt(1, 16));
         when(enum2.protoName()).thenReturn(RANDOM_STRING.nextString());
         List<EnumWithProtoMetadata> enums = asList(enum1, enum2);
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE + enums.size(), sizeOfEnumList(definition, enums));
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE + enums.size(), sizeOfEnumList(definition, enums));
     }
 
     @Test
@@ -955,7 +1125,9 @@ void testSizeOfEnumList_empty() {
 
     @Test
     void testSizeOfOneOfEnumList_empty() {
-        assertEquals(TAG_SIZE + MIN_LENGTH_VAR_SIZE, sizeOfEnumList(createOneOfFieldDefinition(ENUM), emptyList()));
+        assertEquals(
+                TAG_SIZE + MIN_LENGTH_VAR_SIZE,
+                sizeOfEnumList(createOneOfFieldDefinition(ENUM), emptyList()));
     }
 
     @Test
@@ -964,7 +1136,8 @@ void testSizeOfStringList() {
         String str1 = randomVarSizeString();
         String str2 = randomVarSizeString();
 
-        assertEquals(MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + str1.length() + str2.length(),
+        assertEquals(
+                MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + str1.length() + str2.length(),
                 sizeOfStringList(definition, asList(str1, str2)));
     }
 
@@ -972,14 +1145,14 @@ void testSizeOfStringList() {
     void testSizeOfStringList_nullAndEmpty() {
         FieldDefinition definition = createFieldDefinition(STRING);
 
-        assertEquals(MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2,
+        assertEquals(
+                MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2,
                 sizeOfStringList(definition, asList(null, "")));
     }
 
     @Test
     void testSizeOfStringList_empty() {
-        assertEquals(0,
-                sizeOfStringList(createOneOfFieldDefinition(STRING), emptyList()));
+        assertEquals(0, sizeOfStringList(createOneOfFieldDefinition(STRING), emptyList()));
     }
 
     @Test
@@ -992,7 +1165,8 @@ void testSizeOfMessageList() {
 
         assertEquals(
                 MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + appleStr1.length() + appleStr2.length(),
-                sizeOfMessageList(definition, Arrays.asList(apple1, apple2), v -> v.getVariety().length()));
+                sizeOfMessageList(
+                        definition, Arrays.asList(apple1, apple2), v -> v.getVariety().length()));
     }
 
     @Test
@@ -1009,8 +1183,7 @@ void testSizeOfBytesList() {
         Bytes bytes2 = Bytes.wrap(randomVarSizeString());
 
         assertEquals(
-                MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2
-                        + bytes1.length() + bytes2.length(),
+                MIN_LENGTH_VAR_SIZE * 2 + TAG_SIZE * 2 + bytes1.length() + bytes2.length(),
                 sizeOfBytesList(definition, asList(bytes1, bytes2)));
     }
 
@@ -1074,7 +1247,7 @@ void testSizeOfString_oneOf() {
     }
 
     @Test
-    void testSizeOfString_oneOf_null(){
+    void testSizeOfString_oneOf_null() {
         final FieldDefinition definition = createOneOfFieldDefinition(STRING);
         assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE, sizeOfString(definition, null));
     }
@@ -1084,7 +1257,8 @@ void testSizeOfBytes() {
         final FieldDefinition definition = createFieldDefinition(BYTES);
         final Bytes bytes = Bytes.wrap(randomVarSizeString());
 
-        assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE + bytes.length(), sizeOfBytes(definition, bytes));
+        assertEquals(
+                MIN_LENGTH_VAR_SIZE + TAG_SIZE + bytes.length(), sizeOfBytes(definition, bytes));
     }
 
     @Test
@@ -1096,26 +1270,31 @@ void testSizeOfBytes_empty() {
     }
 
     @Test
-    void testSizeOfBytes_oneOf(){
+    void testSizeOfBytes_oneOf() {
         final FieldDefinition definition = createOneOfFieldDefinition(BYTES);
         final Bytes bytes = Bytes.wrap(randomVarSizeString());
 
-        assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE + bytes.length(), sizeOfBytes(definition, bytes));
+        assertEquals(
+                MIN_LENGTH_VAR_SIZE + TAG_SIZE + bytes.length(), sizeOfBytes(definition, bytes));
     }
 
     @Test
-    void testSizeOfMessage(){
+    void testSizeOfMessage() {
         final FieldDefinition definition = createFieldDefinition(MESSAGE);
         final String appleStr = randomVarSizeString();
         final Apple apple = Apple.newBuilder().setVariety(appleStr).build();
 
-        assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE + appleStr.length(), sizeOfMessage(definition, apple, v -> v.getVariety().length()));
+        assertEquals(
+                MIN_LENGTH_VAR_SIZE + TAG_SIZE + appleStr.length(),
+                sizeOfMessage(definition, apple, v -> v.getVariety().length()));
     }
 
     @Test
     void testSizeOfMessage_oneOf_null() {
         final FieldDefinition definition = createOneOfFieldDefinition(MESSAGE);
-        assertEquals(MIN_LENGTH_VAR_SIZE + TAG_SIZE, sizeOfMessage(definition, null, v -> RNG.nextInt()));
+        assertEquals(
+                MIN_LENGTH_VAR_SIZE + TAG_SIZE,
+                sizeOfMessage(definition, null, v -> RNG.nextInt()));
     }
 
     @Test
@@ -1138,8 +1317,7 @@ private static Stream<Arguments> provideWriteIntegerListArguments() {
                 Arguments.of(UINT32, 24, false),
                 Arguments.of(SINT32, 21, true),
                 Arguments.of(FIXED32, 32, false),
-                Arguments.of(SFIXED32, 32, false)
-        );
+                Arguments.of(SFIXED32, 32, false));
     }
 
     @ParameterizedTest
@@ -1148,16 +1326,10 @@ void testWriteIntegerList(final FieldType type, final int expectedSize, final bo
         final FieldDefinition definition = createRepeatedFieldDefinition(type);
 
         final long start = bufferedData.position();
-        ProtoWriterTools.writeIntegerList(bufferedData, definition, List.of(
-                0x0f,
-                0xff,
-                0xfff,
-                0xffff,
-                0xfffff,
-                0xffffff,
-                0xfffffff,
-                0xffffffff
-        ));
+        ProtoWriterTools.writeIntegerList(
+                bufferedData,
+                definition,
+                List.of(0x0f, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0xfffffff, 0xffffffff));
         final long finish = bufferedData.position();
 
         int tag = bufferedData.getVarInt(start, false);
@@ -1180,8 +1352,7 @@ private static Stream<Arguments> provideWriteLongListArguments() {
                 Arguments.of(UINT64, 85, false),
                 Arguments.of(SINT64, 78, true),
                 Arguments.of(FIXED64, 128, false),
-                Arguments.of(SFIXED64, 128, false)
-        );
+                Arguments.of(SFIXED64, 128, false));
     }
 
     @ParameterizedTest
@@ -1190,24 +1361,26 @@ void testWriteLongList(final FieldType type, final int expectedSize, final boole
         final FieldDefinition definition = createRepeatedFieldDefinition(type);
 
         final long start = bufferedData.position();
-        ProtoWriterTools.writeLongList(bufferedData, definition, List.of(
-                0x0fL,
-                0xffL,
-                0xfffL,
-                0xffffL,
-                0xfffffL,
-                0xffffffL,
-                0xfffffffL,
-                0xffffffffL,
-                0xfffffffffL,
-                0xffffffffffL,
-                0xfffffffffffL,
-                0xffffffffffffL,
-                0xfffffffffffffL,
-                0xffffffffffffffL,
-                0xfffffffffffffffL,
-                0xffffffffffffffffL
-        ));
+        ProtoWriterTools.writeLongList(
+                bufferedData,
+                definition,
+                List.of(
+                        0x0fL,
+                        0xffL,
+                        0xfffL,
+                        0xffffL,
+                        0xfffffL,
+                        0xffffffL,
+                        0xfffffffL,
+                        0xffffffffL,
+                        0xfffffffffL,
+                        0xffffffffffL,
+                        0xfffffffffffL,
+                        0xffffffffffffL,
+                        0xfffffffffffffL,
+                        0xffffffffffffffL,
+                        0xfffffffffffffffL,
+                        0xffffffffffffffffL));
         final long finish = bufferedData.position();
 
         int tag = bufferedData.getVarInt(start, false);
@@ -1232,11 +1405,8 @@ private static interface ReaderMethod<T> {
         T read(BufferedData bd, long pos);
     }
 
-    private static final List<EnumWithProtoMetadata> testEnumList = List.of(
-            mockEnum(0),
-            mockEnum(2),
-            mockEnum(1)
-    );
+    private static final List<EnumWithProtoMetadata> testEnumList =
+            List.of(mockEnum(0), mockEnum(2), mockEnum(1));
 
     // https://clement-jean.github.io/packed_vs_unpacked_repeated_fields/
     private static Stream<Arguments> provideWritePackedListArguments() {
@@ -1246,41 +1416,49 @@ private static Stream<Arguments> provideWritePackedListArguments() {
                         (WriterMethod<Float>) ProtoWriterTools::writeFloatList,
                         List.of(.1f, .5f, 100.f),
                         12,
-                        (ReaderMethod<Float>) (BufferedData bd, long pos) -> bd.getFloat(pos)
-                        ),
+                        (ReaderMethod<Float>) (BufferedData bd, long pos) -> bd.getFloat(pos)),
                 Arguments.of(
                         DOUBLE,
                         (WriterMethod<Double>) ProtoWriterTools::writeDoubleList,
                         List.of(.1, .5, 100., 1.7653472635472654e240),
                         32,
-                        (ReaderMethod<Double>) (BufferedData bd, long pos) -> bd.getDouble(pos)
-                ),
+                        (ReaderMethod<Double>) (BufferedData bd, long pos) -> bd.getDouble(pos)),
                 Arguments.of(
                         BOOL,
                         (WriterMethod<Boolean>) ProtoWriterTools::writeBooleanList,
                         List.of(true, false, false, true, true, true),
                         6,
-                        (ReaderMethod<Boolean>) (BufferedData bd, long pos) -> (bd.getInt(pos) != 0 ? true : false)
-                ),
+                        (ReaderMethod<Boolean>)
+                                (BufferedData bd, long pos) ->
+                                        (bd.getInt(pos) != 0 ? true : false)),
                 Arguments.of(
                         ENUM,
-                        (WriterMethod<? extends EnumWithProtoMetadata>) ProtoWriterTools::writeEnumList,
+                        (WriterMethod<? extends EnumWithProtoMetadata>)
+                                ProtoWriterTools::writeEnumList,
                         testEnumList,
                         3,
-                        (ReaderMethod<? extends EnumWithProtoMetadata>) (BufferedData bd, long pos) -> {
-                            final int ordinal = bd.getVarInt(pos, false);
-                            for (EnumWithProtoMetadata e : testEnumList) {
-                                if (e.protoOrdinal() == ordinal) return e;
-                            }
-                            throw new RuntimeException("Unexpected ordinal " + ordinal
-                                    + " for test enum list "
-                                    + testEnumList.stream()
-                                    .map(e -> "" + e.protoOrdinal() + ": " + e.protoName())
-                                    .collect(Collectors.joining(",", "{", "}"))
-                            );
-                        }
-                )
-        );
+                        (ReaderMethod<? extends EnumWithProtoMetadata>)
+                                (BufferedData bd, long pos) -> {
+                                    final int ordinal = bd.getVarInt(pos, false);
+                                    for (EnumWithProtoMetadata e : testEnumList) {
+                                        if (e.protoOrdinal() == ordinal) return e;
+                                    }
+                                    throw new RuntimeException(
+                                            "Unexpected ordinal "
+                                                    + ordinal
+                                                    + " for test enum list "
+                                                    + testEnumList.stream()
+                                                            .map(
+                                                                    e ->
+                                                                            ""
+                                                                                    + e
+                                                                                            .protoOrdinal()
+                                                                                    + ": "
+                                                                                    + e.protoName())
+                                                            .collect(
+                                                                    Collectors.joining(
+                                                                            ",", "{", "}")));
+                                }));
     }
 
     @ParameterizedTest
@@ -1307,64 +1485,58 @@ <T> void testWritePackedList(
 
         assertEquals(finish - start - sizeOfTag - sizeOfSize, size);
 
-        T value = readerMethod.read(bufferedData,start + sizeOfTag + sizeOfSize);
+        T value = readerMethod.read(bufferedData, start + sizeOfTag + sizeOfSize);
         assertEquals(list.get(0), value);
     }
 
-    private static record UnpackedField<T>(
-            T value,
-            int size
-    ) {}
+    private static record UnpackedField<T>(T value, int size) {}
 
     // https://clement-jean.github.io/packed_vs_unpacked_repeated_fields/
     private static Stream<Arguments> provideWriteUnpackedListArguments() {
         return Stream.of(
                 Arguments.of(
                         STRING,
-                        (WriterMethod<String>) (out, field, list) -> {
-                            try {
-                                ProtoWriterTools.writeStringList(out, field, list);
-                            } catch (IOException e) {
-                                Sneaky.sneakyThrow(e);
-                            }
-                        },
+                        (WriterMethod<String>)
+                                (out, field, list) -> {
+                                    try {
+                                        ProtoWriterTools.writeStringList(out, field, list);
+                                    } catch (IOException e) {
+                                        Sneaky.sneakyThrow(e);
+                                    }
+                                },
                         List.of("string 1", "testing here", "testing there"),
-                        (ReaderMethod<UnpackedField<String>>) (BufferedData bd, long pos) -> {
-                            int size = bd.getVarInt(pos, false);
-                            int sizeOfSize = ProtoWriterTools.sizeOfVarInt32(size);
-                            return new UnpackedField<>(
-                                    new String(
-                                            bd.getBytes(pos + sizeOfSize, size).toByteArray(),
-                                            StandardCharsets.UTF_8
-                                    ),
-                                    sizeOfSize + size
-                            );
-                        }
-                ),
+                        (ReaderMethod<UnpackedField<String>>)
+                                (BufferedData bd, long pos) -> {
+                                    int size = bd.getVarInt(pos, false);
+                                    int sizeOfSize = ProtoWriterTools.sizeOfVarInt32(size);
+                                    return new UnpackedField<>(
+                                            new String(
+                                                    bd.getBytes(pos + sizeOfSize, size)
+                                                            .toByteArray(),
+                                                    StandardCharsets.UTF_8),
+                                            sizeOfSize + size);
+                                }),
                 Arguments.of(
                         BYTES,
-                        (WriterMethod<? extends RandomAccessData>) (out, field, list) -> {
-                            try {
-                                ProtoWriterTools.writeBytesList(out, field, list);
-                            } catch (IOException e) {
-                                Sneaky.sneakyThrow(e);
-                            }
-                        },
+                        (WriterMethod<? extends RandomAccessData>)
+                                (out, field, list) -> {
+                                    try {
+                                        ProtoWriterTools.writeBytesList(out, field, list);
+                                    } catch (IOException e) {
+                                        Sneaky.sneakyThrow(e);
+                                    }
+                                },
                         List.of(
                                 Bytes.wrap(new byte[] {1, 2, 3}),
-                                Bytes.wrap(new byte[] {(byte)255, 127, 15}),
-                                Bytes.wrap(new byte[] {66, (byte) 218, 7, 55, 11, (byte) 255})
-                        ),
-                        (ReaderMethod<UnpackedField<Bytes>>) (BufferedData bd, long pos) -> {
-                            int size = bd.getVarInt(pos, false);
-                            int sizeOfSize = ProtoWriterTools.sizeOfVarInt32(size);
-                            return new UnpackedField<>(
-                                    bd.getBytes(pos + sizeOfSize, size),
-                                    sizeOfSize + size
-                            );
-                        }
-                )
-        );
+                                Bytes.wrap(new byte[] {(byte) 255, 127, 15}),
+                                Bytes.wrap(new byte[] {66, (byte) 218, 7, 55, 11, (byte) 255})),
+                        (ReaderMethod<UnpackedField<Bytes>>)
+                                (BufferedData bd, long pos) -> {
+                                    int size = bd.getVarInt(pos, false);
+                                    int sizeOfSize = ProtoWriterTools.sizeOfVarInt32(size);
+                                    return new UnpackedField<>(
+                                            bd.getBytes(pos + sizeOfSize, size), sizeOfSize + size);
+                                }));
     }
 
     @ParameterizedTest
@@ -1396,19 +1568,27 @@ <T> void testWriteUnpackedList(
     }
 
     private void assertVarIntTag(FieldDefinition definition) {
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_VARINT_OR_ZIGZAG.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_VARINT_OR_ZIGZAG.ordinal(),
+                bufferedData.readVarInt(false));
     }
 
     private void assertFixed32Tag(FieldDefinition definition) {
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_32_BIT.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_32_BIT.ordinal(),
+                bufferedData.readVarInt(false));
     }
 
     private void assertFixed64Tag(FieldDefinition definition) {
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_FIXED_64_BIT.ordinal(),
+                bufferedData.readVarInt(false));
     }
 
     private void assertTypeDelimitedTag(FieldDefinition definition) {
-        assertEquals((definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(), bufferedData.readVarInt(false));
+        assertEquals(
+                (definition.number() << TAG_TYPE_BITS) | WIRE_TYPE_DELIMITED.ordinal(),
+                bufferedData.readVarInt(false));
     }
 
     static String randomVarSizeString() {
@@ -1435,19 +1615,21 @@ static long randomLargeLong() {
     }
 
     static FieldDefinition createFieldDefinition(FieldType fieldType) {
-        return new FieldDefinition(RANDOM_STRING.nextString(), fieldType, false, RNG.nextInt(1, 16));
+        return new FieldDefinition(
+                RANDOM_STRING.nextString(), fieldType, false, RNG.nextInt(1, 16));
     }
 
     static FieldDefinition createOptionalFieldDefinition(FieldType fieldType) {
-        return new FieldDefinition(RANDOM_STRING.nextString(), fieldType, false, true, false, RNG.nextInt(1, 16));
+        return new FieldDefinition(
+                RANDOM_STRING.nextString(), fieldType, false, true, false, RNG.nextInt(1, 16));
     }
 
     static FieldDefinition createOneOfFieldDefinition(FieldType fieldType) {
-        return new FieldDefinition(RANDOM_STRING.nextString(), fieldType, false, false, true, RNG.nextInt(1, 16));
+        return new FieldDefinition(
+                RANDOM_STRING.nextString(), fieldType, false, false, true, RNG.nextInt(1, 16));
     }
 
     static FieldDefinition createRepeatedFieldDefinition(FieldType fieldType) {
         return new FieldDefinition(RANDOM_STRING.nextString(), fieldType, true, RNG.nextInt(1, 16));
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java
index 4ee92f2e..0c1d7486 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/Utf8ToolsTest.java
@@ -1,22 +1,22 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime;
 
-import com.hedera.pbj.runtime.io.buffer.BufferedData;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.Arguments;
-import org.junit.jupiter.params.provider.MethodSource;
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
+import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.nio.charset.StandardCharsets;
 import java.util.HexFormat;
 import java.util.stream.Stream;
-
-import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
 public class Utf8ToolsTest {
-    private static Stream<Arguments> provideStringsAndLengths() throws UnsupportedEncodingException {
+    private static Stream<Arguments> provideStringsAndLengths()
+            throws UnsupportedEncodingException {
         return Stream.of(
                 Arguments.of("", 0),
                 Arguments.of(" ", 1),
@@ -25,16 +25,25 @@ private static Stream<Arguments> provideStringsAndLengths() throws UnsupportedEn
                 Arguments.of("not blank", 9),
                 Arguments.of("\u076c test", 7),
                 Arguments.of("\u076c \uea84 test", 11),
-                Arguments.of(new String(new byte[] {
-                        (byte) 0b11110001, (byte) 0b10000011, (byte) 0b10000111, (byte) 0b10001111
-                }, "UTF-8"), 4)
-        );
+                Arguments.of(
+                        new String(
+                                new byte[] {
+                                    (byte) 0b11110001,
+                                    (byte) 0b10000011,
+                                    (byte) 0b10000111,
+                                    (byte) 0b10001111
+                                },
+                                "UTF-8"),
+                        4));
     }
+
     @ParameterizedTest
     @MethodSource("provideStringsAndLengths")
     void encodedLength(String testStr, int expectedLength) {
         assertEquals(expectedLength, assertDoesNotThrow(() -> Utf8Tools.encodedLength(testStr)));
-        assertEquals(testStr.getBytes(StandardCharsets.UTF_8).length, assertDoesNotThrow(() -> Utf8Tools.encodedLength(testStr)));
+        assertEquals(
+                testStr.getBytes(StandardCharsets.UTF_8).length,
+                assertDoesNotThrow(() -> Utf8Tools.encodedLength(testStr)));
     }
 
     @ParameterizedTest
@@ -47,9 +56,11 @@ void encodeUtf8(String testStr, int expectedLength) {
             throw new RuntimeException(e);
         }
         bufferedData.flip();
-        byte[] bytes = new byte[(int)bufferedData.length()];
+        byte[] bytes = new byte[(int) bufferedData.length()];
         bufferedData.getBytes(0, bytes);
-        assertEquals(HexFormat.of().formatHex(testStr.getBytes(StandardCharsets.UTF_8)), HexFormat.of().formatHex(bytes));
+        assertEquals(
+                HexFormat.of().formatHex(testStr.getBytes(StandardCharsets.UTF_8)),
+                HexFormat.of().formatHex(bytes));
         assertEquals(expectedLength, bytes.length);
     }
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java
index a4f4d68e..ebeb0b62 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcExceptionTest.java
@@ -15,7 +15,8 @@ class GrpcExceptionTest {
     void testStatus(final GrpcStatus expected) {
         // If it is OK, then it will actually fail the test, so do not run the test in that case.
         Assumptions.assumeThat(expected).isNotEqualTo(GrpcStatus.OK);
-        // A GrpcException that is given any status should return that status from the status() method.
+        // A GrpcException that is given any status should return that status from the status()
+        // method.
         GrpcException grpcException = new GrpcException(expected);
         assertThat(grpcException.status()).isEqualTo(expected);
     }
@@ -25,8 +26,8 @@ void testOkStatusThrows() {
         // If the status is OK, then the constructor should throw an IllegalArgumentException.
         //noinspection ThrowableNotThrown
         assertThatThrownBy(() -> new GrpcException(GrpcStatus.OK))
-            .isInstanceOf(IllegalArgumentException.class)
-            .hasMessage("status cannot be OK");
+                .isInstanceOf(IllegalArgumentException.class)
+                .hasMessage("status cannot be OK");
     }
 
     @Test
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java
index 2a7a1c98..870f8fe5 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/GrpcStatusTest.java
@@ -7,9 +7,10 @@
 
 class GrpcStatusTest {
     /**
-     * The specific ordinal values must match exactly the expectations as set forth in the specification. This test
-     * "fixes" them in place. Any changes to the order in which fields are placed in GrpcStatus will break this test,
-     * as the ordinal values will change. The test MUST NOT BE adapted to match.
+     * The specific ordinal values must match exactly the expectations as set forth in the
+     * specification. This test "fixes" them in place. Any changes to the order in which fields are
+     * placed in GrpcStatus will break this test, as the ordinal values will change. The test MUST
+     * NOT BE adapted to match.
      */
     @Test
     void statusCodesAreSpecific() {
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java
index 00fd6b96..dce05d0b 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/grpc/PipelinesTest.java
@@ -76,10 +76,11 @@ class UnaryTest {
 
         @Test
         void requestMapperIsRequired() {
-            final var builder = Pipelines.<String, String>unary()
-                    .method(String::toUpperCase)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>unary()
+                            .method(String::toUpperCase)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The request mapper must be specified.")
@@ -88,10 +89,11 @@ void requestMapperIsRequired() {
 
         @Test
         void methodIsRequired() {
-            final var builder = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The method must be specified.")
@@ -100,10 +102,11 @@ void methodIsRequired() {
 
         @Test
         void responseMapperIsRequired() {
-            final var builder = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(String::toUpperCase)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(String::toUpperCase)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The response mapper must be specified.")
@@ -112,10 +115,11 @@ void responseMapperIsRequired() {
 
         @Test
         void respondToIsRequired() {
-            final var builder = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(String::toUpperCase)
-                    .mapResponse(Bytes::wrap);
+            final var builder =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(String::toUpperCase)
+                            .mapResponse(Bytes::wrap);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The replies subscriber must be specified.")
@@ -124,12 +128,13 @@ void respondToIsRequired() {
 
         @Test
         void nullSubscriptionThrowsNPE() {
-            final var pipeline = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(String::toUpperCase)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(String::toUpperCase)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             assertThatThrownBy(() -> pipeline.onSubscribe(null))
                     .isInstanceOf(NullPointerException.class);
@@ -137,12 +142,13 @@ void nullSubscriptionThrowsNPE() {
 
         @Test
         void onNextTwiceThrowsISE() {
-            final var pipeline = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(String::toUpperCase)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(String::toUpperCase)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             pipeline.onNext(Bytes.wrap("hello"));
@@ -154,12 +160,13 @@ void onNextTwiceThrowsISE() {
         void exceptionDuring_onNext_IsHandled() {
             final var ex = new RuntimeException("Some exception");
             doThrow(ex).when(replies).onNext(any());
-            final var pipeline = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(String::toUpperCase)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(String::toUpperCase)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             final var data = Bytes.wrap("hello");
@@ -169,12 +176,13 @@ void exceptionDuring_onNext_IsHandled() {
 
         @Test
         void positive() {
-            final var pipeline = Pipelines.<String, String>unary()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(String::toUpperCase)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>unary()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(String::toUpperCase)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(subscription);
             pipeline.onNext(Bytes.wrap("hello"));
@@ -191,10 +199,11 @@ class BidiTest {
 
         @Test
         void requestMapperIsRequired() {
-            final var builder = Pipelines.<String, String>bidiStreaming()
-                    .method(sink -> client)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>bidiStreaming()
+                            .method(sink -> client)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The request mapper must be specified.")
@@ -203,10 +212,11 @@ void requestMapperIsRequired() {
 
         @Test
         void methodIsRequired() {
-            final var builder = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The method must be specified.")
@@ -215,10 +225,11 @@ void methodIsRequired() {
 
         @Test
         void responseMapperIsRequired() {
-            final var builder = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> client)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(sink -> client)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The response mapper must be specified.")
@@ -227,10 +238,11 @@ void responseMapperIsRequired() {
 
         @Test
         void respondToIsRequired() {
-            final var builder = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> client)
-                    .mapResponse(Bytes::wrap);
+            final var builder =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(sink -> client)
+                            .mapResponse(Bytes::wrap);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The replies subscriber must be specified.")
@@ -239,12 +251,13 @@ void respondToIsRequired() {
 
         @Test
         void nullSubscriptionThrowsNPE() {
-            final var pipeline = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> client)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(sink -> client)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             assertThatThrownBy(() -> pipeline.onSubscribe(null))
                     .isInstanceOf(NullPointerException.class);
@@ -252,19 +265,27 @@ void nullSubscriptionThrowsNPE() {
 
         @Test
         void onCompleteNextThrowsISE() {
-            final var pipeline = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> {
-                        lenient().doAnswer(invocation -> {
-                            final var msg = invocation.getArgument(0, String.class);
-                            sink.onNext(msg.toUpperCase());
-                            return null;
-                        }).when(client).onNext(any());
-                        return client;
-                    })
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(
+                                    sink -> {
+                                        lenient()
+                                                .doAnswer(
+                                                        invocation -> {
+                                                            final var msg =
+                                                                    invocation.getArgument(
+                                                                            0, String.class);
+                                                            sink.onNext(msg.toUpperCase());
+                                                            return null;
+                                                        })
+                                                .when(client)
+                                                .onNext(any());
+                                        return client;
+                                    })
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             pipeline.onNext(Bytes.wrap("hello"));
@@ -287,12 +308,13 @@ void onCompleteNextThrowsISE() {
         void exceptionDuring_onNext_IsHandled() {
             final var ex = new RuntimeException("Some exception");
             doThrow(ex).when(client).onNext(any());
-            final var pipeline = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> client)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(sink -> client)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             final var data = Bytes.wrap("hello");
@@ -305,7 +327,10 @@ void exceptionDuring_responseConverter_IsHandled() {
             final var ex = new RuntimeException("Some exception");
             Pipelines.<String, String>bidiStreaming()
                     .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> { throw ex; })
+                    .method(
+                            sink -> {
+                                throw ex;
+                            })
                     .mapResponse(Bytes::wrap)
                     .respondTo(replies)
                     .build();
@@ -315,28 +340,34 @@ void exceptionDuring_responseConverter_IsHandled() {
 
         @Test
         void positive() {
-            final var pipeline = Pipelines.<String, String>bidiStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> {
-                        doAnswer(invocation -> {
-                            final var msg = invocation.getArgument(0, String.class);
-                            sink.onNext(msg.toUpperCase());
-                            return null;
-                        }).when(client).onNext(any());
-                        return client;
-                    })
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>bidiStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(
+                                    sink -> {
+                                        doAnswer(
+                                                        invocation -> {
+                                                            final var msg =
+                                                                    invocation.getArgument(
+                                                                            0, String.class);
+                                                            sink.onNext(msg.toUpperCase());
+                                                            return null;
+                                                        })
+                                                .when(client)
+                                                .onNext(any());
+                                        return client;
+                                    })
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             final var argCaptor = ArgumentCaptor.forClass(Bytes.class);
             pipeline.onSubscribe(subscription);
             pipeline.onNext(Bytes.wrap("hello"));
             pipeline.onNext(Bytes.wrap("world"));
             verify(replies, times(2)).onNext(argCaptor.capture());
-            assertThat(argCaptor.getAllValues()).containsExactly(
-                    Bytes.wrap("HELLO"),
-                    Bytes.wrap("WORLD"));
+            assertThat(argCaptor.getAllValues())
+                    .containsExactly(Bytes.wrap("HELLO"), Bytes.wrap("WORLD"));
         }
     }
 
@@ -348,10 +379,11 @@ class ServerStreamingTest {
 
         @Test
         void requestMapperIsRequired() {
-            final var builder = Pipelines.<String, String>serverStreaming()
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>serverStreaming()
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The request mapper must be specified.")
@@ -360,10 +392,11 @@ void requestMapperIsRequired() {
 
         @Test
         void methodIsRequired() {
-            final var builder = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The method must be specified.")
@@ -372,10 +405,11 @@ void methodIsRequired() {
 
         @Test
         void responseMapperIsRequired() {
-            final var builder = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The response mapper must be specified.")
@@ -384,10 +418,11 @@ void responseMapperIsRequired() {
 
         @Test
         void respondToIsRequired() {
-            final var builder = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .mapResponse(Bytes::wrap);
+            final var builder =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .mapResponse(Bytes::wrap);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The replies subscriber must be specified.")
@@ -396,12 +431,13 @@ void respondToIsRequired() {
 
         @Test
         void nullSubscriptionThrowsNPE() {
-            final var pipeline = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             assertThatThrownBy(() -> pipeline.onSubscribe(null))
                     .isInstanceOf(NullPointerException.class);
@@ -409,12 +445,13 @@ void nullSubscriptionThrowsNPE() {
 
         @Test
         void onCompleteNextThrowsISE() {
-            final var pipeline = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             pipeline.onNext(Bytes.wrap("hello"));
@@ -427,12 +464,16 @@ void onCompleteNextThrowsISE() {
         @Test
         void badRequestMapperCallsOnError() {
             final var ex = new RuntimeException("Bad bad bad");
-            final var pipeline = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(bytes -> { throw ex; })
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(
+                                    bytes -> {
+                                        throw ex;
+                                    })
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             final var data = Bytes.wrap("hello");
@@ -443,12 +484,16 @@ void badRequestMapperCallsOnError() {
         @Test
         void badMethodCallsOnError() {
             final var ex = new RuntimeException("Bad bad bad");
-            final var pipeline = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method((msg, sink) -> { throw ex; })
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(
+                                    (msg, sink) -> {
+                                        throw ex;
+                                    })
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             final var data = Bytes.wrap("hello");
@@ -458,12 +503,13 @@ void badMethodCallsOnError() {
 
         @Test
         void positive() {
-            final var pipeline = Pipelines.<String, String>serverStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>serverStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method((msg, sink) -> sink.onNext(msg.toUpperCase()))
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(subscription);
             pipeline.onNext(Bytes.wrap("hello"));
@@ -479,10 +525,11 @@ class ClientStreamingTest {
 
         @Test
         void requestMapperIsRequired() {
-            final var builder = Pipelines.<String, String>clientStreaming()
-                    .method(ConcatenatingHandler::new)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>clientStreaming()
+                            .method(ConcatenatingHandler::new)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The request mapper must be specified.")
@@ -491,10 +538,11 @@ void requestMapperIsRequired() {
 
         @Test
         void methodIsRequired() {
-            final var builder = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The method must be specified.")
@@ -503,10 +551,11 @@ void methodIsRequired() {
 
         @Test
         void responseMapperIsRequired() {
-            final var builder = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(ConcatenatingHandler::new)
-                    .respondTo(replies);
+            final var builder =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(ConcatenatingHandler::new)
+                            .respondTo(replies);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The response mapper must be specified.")
@@ -515,10 +564,11 @@ void responseMapperIsRequired() {
 
         @Test
         void respondToIsRequired() {
-            final var builder = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(ConcatenatingHandler::new)
-                    .mapResponse(Bytes::wrap);
+            final var builder =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(ConcatenatingHandler::new)
+                            .mapResponse(Bytes::wrap);
 
             assertThatThrownBy(builder::build)
                     .hasMessage("The replies subscriber must be specified.")
@@ -527,12 +577,13 @@ void respondToIsRequired() {
 
         @Test
         void nullSubscriptionThrowsNPE() {
-            final var pipeline = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(ConcatenatingHandler::new)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(ConcatenatingHandler::new)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             assertThatThrownBy(() -> pipeline.onSubscribe(null))
                     .isInstanceOf(NullPointerException.class);
@@ -540,12 +591,13 @@ void nullSubscriptionThrowsNPE() {
 
         @Test
         void onCompleteNextThrowsISE() {
-            final var pipeline = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(ConcatenatingHandler::new)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(ConcatenatingHandler::new)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             pipeline.onNext(Bytes.wrap("hello"));
@@ -558,12 +610,16 @@ void onCompleteNextThrowsISE() {
         @Test
         void badRequestMapperCallsOnError() {
             final var ex = new RuntimeException("Bad bad bad");
-            final var pipeline = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(bytes -> { throw ex; })
-                    .method(ConcatenatingHandler::new)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(
+                                    bytes -> {
+                                        throw ex;
+                                    })
+                            .method(ConcatenatingHandler::new)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(mock(Flow.Subscription.class));
             final var data = Bytes.wrap("hello");
@@ -576,7 +632,10 @@ void badMethodCallsOnError() {
             final var ex = new RuntimeException("Bad bad bad");
             Pipelines.<String, String>clientStreaming()
                     .mapRequest(Bytes::asUtf8String)
-                    .method(sink -> { throw ex; })
+                    .method(
+                            sink -> {
+                                throw ex;
+                            })
                     .mapResponse(Bytes::wrap)
                     .respondTo(replies)
                     .build();
@@ -586,12 +645,13 @@ void badMethodCallsOnError() {
 
         @Test
         void positive() {
-            final var pipeline = Pipelines.<String, String>clientStreaming()
-                    .mapRequest(Bytes::asUtf8String)
-                    .method(ConcatenatingHandler::new)
-                    .mapResponse(Bytes::wrap)
-                    .respondTo(replies)
-                    .build();
+            final var pipeline =
+                    Pipelines.<String, String>clientStreaming()
+                            .mapRequest(Bytes::asUtf8String)
+                            .method(ConcatenatingHandler::new)
+                            .mapResponse(Bytes::wrap)
+                            .respondTo(replies)
+                            .build();
 
             pipeline.onSubscribe(subscription);
             pipeline.onNext(Bytes.wrap("hello"));
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java
index 80d3dba6..945dcaa2 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/DataTest.java
@@ -1,204 +1,207 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import com.google.protobuf.CodedInputStream;
 import com.google.protobuf.CodedOutputStream;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.stream.ReadableStreamingData;
 import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
-
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.ByteOrder;
 import java.util.Arrays;
 import java.util.stream.Stream;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
 
 final class DataTest {
 
     static Stream<Byte> bytesTestCases() {
         return Stream.of(
-                Byte.MIN_VALUE,
-                Byte.MIN_VALUE + 1,
-                -100,
-                -66,
-                -7,
-                -1,
-                0,
-                1,
-                9,
-                51,
-                101,
-                Byte.MAX_VALUE - 1,
-                Byte.MAX_VALUE).map(Number::byteValue);
+                        Byte.MIN_VALUE,
+                        Byte.MIN_VALUE + 1,
+                        -100,
+                        -66,
+                        -7,
+                        -1,
+                        0,
+                        1,
+                        9,
+                        51,
+                        101,
+                        Byte.MAX_VALUE - 1,
+                        Byte.MAX_VALUE)
+                .map(Number::byteValue);
     }
 
     @ParameterizedTest
     @MethodSource("bytesTestCases")
     void byteTest(Byte value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeByte,
-                (dout, v) -> dout.writeByte((int)v),
+                (dout, v) -> dout.writeByte((int) v),
                 BufferedData::writeByte,
                 ReadableStreamingData::readByte,
                 java.io.DataInputStream::readByte,
-                BufferedData::readByte
-        );
+                BufferedData::readByte);
     }
 
     static Stream<Integer> unsignedBytesTestCases() {
-        return Stream.of(0,1,9,51,101,127,128,255).map(Number::intValue);
+        return Stream.of(0, 1, 9, 51, 101, 127, 128, 255).map(Number::intValue);
     }
 
     @ParameterizedTest
     @MethodSource("unsignedBytesTestCases")
     void unsignedByteTest(Integer value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeUnsignedByte,
                 java.io.DataOutputStream::writeByte,
                 BufferedData::writeUnsignedByte,
                 ReadableStreamingData::readUnsignedByte,
                 java.io.DataInputStream::readUnsignedByte,
-                ReadableSequentialData::readUnsignedByte
-        );
+                ReadableSequentialData::readUnsignedByte);
     }
 
     static Stream<Integer> intsTestCases() {
         return Stream.of(
-                Integer.MIN_VALUE,
-                Integer.MIN_VALUE + 1,
-                -536870912,
-                -4194304,
-                -32768,
-                -100,
-                -66,
-                -7,
-                -1,
-                0,
-                1,
-                9,
-                51,
-                101,
-                32768,
-                4194304,
-                536870912,
-                Integer.MAX_VALUE - 1,
-                Integer.MAX_VALUE).map(Number::intValue);
+                        Integer.MIN_VALUE,
+                        Integer.MIN_VALUE + 1,
+                        -536870912,
+                        -4194304,
+                        -32768,
+                        -100,
+                        -66,
+                        -7,
+                        -1,
+                        0,
+                        1,
+                        9,
+                        51,
+                        101,
+                        32768,
+                        4194304,
+                        536870912,
+                        Integer.MAX_VALUE - 1,
+                        Integer.MAX_VALUE)
+                .map(Number::intValue);
     }
 
     @ParameterizedTest
     @MethodSource("intsTestCases")
     void intTest(Integer value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeInt,
                 java.io.DataOutputStream::writeInt,
                 BufferedData::writeInt,
                 ReadableStreamingData::readInt,
                 java.io.DataInputStream::readInt,
-                BufferedData::readInt
-        );
-        doTest(value,
+                BufferedData::readInt);
+        doTest(
+                value,
                 (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN),
                 (d, v) -> d.writeInt(Integer.reverseBytes(v)),
                 (d, v) -> d.writeInt(v, ByteOrder.LITTLE_ENDIAN),
                 d -> d.readInt(ByteOrder.LITTLE_ENDIAN),
                 d -> Integer.reverseBytes(d.readInt()),
-                d -> d.readInt(ByteOrder.LITTLE_ENDIAN)
-        );
+                d -> d.readInt(ByteOrder.LITTLE_ENDIAN));
     }
 
     static Stream<Long> unsignedIntsTestCases() {
-        return Stream.of(0,1,9,51,127,Integer.MAX_VALUE*2L).map(Number::longValue);
+        return Stream.of(0, 1, 9, 51, 127, Integer.MAX_VALUE * 2L).map(Number::longValue);
     }
 
     @ParameterizedTest
     @MethodSource("unsignedIntsTestCases")
     void unsignedIntTest(Long value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeUnsignedInt,
                 (dout, v) -> dout.writeInt(v.intValue()),
                 BufferedData::writeUnsignedInt,
                 ReadableStreamingData::readUnsignedInt,
                 (dout) -> Integer.toUnsignedLong(dout.readInt()),
-                BufferedData::readUnsignedInt
-        );
-        doTest(value,
+                BufferedData::readUnsignedInt);
+        doTest(
+                value,
                 (d, v) -> d.writeUnsignedInt(v, ByteOrder.LITTLE_ENDIAN),
                 (d, v) -> d.writeInt(Integer.reverseBytes(v.intValue())),
                 (d, v) -> d.writeUnsignedInt(v, ByteOrder.LITTLE_ENDIAN),
                 d -> d.readUnsignedInt(ByteOrder.LITTLE_ENDIAN),
                 d -> Integer.toUnsignedLong(Integer.reverseBytes(d.readInt())),
-                d -> d.readUnsignedInt(ByteOrder.LITTLE_ENDIAN)
-        );
+                d -> d.readUnsignedInt(ByteOrder.LITTLE_ENDIAN));
     }
 
     static Stream<Long> longsTestCases() {
         return Stream.of(
-                Long.MIN_VALUE,
-                Long.MIN_VALUE + 1,
-                Integer.MIN_VALUE - 1L,
-                Integer.MIN_VALUE,
-                Integer.MIN_VALUE + 1,
-                -9007199254740992L,
-                -35184372088832L,
-                -137438953472L,
-                -536870912,
-                -4194304,
-                -65536,
-                -65535,
-                -65534,
-                -32768,
-                -100,
-                -66,
-                -7,
-                -1,
-                0,
-                1,
-                9,
-                51,
-                101,
-                1023,
-                1024,
-                1025,
-                32768,
-                4194304,
-                536870912,
-                137438953472L,
-                35184372088832L,
-                9007199254740992L,
-                Integer.MAX_VALUE - 1L,
-                Integer.MAX_VALUE,
-                Integer.MAX_VALUE + 1L,
-                Long.MAX_VALUE - 1L,
-                Long.MAX_VALUE).map(Number::longValue);
-}
+                        Long.MIN_VALUE,
+                        Long.MIN_VALUE + 1,
+                        Integer.MIN_VALUE - 1L,
+                        Integer.MIN_VALUE,
+                        Integer.MIN_VALUE + 1,
+                        -9007199254740992L,
+                        -35184372088832L,
+                        -137438953472L,
+                        -536870912,
+                        -4194304,
+                        -65536,
+                        -65535,
+                        -65534,
+                        -32768,
+                        -100,
+                        -66,
+                        -7,
+                        -1,
+                        0,
+                        1,
+                        9,
+                        51,
+                        101,
+                        1023,
+                        1024,
+                        1025,
+                        32768,
+                        4194304,
+                        536870912,
+                        137438953472L,
+                        35184372088832L,
+                        9007199254740992L,
+                        Integer.MAX_VALUE - 1L,
+                        Integer.MAX_VALUE,
+                        Integer.MAX_VALUE + 1L,
+                        Long.MAX_VALUE - 1L,
+                        Long.MAX_VALUE)
+                .map(Number::longValue);
+    }
 
     @ParameterizedTest
     @MethodSource("longsTestCases")
     void longTest(Long value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeLong,
                 java.io.DataOutputStream::writeLong,
                 BufferedData::writeLong,
                 ReadableStreamingData::readLong,
                 java.io.DataInputStream::readLong,
-                BufferedData::readLong
-        );
-        doTest(value,
+                BufferedData::readLong);
+        doTest(
+                value,
                 (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN),
                 (d, v) -> d.writeLong(Long.reverseBytes(v)),
                 (d, v) -> d.writeLong(v, ByteOrder.LITTLE_ENDIAN),
                 d -> d.readLong(ByteOrder.LITTLE_ENDIAN),
                 d -> Long.reverseBytes(d.readLong()),
-                d -> d.readLong(ByteOrder.LITTLE_ENDIAN)
-        );
+                d -> d.readLong(ByteOrder.LITTLE_ENDIAN));
     }
+
     @ParameterizedTest
     @MethodSource("intsTestCases")
     void bytesVarIntTest(int value) throws IOException {
@@ -246,50 +249,81 @@ void bytesVarLongTest(long value) throws IOException {
     }
 
     static Stream<Float> floatsTestCases() {
-        return Stream.of(Float.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Float.MAX_VALUE).map(Number::floatValue);
+        return Stream.of(
+                        Float.MIN_VALUE,
+                        Integer.MIN_VALUE - 1L,
+                        -100,
+                        -66,
+                        -7,
+                        -1,
+                        0,
+                        1,
+                        9,
+                        51,
+                        101,
+                        Integer.MAX_VALUE + 1L,
+                        Float.MAX_VALUE)
+                .map(Number::floatValue);
     }
+
     @ParameterizedTest
     @MethodSource("floatsTestCases")
     void floatTest(Float value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeFloat,
                 java.io.DataOutputStream::writeFloat,
                 BufferedData::writeFloat,
                 ReadableStreamingData::readFloat,
                 java.io.DataInputStream::readFloat,
-                BufferedData::readFloat
-        );
-        doTest(value,
+                BufferedData::readFloat);
+        doTest(
+                value,
                 (d, v) -> d.writeFloat(v, ByteOrder.LITTLE_ENDIAN),
-                (d, v) -> d.writeInt( Integer.reverseBytes(Float.floatToIntBits(v))),
+                (d, v) -> d.writeInt(Integer.reverseBytes(Float.floatToIntBits(v))),
                 (d, v) -> d.writeFloat(v, ByteOrder.LITTLE_ENDIAN),
                 d -> d.readFloat(ByteOrder.LITTLE_ENDIAN),
                 d -> Float.intBitsToFloat(Integer.reverseBytes(d.readInt())),
-                d -> d.readFloat(ByteOrder.LITTLE_ENDIAN)
-        );
+                d -> d.readFloat(ByteOrder.LITTLE_ENDIAN));
     }
+
     static Stream<Double> doublesTestCases() {
-        return Stream.of(Double.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Double.MAX_VALUE).map(Number::doubleValue);
+        return Stream.of(
+                        Double.MIN_VALUE,
+                        Integer.MIN_VALUE - 1L,
+                        -100,
+                        -66,
+                        -7,
+                        -1,
+                        0,
+                        1,
+                        9,
+                        51,
+                        101,
+                        Integer.MAX_VALUE + 1L,
+                        Double.MAX_VALUE)
+                .map(Number::doubleValue);
     }
+
     @ParameterizedTest
     @MethodSource("doublesTestCases")
     void doubleTest(Double value) throws IOException {
-        doTest(value,
+        doTest(
+                value,
                 WritableStreamingData::writeDouble,
                 java.io.DataOutputStream::writeDouble,
                 BufferedData::writeDouble,
                 ReadableStreamingData::readDouble,
                 java.io.DataInputStream::readDouble,
-                BufferedData::readDouble
-        );
-        doTest(value,
+                BufferedData::readDouble);
+        doTest(
+                value,
                 (d, v) -> d.writeDouble(v, ByteOrder.LITTLE_ENDIAN),
-                (d, v) -> d.writeLong( Long.reverseBytes(Double.doubleToLongBits(v))),
+                (d, v) -> d.writeLong(Long.reverseBytes(Double.doubleToLongBits(v))),
                 (d, v) -> d.writeDouble(v, ByteOrder.LITTLE_ENDIAN),
                 d -> d.readDouble(ByteOrder.LITTLE_ENDIAN),
                 d -> Double.longBitsToDouble(Long.reverseBytes(d.readLong())),
-                d -> d.readDouble(ByteOrder.LITTLE_ENDIAN)
-        );
+                d -> d.readDouble(ByteOrder.LITTLE_ENDIAN));
     }
 
     @ParameterizedTest
@@ -434,14 +468,15 @@ void compatInt32Int64(final long num) {
     // ==============================================================================================================
     // Generic test case used by all tests :-)
 
-    static <T> void doTest(T value,
-                           IoWrite<WritableStreamingData,T> dataOutputWriteMethod,
-                           IoWrite<java.io.DataOutputStream,T> javaDataOutputWriteMethod,
-                           IoWrite<BufferedData,T> dataBufferWriteMethod,
-                           IoRead<ReadableStreamingData,T> dataInputReadMethod,
-                           IoRead<java.io.DataInputStream,T> javaDataInputReadMethod,
-                           IoRead<BufferedData,T> dataBufferReadMethod
-    ) throws IOException {
+    static <T> void doTest(
+            T value,
+            IoWrite<WritableStreamingData, T> dataOutputWriteMethod,
+            IoWrite<java.io.DataOutputStream, T> javaDataOutputWriteMethod,
+            IoWrite<BufferedData, T> dataBufferWriteMethod,
+            IoRead<ReadableStreamingData, T> dataInputReadMethod,
+            IoRead<java.io.DataInputStream, T> javaDataInputReadMethod,
+            IoRead<BufferedData, T> dataBufferReadMethod)
+            throws IOException {
         try {
             // write to byte array with DataIO DataOutputStream
             ByteArrayOutputStream bout = new ByteArrayOutputStream();
@@ -503,6 +538,7 @@ static <T> void doTest(T value,
     public interface IoWrite<T, U> {
         void write(T t, U u) throws IOException;
     }
+
     public interface IoRead<T, U> {
         U read(T t) throws IOException;
     }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java
index 2ec6eee6..ed709bc1 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialDataTest.java
@@ -1,17 +1,16 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 import com.hedera.pbj.runtime.io.stream.EOFException;
 import edu.umd.cs.findbugs.annotations.NonNull;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Test;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.BufferUnderflowException;
 import java.util.function.Supplier;
-
-import static org.assertj.core.api.Assertions.assertThat;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
 
 final class ReadableSequentialDataTest extends ReadableSequentialTestBase {
 
@@ -23,7 +22,7 @@ protected ReadableSequentialData emptySequence() {
 
     @NonNull
     private ReadableSequentialData throwingSequence() {
-        return new StubbedSequence(new byte[] { 1 }, () -> new EOFException());
+        return new StubbedSequence(new byte[] {1}, () -> new EOFException());
     }
 
     @Test
@@ -42,7 +41,7 @@ void throwingSequenceTest() {
     @Test
     @DisplayName("Verify asInputStream()")
     void testAsInputStream() throws IOException {
-        ReadableSequentialData sequence = sequence(new byte[]{1, 2, 3, (byte) 254, (byte) 255});
+        ReadableSequentialData sequence = sequence(new byte[] {1, 2, 3, (byte) 254, (byte) 255});
         InputStream inputStream = sequence.asInputStream();
 
         assertThat(inputStream.read()).isEqualTo(1);
@@ -61,7 +60,7 @@ void testAsInputStream() throws IOException {
     @NonNull
     @Override
     protected ReadableSequentialData fullyUsedSequence() {
-        final var seq = new StubbedSequence(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+        final var seq = new StubbedSequence(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
         seq.skip(10);
         return seq;
     }
@@ -81,7 +80,8 @@ private static final class StubbedSequence implements ReadableSequentialData {
 
         private StubbedSequence(
                 @NonNull final byte[] bytes,
-                @NonNull final Supplier<? extends RuntimeException> unconditionalExceptionSupplier) {
+                @NonNull
+                        final Supplier<? extends RuntimeException> unconditionalExceptionSupplier) {
             this.bytes = bytes;
             this.limit = this.bytes.length;
             this.unconditionalExceptionSupplier = unconditionalExceptionSupplier;
@@ -91,7 +91,6 @@ private StubbedSequence(@NonNull final byte[] bytes) {
             this(bytes, null);
         }
 
-
         @Override
         public long capacity() {
             return bytes.length;
@@ -134,6 +133,5 @@ public byte readByte() {
 
             return bytes[(int) position++];
         }
-
     }
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java
index 2ddba1d2..be718288 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableSequentialTestBase.java
@@ -7,7 +7,6 @@
 
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.nio.BufferUnderflowException;
 import java.nio.charset.StandardCharsets;
 import org.junit.jupiter.api.DisplayName;
@@ -25,7 +24,7 @@ public abstract class ReadableSequentialTestBase extends ReadableTestBase {
 
     @Override
     @NonNull
-    protected abstract ReadableSequentialData sequence(@NonNull byte [] arr);
+    protected abstract ReadableSequentialData sequence(@NonNull byte[] arr);
 
     @Test
     @DisplayName("Stream with no data")
@@ -160,5 +159,4 @@ void skipMoreThanAvailable() {
         final var stream = sequence("0123456789".getBytes(StandardCharsets.UTF_8));
         assertThrows(BufferUnderflowException.class, () -> stream.skip(20));
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java
index 84e3a5f4..da6a29c4 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/ReadableTestBase.java
@@ -1,11 +1,21 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
+import static java.nio.ByteOrder.BIG_ENDIAN;
+import static java.nio.ByteOrder.LITTLE_ENDIAN;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
 import com.hedera.pbj.runtime.io.stream.ReadableStreamingData;
 import edu.umd.cs.findbugs.annotations.NonNull;
+import java.nio.BufferOverflowException;
+import java.nio.BufferUnderflowException;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
 import org.junit.jupiter.api.Disabled;
 import org.junit.jupiter.api.DisplayName;
 import org.junit.jupiter.api.Nested;
@@ -14,28 +24,20 @@
 import org.junit.jupiter.params.provider.CsvSource;
 import org.junit.jupiter.params.provider.ValueSource;
 
-import java.nio.BufferOverflowException;
-import java.nio.BufferUnderflowException;
-import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-import static java.nio.ByteOrder.BIG_ENDIAN;
-import static java.nio.ByteOrder.LITTLE_ENDIAN;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-
 /**
- * Base test class for testing {@link ReadableSequentialData} and {@link RandomAccessData}. Both classes
- * have basically the same methods, where one has an implicit cursor position and the other has an explicit
- * offset for each method. There are several concrete base classes that should receive all the same testing.
+ * Base test class for testing {@link ReadableSequentialData} and {@link RandomAccessData}. Both
+ * classes have basically the same methods, where one has an implicit cursor position and the other
+ * has an explicit offset for each method. There are several concrete base classes that should
+ * receive all the same testing.
  *
- * <p> I will implement this test in terms of a {@link ReadableSequentialData}, which will apply to
- * {@link ReadableStreamingData} and {@link BufferedData}, and by wrapping the {@link RandomAccessData}
- * with a {@link ReadableSequentialData} adapter.
+ * <p>I will implement this test in terms of a {@link ReadableSequentialData}, which will apply to
+ * {@link ReadableStreamingData} and {@link BufferedData}, and by wrapping the {@link
+ * RandomAccessData} with a {@link ReadableSequentialData} adapter.
  */
 public abstract class ReadableTestBase extends SequentialTestBase {
 
-    protected static final byte[] TEST_BYTES = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(StandardCharsets.UTF_8);
+    protected static final byte[] TEST_BYTES =
+            "ABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(StandardCharsets.UTF_8);
 
     @NonNull
     @Override
@@ -112,7 +114,8 @@ void read() {
     @DisplayName("readUnsignedByte()")
     final class ReadUnsignedByteTest {
         @Test
-        @DisplayName("Reading an unsigned byte from an empty sequence throws BufferUnderflowException")
+        @DisplayName(
+                "Reading an unsigned byte from an empty sequence throws BufferUnderflowException")
         void readFromEmptyDataThrows() {
             // Given an empty sequence
             final var seq = emptySequence();
@@ -121,7 +124,9 @@ void readFromEmptyDataThrows() {
         }
 
         @Test
-        @DisplayName("Reading an unsigned byte from a full read sequence throws BufferUnderflowException")
+        @DisplayName(
+                "Reading an unsigned byte from a full read sequence throws"
+                        + " BufferUnderflowException")
         void readFromFullyReadDataThrows() {
             // Given a fully read sequence
             final var seq = fullyUsedSequence();
@@ -143,9 +148,11 @@ void readPastLimit() {
         @Test
         @DisplayName("Reading an unsigned byte")
         void read() {
-            // Given a sequence of bytes (with a single byte that could be interpreted as negative if signed)
-            final var seq = sequence(new byte[] { (byte) 0b1110_0011 });
-            // When we read the byte, then we get the expected byte and move the position forward by a single byte
+            // Given a sequence of bytes (with a single byte that could be interpreted as negative
+            // if signed)
+            final var seq = sequence(new byte[] {(byte) 0b1110_0011});
+            // When we read the byte, then we get the expected byte and move the position forward by
+            // a single byte
             final var pos = seq.position();
             assertThat(seq.readUnsignedByte()).isEqualTo(0b1110_0011);
             assertThat(seq.position()).isEqualTo(pos + 1);
@@ -163,17 +170,22 @@ void readNullDstThrows() {
 
             // When we try to read bytes using a null byte array, then we get a NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.readBytes((byte[]) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.readBytes((byte[]) null))
+                    .isInstanceOf(NullPointerException.class);
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.readBytes(null, 0, 10)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.readBytes(null, 0, 10))
+                    .isInstanceOf(NullPointerException.class);
 
             // When we try to read bytes using a null ByteBuffer, then we get a NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.readBytes((ByteBuffer) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.readBytes((ByteBuffer) null))
+                    .isInstanceOf(NullPointerException.class);
 
-            // When we try to read bytes using a null BufferedData, then we get a NullPointerException
+            // When we try to read bytes using a null BufferedData, then we get a
+            // NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.readBytes((BufferedData) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.readBytes((BufferedData) null))
+                    .isInstanceOf(NullPointerException.class);
         }
 
         @Test
@@ -181,12 +193,15 @@ void readNullDstThrows() {
         void negativeOffsetThrows() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
-            // When we try to read bytes using a byte array with a negative offset, then we get an IndexOutOfBoundsException
-            assertThatThrownBy(() -> seq.readBytes(new byte[10], -1, 10)).isInstanceOf(IndexOutOfBoundsException.class);
+            // When we try to read bytes using a byte array with a negative offset, then we get an
+            // IndexOutOfBoundsException
+            assertThatThrownBy(() -> seq.readBytes(new byte[10], -1, 10))
+                    .isInstanceOf(IndexOutOfBoundsException.class);
         }
 
         @Test
-        @DisplayName("Reading bytes with an offset that is too large throws IndexOutOfBoundsException")
+        @DisplayName(
+                "Reading bytes with an offset that is too large throws IndexOutOfBoundsException")
         void tooLargeOffsetThrows() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -194,10 +209,12 @@ void tooLargeOffsetThrows() {
             // then we get an IndexOutOfBoundsException
             assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10))
                     .isInstanceOf(IndexOutOfBoundsException.class);
-            // When we try to read bytes using a byte array with an offset + maxLength that is too large,
+            // When we try to read bytes using a byte array with an offset + maxLength that is too
+            // large,
             // then we get an IndexOutOfBoundsException
             assertThatThrownBy(() -> seq.readBytes(new byte[10], 9, 2))
-                    .isInstanceOfAny(IndexOutOfBoundsException.class, BufferOverflowException.class);
+                    .isInstanceOfAny(
+                            IndexOutOfBoundsException.class, BufferOverflowException.class);
         }
 
         @Test
@@ -205,9 +222,12 @@ void tooLargeOffsetThrows() {
         void negativeLengthThrows() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
-            // When we try to read bytes using a byte array with a negative length, then we get an IllegalArgumentException
-            assertThatThrownBy(() -> seq.readBytes(new byte[10], 0, -1)).isInstanceOf(IllegalArgumentException.class);
-            assertThatThrownBy(() -> seq.readBytes(-1)).isInstanceOf(IllegalArgumentException.class);
+            // When we try to read bytes using a byte array with a negative length, then we get an
+            // IllegalArgumentException
+            assertThatThrownBy(() -> seq.readBytes(new byte[10], 0, -1))
+                    .isInstanceOf(IllegalArgumentException.class);
+            assertThatThrownBy(() -> seq.readBytes(-1))
+                    .isInstanceOf(IllegalArgumentException.class);
         }
 
         @Test
@@ -257,7 +277,9 @@ void readFromFullyReadDataIsNoOp() {
         }
 
         @Test
-        @DisplayName("Reading bytes where there is nothing remaining because we are at the limit is a no-op")
+        @DisplayName(
+                "Reading bytes where there is nothing remaining because we are at the limit is a"
+                        + " no-op")
         void readPastLimit() {
             // Given a sequence of bytes with a limit where position == limit
             final var seq = sequence(TEST_BYTES);
@@ -291,7 +313,8 @@ void readingBytesWithTooLargeLength() {
             // When we try to read Bytes, we throw a BufferUnderflowException
             assertThatThrownBy(() -> seq.readBytes(1)).isInstanceOf(BufferUnderflowException.class);
             assertThatThrownBy(() -> seq.readBytes(5)).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.readBytes(10)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readBytes(10))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
@@ -309,15 +332,18 @@ void readZeroDstByteArray() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array with offset and length where the dst has length of 0")
+        @DisplayName(
+                "Reading bytes into a dst byte array with offset and length where the dst has"
+                        + " length of 0")
         void readZeroDstByteArrayWithOffset() {
             // Given a sequence of bytes and a destination byte array
             final var seq = sequence(TEST_BYTES);
             final var dst = new byte[10];
             final var pos = seq.position();
-            // When we try to read bytes into the dst but with a 0 length, then the position does not change,
+            // When we try to read bytes into the dst but with a 0 length, then the position does
+            // not change,
             // and the destination array is empty
-            assertThat(seq.readBytes(dst, 5,0)).isZero();
+            assertThat(seq.readBytes(dst, 5, 0)).isZero();
             assertThat(seq.position()).isEqualTo(pos);
             assertThat(dst).containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
         }
@@ -351,24 +377,29 @@ void readZeroDstBufferedData() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array where the dst is smaller than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst byte array where the dst is smaller than the sequence")
         void readSmallerDstByteArray() {
             // Given a sequence of bytes and a destination byte array
             final var seq = sequence(TEST_BYTES);
-            // When we try reading into the dst (twice, once from the beginning and once in the middle)
+            // When we try reading into the dst (twice, once from the beginning and once in the
+            // middle)
             for (int i = 0; i < 2; i++) {
                 final var dst = new byte[5];
                 final var pos = seq.position();
                 final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
                 assertThat(seq.readBytes(dst)).isEqualTo(5);
-                // Then the dst is filled with the bytes from the sequence, and the position is updated
+                // Then the dst is filled with the bytes from the sequence, and the position is
+                // updated
                 assertThat(dst).isEqualTo(subset);
                 assertThat(seq.position()).isEqualTo(pos + 5);
             }
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array with offset where the dst is smaller than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst byte array with offset where the dst is smaller than the"
+                        + " sequence")
         void readSmallerDstByteArrayWithOffset() {
             final var seq = sequence(TEST_BYTES);
             // Do twice, so we read once from sequence at the beginning and once in the middle
@@ -383,7 +414,8 @@ void readSmallerDstByteArrayWithOffset() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is smaller than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst ByteBuffer where the dst is smaller than the sequence")
         void readSmallerDstByteBuffer() {
             final var seq = sequence(TEST_BYTES);
             for (int i = 0; i < 2; i++) {
@@ -397,7 +429,9 @@ void readSmallerDstByteBuffer() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is smaller than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst ByteBuffer with offset where the dst is smaller than the"
+                        + " sequence")
         void readSmallerDstByteBufferWithOffset() {
             final var seq = sequence(TEST_BYTES);
             for (int i = 0; i < 2; i++) {
@@ -412,7 +446,8 @@ void readSmallerDstByteBufferWithOffset() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst BufferedData where the dst is smaller than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst BufferedData where the dst is smaller than the sequence")
         void readSmallerDstBufferedData() {
             final var seq = sequence(TEST_BYTES);
             for (int i = 0; i < 2; i++) {
@@ -426,7 +461,9 @@ void readSmallerDstBufferedData() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is smaller than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst BufferedData with offset where the dst is smaller than"
+                        + " the sequence")
         void readSmallerDstBufferedDataWithOffset() {
             final var seq = sequence(TEST_BYTES);
             for (int i = 0; i < 2; i++) {
@@ -441,7 +478,9 @@ void readSmallerDstBufferedDataWithOffset() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array where the dst is the same length as the sequence")
+        @DisplayName(
+                "Reading bytes into a dst byte array where the dst is the same length as the"
+                        + " sequence")
         void readDstByteArray() {
             final var seq = sequence(TEST_BYTES);
             final var dst = new byte[TEST_BYTES.length];
@@ -452,7 +491,9 @@ void readDstByteArray() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array with offset where the dst is the same length as the sequence")
+        @DisplayName(
+                "Reading bytes into a dst byte array with offset where the dst is the same length"
+                        + " as the sequence")
         void readDstByteArrayWithOffset() {
             final var seq = sequence(TEST_BYTES);
             final var dst = new byte[TEST_BYTES.length + 10];
@@ -463,7 +504,9 @@ void readDstByteArrayWithOffset() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is the same length as the sequence")
+        @DisplayName(
+                "Reading bytes into a dst ByteBuffer where the dst is the same length as the"
+                        + " sequence")
         void readDstByteBuffer() {
             final var seq = sequence(TEST_BYTES);
             final var dst = ByteBuffer.allocate(TEST_BYTES.length);
@@ -474,7 +517,9 @@ void readDstByteBuffer() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is the same length as the sequence")
+        @DisplayName(
+                "Reading bytes into a dst ByteBuffer with offset where the dst is the same length"
+                        + " as the sequence")
         void readDstByteBufferWithOffset() {
             final var seq = sequence(TEST_BYTES);
             final var dst = ByteBuffer.allocate(TEST_BYTES.length + 10);
@@ -487,7 +532,9 @@ void readDstByteBufferWithOffset() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst BufferedData where the dst is the same length as the sequence")
+        @DisplayName(
+                "Reading bytes into a dst BufferedData where the dst is the same length as the"
+                        + " sequence")
         void readDstBufferedData() {
             final var seq = sequence(TEST_BYTES);
             final var dst = BufferedData.allocate(TEST_BYTES.length);
@@ -498,7 +545,9 @@ void readDstBufferedData() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is the same length as the sequence")
+        @DisplayName(
+                "Reading bytes into a dst BufferedData with offset where the dst is the same length"
+                        + " as the sequence")
         void readDstBufferedDataWithOffset() {
             final var seq = sequence(TEST_BYTES);
             final var dst = BufferedData.allocate(TEST_BYTES.length + 10);
@@ -511,8 +560,9 @@ void readDstBufferedDataWithOffset() {
         }
 
         @ParameterizedTest
-        @ValueSource(ints = { 1, 5, 26 })
-        @DisplayName("Reading a number of bytes into Bytes where the length is > 0 and <= remaining")
+        @ValueSource(ints = {1, 5, 26})
+        @DisplayName(
+                "Reading a number of bytes into Bytes where the length is > 0 and <= remaining")
         void readBytes(final int length) {
             final var seq = sequence(TEST_BYTES);
             final var pos = seq.position();
@@ -522,7 +572,8 @@ void readBytes(final int length) {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array where the dst is larger than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst byte array where the dst is larger than the sequence")
         void readLargerDstByteArray() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -536,7 +587,9 @@ void readLargerDstByteArray() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst byte array with offset where the dst is larger than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst byte array with offset where the dst is larger than the"
+                        + " sequence")
         void readLargerDstByteArrayWithOffset() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -546,11 +599,13 @@ void readLargerDstByteArrayWithOffset() {
             // Then the sequence is exhausted and the array is filled starting at index 5
             assertThat(seq.remaining()).isZero();
             assertThat(seq.hasRemaining()).isFalse();
-            assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES);
+            assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5))
+                    .containsExactly(TEST_BYTES);
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is larger than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst ByteBuffer where the dst is larger than the sequence")
         void readLargerDstByteBuffer() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -564,7 +619,9 @@ void readLargerDstByteBuffer() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is larger than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst ByteBuffer with offset where the dst is larger than the"
+                        + " sequence")
         void readLargerDstByteBufferWithOffset() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -576,11 +633,13 @@ void readLargerDstByteBufferWithOffset() {
             // Then the sequence is exhausted and the buffer is filled starting at index 5
             assertThat(seq.remaining()).isZero();
             assertThat(seq.hasRemaining()).isFalse();
-            assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES);
+            assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5))
+                    .containsExactly(TEST_BYTES);
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst BufferedData where the dst is larger than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst BufferedData where the dst is larger than the sequence")
         void readLargerDstBufferedData() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -594,7 +653,9 @@ void readLargerDstBufferedData() {
         }
 
         @Test
-        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is larger than the sequence")
+        @DisplayName(
+                "Reading bytes into a dst BufferedData with offset where the dst is larger than the"
+                        + " sequence")
         void readLargerDstBufferedDataWithOffset() {
             // Given a sequence of bytes
             final var seq = sequence(TEST_BYTES);
@@ -611,15 +672,16 @@ void readLargerDstBufferedDataWithOffset() {
 
         @ParameterizedTest(name = "offset={0}, length={1}")
         @CsvSource({
-                "-1, 1", // Negative offset
-                "100, 10", // Offset larger than the dst array size
-                "5, 10", // Offset+Length larger than the dst array size
+            "-1, 1", // Negative offset
+            "100, 10", // Offset larger than the dst array size
+            "5, 10", // Offset+Length larger than the dst array size
         })
         @DisplayName("Reading bytes where the dst offset and length are bad")
         void badOffsetLength(int offset, int length) {
             final var seq = sequence(TEST_BYTES);
             assertThatThrownBy(() -> seq.readBytes(new byte[10], offset, length))
-                    .isInstanceOfAny(IndexOutOfBoundsException.class, BufferOverflowException.class);
+                    .isInstanceOfAny(
+                            IndexOutOfBoundsException.class, BufferOverflowException.class);
         }
     }
 
@@ -635,13 +697,17 @@ void negativeLength() {
 
         @Test
         @DisplayName("Length that is greater than remaining throws BufferUnderflowException")
-        @Disabled("This has to be tested on the buffer level only, because for a Stream, the limit is too big")
+        @Disabled(
+                "This has to be tested on the buffer level only, because for a Stream, the limit is"
+                        + " too big")
         void lengthGreaterThanRemaining() {
             // TODO Move to buffer tests
             final var seq = sequence(TEST_BYTES);
             seq.skip(1);
-            assertThatThrownBy(() -> seq.view(TEST_BYTES.length)).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.view(Integer.MAX_VALUE)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.view(TEST_BYTES.length))
+                    .isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.view(Integer.MAX_VALUE))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
@@ -651,7 +717,8 @@ void readPastLimit() {
             final var seq = sequence(TEST_BYTES);
             seq.limit(5);
             seq.skip(5);
-            // When we try to create a view with a length past the limit, then we get a BufferUnderflowException
+            // When we try to create a view with a length past the limit, then we get a
+            // BufferUnderflowException
             assertThatThrownBy(() -> seq.view(6)).isInstanceOf(BufferUnderflowException.class);
         }
 
@@ -683,7 +750,8 @@ void lengthPlusPositionIsTheLimit() {
             assertThat(seq.position()).isEqualTo(16);
             // When we create a view with a length of 10 bytes
             final var view = seq.view(10);
-            // Then we get the last 10 bytes of the sequence, AND it advances the position by that many bytes.
+            // Then we get the last 10 bytes of the sequence, AND it advances the position by that
+            // many bytes.
             assertThat(seq.position()).isEqualTo(26);
             // The view, when read, will have all 10 of its bytes
             assertThat(view.remaining()).isEqualTo(10);
@@ -730,11 +798,14 @@ void readPastLimit() {
             // When we try to read an int, then we get a BufferUnderflowException
             seq.skip(4); // Only 1 byte left, not enough
             assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.readInt(LITTLE_ENDIAN)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readInt(LITTLE_ENDIAN))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
-        @DisplayName("Reading an int when less than 4 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading an int when less than 4 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows() {
             for (int i = 0; i < Integer.BYTES - 1; i++) {
                 final var seq = sequence(new byte[i]);
@@ -749,7 +820,8 @@ void read(int value) {
             // Given a sequence with exactly 1 integer of data
             final var seq = sequence(asBytes(c -> c.putInt(value)));
             final var pos = seq.position();
-            // When we read an int, then it is the same as the one we wrote, and the position has moved forward
+            // When we read an int, then it is the same as the one we wrote, and the position has
+            // moved forward
             // by 4 bytes
             assertThat(seq.readInt()).isEqualTo(value);
             assertThat(seq.position()).isEqualTo(pos + 4);
@@ -778,16 +850,19 @@ void readBigEndian(int value) {
         @Test
         @DisplayName("Read a mixture of big and little endian data")
         void readMixedEndian() {
-            final var seq = sequence(asBytes(c -> {
-                c.order(BIG_ENDIAN);
-                c.putInt(0x01020304);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x05060708);
-                c.order(BIG_ENDIAN);
-                c.putInt(0x090A0B0C);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x0D0E0F10);
-            }));
+            final var seq =
+                    sequence(
+                            asBytes(
+                                    c -> {
+                                        c.order(BIG_ENDIAN);
+                                        c.putInt(0x01020304);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x05060708);
+                                        c.order(BIG_ENDIAN);
+                                        c.putInt(0x090A0B0C);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x0D0E0F10);
+                                    }));
             assertThat(seq.readInt()).isEqualTo(0x01020304);
             assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x05060708);
             assertThat(seq.readInt()).isEqualTo(0x090A0B0C);
@@ -799,14 +874,16 @@ void readMixedEndian() {
     @DisplayName("readUnsignedInt()")
     final class ReadUnsignedIntTest {
         @Test
-        @DisplayName("Reading an unsigned int from an empty sequence throws BufferUnderflowException")
+        @DisplayName(
+                "Reading an unsigned int from an empty sequence throws BufferUnderflowException")
         void readFromEmptyDataThrows() {
             final var seq = emptySequence();
             assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
-        @DisplayName("Reading an unsigned int from a full read sequence throws BufferUnderflowException")
+        @DisplayName(
+                "Reading an unsigned int from a full read sequence throws BufferUnderflowException")
         void readFromFullyReadDataThrows() {
             final var seq = fullyUsedSequence();
             assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
@@ -821,15 +898,19 @@ void readPastLimit() {
             // When we try to read an unsigned int, then we get a BufferUnderflowException
             seq.skip(4); // Only 1 byte left, not enough
             assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.readUnsignedInt(LITTLE_ENDIAN)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readUnsignedInt(LITTLE_ENDIAN))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
-        @DisplayName("Reading an unsigned int when less than 4 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading an unsigned int when less than 4 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows() {
             for (int i = 0; i < Integer.BYTES - 1; i++) {
                 final var seq = sequence(new byte[i]);
-                assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
+                assertThatThrownBy(seq::readUnsignedInt)
+                        .isInstanceOf(BufferUnderflowException.class);
             }
         }
 
@@ -866,16 +947,19 @@ void readBigEndian(long value) {
         @Test
         @DisplayName("Read a mixture of big and little endian data")
         void readMixedEndian() {
-            final var seq = sequence(asBytes(c -> {
-                c.order(BIG_ENDIAN);
-                c.putInt(0x91020304);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x95060708);
-                c.order(BIG_ENDIAN);
-                c.putInt(0x990A0B0C);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x9D0E0F10);
-            }));
+            final var seq =
+                    sequence(
+                            asBytes(
+                                    c -> {
+                                        c.order(BIG_ENDIAN);
+                                        c.putInt(0x91020304);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x95060708);
+                                        c.order(BIG_ENDIAN);
+                                        c.putInt(0x990A0B0C);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x9D0E0F10);
+                                    }));
             assertThat(seq.readUnsignedInt()).isEqualTo(0x91020304L);
             assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x95060708L);
             assertThat(seq.readUnsignedInt()).isEqualTo(0x990A0B0CL);
@@ -909,11 +993,14 @@ void readPastLimit() {
             // When we try to read a long, then we get a BufferUnderflowException
             seq.skip(4); // Only 1 byte left, not enough
             assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.readLong(LITTLE_ENDIAN)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readLong(LITTLE_ENDIAN))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
-        @DisplayName("Reading a long when less than 8 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading a long when less than 8 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows() {
             for (int i = 0; i < Long.BYTES - 1; i++) {
                 final var seq = sequence(new byte[i]);
@@ -954,16 +1041,19 @@ void readBigEndian(long value) {
         @Test
         @DisplayName("Read a mixture of big and little endian data")
         void readMixedEndian() {
-            final var seq = sequence(asBytes(c -> {
-                c.order(BIG_ENDIAN);
-                c.putLong(0x0102030405060708L);
-                c.order(LITTLE_ENDIAN);
-                c.putLong(0x05060708090A0B0CL);
-                c.order(BIG_ENDIAN);
-                c.putLong(0x990A0B0C0D0E0F10L);
-                c.order(LITTLE_ENDIAN);
-                c.putLong(0x9D0E0F1011121314L);
-            }));
+            final var seq =
+                    sequence(
+                            asBytes(
+                                    c -> {
+                                        c.order(BIG_ENDIAN);
+                                        c.putLong(0x0102030405060708L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putLong(0x05060708090A0B0CL);
+                                        c.order(BIG_ENDIAN);
+                                        c.putLong(0x990A0B0C0D0E0F10L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putLong(0x9D0E0F1011121314L);
+                                    }));
             assertThat(seq.readLong()).isEqualTo(0x0102030405060708L);
             assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x05060708090A0B0CL);
             assertThat(seq.readLong()).isEqualTo(0x990A0B0C0D0E0F10L);
@@ -1002,7 +1092,9 @@ void readPastLimit() {
         }
 
         @Test
-        @DisplayName("Reading a float when less than 4 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading a float when less than 4 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows() {
             for (int i = 0; i < Float.BYTES - 1; i++) {
                 final var seq = sequence(new byte[i]);
@@ -1011,7 +1103,19 @@ void readInsufficientDataThrows() {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+        @ValueSource(
+                floats = {
+                    Float.NaN,
+                    Float.NEGATIVE_INFINITY,
+                    Float.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Float.MAX_VALUE,
+                    Float.POSITIVE_INFINITY
+                })
         @DisplayName("Reading a float")
         void read(float value) {
             final var seq = sequence(asBytes(c -> c.putFloat(value)));
@@ -1026,7 +1130,19 @@ void read(float value) {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+        @ValueSource(
+                floats = {
+                    Float.NaN,
+                    Float.NEGATIVE_INFINITY,
+                    Float.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Float.MAX_VALUE,
+                    Float.POSITIVE_INFINITY
+                })
         @DisplayName("Reading a float in Little Endian")
         void readLittleEndian(float value) {
             final var seq = sequence(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN));
@@ -1041,7 +1157,19 @@ void readLittleEndian(float value) {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+        @ValueSource(
+                floats = {
+                    Float.NaN,
+                    Float.NEGATIVE_INFINITY,
+                    Float.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Float.MAX_VALUE,
+                    Float.POSITIVE_INFINITY
+                })
         @DisplayName("Reading a float in Big Endian")
         void readBigEndian(float value) {
             final var seq = sequence(asBytes(c -> c.putFloat(value), BIG_ENDIAN));
@@ -1058,15 +1186,18 @@ void readBigEndian(float value) {
         @Test
         @DisplayName("Read a mixture of big and little endian data")
         void readMixedEndian() {
-            final var seq = sequence(asBytes(c -> {
-                c.putFloat(0x01020304);
-                c.order(LITTLE_ENDIAN);
-                c.putFloat(0x05060708);
-                c.order(BIG_ENDIAN);
-                c.putFloat(0x990A0B0C);
-                c.order(LITTLE_ENDIAN);
-                c.putFloat(0x9D0E0F10);
-            }));
+            final var seq =
+                    sequence(
+                            asBytes(
+                                    c -> {
+                                        c.putFloat(0x01020304);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putFloat(0x05060708);
+                                        c.order(BIG_ENDIAN);
+                                        c.putFloat(0x990A0B0C);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putFloat(0x9D0E0F10);
+                                    }));
             assertThat(seq.readFloat()).isEqualTo(0x01020304);
             assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x05060708);
             assertThat(seq.readFloat()).isEqualTo(0x990A0B0C);
@@ -1105,7 +1236,9 @@ void readPastLimit() {
         }
 
         @Test
-        @DisplayName("Reading a double when less than 8 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading a double when less than 8 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows() {
             for (int i = 0; i < Long.BYTES - 1; i++) {
                 final var seq = sequence(new byte[i]);
@@ -1114,7 +1247,19 @@ void readInsufficientDataThrows() {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+        @ValueSource(
+                doubles = {
+                    Double.NaN,
+                    Double.NEGATIVE_INFINITY,
+                    Double.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Double.MAX_VALUE,
+                    Double.POSITIVE_INFINITY
+                })
         @DisplayName("Reading a double")
         void read(double value) {
             final var seq = sequence(asBytes(c -> c.putDouble(value)));
@@ -1129,7 +1274,19 @@ void read(double value) {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+        @ValueSource(
+                doubles = {
+                    Double.NaN,
+                    Double.NEGATIVE_INFINITY,
+                    Double.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Double.MAX_VALUE,
+                    Double.POSITIVE_INFINITY
+                })
         @DisplayName("Reading a double in Little Endian")
         void readLittleEndian(double value) {
             final var seq = sequence(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN));
@@ -1144,7 +1301,19 @@ void readLittleEndian(double value) {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+        @ValueSource(
+                doubles = {
+                    Double.NaN,
+                    Double.NEGATIVE_INFINITY,
+                    Double.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Double.MAX_VALUE,
+                    Double.POSITIVE_INFINITY
+                })
         @DisplayName("Reading a double in Big Endian")
         void readBigEndian(double value) {
             final var seq = sequence(asBytes(c -> c.putDouble(value), BIG_ENDIAN));
@@ -1161,15 +1330,18 @@ void readBigEndian(double value) {
         @Test
         @DisplayName("Read a mixture of big and little endian data")
         void readMixedEndian() {
-            final var seq = sequence(asBytes(c -> {
-                c.putDouble(0x9102030405060708L);
-                c.order(LITTLE_ENDIAN);
-                c.putDouble(0x990A0B0C0D0E0F10L);
-                c.order(BIG_ENDIAN);
-                c.putDouble(0x1112131415161718L);
-                c.order(LITTLE_ENDIAN);
-                c.putDouble(0x191A1B1C1D1E1F20L);
-            }));
+            final var seq =
+                    sequence(
+                            asBytes(
+                                    c -> {
+                                        c.putDouble(0x9102030405060708L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putDouble(0x990A0B0C0D0E0F10L);
+                                        c.order(BIG_ENDIAN);
+                                        c.putDouble(0x1112131415161718L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putDouble(0x191A1B1C1D1E1F20L);
+                                    }));
             assertThat(seq.readDouble()).isEqualTo(0x9102030405060708L);
             assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x990A0B0C0D0E0F10L);
             assertThat(seq.readDouble()).isEqualTo(0x1112131415161718L);
@@ -1185,7 +1357,8 @@ final class ReadVarIntTest {
         @DisplayName("Reading a varint from an empty sequence throws BufferUnderflowException")
         void readFromEmptyDataThrows(final boolean zigZag) {
             final var seq = emptySequence();
-            assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarInt(zigZag))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @ParameterizedTest
@@ -1193,7 +1366,8 @@ void readFromEmptyDataThrows(final boolean zigZag) {
         @DisplayName("Reading a varint from a full read sequence throws BufferUnderflowException")
         void readFromFullyReadDataThrows(final boolean zigZag) {
             final var seq = fullyUsedSequence();
-            assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarInt(zigZag))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
@@ -1204,22 +1378,27 @@ void readPastLimit() {
             seq.limit(5);
             seq.skip(5);
             // When we try to read a varint, then we get a BufferUnderflowException
-            assertThatThrownBy(() -> seq.readVarInt(false)).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.readVarInt(true)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarInt(false))
+                    .isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarInt(true))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @ParameterizedTest
         @ValueSource(booleans = {false, true})
-        @DisplayName("Reading a varint when less than 4 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading a varint when less than 4 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows(final boolean zigZag) {
-            final var seq = sequence(new byte[] { (byte) 0b10101100 });
-            assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
+            final var seq = sequence(new byte[] {(byte) 0b10101100});
+            assertThatThrownBy(() -> seq.readVarInt(zigZag))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
         @DisplayName("Read a varint")
         void read() {
-            final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 });
+            final var seq = sequence(new byte[] {(byte) 0b10101100, 0b00000010});
             final var pos = seq.position();
             final var value = seq.readVarInt(false);
             assertThat(value).isEqualTo(300);
@@ -1229,7 +1408,7 @@ void read() {
         @Test
         @DisplayName("Read a 3 bytes varint")
         void read3Bytes() {
-            final var seq = sequence(new byte[] { (byte) 0b10101100, (byte) 0b10101100, 0b00000010 });
+            final var seq = sequence(new byte[] {(byte) 0b10101100, (byte) 0b10101100, 0b00000010});
             final var pos = seq.position();
             final var value = seq.readVarInt(false);
             assertThat(value).isEqualTo(38444);
@@ -1239,12 +1418,11 @@ void read3Bytes() {
         @Test
         @DisplayName("Read a 4 bytes varint")
         void read4Bytes() {
-            final var seq = sequence(new byte[] {
-                    (byte) 0b10101100,
-                    (byte) 0b10101100,
-                    (byte) 0b10101100,
-                    0b00000010
-            });
+            final var seq =
+                    sequence(
+                            new byte[] {
+                                (byte) 0b10101100, (byte) 0b10101100, (byte) 0b10101100, 0b00000010
+                            });
             final var pos = seq.position();
             final var value = seq.readVarInt(false);
             assertThat(value).isEqualTo(4920876);
@@ -1254,13 +1432,15 @@ void read4Bytes() {
         @Test
         @DisplayName("Read a 5 bytes varint")
         void read5Bytes() {
-            final var seq = sequence(new byte[] {
-                    (byte) 0b10101100,
-                    (byte) 0b10101100,
-                    (byte) 0b10101100,
-                    (byte) 0b10101100,
-                    0b00000010
-            });
+            final var seq =
+                    sequence(
+                            new byte[] {
+                                (byte) 0b10101100,
+                                (byte) 0b10101100,
+                                (byte) 0b10101100,
+                                (byte) 0b10101100,
+                                0b00000010
+                            });
             final var pos = seq.position();
             final var value = seq.readVarInt(false);
             assertThat(value).isEqualTo(629872172);
@@ -1270,7 +1450,7 @@ void read5Bytes() {
         @Test
         @DisplayName("Read a varint with zig zag encoding")
         void readZigZag() {
-            final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 });
+            final var seq = sequence(new byte[] {(byte) 0b10101101, 0b00000010});
             final var pos = seq.position();
             final var value = seq.readVarInt(true);
             assertThat(value).isEqualTo(-151);
@@ -1286,7 +1466,8 @@ final class ReadVarLongTest {
         @DisplayName("Reading a varlong from an empty sequence throws BufferUnderflowException")
         void readFromEmptyDataThrows(final boolean zigZag) {
             final var seq = emptySequence();
-            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarLong(zigZag))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @ParameterizedTest
@@ -1294,7 +1475,8 @@ void readFromEmptyDataThrows(final boolean zigZag) {
         @DisplayName("Reading a varlong from a full read sequence throws BufferUnderflowException")
         void readFromFullyReadDataThrows(final boolean zigZag) {
             final var seq = fullyUsedSequence();
-            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarLong(zigZag))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
@@ -1305,22 +1487,27 @@ void readPastLimit() {
             seq.limit(5);
             seq.skip(5);
             // When we try to read a varlong, then we get a BufferUnderflowException
-            assertThatThrownBy(() -> seq.readVarLong(false)).isInstanceOf(BufferUnderflowException.class);
-            assertThatThrownBy(() -> seq.readVarLong(true)).isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarLong(false))
+                    .isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(() -> seq.readVarLong(true))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @ParameterizedTest
         @ValueSource(booleans = {false, true})
-        @DisplayName("Reading a varlong when less than 4 bytes are available throws BufferUnderflowException")
+        @DisplayName(
+                "Reading a varlong when less than 4 bytes are available throws"
+                        + " BufferUnderflowException")
         void readInsufficientDataThrows(final boolean zigZag) {
-            final var seq = sequence(new byte[] { (byte) 0b10101100 });
-            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+            final var seq = sequence(new byte[] {(byte) 0b10101100});
+            assertThatThrownBy(() -> seq.readVarLong(zigZag))
+                    .isInstanceOf(BufferUnderflowException.class);
         }
 
         @Test
         @DisplayName("Read a varlong")
         void read() {
-            final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 });
+            final var seq = sequence(new byte[] {(byte) 0b10101100, 0b00000010});
             final var pos = seq.position();
             final var value = seq.readVarLong(false);
             assertThat(value).isEqualTo(300);
@@ -1330,7 +1517,7 @@ void read() {
         @Test
         @DisplayName("Read a varlong with zig zag encoding")
         void readZigZag() {
-            final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 });
+            final var seq = sequence(new byte[] {(byte) 0b10101101, 0b00000010});
             final var pos = seq.position();
             final var value = seq.readVarLong(true);
             assertThat(value).isEqualTo(-151);
@@ -1341,40 +1528,48 @@ void readZigZag() {
         @DisplayName("Reading a varint that is not properly encoded throws DataEncodingException")
         void readInvalidVarInt() {
             // Given a very long sequence of bytes all with the "continuation" bit set
-            final var seq = sequence(new byte[] {
-                    (byte) 0b10101101,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010
-            });
-            // When we try to decode an int, the lack of a "terminator" bit causes a DataEncodingException
-            assertThatThrownBy(() -> seq.readVarInt(false)).isInstanceOf(DataEncodingException.class);
+            final var seq =
+                    sequence(
+                            new byte[] {
+                                (byte) 0b10101101,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010
+                            });
+            // When we try to decode an int, the lack of a "terminator" bit causes a
+            // DataEncodingException
+            assertThatThrownBy(() -> seq.readVarInt(false))
+                    .isInstanceOf(DataEncodingException.class);
         }
 
         @Test
         @DisplayName("Reading a varlong that is not properly encoded throws DataEncodingException")
         void readInvalidVarLong() {
             // Given a very long sequence of bytes all with the "continuation" bit set
-            final var seq = sequence(new byte[] {
-                    (byte) 0b10101101,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010,
-                    (byte) 0b10000010
-            });
-            // When we try to decode a long, the lack of a "terminator" bit causes a DataEncodingException
-            assertThatThrownBy(() -> seq.readVarLong(false)).isInstanceOf(DataEncodingException.class);
+            final var seq =
+                    sequence(
+                            new byte[] {
+                                (byte) 0b10101101,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010,
+                                (byte) 0b10000010
+                            });
+            // When we try to decode a long, the lack of a "terminator" bit causes a
+            // DataEncodingException
+            assertThatThrownBy(() -> seq.readVarLong(false))
+                    .isInstanceOf(DataEncodingException.class);
         }
     }
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java
index d4eb7b91..486082b7 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialDataTest.java
@@ -1,15 +1,14 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.util.stream.Stream;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
-import java.util.stream.Stream;
-import static org.assertj.core.api.Assertions.assertThat;
 
-/**
- * Test for default methods on {@link SequentialData}.
- */
+/** Test for default methods on {@link SequentialData}. */
 final class SequentialDataTest {
 
     private static Stream<Arguments> provideArgumentsForRemaining() {
@@ -20,7 +19,7 @@ private static Stream<Arguments> provideArgumentsForRemaining() {
                 Arguments.of(1, 2, 1), // One byte remaining
                 Arguments.of(1, 3, 2), // Two bytes remaining
                 Arguments.of(-1, -1, 0) // Negatives? (error that we handle)
-        );
+                );
     }
 
     @ParameterizedTest(name = "position={0}, limit={1}")
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java
index 913f3288..3a666333 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/SequentialTestBase.java
@@ -1,28 +1,24 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
-import com.hedera.pbj.runtime.io.buffer.BufferedData;
-import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Nested;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.CsvSource;
+import static java.nio.ByteOrder.BIG_ENDIAN;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
+import edu.umd.cs.findbugs.annotations.NonNull;
 import java.nio.BufferOverflowException;
 import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.util.function.Consumer;
-import static java.nio.ByteOrder.BIG_ENDIAN;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-/**
- * Base test class for testing all types of {@link SequentialData} classes.
- */
+/** Base test class for testing all types of {@link SequentialData} classes. */
 public abstract class SequentialTestBase {
 
     @NonNull
@@ -32,7 +28,8 @@ public abstract class SequentialTestBase {
     protected abstract SequentialData eofSequence();
 
     @NonNull
-    protected byte[] asBytes(@NonNull final Consumer<ByteBuffer> c, @NonNull final ByteOrder order) {
+    protected byte[] asBytes(
+            @NonNull final Consumer<ByteBuffer> c, @NonNull final ByteOrder order) {
         final var buf = ByteBuffer.allocate(1000).order(order);
         c.accept(buf);
         buf.flip();
@@ -88,7 +85,8 @@ void limit() {
             seq.limit(limit);
             // Then the limit is set
             assertThat(seq.limit()).isEqualTo(limit);
-            // And there are still bytes remaining, and equal the difference between the limit and the position
+            // And there are still bytes remaining, and equal the difference between the limit and
+            // the position
             assertThat(seq.hasRemaining()).isTrue();
             assertThat(seq.remaining()).isEqualTo(limit - seq.position());
         }
@@ -98,12 +96,14 @@ void limit() {
         void clampToCapacity() {
             // Given a sequence (assuming capacity is less than Long.MAX_VALUE)
             final var seq = sequence();
-            assumeTrue(seq.capacity() < Long.MAX_VALUE, "This test does not make sense for streams");
+            assumeTrue(
+                    seq.capacity() < Long.MAX_VALUE, "This test does not make sense for streams");
             // When we set the limit to be larger than the capacity
             seq.limit(seq.capacity() + 1);
             // Then the limit is clamped to the capacity
             assertThat(seq.limit()).isEqualTo(seq.capacity());
-            // And there are still bytes remaining, and equal the difference between the capacity and the position
+            // And there are still bytes remaining, and equal the difference between the capacity
+            // and the position
             assertThat(seq.hasRemaining()).isTrue();
             assertThat(seq.remaining()).isEqualTo(seq.capacity() - seq.position());
         }
@@ -114,15 +114,17 @@ void clampToCapacity() {
     final class SkipTest {
         @ParameterizedTest
         @CsvSource({
-                "-1, 0", // skip -1 bytes, limit is 5, so clamp to 0
-                "0, 0", // skip 0 bytes, limit is 5, so clamp to 0
-                "3, 3", // skip 3 bytes, limit is 5, so clamp to 3
-                "5, 5"}) // skip 5 bytes, limit is 5, so clamp to 5
+            "-1, 0", // skip -1 bytes, limit is 5, so clamp to 0
+            "0, 0", // skip 0 bytes, limit is 5, so clamp to 0
+            "3, 3", // skip 3 bytes, limit is 5, so clamp to 3
+            "5, 5"
+        }) // skip 5 bytes, limit is 5, so clamp to 5
         @DisplayName("Skipping relative to the limit will clamp at limit")
         void skipping(long skip, long expected) {
             // Given a sequence, and some number of bytes to skip
             final var seq = sequence();
-            // When we set the limit to be between the position and capacity, and we skip those bytes
+            // When we set the limit to be between the position and capacity, and we skip those
+            // bytes
             seq.limit(5);
             seq.skip(skip);
             // Then the position matches the number of bytes actually skipped, taking into account
@@ -133,1223 +135,1328 @@ void skipping(long skip, long expected) {
         }
 
         @ParameterizedTest
-        @CsvSource({
-                "7"}) // skip 7 bytes, limit is 5, so throw on skip()
+        @CsvSource({"7"}) // skip 7 bytes, limit is 5, so throw on skip()
         @DisplayName("Skipping beyond the limit will throw")
         void skippingAndThrowing(long skip) {
             // Given a sequence, and some number of bytes to skip
             final var seq = sequence();
-            // When we set the limit to be between the position and capacity, and we skip those bytes
+            // When we set the limit to be between the position and capacity, and we skip those
+            // bytes
             seq.limit(5);
-            assertThatThrownBy(() -> seq.skip(skip)).isInstanceOfAny(
-                    BufferUnderflowException.class,
-                    BufferOverflowException.class
-            );
+            assertThatThrownBy(() -> seq.skip(skip))
+                    .isInstanceOfAny(BufferUnderflowException.class, BufferOverflowException.class);
         }
     }
 
     @Nested
     @DisplayName("writeByte()")
     final class WriteByteTest {
-//        @Test
-//        @DisplayName("Writing a byte to an empty sequence throws BufferOverflowException")
-//        void writeToEmptyDataThrows() {
-//            // Given an empty sequence
-//            final var seq = emptySequence();
-//            // When we try to read a byte, then we get a BufferOverflowException
-//            assertThatThrownBy(() -> seq.writeByte((byte) 1)).isInstanceOf(BufferOverflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a byte from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            // Given a fully read sequence
-//            final var seq = fullyUsedSequence();
-//            // When we try to read a byte, then we get a BufferUnderflowException
-//            assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a byte past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            seq.skip(5);
-//            // When we try to read a byte, then we get a BufferUnderflowException
-//            assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes from beginning to end")
-//        void read() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read each byte, then we get the expected byte
-//            for (byte testByte : TEST_BYTES) {
-//                final var pos = seq.position();
-//                assertThat(seq.hasRemaining()).isTrue();
-//                assertThat(seq.readByte()).isEqualTo(testByte);
-//                assertThat(seq.position()).isEqualTo(pos + 1);
-//            }
-//            // And when we get to the end, there is no longer anything to be read
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(seq.remaining()).isZero();
-//        }
+        //        @Test
+        //        @DisplayName("Writing a byte to an empty sequence throws BufferOverflowException")
+        //        void writeToEmptyDataThrows() {
+        //            // Given an empty sequence
+        //            final var seq = emptySequence();
+        //            // When we try to read a byte, then we get a BufferOverflowException
+        //            assertThatThrownBy(() -> seq.writeByte((byte)
+        // 1)).isInstanceOf(BufferOverflowException.class);
+        //        }
+        //
+        //        @Test
+        //        @DisplayName("Reading a byte from a full read sequence throws
+        // BufferUnderflowException")
+        //        void readFromFullyReadDataThrows() {
+        //            // Given a fully read sequence
+        //            final var seq = fullyUsedSequence();
+        //            // When we try to read a byte, then we get a BufferUnderflowException
+        //
+        // assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class);
+        //        }
+        //
+        //        @Test
+        //        @DisplayName("Reading a byte past the limit throws BufferUnderflowException")
+        //        void readPastLimit() {
+        //            // Given a sequence of bytes with a limit where position == limit
+        //            final var seq = sequence(TEST_BYTES);
+        //            seq.limit(5);
+        //            seq.skip(5);
+        //            // When we try to read a byte, then we get a BufferUnderflowException
+        //
+        // assertThatThrownBy(seq::readByte).isInstanceOf(BufferUnderflowException.class);
+        //        }
+        //
+        //        @Test
+        //        @DisplayName("Reading bytes from beginning to end")
+        //        void read() {
+        //            // Given a sequence of bytes
+        //            final var seq = sequence(TEST_BYTES);
+        //            // When we read each byte, then we get the expected byte
+        //            for (byte testByte : TEST_BYTES) {
+        //                final var pos = seq.position();
+        //                assertThat(seq.hasRemaining()).isTrue();
+        //                assertThat(seq.readByte()).isEqualTo(testByte);
+        //                assertThat(seq.position()).isEqualTo(pos + 1);
+        //            }
+        //            // And when we get to the end, there is no longer anything to be read
+        //            assertThat(seq.hasRemaining()).isFalse();
+        //            assertThat(seq.remaining()).isZero();
+        //        }
     }
 
-//    @Nested
-//    @DisplayName("readUnsignedByte()")
-//    final class ReadUnsignedByteTest {
-//        @Test
-//        @DisplayName("Reading an unsigned byte from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows() {
-//            // Given an empty sequence
-//            final var seq = emptySequence();
-//            // When we try to read an unsigned byte, then we get a BufferUnderflowException
-//            assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an unsigned byte from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            // Given a fully read sequence
-//            final var seq = fullyUsedSequence();
-//            // When we try to read an unsigned byte, then we get a BufferUnderflowException
-//            assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an unsigned byte past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            seq.skip(5);
-//            // When we try to read an unsigned byte, then we get a BufferUnderflowException
-//            assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an unsigned byte")
-//        void read() {
-//            // Given a sequence of bytes (with a single byte that could be interpreted as negative if signed)
-//            final var seq = sequence(new byte[] { (byte) 0b1110_0011 });
-//            // When we read the byte, then we get the expected byte and move the position forward by a single byte
-//            final var pos = seq.position();
-//            assertThat(seq.readUnsignedByte()).isEqualTo(0b1110_0011);
-//            assertThat(seq.position()).isEqualTo(pos + 1);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readBytes()")
-//    final class ReadBytesTest {
-//        @Test
-//        @DisplayName("Reading bytes with a null dst throws NullPointerException")
-//        void readNullDstThrows() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//
-//            // When we try to read bytes using a null byte array, then we get a NullPointerException
-//            //noinspection DataFlowIssue
-//            assertThatThrownBy(() -> seq.readBytes((byte[]) null)).isInstanceOf(NullPointerException.class);
-//            //noinspection DataFlowIssue
-//            assertThatThrownBy(() -> seq.readBytes(null, 0, 10)).isInstanceOf(NullPointerException.class);
-//
-//            // When we try to read bytes using a null ByteBuffer, then we get a NullPointerException
-//            //noinspection DataFlowIssue
-//            assertThatThrownBy(() -> seq.readBytes((ByteBuffer) null)).isInstanceOf(NullPointerException.class);
-//
-//            // When we try to read bytes using a null BufferedData, then we get a NullPointerException
-//            //noinspection DataFlowIssue
-//            assertThatThrownBy(() -> seq.readBytes((BufferedData) null)).isInstanceOf(NullPointerException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes with a negative offset throws IndexOutOfBoundsException")
-//        void negativeOffsetThrows() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we try to read bytes using a byte array with a negative offset, then we get an IndexOutOfBoundsException
-//            assertThatThrownBy(() -> seq.readBytes(new byte[10], -1, 10)).isInstanceOf(IndexOutOfBoundsException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes with an offset that is too large throws IndexOutOfBoundsException")
-//        void tooLargeOffsetThrows() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we try to read bytes using a byte array with an offset that is too large,
-//            // then we get an IndexOutOfBoundsException
-//            assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10))
-//                    .isInstanceOf(IndexOutOfBoundsException.class);
-//            // When we try to read bytes using a byte array with an offset + maxLength that is too large,
-//            // then we get an IndexOutOfBoundsException
-//            assertThatThrownBy(() -> seq.readBytes(new byte[10], 9, 2))
-//                    .isInstanceOf(IndexOutOfBoundsException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes with a negative length throws IllegalArgumentException")
-//        void negativeLengthThrows() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we try to read bytes using a byte array with a negative length, then we get an IllegalArgumentException
-//            assertThatThrownBy(() -> seq.readBytes(new byte[10], 0, -1)).isInstanceOf(IllegalArgumentException.class);
-//            assertThatThrownBy(() -> seq.readBytes(-1)).isInstanceOf(IllegalArgumentException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes from an empty sequence is a no-op")
-//        void readFromEmptyDataIsNoOp() {
-//            // Given an empty sequence
-//            final var seq = emptySequence();
-//
-//            // When we try to read bytes using a byte array, then we get nothing read
-//            assertThat(seq.readBytes(new byte[10])).isZero();
-//            assertThat(seq.readBytes(new byte[10], 0, 2)).isZero();
-//
-//            // When we try to read bytes using a ByteBuffer, then we get nothing read
-//            final var byteBuffer = ByteBuffer.allocate(10);
-//            assertThat(seq.readBytes(byteBuffer)).isZero();
-//
-//            // When we try to read bytes using a BufferedData, then we get nothing read
-//            final var bufferedData = BufferedData.allocate(10);
-//            assertThat(seq.readBytes(bufferedData)).isZero();
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes from a fully read sequence is a no-op")
-//        void readFromFullyReadDataIsNoOp() {
-//            // Given a fully read sequence
-//            final var seq = fullyUsedSequence();
-//
-//            // When we try to read bytes using a byte array, then we get nothing read
-//            assertThat(seq.readBytes(new byte[10])).isZero();
-//            assertThat(seq.readBytes(new byte[10], 0, 2)).isZero();
-//
-//            // When we try to read bytes using a ByteBuffer, then we get nothing read
-//            final var byteBuffer = ByteBuffer.allocate(10);
-//            assertThat(seq.readBytes(byteBuffer)).isZero();
-//
-//            // When we try to read bytes using a BufferedData, then we get nothing read
-//            final var bufferedData = BufferedData.allocate(10);
-//            assertThat(seq.readBytes(bufferedData)).isZero();
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes where there is nothing remaining because we are at the limit is a no-op")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            seq.skip(5);
-//
-//            // When we try to read bytes using a byte array, then we get nothing read
-//            assertThat(seq.readBytes(new byte[10])).isZero();
-//            assertThat(seq.readBytes(new byte[10], 0, 2)).isZero();
-//
-//            // When we try to read bytes using a ByteBuffer, then we get nothing read
-//            final var byteBuffer = ByteBuffer.allocate(10);
-//            assertThat(seq.readBytes(byteBuffer)).isZero();
-//
-//            // When we try to read bytes using a BufferedData, then we get nothing read
-//            final var bufferedData = BufferedData.allocate(10);
-//            assertThat(seq.readBytes(bufferedData)).isZero();
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array where the dst has length of 0")
-//        void readZeroDstByteArray() {
-//            // Given a sequence of bytes and an empty destination byte array
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = new byte[0];
-//            final var pos = seq.position();
-//            // When we try to read bytes into the dst, then the position does not change,
-//            // and the destination array is empty
-//            assertThat(seq.readBytes(dst)).isZero();
-//            assertThat(seq.position()).isEqualTo(pos);
-//            assertThat(dst).isEmpty();
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array with offset and length where the dst has length of 0")
-//        void readZeroDstByteArrayWithOffset() {
-//            // Given a sequence of bytes and a destination byte array
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = new byte[10];
-//            final var pos = seq.position();
-//            // When we try to read bytes into the dst but with a 0 length, then the position does not change,
-//            // and the destination array is empty
-//            assertThat(seq.readBytes(dst, 5,0)).isZero();
-//            assertThat(seq.position()).isEqualTo(pos);
-//            assertThat(dst).containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer where the dst has length of 0")
-//        void readZeroDstByteBuffer() {
-//            // Given a sequence of bytes and an empty destination ByteBuffer
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = ByteBuffer.allocate(0);
-//            final var pos = seq.position();
-//            // When we try to read bytes into the dst, then the position does not change,
-//            // and the destination buffer is empty
-//            assertThat(seq.readBytes(dst)).isZero();
-//            assertThat(seq.position()).isEqualTo(pos);
-//            assertThat(dst.position()).isZero();
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData where the dst has length of 0")
-//        void readZeroDstBufferedData() {
-//            // Given a sequence of bytes and an empty destination BufferedData
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = BufferedData.allocate(0);
-//            final var pos = seq.position();
-//            // When we try to read bytes into the dst, then the position does not change,
-//            // and the destination buffer is empty
-//            assertThat(seq.readBytes(dst)).isZero();
-//            assertThat(seq.position()).isEqualTo(pos);
-//            assertThat(dst.position()).isZero();
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array where the dst is smaller than the sequence")
-//        void readSmallerDstByteArray() {
-//            // Given a sequence of bytes and a destination byte array
-//            final var seq = sequence(TEST_BYTES);
-//            // When we try reading into the dst (twice, once from the beginning and once in the middle)
-//            for (int i = 0; i < 2; i++) {
-//                final var dst = new byte[5];
-//                final var pos = seq.position();
-//                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
-//                assertThat(seq.readBytes(dst)).isEqualTo(5);
-//                // Then the dst is filled with the bytes from the sequence, and the position is updated
-//                assertThat(dst).isEqualTo(subset);
-//                assertThat(seq.position()).isEqualTo(pos + 5);
-//            }
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array with offset where the dst is smaller than the sequence")
-//        void readSmallerDstByteArrayWithOffset() {
-//            final var seq = sequence(TEST_BYTES);
-//            // Do twice, so we read once from sequence at the beginning and once in the middle
-//            for (int i = 0; i < 2; i++) {
-//                final var dst = new byte[10];
-//                final var pos = seq.position();
-//                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
-//                assertThat(seq.readBytes(dst, 3, 5)).isEqualTo(5);
-//                assertThat(Arrays.copyOfRange(dst, 3, 8)).isEqualTo(subset);
-//                assertThat(seq.position()).isEqualTo(pos + 5);
-//            }
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is smaller than the sequence")
-//        void readSmallerDstByteBuffer() {
-//            final var seq = sequence(TEST_BYTES);
-//            for (int i = 0; i < 2; i++) {
-//                final var dst = ByteBuffer.allocate(5);
-//                final var pos = seq.position();
-//                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
-//                assertThat(seq.readBytes(dst)).isEqualTo(5);
-//                assertThat(dst.array()).isEqualTo(subset);
-//                assertThat(seq.position()).isEqualTo(pos + 5);
-//            }
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is smaller than the sequence")
-//        void readSmallerDstByteBufferWithOffset() {
-//            final var seq = sequence(TEST_BYTES);
-//            for (int i = 0; i < 2; i++) {
-//                final var dst = ByteBuffer.allocate(10);
-//                dst.position(5);
-//                final var pos = seq.position();
-//                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
-//                assertThat(seq.readBytes(dst)).isEqualTo(5);
-//                assertThat(dst.slice(5, 5)).isEqualTo(ByteBuffer.wrap(subset));
-//                assertThat(seq.position()).isEqualTo(pos + 5);
-//            }
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData where the dst is smaller than the sequence")
-//        void readSmallerDstBufferedData() {
-//            final var seq = sequence(TEST_BYTES);
-//            for (int i = 0; i < 2; i++) {
-//                final var dst = BufferedData.allocate(5);
-//                final var pos = seq.position();
-//                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
-//                assertThat(seq.readBytes(dst)).isEqualTo(5);
-//                assertThat(dst).isEqualTo(BufferedData.wrap(subset));
-//                assertThat(seq.position()).isEqualTo(pos + 5);
-//            }
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is smaller than the sequence")
-//        void readSmallerDstBufferedDataWithOffset() {
-//            final var seq = sequence(TEST_BYTES);
-//            for (int i = 0; i < 2; i++) {
-//                final var dst = BufferedData.allocate(10);
-//                dst.position(5);
-//                final var pos = seq.position();
-//                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
-//                assertThat(seq.readBytes(dst)).isEqualTo(5);
-//                assertThat(dst.slice(5, 5)).isEqualTo(BufferedData.wrap(subset));
-//                assertThat(seq.position()).isEqualTo(pos + 5);
-//            }
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array where the dst is the same length as the sequence")
-//        void readDstByteArray() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = new byte[TEST_BYTES.length];
-//            final var pos = seq.position();
-//            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
-//            assertThat(dst).isEqualTo(TEST_BYTES);
-//            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array with offset where the dst is the same length as the sequence")
-//        void readDstByteArrayWithOffset() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = new byte[TEST_BYTES.length + 10];
-//            final var pos = seq.position();
-//            assertThat(seq.readBytes(dst, 5, TEST_BYTES.length)).isEqualTo(TEST_BYTES.length);
-//            assertThat(Arrays.copyOfRange(dst, 5, 5 + TEST_BYTES.length)).isEqualTo(TEST_BYTES);
-//            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is the same length as the sequence")
-//        void readDstByteBuffer() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = ByteBuffer.allocate(TEST_BYTES.length);
-//            final var pos = seq.position();
-//            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
-//            assertThat(dst.array()).isEqualTo(TEST_BYTES);
-//            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is the same length as the sequence")
-//        void readDstByteBufferWithOffset() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = ByteBuffer.allocate(TEST_BYTES.length + 10);
-//            final var pos = seq.position();
-//            dst.position(5);
-//            dst.limit(TEST_BYTES.length + 5);
-//            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
-//            assertThat(dst.slice(5, TEST_BYTES.length)).isEqualTo(ByteBuffer.wrap(TEST_BYTES));
-//            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData where the dst is the same length as the sequence")
-//        void readDstBufferedData() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = BufferedData.allocate(TEST_BYTES.length);
-//            final var pos = seq.position();
-//            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
-//            assertThat(dst).isEqualTo(BufferedData.wrap(TEST_BYTES));
-//            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is the same length as the sequence")
-//        void readDstBufferedDataWithOffset() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var dst = BufferedData.allocate(TEST_BYTES.length + 10);
-//            final var pos = seq.position();
-//            dst.position(5);
-//            dst.limit(TEST_BYTES.length + 5);
-//            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
-//            assertThat(dst.slice(5, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES));
-//            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array where the dst is larger than the sequence")
-//        void readLargerDstByteArray() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read the bytes into a larger byte array
-//            final var arr = new byte[TEST_BYTES.length + 1];
-//            assertThat(seq.readBytes(arr)).isEqualTo(TEST_BYTES.length);
-//            // Then the sequence is exhausted and the array is filled starting at index 0
-//            assertThat(seq.remaining()).isZero();
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(arr).startsWith(TEST_BYTES);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst byte array with offset where the dst is larger than the sequence")
-//        void readLargerDstByteArrayWithOffset() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read the bytes into a larger byte array with an offset
-//            final var arr = new byte[TEST_BYTES.length + 10];
-//            assertThat(seq.readBytes(arr, 5, TEST_BYTES.length + 1)).isEqualTo(TEST_BYTES.length);
-//            // Then the sequence is exhausted and the array is filled starting at index 5
-//            assertThat(seq.remaining()).isZero();
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is larger than the sequence")
-//        void readLargerDstByteBuffer() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read the bytes into a larger buffer
-//            final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 1);
-//            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
-//            // Then the sequence is exhausted and the buffer is filled starting at index 0
-//            assertThat(seq.remaining()).isZero();
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(buffer.array()).startsWith(TEST_BYTES);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is larger than the sequence")
-//        void readLargerDstByteBufferWithOffset() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read the bytes into a larger buffer with an offset
-//            final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 10);
-//            buffer.position(5);
-//            buffer.limit(5 + TEST_BYTES.length + 1);
-//            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
-//            // Then the sequence is exhausted and the buffer is filled starting at index 5
-//            assertThat(seq.remaining()).isZero();
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5 )).containsExactly(TEST_BYTES);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData where the dst is larger than the sequence")
-//        void readLargerDstBufferedData() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read the bytes into a larger buffer
-//            final var buffer = BufferedData.allocate(TEST_BYTES.length + 1);
-//            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
-//            // Then the sequence is exhausted and the buffer is filled starting at index 0
-//            assertThat(seq.remaining()).isZero();
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(buffer.slice(0, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES));
-//        }
-//
-//        @Test
-//        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is larger than the sequence")
-//        void readLargerDstBufferedDataWithOffset() {
-//            // Given a sequence of bytes
-//            final var seq = sequence(TEST_BYTES);
-//            // When we read the bytes into a larger buffer with an offset
-//            final var buffer = BufferedData.allocate(TEST_BYTES.length + 10);
-//            buffer.position(5);
-//            buffer.limit(5 + TEST_BYTES.length + 1);
-//            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
-//            // Then the sequence is exhausted and the buffer is filled starting at index 5
-//            assertThat(seq.remaining()).isZero();
-//            assertThat(seq.hasRemaining()).isFalse();
-//            assertThat(buffer.slice(5, TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES));
-//        }
-//
-//        @ParameterizedTest(name = "offset={0}, length={1}")
-//        @CsvSource({
-//                "-1, 1", // Negative offset
-//                "100, 10", // Offset larger than the dst array size
-//                "5, 10", // Offset+Length larger than the dst array size
-//        })
-//        @DisplayName("Reading bytes where the dst offset and length are bad")
-//        void badOffsetLength(int offset, int length) {
-//            final var seq = sequence(TEST_BYTES);
-//            assertThatThrownBy(() -> seq.readBytes(new byte[10], offset, length))
-//                    .isInstanceOf(IndexOutOfBoundsException.class);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("view()")
-//    final class ViewTest {
-//        @Test
-//        @DisplayName("Negative length throws IllegalArgumentException")
-//        void negativeLength() {
-//            final var seq = sequence(TEST_BYTES);
-//            assertThatThrownBy(() -> seq.view(-1)).isInstanceOf(IllegalArgumentException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Length that is greater than remaining throws BufferUnderflowException")
-//        @Disabled("This has to be tested on the buffer level only, because for a Stream, the limit is too big")
-//        void lengthGreaterThanRemaining() {
-//            // TODO Move to buffer tests
-//            final var seq = sequence(TEST_BYTES);
-//            seq.skip(1);
-//            assertThatThrownBy(() -> seq.view(TEST_BYTES.length)).isInstanceOf(BufferUnderflowException.class);
-//            assertThatThrownBy(() -> seq.view(Integer.MAX_VALUE)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Creating a view past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            seq.skip(5);
-//            // When we try to create a view with a length past the limit, then we get a BufferUnderflowException
-//            assertThatThrownBy(() -> seq.view(6)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Length is zero (OK, empty sequence)")
-//        void lengthIsZero() {
-//            final var seq = sequence(TEST_BYTES);
-//            assertThat(seq.view(0).remaining()).isZero();
-//        }
-//
-//        @Test
-//        @DisplayName("Length + Position is less than limit (OK)")
-//        void lengthPlusPositionIsLessThanLimit() {
-//            final var seq = sequence(TEST_BYTES);
-//            seq.skip(5);
-//            final var view = seq.view(10);
-//
-//            assertThat(view.remaining()).isEqualTo(10);
-//            assertThat(view.readBytes(10)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(5, 10));
-//        }
-//
-//        @Test
-//        @DisplayName("Length + Position is the limit (OK)")
-//        void lengthPlusPositionIsTheLimit() {
-//            // Given a sequence of bytes where the position is 10 bytes from the end
-//            final var seq = sequence(TEST_BYTES);
-//            final var startIndex = TEST_BYTES.length - 10;
-//            assertThat(seq.skip(startIndex)).isEqualTo(16);
-//            assertThat(seq.position()).isEqualTo(16);
-//            // When we create a view with a length of 10 bytes
-//            final var view = seq.view(10);
-//            // Then we get the last 10 bytes of the sequence, AND it advances the position by that many bytes.
-//            assertThat(seq.position()).isEqualTo(26);
-//            // The view, when read, will have all 10 of its bytes
-//            assertThat(view.remaining()).isEqualTo(10);
-//            final var bytes = view.readBytes(10);
-//            assertThat(view.position()).isEqualTo(10);
-//            // And those bytes will be the last 10 bytes of the sequence
-//            assertThat(bytes).isEqualTo(Bytes.wrap(TEST_BYTES).slice(startIndex, 10));
-//        }
-//
-//        @Test
-//        @DisplayName("Get sub-sequence of a sub-sequence")
-//        void subSequenceOfSubSequence() {
-//            final var seq = sequence(TEST_BYTES);
-//            final var subSeq = seq.view(10);
-//            final var subSubSeq = subSeq.view(5);
-//            assertThat(subSubSeq.remaining()).isEqualTo(5);
-//            assertThat(subSubSeq.readBytes(5)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(0, 5));
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readInt()")
-//    final class ReadIntTest {
-//        @Test
-//        @DisplayName("Reading an int from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows() {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an int from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an int past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            // When we try to read an int, then we get a BufferUnderflowException
-//            seq.skip(4); // Only 1 byte left, not enough
-//            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
-//            seq.skip(1); // No bytes left, not enough
-//            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an int when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows() {
-//            for (int i = 0; i < 3; i++) {
-//                final var seq = sequence(new byte[i]);
-//                assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
-//            }
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE})
-//        @DisplayName("Reading an int")
-//        void read(int value) {
-//            // Given a sequence with exactly 1 integer of data
-//            final var seq = sequence(asBytes(c -> c.putInt(value)));
-//            final var pos = seq.position();
-//            // When we read an int, then it is the same as the one we wrote, and the position has moved forward
-//            // by 4 bytes
-//            assertThat(seq.readInt()).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE})
-//        @DisplayName("Reading an int in Little Endian")
-//        void readLittleEndian(int value) {
-//            final var seq = sequence(asBytes(c -> c.putInt(value), LITTLE_ENDIAN));
-//            final var pos = seq.position();
-//            assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE})
-//        @DisplayName("Reading an int in Big Endian")
-//        void readBigEndian(int value) {
-//            final var seq = sequence(asBytes(c -> c.putInt(value), BIG_ENDIAN));
-//            final var pos = seq.position();
-//            assertThat(seq.readInt(BIG_ENDIAN)).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a mixture of big and little endian data")
-//        void readMixedEndian() {
-//            final var seq = sequence(asBytes(c -> {
-//                c.order(BIG_ENDIAN);
-//                c.putInt(0x01020304);
-//                c.order(LITTLE_ENDIAN);
-//                c.putInt(0x05060708);
-//                c.order(BIG_ENDIAN);
-//                c.putInt(0x090A0B0C);
-//                c.order(LITTLE_ENDIAN);
-//                c.putInt(0x0D0E0F10);
-//            }));
-//            assertThat(seq.readInt()).isEqualTo(0x01020304);
-//            assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x05060708);
-//            assertThat(seq.readInt()).isEqualTo(0x090A0B0C);
-//            assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x0D0E0F10);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readUnsignedInt()")
-//    final class ReadUnsignedIntTest {
-//        @Test
-//        @DisplayName("Reading an unsigned int from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows() {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an unsigned int from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an unsigned int past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            // When we try to read an unsigned int, then we get a BufferUnderflowException
-//            seq.skip(4); // Only 1 byte left, not enough
-//            assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
-//            seq.skip(1); // No bytes left, not enough
-//            assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading an unsigned int when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows() {
-//            for (int i = 0; i < 3; i++) {
-//                final var seq = sequence(new byte[i]);
-//                assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
-//            }
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL})
-//        @DisplayName("Reading an unsigned int")
-//        void read(long value) {
-//            final var seq = sequence(asBytes(c -> c.putInt((int) value)));
-//            final var pos = seq.position();
-//            assertThat(seq.readUnsignedInt()).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL})
-//        @DisplayName("Reading an unsigned int in Little Endian")
-//        void readLittleEndian(long value) {
-//            final var seq = sequence(asBytes(c -> c.putInt((int) value), LITTLE_ENDIAN));
-//            final var pos = seq.position();
-//            assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL})
-//        @DisplayName("Reading an unsigned int in Big Endian")
-//        void readBigEndian(long value) {
-//            final var seq = sequence(asBytes(c -> c.putInt((int) value), BIG_ENDIAN));
-//            final var pos = seq.position();
-//            assertThat(seq.readUnsignedInt(BIG_ENDIAN)).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a mixture of big and little endian data")
-//        void readMixedEndian() {
-//            final var seq = sequence(asBytes(c -> {
-//                c.order(BIG_ENDIAN);
-//                c.putInt(0x91020304);
-//                c.order(LITTLE_ENDIAN);
-//                c.putInt(0x95060708);
-//                c.order(BIG_ENDIAN);
-//                c.putInt(0x990A0B0C);
-//                c.order(LITTLE_ENDIAN);
-//                c.putInt(0x9D0E0F10);
-//            }));
-//            assertThat(seq.readUnsignedInt()).isEqualTo(0x91020304L);
-//            assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x95060708L);
-//            assertThat(seq.readUnsignedInt()).isEqualTo(0x990A0B0CL);
-//            assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10L);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readLong()")
-//    final class ReadLongTest {
-//        @Test
-//        @DisplayName("Reading a long from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows() {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a long from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a long past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            // When we try to read a long, then we get a BufferUnderflowException
-//            seq.skip(4); // Only 1 byte left, not enough
-//            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
-//            seq.skip(1); // No bytes left, not enough
-//            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a long when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows() {
-//            for (int i = 0; i < 7; i++) {
-//                final var seq = sequence(new byte[i]);
-//                assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
-//            }
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE})
-//        @DisplayName("Reading a long")
-//        void read(long value) {
-//            final var seq = sequence(asBytes(c -> c.putLong(value)));
-//            final var pos = seq.position();
-//            assertThat(seq.readLong()).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 8);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE})
-//        @DisplayName("Reading a long in Little Endian")
-//        void readLittleEndian(long value) {
-//            final var seq = sequence(asBytes(c -> c.putLong(value), LITTLE_ENDIAN));
-//            final var pos = seq.position();
-//            assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 8);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE})
-//        @DisplayName("Reading a long in Big Endian")
-//        void readBigEndian(long value) {
-//            final var seq = sequence(asBytes(c -> c.putLong(value), BIG_ENDIAN));
-//            final var pos = seq.position();
-//            assertThat(seq.readLong(BIG_ENDIAN)).isEqualTo(value);
-//            assertThat(seq.position()).isEqualTo(pos + 8);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a mixture of big and little endian data")
-//        void readMixedEndian() {
-//            final var seq = sequence(asBytes(c -> {
-//                c.order(BIG_ENDIAN);
-//                c.putLong(0x0102030405060708L);
-//                c.order(LITTLE_ENDIAN);
-//                c.putLong(0x05060708090A0B0CL);
-//                c.order(BIG_ENDIAN);
-//                c.putLong(0x990A0B0C0D0E0F10L);
-//                c.order(LITTLE_ENDIAN);
-//                c.putLong(0x9D0E0F1011121314L);
-//            }));
-//            assertThat(seq.readLong()).isEqualTo(0x0102030405060708L);
-//            assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x05060708090A0B0CL);
-//            assertThat(seq.readLong()).isEqualTo(0x990A0B0C0D0E0F10L);
-//            assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F1011121314L);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readFloat()")
-//    final class ReadFloatTest {
-//        @Test
-//        @DisplayName("Reading a float from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows() {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a float from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a float past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            // When we try to read a float, then we get a BufferUnderflowException
-//            seq.skip(4); // Only 1 byte left, not enough
-//            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
-//            seq.skip(1); // No bytes left, not enough
-//            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a float when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows() {
-//            for (int i = 0; i < 3; i++) {
-//                final var seq = sequence(new byte[i]);
-//                assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
-//            }
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
-//        @DisplayName("Reading a float")
-//        void read(float value) {
-//            final var seq = sequence(asBytes(c -> c.putFloat(value)));
-//            final var pos = seq.position();
-//            final var readFloat = seq.readFloat();
-//            if (Float.isNaN(value)) {
-//                assertThat(readFloat).isNaN();
-//            } else {
-//                assertThat(readFloat).isEqualTo(value);
-//            }
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
-//        @DisplayName("Reading a float in Little Endian")
-//        void readLittleEndian(float value) {
-//            final var seq = sequence(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN));
-//            final var pos = seq.position();
-//            final var readFloat = seq.readFloat(LITTLE_ENDIAN);
-//            if (Float.isNaN(value)) {
-//                assertThat(readFloat).isNaN();
-//            } else {
-//                assertThat(readFloat).isEqualTo(value);
-//            }
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
-//        @DisplayName("Reading a float in Big Endian")
-//        void readBigEndian(float value) {
-//            final var seq = sequence(asBytes(c -> c.putFloat(value), BIG_ENDIAN));
-//            final var pos = seq.position();
-//            final var readFloat = seq.readFloat(BIG_ENDIAN);
-//            if (Float.isNaN(value)) {
-//                assertThat(readFloat).isNaN();
-//            } else {
-//                assertThat(readFloat).isEqualTo(value);
-//            }
-//            assertThat(seq.position()).isEqualTo(pos + 4);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a mixture of big and little endian data")
-//        void readMixedEndian() {
-//            final var seq = sequence(asBytes(c -> {
-//                c.putFloat(0x01020304);
-//                c.order(LITTLE_ENDIAN);
-//                c.putFloat(0x05060708);
-//                c.order(BIG_ENDIAN);
-//                c.putFloat(0x990A0B0C);
-//                c.order(LITTLE_ENDIAN);
-//                c.putFloat(0x9D0E0F10);
-//            }));
-//            assertThat(seq.readFloat()).isEqualTo(0x01020304);
-//            assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x05060708);
-//            assertThat(seq.readFloat()).isEqualTo(0x990A0B0C);
-//            assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readDouble()")
-//    final class ReadDoubleTest {
-//        @Test
-//        @DisplayName("Reading a double from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows() {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a double from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows() {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a double past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            // When we try to read a double, then we get a BufferUnderflowException
-//            seq.skip(4); // Only 1 byte left, not enough
-//            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
-//            seq.skip(1); // No bytes left, not enough
-//            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a double when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows() {
-//            for (int i = 0; i < 7; i++) {
-//                final var seq = sequence(new byte[i]);
-//                assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
-//            }
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
-//        @DisplayName("Reading a double")
-//        void read(double value) {
-//            final var seq = sequence(asBytes(c -> c.putDouble(value)));
-//            final var pos = seq.position();
-//            final var readDouble = seq.readDouble();
-//            if (Double.isNaN(value)) {
-//                assertThat(readDouble).isNaN();
-//            } else {
-//                assertThat(readDouble).isEqualTo(value);
-//            }
-//            assertThat(seq.position()).isEqualTo(pos + 8);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
-//        @DisplayName("Reading a double in Little Endian")
-//        void readLittleEndian(double value) {
-//            final var seq = sequence(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN));
-//            final var pos = seq.position();
-//            final var readDouble = seq.readDouble(LITTLE_ENDIAN);
-//            if (Double.isNaN(value)) {
-//                assertThat(readDouble).isNaN();
-//            } else {
-//                assertThat(readDouble).isEqualTo(value);
-//            }
-//            assertThat(seq.position()).isEqualTo(pos + 8);
-//        }
-//
-//        @ParameterizedTest(name = "value={0}")
-//        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
-//        @DisplayName("Reading a double in Big Endian")
-//        void readBigEndian(double value) {
-//            final var seq = sequence(asBytes(c -> c.putDouble(value), BIG_ENDIAN));
-//            final var pos = seq.position();
-//            final var readDouble = seq.readDouble(BIG_ENDIAN);
-//            if (Double.isNaN(value)) {
-//                assertThat(readDouble).isNaN();
-//            } else {
-//                assertThat(readDouble).isEqualTo(value);
-//            }
-//            assertThat(seq.position()).isEqualTo(pos + 8);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a mixture of big and little endian data")
-//        void readMixedEndian() {
-//            final var seq = sequence(asBytes(c -> {
-//                c.putDouble(0x9102030405060708L);
-//                c.order(LITTLE_ENDIAN);
-//                c.putDouble(0x990A0B0C0D0E0F10L);
-//                c.order(BIG_ENDIAN);
-//                c.putDouble(0x1112131415161718L);
-//                c.order(LITTLE_ENDIAN);
-//                c.putDouble(0x191A1B1C1D1E1F20L);
-//            }));
-//            assertThat(seq.readDouble()).isEqualTo(0x9102030405060708L);
-//            assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x990A0B0C0D0E0F10L);
-//            assertThat(seq.readDouble()).isEqualTo(0x1112131415161718L);
-//            assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x191A1B1C1D1E1F20L);
-//        }
-//    }
-//    @Nested
-//    @DisplayName("readVarInt()")
-//    final class ReadVarIntTest {
-//        @ParameterizedTest
-//        @ValueSource(booleans = {false, true})
-//        @DisplayName("Reading a varint from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows(final boolean zigZag) {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @ParameterizedTest
-//        @ValueSource(booleans = {false, true})
-//        @DisplayName("Reading a varint from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows(final boolean zigZag) {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(() -> seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a varint past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            seq.skip(5);
-//            // When we try to read a varint, then we get a BufferUnderflowException
-//            assertThatThrownBy(() -> seq.readVarInt(false)).isInstanceOf(BufferUnderflowException.class);
-//            assertThatThrownBy(() -> seq.readVarInt(true)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @ParameterizedTest
-//        @ValueSource(booleans = {false, true})
-//        @DisplayName("Reading a varint when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows(final boolean zigZag) {
-//            final var seq = sequence(new byte[] { (byte) 0b10101100 });
-//            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a varint")
-//        void read() {
-//            final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 });
-//            final var pos = seq.position();
-//            final var value = seq.readVarInt(false);
-//            assertThat(value).isEqualTo(300);
-//            assertThat(seq.position()).isEqualTo(pos + 2);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a varint with zig zag encoding")
-//        void readZigZag() {
-//            final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 });
-//            final var pos = seq.position();
-//            final var value = seq.readVarInt(true);
-//            assertThat(value).isEqualTo(-151);
-//            assertThat(seq.position()).isEqualTo(pos + 2);
-//        }
-//    }
-//
-//    @Nested
-//    @DisplayName("readVarLong()")
-//    final class ReadVarLongTest {
-//        @ParameterizedTest
-//        @ValueSource(booleans = {false, true})
-//        @DisplayName("Reading a varlong from an empty sequence throws BufferUnderflowException")
-//        void readFromEmptyDataThrows(final boolean zigZag) {
-//            final var seq = emptySequence();
-//            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @ParameterizedTest
-//        @ValueSource(booleans = {false, true})
-//        @DisplayName("Reading a varlong from a full read sequence throws BufferUnderflowException")
-//        void readFromFullyReadDataThrows(final boolean zigZag) {
-//            final var seq = fullyUsedSequence();
-//            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Reading a varlong past the limit throws BufferUnderflowException")
-//        void readPastLimit() {
-//            // Given a sequence of bytes with a limit where position == limit
-//            final var seq = sequence(TEST_BYTES);
-//            seq.limit(5);
-//            seq.skip(5);
-//            // When we try to read a varlong, then we get a BufferUnderflowException
-//            assertThatThrownBy(() -> seq.readVarLong(false)).isInstanceOf(BufferUnderflowException.class);
-//            assertThatThrownBy(() -> seq.readVarLong(true)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @ParameterizedTest
-//        @ValueSource(booleans = {false, true})
-//        @DisplayName("Reading a varlong when less than 4 bytes are available throws BufferUnderflowException")
-//        void readInsufficientDataThrows(final boolean zigZag) {
-//            final var seq = sequence(new byte[] { (byte) 0b10101100 });
-//            assertThatThrownBy(() -> seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a varlong")
-//        void read() {
-//            final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 });
-//            final var pos = seq.position();
-//            final var value = seq.readVarLong(false);
-//            assertThat(value).isEqualTo(300);
-//            assertThat(seq.position()).isEqualTo(pos + 2);
-//        }
-//
-//        @Test
-//        @DisplayName("Read a varlong with zig zag encoding")
-//        void readZigZag() {
-//            final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 });
-//            final var pos = seq.position();
-//            final var value = seq.readVarLong(true);
-//            assertThat(value).isEqualTo(-151);
-//            assertThat(seq.position()).isEqualTo(pos + 2);
-//        }
-//    }
+    //    @Nested
+    //    @DisplayName("readUnsignedByte()")
+    //    final class ReadUnsignedByteTest {
+    //        @Test
+    //        @DisplayName("Reading an unsigned byte from an empty sequence throws
+    // BufferUnderflowException")
+    //        void readFromEmptyDataThrows() {
+    //            // Given an empty sequence
+    //            final var seq = emptySequence();
+    //            // When we try to read an unsigned byte, then we get a BufferUnderflowException
+    //
+    // assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an unsigned byte from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows() {
+    //            // Given a fully read sequence
+    //            final var seq = fullyUsedSequence();
+    //            // When we try to read an unsigned byte, then we get a BufferUnderflowException
+    //
+    // assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an unsigned byte past the limit throws
+    // BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            seq.skip(5);
+    //            // When we try to read an unsigned byte, then we get a BufferUnderflowException
+    //
+    // assertThatThrownBy(seq::readUnsignedByte).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an unsigned byte")
+    //        void read() {
+    //            // Given a sequence of bytes (with a single byte that could be interpreted as
+    // negative if signed)
+    //            final var seq = sequence(new byte[] { (byte) 0b1110_0011 });
+    //            // When we read the byte, then we get the expected byte and move the position
+    // forward by a single byte
+    //            final var pos = seq.position();
+    //            assertThat(seq.readUnsignedByte()).isEqualTo(0b1110_0011);
+    //            assertThat(seq.position()).isEqualTo(pos + 1);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readBytes()")
+    //    final class ReadBytesTest {
+    //        @Test
+    //        @DisplayName("Reading bytes with a null dst throws NullPointerException")
+    //        void readNullDstThrows() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //
+    //            // When we try to read bytes using a null byte array, then we get a
+    // NullPointerException
+    //            //noinspection DataFlowIssue
+    //            assertThatThrownBy(() -> seq.readBytes((byte[])
+    // null)).isInstanceOf(NullPointerException.class);
+    //            //noinspection DataFlowIssue
+    //            assertThatThrownBy(() -> seq.readBytes(null, 0,
+    // 10)).isInstanceOf(NullPointerException.class);
+    //
+    //            // When we try to read bytes using a null ByteBuffer, then we get a
+    // NullPointerException
+    //            //noinspection DataFlowIssue
+    //            assertThatThrownBy(() -> seq.readBytes((ByteBuffer)
+    // null)).isInstanceOf(NullPointerException.class);
+    //
+    //            // When we try to read bytes using a null BufferedData, then we get a
+    // NullPointerException
+    //            //noinspection DataFlowIssue
+    //            assertThatThrownBy(() -> seq.readBytes((BufferedData)
+    // null)).isInstanceOf(NullPointerException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes with a negative offset throws IndexOutOfBoundsException")
+    //        void negativeOffsetThrows() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we try to read bytes using a byte array with a negative offset, then we
+    // get an IndexOutOfBoundsException
+    //            assertThatThrownBy(() -> seq.readBytes(new byte[10], -1,
+    // 10)).isInstanceOf(IndexOutOfBoundsException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes with an offset that is too large throws
+    // IndexOutOfBoundsException")
+    //        void tooLargeOffsetThrows() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we try to read bytes using a byte array with an offset that is too large,
+    //            // then we get an IndexOutOfBoundsException
+    //            assertThatThrownBy(() -> seq.readBytes(new byte[10], 11, 10))
+    //                    .isInstanceOf(IndexOutOfBoundsException.class);
+    //            // When we try to read bytes using a byte array with an offset + maxLength that is
+    // too large,
+    //            // then we get an IndexOutOfBoundsException
+    //            assertThatThrownBy(() -> seq.readBytes(new byte[10], 9, 2))
+    //                    .isInstanceOf(IndexOutOfBoundsException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes with a negative length throws IllegalArgumentException")
+    //        void negativeLengthThrows() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we try to read bytes using a byte array with a negative length, then we
+    // get an IllegalArgumentException
+    //            assertThatThrownBy(() -> seq.readBytes(new byte[10], 0,
+    // -1)).isInstanceOf(IllegalArgumentException.class);
+    //            assertThatThrownBy(() ->
+    // seq.readBytes(-1)).isInstanceOf(IllegalArgumentException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes from an empty sequence is a no-op")
+    //        void readFromEmptyDataIsNoOp() {
+    //            // Given an empty sequence
+    //            final var seq = emptySequence();
+    //
+    //            // When we try to read bytes using a byte array, then we get nothing read
+    //            assertThat(seq.readBytes(new byte[10])).isZero();
+    //            assertThat(seq.readBytes(new byte[10], 0, 2)).isZero();
+    //
+    //            // When we try to read bytes using a ByteBuffer, then we get nothing read
+    //            final var byteBuffer = ByteBuffer.allocate(10);
+    //            assertThat(seq.readBytes(byteBuffer)).isZero();
+    //
+    //            // When we try to read bytes using a BufferedData, then we get nothing read
+    //            final var bufferedData = BufferedData.allocate(10);
+    //            assertThat(seq.readBytes(bufferedData)).isZero();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes from a fully read sequence is a no-op")
+    //        void readFromFullyReadDataIsNoOp() {
+    //            // Given a fully read sequence
+    //            final var seq = fullyUsedSequence();
+    //
+    //            // When we try to read bytes using a byte array, then we get nothing read
+    //            assertThat(seq.readBytes(new byte[10])).isZero();
+    //            assertThat(seq.readBytes(new byte[10], 0, 2)).isZero();
+    //
+    //            // When we try to read bytes using a ByteBuffer, then we get nothing read
+    //            final var byteBuffer = ByteBuffer.allocate(10);
+    //            assertThat(seq.readBytes(byteBuffer)).isZero();
+    //
+    //            // When we try to read bytes using a BufferedData, then we get nothing read
+    //            final var bufferedData = BufferedData.allocate(10);
+    //            assertThat(seq.readBytes(bufferedData)).isZero();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes where there is nothing remaining because we are at the
+    // limit is a no-op")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            seq.skip(5);
+    //
+    //            // When we try to read bytes using a byte array, then we get nothing read
+    //            assertThat(seq.readBytes(new byte[10])).isZero();
+    //            assertThat(seq.readBytes(new byte[10], 0, 2)).isZero();
+    //
+    //            // When we try to read bytes using a ByteBuffer, then we get nothing read
+    //            final var byteBuffer = ByteBuffer.allocate(10);
+    //            assertThat(seq.readBytes(byteBuffer)).isZero();
+    //
+    //            // When we try to read bytes using a BufferedData, then we get nothing read
+    //            final var bufferedData = BufferedData.allocate(10);
+    //            assertThat(seq.readBytes(bufferedData)).isZero();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array where the dst has length of 0")
+    //        void readZeroDstByteArray() {
+    //            // Given a sequence of bytes and an empty destination byte array
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = new byte[0];
+    //            final var pos = seq.position();
+    //            // When we try to read bytes into the dst, then the position does not change,
+    //            // and the destination array is empty
+    //            assertThat(seq.readBytes(dst)).isZero();
+    //            assertThat(seq.position()).isEqualTo(pos);
+    //            assertThat(dst).isEmpty();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array with offset and length where the dst
+    // has length of 0")
+    //        void readZeroDstByteArrayWithOffset() {
+    //            // Given a sequence of bytes and a destination byte array
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = new byte[10];
+    //            final var pos = seq.position();
+    //            // When we try to read bytes into the dst but with a 0 length, then the position
+    // does not change,
+    //            // and the destination array is empty
+    //            assertThat(seq.readBytes(dst, 5,0)).isZero();
+    //            assertThat(seq.position()).isEqualTo(pos);
+    //            assertThat(dst).containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer where the dst has length of 0")
+    //        void readZeroDstByteBuffer() {
+    //            // Given a sequence of bytes and an empty destination ByteBuffer
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = ByteBuffer.allocate(0);
+    //            final var pos = seq.position();
+    //            // When we try to read bytes into the dst, then the position does not change,
+    //            // and the destination buffer is empty
+    //            assertThat(seq.readBytes(dst)).isZero();
+    //            assertThat(seq.position()).isEqualTo(pos);
+    //            assertThat(dst.position()).isZero();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData where the dst has length of 0")
+    //        void readZeroDstBufferedData() {
+    //            // Given a sequence of bytes and an empty destination BufferedData
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = BufferedData.allocate(0);
+    //            final var pos = seq.position();
+    //            // When we try to read bytes into the dst, then the position does not change,
+    //            // and the destination buffer is empty
+    //            assertThat(seq.readBytes(dst)).isZero();
+    //            assertThat(seq.position()).isEqualTo(pos);
+    //            assertThat(dst.position()).isZero();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array where the dst is smaller than the
+    // sequence")
+    //        void readSmallerDstByteArray() {
+    //            // Given a sequence of bytes and a destination byte array
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we try reading into the dst (twice, once from the beginning and once in
+    // the middle)
+    //            for (int i = 0; i < 2; i++) {
+    //                final var dst = new byte[5];
+    //                final var pos = seq.position();
+    //                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
+    //                assertThat(seq.readBytes(dst)).isEqualTo(5);
+    //                // Then the dst is filled with the bytes from the sequence, and the position
+    // is updated
+    //                assertThat(dst).isEqualTo(subset);
+    //                assertThat(seq.position()).isEqualTo(pos + 5);
+    //            }
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array with offset where the dst is smaller
+    // than the sequence")
+    //        void readSmallerDstByteArrayWithOffset() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            // Do twice, so we read once from sequence at the beginning and once in the middle
+    //            for (int i = 0; i < 2; i++) {
+    //                final var dst = new byte[10];
+    //                final var pos = seq.position();
+    //                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
+    //                assertThat(seq.readBytes(dst, 3, 5)).isEqualTo(5);
+    //                assertThat(Arrays.copyOfRange(dst, 3, 8)).isEqualTo(subset);
+    //                assertThat(seq.position()).isEqualTo(pos + 5);
+    //            }
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is smaller than the
+    // sequence")
+    //        void readSmallerDstByteBuffer() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            for (int i = 0; i < 2; i++) {
+    //                final var dst = ByteBuffer.allocate(5);
+    //                final var pos = seq.position();
+    //                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
+    //                assertThat(seq.readBytes(dst)).isEqualTo(5);
+    //                assertThat(dst.array()).isEqualTo(subset);
+    //                assertThat(seq.position()).isEqualTo(pos + 5);
+    //            }
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is smaller
+    // than the sequence")
+    //        void readSmallerDstByteBufferWithOffset() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            for (int i = 0; i < 2; i++) {
+    //                final var dst = ByteBuffer.allocate(10);
+    //                dst.position(5);
+    //                final var pos = seq.position();
+    //                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
+    //                assertThat(seq.readBytes(dst)).isEqualTo(5);
+    //                assertThat(dst.slice(5, 5)).isEqualTo(ByteBuffer.wrap(subset));
+    //                assertThat(seq.position()).isEqualTo(pos + 5);
+    //            }
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData where the dst is smaller than the
+    // sequence")
+    //        void readSmallerDstBufferedData() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            for (int i = 0; i < 2; i++) {
+    //                final var dst = BufferedData.allocate(5);
+    //                final var pos = seq.position();
+    //                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
+    //                assertThat(seq.readBytes(dst)).isEqualTo(5);
+    //                assertThat(dst).isEqualTo(BufferedData.wrap(subset));
+    //                assertThat(seq.position()).isEqualTo(pos + 5);
+    //            }
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is
+    // smaller than the sequence")
+    //        void readSmallerDstBufferedDataWithOffset() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            for (int i = 0; i < 2; i++) {
+    //                final var dst = BufferedData.allocate(10);
+    //                dst.position(5);
+    //                final var pos = seq.position();
+    //                final var subset = Arrays.copyOfRange(TEST_BYTES, (int) pos, (int) pos + 5);
+    //                assertThat(seq.readBytes(dst)).isEqualTo(5);
+    //                assertThat(dst.slice(5, 5)).isEqualTo(BufferedData.wrap(subset));
+    //                assertThat(seq.position()).isEqualTo(pos + 5);
+    //            }
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array where the dst is the same length as
+    // the sequence")
+    //        void readDstByteArray() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = new byte[TEST_BYTES.length];
+    //            final var pos = seq.position();
+    //            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
+    //            assertThat(dst).isEqualTo(TEST_BYTES);
+    //            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array with offset where the dst is the
+    // same length as the sequence")
+    //        void readDstByteArrayWithOffset() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = new byte[TEST_BYTES.length + 10];
+    //            final var pos = seq.position();
+    //            assertThat(seq.readBytes(dst, 5, TEST_BYTES.length)).isEqualTo(TEST_BYTES.length);
+    //            assertThat(Arrays.copyOfRange(dst, 5, 5 +
+    // TEST_BYTES.length)).isEqualTo(TEST_BYTES);
+    //            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is the same length as
+    // the sequence")
+    //        void readDstByteBuffer() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = ByteBuffer.allocate(TEST_BYTES.length);
+    //            final var pos = seq.position();
+    //            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
+    //            assertThat(dst.array()).isEqualTo(TEST_BYTES);
+    //            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is the
+    // same length as the sequence")
+    //        void readDstByteBufferWithOffset() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = ByteBuffer.allocate(TEST_BYTES.length + 10);
+    //            final var pos = seq.position();
+    //            dst.position(5);
+    //            dst.limit(TEST_BYTES.length + 5);
+    //            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
+    //            assertThat(dst.slice(5,
+    // TEST_BYTES.length)).isEqualTo(ByteBuffer.wrap(TEST_BYTES));
+    //            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData where the dst is the same length
+    // as the sequence")
+    //        void readDstBufferedData() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = BufferedData.allocate(TEST_BYTES.length);
+    //            final var pos = seq.position();
+    //            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
+    //            assertThat(dst).isEqualTo(BufferedData.wrap(TEST_BYTES));
+    //            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is the
+    // same length as the sequence")
+    //        void readDstBufferedDataWithOffset() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var dst = BufferedData.allocate(TEST_BYTES.length + 10);
+    //            final var pos = seq.position();
+    //            dst.position(5);
+    //            dst.limit(TEST_BYTES.length + 5);
+    //            assertThat(seq.readBytes(dst)).isEqualTo(TEST_BYTES.length);
+    //            assertThat(dst.slice(5,
+    // TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES));
+    //            assertThat(seq.position()).isEqualTo(pos + TEST_BYTES.length);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array where the dst is larger than the
+    // sequence")
+    //        void readLargerDstByteArray() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we read the bytes into a larger byte array
+    //            final var arr = new byte[TEST_BYTES.length + 1];
+    //            assertThat(seq.readBytes(arr)).isEqualTo(TEST_BYTES.length);
+    //            // Then the sequence is exhausted and the array is filled starting at index 0
+    //            assertThat(seq.remaining()).isZero();
+    //            assertThat(seq.hasRemaining()).isFalse();
+    //            assertThat(arr).startsWith(TEST_BYTES);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst byte array with offset where the dst is larger
+    // than the sequence")
+    //        void readLargerDstByteArrayWithOffset() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we read the bytes into a larger byte array with an offset
+    //            final var arr = new byte[TEST_BYTES.length + 10];
+    //            assertThat(seq.readBytes(arr, 5, TEST_BYTES.length +
+    // 1)).isEqualTo(TEST_BYTES.length);
+    //            // Then the sequence is exhausted and the array is filled starting at index 5
+    //            assertThat(seq.remaining()).isZero();
+    //            assertThat(seq.hasRemaining()).isFalse();
+    //            assertThat(Arrays.copyOfRange(arr, 5, TEST_BYTES.length + 5
+    // )).containsExactly(TEST_BYTES);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer where the dst is larger than the
+    // sequence")
+    //        void readLargerDstByteBuffer() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we read the bytes into a larger buffer
+    //            final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 1);
+    //            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
+    //            // Then the sequence is exhausted and the buffer is filled starting at index 0
+    //            assertThat(seq.remaining()).isZero();
+    //            assertThat(seq.hasRemaining()).isFalse();
+    //            assertThat(buffer.array()).startsWith(TEST_BYTES);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst ByteBuffer with offset where the dst is larger
+    // than the sequence")
+    //        void readLargerDstByteBufferWithOffset() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we read the bytes into a larger buffer with an offset
+    //            final var buffer = ByteBuffer.allocate(TEST_BYTES.length + 10);
+    //            buffer.position(5);
+    //            buffer.limit(5 + TEST_BYTES.length + 1);
+    //            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
+    //            // Then the sequence is exhausted and the buffer is filled starting at index 5
+    //            assertThat(seq.remaining()).isZero();
+    //            assertThat(seq.hasRemaining()).isFalse();
+    //            assertThat(Arrays.copyOfRange(buffer.array(), 5, TEST_BYTES.length + 5
+    // )).containsExactly(TEST_BYTES);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData where the dst is larger than the
+    // sequence")
+    //        void readLargerDstBufferedData() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we read the bytes into a larger buffer
+    //            final var buffer = BufferedData.allocate(TEST_BYTES.length + 1);
+    //            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
+    //            // Then the sequence is exhausted and the buffer is filled starting at index 0
+    //            assertThat(seq.remaining()).isZero();
+    //            assertThat(seq.hasRemaining()).isFalse();
+    //            assertThat(buffer.slice(0,
+    // TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES));
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading bytes into a dst BufferedData with offset where the dst is
+    // larger than the sequence")
+    //        void readLargerDstBufferedDataWithOffset() {
+    //            // Given a sequence of bytes
+    //            final var seq = sequence(TEST_BYTES);
+    //            // When we read the bytes into a larger buffer with an offset
+    //            final var buffer = BufferedData.allocate(TEST_BYTES.length + 10);
+    //            buffer.position(5);
+    //            buffer.limit(5 + TEST_BYTES.length + 1);
+    //            assertThat(seq.readBytes(buffer)).isEqualTo(TEST_BYTES.length);
+    //            // Then the sequence is exhausted and the buffer is filled starting at index 5
+    //            assertThat(seq.remaining()).isZero();
+    //            assertThat(seq.hasRemaining()).isFalse();
+    //            assertThat(buffer.slice(5,
+    // TEST_BYTES.length)).isEqualTo(BufferedData.wrap(TEST_BYTES));
+    //        }
+    //
+    //        @ParameterizedTest(name = "offset={0}, length={1}")
+    //        @CsvSource({
+    //                "-1, 1", // Negative offset
+    //                "100, 10", // Offset larger than the dst array size
+    //                "5, 10", // Offset+Length larger than the dst array size
+    //        })
+    //        @DisplayName("Reading bytes where the dst offset and length are bad")
+    //        void badOffsetLength(int offset, int length) {
+    //            final var seq = sequence(TEST_BYTES);
+    //            assertThatThrownBy(() -> seq.readBytes(new byte[10], offset, length))
+    //                    .isInstanceOf(IndexOutOfBoundsException.class);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("view()")
+    //    final class ViewTest {
+    //        @Test
+    //        @DisplayName("Negative length throws IllegalArgumentException")
+    //        void negativeLength() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            assertThatThrownBy(() ->
+    // seq.view(-1)).isInstanceOf(IllegalArgumentException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Length that is greater than remaining throws BufferUnderflowException")
+    //        @Disabled("This has to be tested on the buffer level only, because for a Stream, the
+    // limit is too big")
+    //        void lengthGreaterThanRemaining() {
+    //            // TODO Move to buffer tests
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.skip(1);
+    //            assertThatThrownBy(() ->
+    // seq.view(TEST_BYTES.length)).isInstanceOf(BufferUnderflowException.class);
+    //            assertThatThrownBy(() ->
+    // seq.view(Integer.MAX_VALUE)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Creating a view past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            seq.skip(5);
+    //            // When we try to create a view with a length past the limit, then we get a
+    // BufferUnderflowException
+    //            assertThatThrownBy(() ->
+    // seq.view(6)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Length is zero (OK, empty sequence)")
+    //        void lengthIsZero() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            assertThat(seq.view(0).remaining()).isZero();
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Length + Position is less than limit (OK)")
+    //        void lengthPlusPositionIsLessThanLimit() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.skip(5);
+    //            final var view = seq.view(10);
+    //
+    //            assertThat(view.remaining()).isEqualTo(10);
+    //            assertThat(view.readBytes(10)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(5, 10));
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Length + Position is the limit (OK)")
+    //        void lengthPlusPositionIsTheLimit() {
+    //            // Given a sequence of bytes where the position is 10 bytes from the end
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var startIndex = TEST_BYTES.length - 10;
+    //            assertThat(seq.skip(startIndex)).isEqualTo(16);
+    //            assertThat(seq.position()).isEqualTo(16);
+    //            // When we create a view with a length of 10 bytes
+    //            final var view = seq.view(10);
+    //            // Then we get the last 10 bytes of the sequence, AND it advances the position by
+    // that many bytes.
+    //            assertThat(seq.position()).isEqualTo(26);
+    //            // The view, when read, will have all 10 of its bytes
+    //            assertThat(view.remaining()).isEqualTo(10);
+    //            final var bytes = view.readBytes(10);
+    //            assertThat(view.position()).isEqualTo(10);
+    //            // And those bytes will be the last 10 bytes of the sequence
+    //            assertThat(bytes).isEqualTo(Bytes.wrap(TEST_BYTES).slice(startIndex, 10));
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Get sub-sequence of a sub-sequence")
+    //        void subSequenceOfSubSequence() {
+    //            final var seq = sequence(TEST_BYTES);
+    //            final var subSeq = seq.view(10);
+    //            final var subSubSeq = subSeq.view(5);
+    //            assertThat(subSubSeq.remaining()).isEqualTo(5);
+    //            assertThat(subSubSeq.readBytes(5)).isEqualTo(Bytes.wrap(TEST_BYTES).slice(0, 5));
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readInt()")
+    //    final class ReadIntTest {
+    //        @Test
+    //        @DisplayName("Reading an int from an empty sequence throws BufferUnderflowException")
+    //        void readFromEmptyDataThrows() {
+    //            final var seq = emptySequence();
+    //            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an int from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows() {
+    //            final var seq = fullyUsedSequence();
+    //            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an int past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            // When we try to read an int, then we get a BufferUnderflowException
+    //            seq.skip(4); // Only 1 byte left, not enough
+    //            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
+    //            seq.skip(1); // No bytes left, not enough
+    //            assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an int when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows() {
+    //            for (int i = 0; i < 3; i++) {
+    //                final var seq = sequence(new byte[i]);
+    //                assertThatThrownBy(seq::readInt).isInstanceOf(BufferUnderflowException.class);
+    //            }
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE})
+    //        @DisplayName("Reading an int")
+    //        void read(int value) {
+    //            // Given a sequence with exactly 1 integer of data
+    //            final var seq = sequence(asBytes(c -> c.putInt(value)));
+    //            final var pos = seq.position();
+    //            // When we read an int, then it is the same as the one we wrote, and the position
+    // has moved forward
+    //            // by 4 bytes
+    //            assertThat(seq.readInt()).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE})
+    //        @DisplayName("Reading an int in Little Endian")
+    //        void readLittleEndian(int value) {
+    //            final var seq = sequence(asBytes(c -> c.putInt(value), LITTLE_ENDIAN));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(ints = {Integer.MIN_VALUE, -8, -1, 0, 1, 8, Integer.MAX_VALUE})
+    //        @DisplayName("Reading an int in Big Endian")
+    //        void readBigEndian(int value) {
+    //            final var seq = sequence(asBytes(c -> c.putInt(value), BIG_ENDIAN));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readInt(BIG_ENDIAN)).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a mixture of big and little endian data")
+    //        void readMixedEndian() {
+    //            final var seq = sequence(asBytes(c -> {
+    //                c.order(BIG_ENDIAN);
+    //                c.putInt(0x01020304);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putInt(0x05060708);
+    //                c.order(BIG_ENDIAN);
+    //                c.putInt(0x090A0B0C);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putInt(0x0D0E0F10);
+    //            }));
+    //            assertThat(seq.readInt()).isEqualTo(0x01020304);
+    //            assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x05060708);
+    //            assertThat(seq.readInt()).isEqualTo(0x090A0B0C);
+    //            assertThat(seq.readInt(LITTLE_ENDIAN)).isEqualTo(0x0D0E0F10);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readUnsignedInt()")
+    //    final class ReadUnsignedIntTest {
+    //        @Test
+    //        @DisplayName("Reading an unsigned int from an empty sequence throws
+    // BufferUnderflowException")
+    //        void readFromEmptyDataThrows() {
+    //            final var seq = emptySequence();
+    //
+    // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an unsigned int from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows() {
+    //            final var seq = fullyUsedSequence();
+    //
+    // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an unsigned int past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            // When we try to read an unsigned int, then we get a BufferUnderflowException
+    //            seq.skip(4); // Only 1 byte left, not enough
+    //
+    // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
+    //            seq.skip(1); // No bytes left, not enough
+    //
+    // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading an unsigned int when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows() {
+    //            for (int i = 0; i < 3; i++) {
+    //                final var seq = sequence(new byte[i]);
+    //
+    // assertThatThrownBy(seq::readUnsignedInt).isInstanceOf(BufferUnderflowException.class);
+    //            }
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL})
+    //        @DisplayName("Reading an unsigned int")
+    //        void read(long value) {
+    //            final var seq = sequence(asBytes(c -> c.putInt((int) value)));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readUnsignedInt()).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL})
+    //        @DisplayName("Reading an unsigned int in Little Endian")
+    //        void readLittleEndian(long value) {
+    //            final var seq = sequence(asBytes(c -> c.putInt((int) value), LITTLE_ENDIAN));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(longs = {0x00FFFFFFFFL, 0, 1, 8, 0x007FFFFFFFL})
+    //        @DisplayName("Reading an unsigned int in Big Endian")
+    //        void readBigEndian(long value) {
+    //            final var seq = sequence(asBytes(c -> c.putInt((int) value), BIG_ENDIAN));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readUnsignedInt(BIG_ENDIAN)).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a mixture of big and little endian data")
+    //        void readMixedEndian() {
+    //            final var seq = sequence(asBytes(c -> {
+    //                c.order(BIG_ENDIAN);
+    //                c.putInt(0x91020304);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putInt(0x95060708);
+    //                c.order(BIG_ENDIAN);
+    //                c.putInt(0x990A0B0C);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putInt(0x9D0E0F10);
+    //            }));
+    //            assertThat(seq.readUnsignedInt()).isEqualTo(0x91020304L);
+    //            assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x95060708L);
+    //            assertThat(seq.readUnsignedInt()).isEqualTo(0x990A0B0CL);
+    //            assertThat(seq.readUnsignedInt(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10L);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readLong()")
+    //    final class ReadLongTest {
+    //        @Test
+    //        @DisplayName("Reading a long from an empty sequence throws BufferUnderflowException")
+    //        void readFromEmptyDataThrows() {
+    //            final var seq = emptySequence();
+    //            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a long from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows() {
+    //            final var seq = fullyUsedSequence();
+    //            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a long past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            // When we try to read a long, then we get a BufferUnderflowException
+    //            seq.skip(4); // Only 1 byte left, not enough
+    //            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
+    //            seq.skip(1); // No bytes left, not enough
+    //            assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a long when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows() {
+    //            for (int i = 0; i < 7; i++) {
+    //                final var seq = sequence(new byte[i]);
+    //
+    // assertThatThrownBy(seq::readLong).isInstanceOf(BufferUnderflowException.class);
+    //            }
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE})
+    //        @DisplayName("Reading a long")
+    //        void read(long value) {
+    //            final var seq = sequence(asBytes(c -> c.putLong(value)));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readLong()).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 8);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE})
+    //        @DisplayName("Reading a long in Little Endian")
+    //        void readLittleEndian(long value) {
+    //            final var seq = sequence(asBytes(c -> c.putLong(value), LITTLE_ENDIAN));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 8);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(longs = {Long.MIN_VALUE, -8, -1, 0, 1, 8, Long.MAX_VALUE})
+    //        @DisplayName("Reading a long in Big Endian")
+    //        void readBigEndian(long value) {
+    //            final var seq = sequence(asBytes(c -> c.putLong(value), BIG_ENDIAN));
+    //            final var pos = seq.position();
+    //            assertThat(seq.readLong(BIG_ENDIAN)).isEqualTo(value);
+    //            assertThat(seq.position()).isEqualTo(pos + 8);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a mixture of big and little endian data")
+    //        void readMixedEndian() {
+    //            final var seq = sequence(asBytes(c -> {
+    //                c.order(BIG_ENDIAN);
+    //                c.putLong(0x0102030405060708L);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putLong(0x05060708090A0B0CL);
+    //                c.order(BIG_ENDIAN);
+    //                c.putLong(0x990A0B0C0D0E0F10L);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putLong(0x9D0E0F1011121314L);
+    //            }));
+    //            assertThat(seq.readLong()).isEqualTo(0x0102030405060708L);
+    //            assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x05060708090A0B0CL);
+    //            assertThat(seq.readLong()).isEqualTo(0x990A0B0C0D0E0F10L);
+    //            assertThat(seq.readLong(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F1011121314L);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readFloat()")
+    //    final class ReadFloatTest {
+    //        @Test
+    //        @DisplayName("Reading a float from an empty sequence throws BufferUnderflowException")
+    //        void readFromEmptyDataThrows() {
+    //            final var seq = emptySequence();
+    //            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a float from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows() {
+    //            final var seq = fullyUsedSequence();
+    //            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a float past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            // When we try to read a float, then we get a BufferUnderflowException
+    //            seq.skip(4); // Only 1 byte left, not enough
+    //            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
+    //            seq.skip(1); // No bytes left, not enough
+    //            assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a float when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows() {
+    //            for (int i = 0; i < 3; i++) {
+    //                final var seq = sequence(new byte[i]);
+    //
+    // assertThatThrownBy(seq::readFloat).isInstanceOf(BufferUnderflowException.class);
+    //            }
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f,
+    // -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+    //        @DisplayName("Reading a float")
+    //        void read(float value) {
+    //            final var seq = sequence(asBytes(c -> c.putFloat(value)));
+    //            final var pos = seq.position();
+    //            final var readFloat = seq.readFloat();
+    //            if (Float.isNaN(value)) {
+    //                assertThat(readFloat).isNaN();
+    //            } else {
+    //                assertThat(readFloat).isEqualTo(value);
+    //            }
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f,
+    // -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+    //        @DisplayName("Reading a float in Little Endian")
+    //        void readLittleEndian(float value) {
+    //            final var seq = sequence(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN));
+    //            final var pos = seq.position();
+    //            final var readFloat = seq.readFloat(LITTLE_ENDIAN);
+    //            if (Float.isNaN(value)) {
+    //                assertThat(readFloat).isNaN();
+    //            } else {
+    //                assertThat(readFloat).isEqualTo(value);
+    //            }
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f,
+    // -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+    //        @DisplayName("Reading a float in Big Endian")
+    //        void readBigEndian(float value) {
+    //            final var seq = sequence(asBytes(c -> c.putFloat(value), BIG_ENDIAN));
+    //            final var pos = seq.position();
+    //            final var readFloat = seq.readFloat(BIG_ENDIAN);
+    //            if (Float.isNaN(value)) {
+    //                assertThat(readFloat).isNaN();
+    //            } else {
+    //                assertThat(readFloat).isEqualTo(value);
+    //            }
+    //            assertThat(seq.position()).isEqualTo(pos + 4);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a mixture of big and little endian data")
+    //        void readMixedEndian() {
+    //            final var seq = sequence(asBytes(c -> {
+    //                c.putFloat(0x01020304);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putFloat(0x05060708);
+    //                c.order(BIG_ENDIAN);
+    //                c.putFloat(0x990A0B0C);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putFloat(0x9D0E0F10);
+    //            }));
+    //            assertThat(seq.readFloat()).isEqualTo(0x01020304);
+    //            assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x05060708);
+    //            assertThat(seq.readFloat()).isEqualTo(0x990A0B0C);
+    //            assertThat(seq.readFloat(LITTLE_ENDIAN)).isEqualTo(0x9D0E0F10);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readDouble()")
+    //    final class ReadDoubleTest {
+    //        @Test
+    //        @DisplayName("Reading a double from an empty sequence throws
+    // BufferUnderflowException")
+    //        void readFromEmptyDataThrows() {
+    //            final var seq = emptySequence();
+    //            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a double from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows() {
+    //            final var seq = fullyUsedSequence();
+    //            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a double past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            // When we try to read a double, then we get a BufferUnderflowException
+    //            seq.skip(4); // Only 1 byte left, not enough
+    //            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
+    //            seq.skip(1); // No bytes left, not enough
+    //            assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a double when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows() {
+    //            for (int i = 0; i < 7; i++) {
+    //                final var seq = sequence(new byte[i]);
+    //
+    // assertThatThrownBy(seq::readDouble).isInstanceOf(BufferUnderflowException.class);
+    //            }
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f,
+    // -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+    //        @DisplayName("Reading a double")
+    //        void read(double value) {
+    //            final var seq = sequence(asBytes(c -> c.putDouble(value)));
+    //            final var pos = seq.position();
+    //            final var readDouble = seq.readDouble();
+    //            if (Double.isNaN(value)) {
+    //                assertThat(readDouble).isNaN();
+    //            } else {
+    //                assertThat(readDouble).isEqualTo(value);
+    //            }
+    //            assertThat(seq.position()).isEqualTo(pos + 8);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f,
+    // -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+    //        @DisplayName("Reading a double in Little Endian")
+    //        void readLittleEndian(double value) {
+    //            final var seq = sequence(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN));
+    //            final var pos = seq.position();
+    //            final var readDouble = seq.readDouble(LITTLE_ENDIAN);
+    //            if (Double.isNaN(value)) {
+    //                assertThat(readDouble).isNaN();
+    //            } else {
+    //                assertThat(readDouble).isEqualTo(value);
+    //            }
+    //            assertThat(seq.position()).isEqualTo(pos + 8);
+    //        }
+    //
+    //        @ParameterizedTest(name = "value={0}")
+    //        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f,
+    // -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+    //        @DisplayName("Reading a double in Big Endian")
+    //        void readBigEndian(double value) {
+    //            final var seq = sequence(asBytes(c -> c.putDouble(value), BIG_ENDIAN));
+    //            final var pos = seq.position();
+    //            final var readDouble = seq.readDouble(BIG_ENDIAN);
+    //            if (Double.isNaN(value)) {
+    //                assertThat(readDouble).isNaN();
+    //            } else {
+    //                assertThat(readDouble).isEqualTo(value);
+    //            }
+    //            assertThat(seq.position()).isEqualTo(pos + 8);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a mixture of big and little endian data")
+    //        void readMixedEndian() {
+    //            final var seq = sequence(asBytes(c -> {
+    //                c.putDouble(0x9102030405060708L);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putDouble(0x990A0B0C0D0E0F10L);
+    //                c.order(BIG_ENDIAN);
+    //                c.putDouble(0x1112131415161718L);
+    //                c.order(LITTLE_ENDIAN);
+    //                c.putDouble(0x191A1B1C1D1E1F20L);
+    //            }));
+    //            assertThat(seq.readDouble()).isEqualTo(0x9102030405060708L);
+    //            assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x990A0B0C0D0E0F10L);
+    //            assertThat(seq.readDouble()).isEqualTo(0x1112131415161718L);
+    //            assertThat(seq.readDouble(LITTLE_ENDIAN)).isEqualTo(0x191A1B1C1D1E1F20L);
+    //        }
+    //    }
+    //    @Nested
+    //    @DisplayName("readVarInt()")
+    //    final class ReadVarIntTest {
+    //        @ParameterizedTest
+    //        @ValueSource(booleans = {false, true})
+    //        @DisplayName("Reading a varint from an empty sequence throws
+    // BufferUnderflowException")
+    //        void readFromEmptyDataThrows(final boolean zigZag) {
+    //            final var seq = emptySequence();
+    //            assertThatThrownBy(() ->
+    // seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @ParameterizedTest
+    //        @ValueSource(booleans = {false, true})
+    //        @DisplayName("Reading a varint from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows(final boolean zigZag) {
+    //            final var seq = fullyUsedSequence();
+    //            assertThatThrownBy(() ->
+    // seq.readVarInt(zigZag)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a varint past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            seq.skip(5);
+    //            // When we try to read a varint, then we get a BufferUnderflowException
+    //            assertThatThrownBy(() ->
+    // seq.readVarInt(false)).isInstanceOf(BufferUnderflowException.class);
+    //            assertThatThrownBy(() ->
+    // seq.readVarInt(true)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @ParameterizedTest
+    //        @ValueSource(booleans = {false, true})
+    //        @DisplayName("Reading a varint when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows(final boolean zigZag) {
+    //            final var seq = sequence(new byte[] { (byte) 0b10101100 });
+    //            assertThatThrownBy(() ->
+    // seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a varint")
+    //        void read() {
+    //            final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 });
+    //            final var pos = seq.position();
+    //            final var value = seq.readVarInt(false);
+    //            assertThat(value).isEqualTo(300);
+    //            assertThat(seq.position()).isEqualTo(pos + 2);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a varint with zig zag encoding")
+    //        void readZigZag() {
+    //            final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 });
+    //            final var pos = seq.position();
+    //            final var value = seq.readVarInt(true);
+    //            assertThat(value).isEqualTo(-151);
+    //            assertThat(seq.position()).isEqualTo(pos + 2);
+    //        }
+    //    }
+    //
+    //    @Nested
+    //    @DisplayName("readVarLong()")
+    //    final class ReadVarLongTest {
+    //        @ParameterizedTest
+    //        @ValueSource(booleans = {false, true})
+    //        @DisplayName("Reading a varlong from an empty sequence throws
+    // BufferUnderflowException")
+    //        void readFromEmptyDataThrows(final boolean zigZag) {
+    //            final var seq = emptySequence();
+    //            assertThatThrownBy(() ->
+    // seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @ParameterizedTest
+    //        @ValueSource(booleans = {false, true})
+    //        @DisplayName("Reading a varlong from a full read sequence throws
+    // BufferUnderflowException")
+    //        void readFromFullyReadDataThrows(final boolean zigZag) {
+    //            final var seq = fullyUsedSequence();
+    //            assertThatThrownBy(() ->
+    // seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Reading a varlong past the limit throws BufferUnderflowException")
+    //        void readPastLimit() {
+    //            // Given a sequence of bytes with a limit where position == limit
+    //            final var seq = sequence(TEST_BYTES);
+    //            seq.limit(5);
+    //            seq.skip(5);
+    //            // When we try to read a varlong, then we get a BufferUnderflowException
+    //            assertThatThrownBy(() ->
+    // seq.readVarLong(false)).isInstanceOf(BufferUnderflowException.class);
+    //            assertThatThrownBy(() ->
+    // seq.readVarLong(true)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @ParameterizedTest
+    //        @ValueSource(booleans = {false, true})
+    //        @DisplayName("Reading a varlong when less than 4 bytes are available throws
+    // BufferUnderflowException")
+    //        void readInsufficientDataThrows(final boolean zigZag) {
+    //            final var seq = sequence(new byte[] { (byte) 0b10101100 });
+    //            assertThatThrownBy(() ->
+    // seq.readVarLong(zigZag)).isInstanceOf(BufferUnderflowException.class);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a varlong")
+    //        void read() {
+    //            final var seq = sequence(new byte[] { (byte) 0b10101100, 0b00000010 });
+    //            final var pos = seq.position();
+    //            final var value = seq.readVarLong(false);
+    //            assertThat(value).isEqualTo(300);
+    //            assertThat(seq.position()).isEqualTo(pos + 2);
+    //        }
+    //
+    //        @Test
+    //        @DisplayName("Read a varlong with zig zag encoding")
+    //        void readZigZag() {
+    //            final var seq = sequence(new byte[] { (byte) 0b10101101, 0b00000010 });
+    //            final var pos = seq.position();
+    //            final var value = seq.readVarLong(true);
+    //            assertThat(value).isEqualTo(-151);
+    //            assertThat(seq.position()).isEqualTo(pos + 2);
+    //        }
+    //    }
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java
index 50affdcb..08e2dd43 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/UnsafeUtilsTest.java
@@ -26,14 +26,14 @@ private static long getLong(final byte[] arr, final int offset) {
         final byte b6 = arr[offset + 5];
         final byte b7 = arr[offset + 6];
         final byte b8 = arr[offset + 7];
-        return (((long)b1 << 56) +
-                ((long)(b2 & 255) << 48) +
-                ((long)(b3 & 255) << 40) +
-                ((long)(b4 & 255) << 32) +
-                ((long)(b5 & 255) << 24) +
-                ((b6 & 255) << 16) +
-                ((b7 & 255) <<  8) +
-                (b8 & 255));
+        return (((long) b1 << 56)
+                + ((long) (b2 & 255) << 48)
+                + ((long) (b3 & 255) << 40)
+                + ((long) (b4 & 255) << 32)
+                + ((long) (b5 & 255) << 24)
+                + ((b6 & 255) << 16)
+                + ((b7 & 255) << 8)
+                + (b8 & 255));
     }
 
     // Tests that UnsafeUtils.getInt() and RandomAccessData.getInt() produce the same results
@@ -61,5 +61,4 @@ void getLongTest() {
             assertEquals(getLong(src, i), UnsafeUtils.getLong(src, i));
         }
     }
-    
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java
index e349ec24..bba3476e 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableSequentialDataTest.java
@@ -2,7 +2,6 @@
 package com.hedera.pbj.runtime.io;
 
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.io.UncheckedIOException;
 import java.nio.BufferOverflowException;
 import java.nio.BufferUnderflowException;
@@ -39,7 +38,6 @@ private StubbedWritableSequentialData(@NonNull final byte[] bytes) {
             this.limit = this.bytes.length;
         }
 
-
         @Override
         public long capacity() {
             return bytes.length;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java
index e67c106d..0fa9215b 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/WritableTestBase.java
@@ -1,17 +1,20 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io;
 
+import static java.nio.ByteOrder.BIG_ENDIAN;
+import static java.nio.ByteOrder.LITTLE_ENDIAN;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
 import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
 import edu.umd.cs.findbugs.annotations.NonNull;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Nested;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.CsvSource;
-import org.junit.jupiter.params.provider.ValueSource;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -21,19 +24,17 @@
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
-import static java.nio.ByteOrder.BIG_ENDIAN;
-import static java.nio.ByteOrder.LITTLE_ENDIAN;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyInt;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.mock;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.ValueSource;
 
 /**
  * Base test class for testing {@link WritableSequentialData}.
  *
- * <p> I will implement this test in terms of a {@link WritableSequentialData}, which will apply to
+ * <p>I will implement this test in terms of a {@link WritableSequentialData}, which will apply to
  * {@link WritableStreamingData} and {@link BufferedData}.
  */
 public abstract class WritableTestBase extends SequentialTestBase {
@@ -58,7 +59,8 @@ void writeToEofDataThrows() {
             // Given an eof sequence
             final var seq = eofSequence();
             // When we try to write a byte, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeByte((byte) 1)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeByte((byte) 1))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -69,7 +71,8 @@ void writePastLimit() {
             seq.limit(5);
             seq.skip(5);
             // When we try to write a byte, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeByte((byte) 1)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeByte((byte) 1))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -99,7 +102,8 @@ void writeToEofDataThrows() {
             // Given an eof sequence
             final var seq = eofSequence();
             // When we try to write an unsigned byte, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeUnsignedByte(0b1101_0011)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeUnsignedByte(0b1101_0011))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -110,7 +114,8 @@ void writePastLimit() {
             seq.limit(5);
             seq.skip(5);
             // When we try to read an unsigned byte, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeUnsignedByte(0b1101_0011)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeUnsignedByte(0b1101_0011))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -118,13 +123,14 @@ void writePastLimit() {
         void write() {
             // Given a sequence
             final var seq = sequence();
-            // When we write the byte (with a single byte that could be interpreted as negative if signed),
+            // When we write the byte (with a single byte that could be interpreted as negative if
+            // signed),
             final var pos = seq.position();
             seq.writeUnsignedByte(0b1110_0011);
             // then the position forward by a single byte
             assertThat(seq.position()).isEqualTo(pos + 1);
             // and the byte was written unmodified
-            final var expected = new byte[] { (byte) 0b1110_0011 };
+            final var expected = new byte[] {(byte) 0b1110_0011};
             assertThat(extractWrittenBytes(seq)).isEqualTo(expected);
         }
     }
@@ -138,27 +144,38 @@ void readNullSrcThrows() {
             // Given a sequence
             final var seq = sequence();
 
-            // When we try to write bytes using a null byte array, then we get a NullPointerException
+            // When we try to write bytes using a null byte array, then we get a
+            // NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.writeBytes((byte[]) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.writeBytes((byte[]) null))
+                    .isInstanceOf(NullPointerException.class);
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.writeBytes(null, 0, 10)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.writeBytes(null, 0, 10))
+                    .isInstanceOf(NullPointerException.class);
 
-            // When we try to write bytes using a null ByteBuffer, then we get a NullPointerException
+            // When we try to write bytes using a null ByteBuffer, then we get a
+            // NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.writeBytes((ByteBuffer) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.writeBytes((ByteBuffer) null))
+                    .isInstanceOf(NullPointerException.class);
 
-            // When we try to write bytes using a null BufferedData, then we get a NullPointerException
+            // When we try to write bytes using a null BufferedData, then we get a
+            // NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.writeBytes((BufferedData) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.writeBytes((BufferedData) null))
+                    .isInstanceOf(NullPointerException.class);
 
-            // When we try to write bytes using a null RandomAccessData, then we get a NullPointerException
+            // When we try to write bytes using a null RandomAccessData, then we get a
+            // NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.writeBytes((RandomAccessData) null)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.writeBytes((RandomAccessData) null))
+                    .isInstanceOf(NullPointerException.class);
 
-            // When we try to write bytes using a null InputStream, then we get a NullPointerException
+            // When we try to write bytes using a null InputStream, then we get a
+            // NullPointerException
             //noinspection DataFlowIssue
-            assertThatThrownBy(() -> seq.writeBytes(null, 10)).isInstanceOf(NullPointerException.class);
+            assertThatThrownBy(() -> seq.writeBytes(null, 10))
+                    .isInstanceOf(NullPointerException.class);
         }
 
         @Test
@@ -166,12 +183,15 @@ void readNullSrcThrows() {
         void negativeOffsetThrows() {
             // Given a sequence
             final var seq = sequence();
-            // When we try to write bytes using a byte array with a negative offset, then we get an IndexOutOfBoundsException
-            assertThatThrownBy(() -> seq.writeBytes(new byte[10], -1, 10)).isInstanceOf(IndexOutOfBoundsException.class);
+            // When we try to write bytes using a byte array with a negative offset, then we get an
+            // IndexOutOfBoundsException
+            assertThatThrownBy(() -> seq.writeBytes(new byte[10], -1, 10))
+                    .isInstanceOf(IndexOutOfBoundsException.class);
         }
 
         @Test
-        @DisplayName("Writing bytes with an offset that is too large throws IndexOutOfBoundsException")
+        @DisplayName(
+                "Writing bytes with an offset that is too large throws IndexOutOfBoundsException")
         void tooLargeOffsetThrows() {
             // Given a sequence
             final var seq = sequence();
@@ -179,10 +199,12 @@ void tooLargeOffsetThrows() {
             // then we get an IndexOutOfBoundsException
             assertThatThrownBy(() -> seq.writeBytes(new byte[10], 11, 10))
                     .isInstanceOf(IndexOutOfBoundsException.class);
-            // When we try to write bytes using a byte array with an offset + length that is too large,
+            // When we try to write bytes using a byte array with an offset + length that is too
+            // large,
             // then we get either an IndexOutOfBoundsException or an BufferUnderflowException
             assertThatThrownBy(() -> seq.writeBytes(new byte[10], 9, 2))
-                    .isInstanceOfAny(IndexOutOfBoundsException.class, BufferUnderflowException.class);
+                    .isInstanceOfAny(
+                            IndexOutOfBoundsException.class, BufferUnderflowException.class);
         }
 
         @Test
@@ -190,11 +212,15 @@ void tooLargeOffsetThrows() {
         void negativeLengthThrows() {
             // Given a sequence
             final var seq = sequence();
-            // When we try to write bytes using a byte array with a negative length, then we get an IllegalArgumentException
-            assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, -1)).isInstanceOf(IllegalArgumentException.class);
-            // When we try to write bytes using an input stream with a negative length, then we get an IllegalArgumentException
+            // When we try to write bytes using a byte array with a negative length, then we get an
+            // IllegalArgumentException
+            assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, -1))
+                    .isInstanceOf(IllegalArgumentException.class);
+            // When we try to write bytes using an input stream with a negative length, then we get
+            // an IllegalArgumentException
             final var stream = new ByteArrayInputStream(new byte[10]);
-            assertThatThrownBy(() -> seq.writeBytes(stream,  -1)).isInstanceOf(IllegalArgumentException.class);
+            assertThatThrownBy(() -> seq.writeBytes(stream, -1))
+                    .isInstanceOf(IllegalArgumentException.class);
         }
 
         @Test
@@ -204,24 +230,31 @@ void writeToEofThrows() {
             final var seq = eofSequence();
 
             // When we try to write a byte array, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeBytes(new byte[10])).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, 10)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(new byte[10]))
+                    .isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, 10))
+                    .isInstanceOf(BufferOverflowException.class);
 
             // When we try to write a ByteBuffer, then we get a BufferOverflowException
             final var byteBuffer = ByteBuffer.allocate(10);
-            assertThatThrownBy(() -> seq.writeBytes(byteBuffer)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(byteBuffer))
+                    .isInstanceOf(BufferOverflowException.class);
 
             // When we try to write a BufferedData, then we get a BufferOverflowException
             final var bufferedData = BufferedData.allocate(10);
-            assertThatThrownBy(() -> seq.writeBytes(bufferedData)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(bufferedData))
+                    .isInstanceOf(BufferOverflowException.class);
 
             // When we try to write Bytes, then we get a BufferOverflowException
             final var bytes = Bytes.wrap("abc");
-            assertThatThrownBy(() -> seq.writeBytes(bytes)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(bytes))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
-        @DisplayName("Writing bytes where the sequence position is at the limit throws BufferOverflowException")
+        @DisplayName(
+                "Writing bytes where the sequence position is at the limit throws"
+                        + " BufferOverflowException")
         void writePastLimit() {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -229,34 +262,42 @@ void writePastLimit() {
             seq.skip(5);
 
             // When we try to write a byte array, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeBytes(new byte[10])).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, 10)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(new byte[10]))
+                    .isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(new byte[10], 0, 10))
+                    .isInstanceOf(BufferOverflowException.class);
 
             // When we try to write a ByteBuffer, then we get a BufferOverflowException
             final var byteBuffer = ByteBuffer.allocate(10);
-            assertThatThrownBy(() -> seq.writeBytes(byteBuffer)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(byteBuffer))
+                    .isInstanceOf(BufferOverflowException.class);
 
             // When we try to write a BufferedData, then we get a BufferOverflowException
             final var bufferedData = BufferedData.allocate(10);
-            assertThatThrownBy(() -> seq.writeBytes(bufferedData)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(bufferedData))
+                    .isInstanceOf(BufferOverflowException.class);
 
             // When we try to write Bytes, then we get a BufferOverflowException
             final var bytes = Bytes.wrap("abc");
-            assertThatThrownBy(() -> seq.writeBytes(bytes)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeBytes(bytes))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
-
         @Test
-        @DisplayName("Writing bytes from an InputStream with less data than the maxLength returns number of bytes written")
+        @DisplayName(
+                "Writing bytes from an InputStream with less data than the maxLength returns number"
+                        + " of bytes written")
         void writingFromInputStreamWithInsufficientData() {
             // Given a sequence and an input stream with some data
             final var seq = sequence();
-            final var bytes = new byte[] { 1, 2, 3, 4, 5 };
+            final var bytes = new byte[] {1, 2, 3, 4, 5};
             final var stream = new ByteArrayInputStream(bytes);
-            // When we write the stream data to the sequence, and the max length is larger than the number
+            // When we write the stream data to the sequence, and the max length is larger than the
+            // number
             // of bytes we have to write,
             final var numBytesWritten = seq.writeBytes(stream, 10);
-            // Then only the bytes available in the stream are written and the number of bytes written are returned.
+            // Then only the bytes available in the stream are written and the number of bytes
+            // written are returned.
             assertThat(numBytesWritten).isEqualTo(5);
             assertThat(extractWrittenBytes(seq)).isEqualTo(bytes);
         }
@@ -266,7 +307,7 @@ void writingFromInputStreamWithInsufficientData() {
         void writingFromInputStreamWithNoData() {
             // Given a sequence and an input stream with no data
             final var seq = sequence();
-            final var bytes = new byte[] { };
+            final var bytes = new byte[] {};
             final var stream = new ByteArrayInputStream(bytes);
             // When we write the stream data to the sequence
             final var numBytesWritten = seq.writeBytes(stream, 10);
@@ -279,7 +320,7 @@ void writingFromInputStreamWithNoData() {
         void writingFromInputStreamWithLotsOfData() {
             // Given a sequence and an input stream with lots of data
             final var seq = sequence();
-            final var bytes = new byte[1024*1024];
+            final var bytes = new byte[1024 * 1024];
             for (int i = 0; i < bytes.length; i++) {
                 bytes[i] = (byte) i;
             }
@@ -307,13 +348,15 @@ void writeZeroSrcByteArray() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src byte array with offset and length where the length is 0")
+        @DisplayName(
+                "Writing bytes from a src byte array with offset and length where the length is 0")
         void writeZeroSrcByteArrayWithOffset() {
             // Given a sequence and a src byte array
             final var seq = sequence();
             final var src = new byte[10];
             final var pos = seq.position();
-            // When we try to write bytes from the src but with a 0 length, then the position does not change,
+            // When we try to write bytes from the src but with a 0 length, then the position does
+            // not change,
             // and the sequence is unchanged.
             seq.writeBytes(src, 5, 0);
             assertThat(seq.position()).isEqualTo(pos);
@@ -334,7 +377,7 @@ void writeZeroSrcByteBuffer() {
             assertThat(extractWrittenBytes(seq)).isEmpty();
         }
 
-        @Test 
+        @Test
         @DisplayName("Writing bytes from a src BufferedData where the src has length of 0")
         void writeZeroSrcBufferedData() {
             // Given a sequence and an empty src BufferedData
@@ -349,13 +392,15 @@ void writeZeroSrcBufferedData() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src byte array where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src byte array where the src is smaller than the sequence"
+                        + " limit")
         void writeSmallerSrcByteArray() {
             // Given a sequence with a src byte array who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
             // When we try writing bytes from the src
-            final var src = new byte[] { 1, 2, 3, 4, 5 };
+            final var src = new byte[] {1, 2, 3, 4, 5};
             final var pos = seq.position();
             seq.writeBytes(src);
             // Then the sequence received those bytes and the position is updated
@@ -364,28 +409,32 @@ void writeSmallerSrcByteArray() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src byte array with offset where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src byte array with offset where the src is smaller than the"
+                        + " sequence limit")
         void writeSmallerSrcByteArrayWithOffset() {
             // Given a sequence with a src byte array who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
-            final var src = new byte[] { 1, 2, 3, 4, 5 };
+            final var src = new byte[] {1, 2, 3, 4, 5};
             // When we try writing bytes from the src
             final var pos = seq.position();
             seq.writeBytes(src, 2, 2);
             // Then the sequence received those bytes and the position is updated
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4});
             assertThat(seq.position()).isEqualTo(pos + 2);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src ByteBuffer where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src ByteBuffer where the src is smaller than the sequence"
+                        + " limit")
         void writeSmallerSrcByteBuffer() {
             // Given a sequence with a src ByteBuffer who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
             // When we try writing bytes from the src
-            final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 });
+            final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5});
             final var pos = seq.position();
             seq.writeBytes(src);
             // Then the sequence received those bytes and the position is updated
@@ -394,93 +443,112 @@ void writeSmallerSrcByteBuffer() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src ByteBuffer with offset where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src ByteBuffer with offset where the src is smaller than the"
+                        + " sequence limit")
         void writeSmallerSrcByteBufferWithOffset() {
             // Given a sequence with a src ByteBuffer who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
-            final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 });
+            final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5});
             src.position(2);
             // When we try writing bytes from the src
             final var pos = seq.position();
             seq.writeBytes(src);
             // Then the sequence received those bytes and the position is updated
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5});
             assertThat(seq.position()).isEqualTo(pos + 3);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src BufferedData where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src BufferedData where the src is smaller than the sequence"
+                        + " limit")
         void writeSmallerSrcBufferedData() {
             // Given a sequence with a src BufferedData who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
             // When we try writing bytes from the src
-            final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5 });
+            final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5});
             final var pos = seq.position();
             seq.writeBytes(src);
             // Then the sequence received those bytes and the position is updated
-            final var writtenBytes = new byte[1024]; // make large enough to hold extra bytes should they have been written
+            final var writtenBytes =
+                    new byte[1024]; // make large enough to hold extra bytes should they have been
+            // written
             assertThat(src.getBytes(0, writtenBytes)).isEqualTo(5);
             assertThat(extractWrittenBytes(seq)).isEqualTo(Arrays.copyOfRange(writtenBytes, 0, 5));
             assertThat(seq.position()).isEqualTo(pos + 5);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src BufferedData with offset where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src BufferedData with offset where the src is smaller than"
+                        + " the sequence limit")
         void writeSmallerSrcBufferedDataWithOffset() {
             // Given a sequence with a src ByteBuffer who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
-            final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5 });
+            final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5});
             src.position(2);
             // When we try writing bytes from the src
             final var pos = seq.position();
             seq.writeBytes(src);
             // Then the sequence received those bytes and the position is updated
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5});
             assertThat(seq.position()).isEqualTo(pos + 3);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src RandomAccessData where the src is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src RandomAccessData where the src is smaller than the"
+                        + " sequence limit")
         void writeSmallerSrcRandomAccessData() {
             // Given a sequence with a src RandomAccessData who's size is less than the limit
             final var seq = sequence();
             seq.limit(10);
             // When we try writing bytes from the src
-            final var src = Bytes.wrap(new byte[] { 1, 2, 3, 4, 5 });
+            final var src = Bytes.wrap(new byte[] {1, 2, 3, 4, 5});
             final var pos = seq.position();
             seq.writeBytes(src);
             // Then the sequence received those bytes and the position is updated
-            final var writtenBytes = new byte[1024]; // make large enough to hold extra bytes should they have been written
+            final var writtenBytes =
+                    new byte[1024]; // make large enough to hold extra bytes should they have been
+            // written
             assertThat(src.getBytes(0, writtenBytes)).isEqualTo(5);
             assertThat(extractWrittenBytes(seq)).isEqualTo(Arrays.copyOfRange(writtenBytes, 0, 5));
             assertThat(seq.position()).isEqualTo(pos + 5);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src InputStream where the maxLength is smaller than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src InputStream where the maxLength is smaller than the"
+                        + " sequence limit")
         void writeSmallerSrcInputStream() {
             // Given a sequence with a src InputStream with lots of items
             final var seq = sequence();
             seq.limit(10);
-            final var srcBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 };
+            final var srcBytes =
+                    new byte[] {
+                        1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20
+                    };
             final var stream = new ByteArrayInputStream(srcBytes);
             // When we try writing bytes from the src with a maxLength less than the limit
             final var pos = seq.position();
             seq.writeBytes(stream, 5);
             // Then the sequence received those fewer bytes and the position is updated
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {1, 2, 3, 4, 5});
             assertThat(seq.position()).isEqualTo(pos + 5);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src byte array where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src byte array where the src is the same length as the"
+                        + " sequence limit")
         void writeSrcByteArray() {
             final var seq = sequence();
             seq.limit(10);
-            final var src = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
+            final var src = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
             final var pos = seq.position();
             seq.writeBytes(src);
             assertThat(extractWrittenBytes(seq)).isEqualTo(src);
@@ -488,23 +556,27 @@ void writeSrcByteArray() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src byte array with offset where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src byte array with offset where the src is the same length"
+                        + " as the sequence limit")
         void writeSrcByteArrayWithOffset() {
             final var seq = sequence();
             seq.limit(5);
-            final var src = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
+            final var src = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
             final var pos = seq.position();
             seq.writeBytes(src, 5, 5);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 6, 7, 8, 9, 10 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {6, 7, 8, 9, 10});
             assertThat(seq.position()).isEqualTo(pos + 5);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src ByteBuffer where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src ByteBuffer where the src is the same length as the"
+                        + " sequence limit")
         void writeSrcByteBuffer() {
             final var seq = sequence();
             seq.limit(10);
-            final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             final var pos = seq.position();
             seq.writeBytes(src);
             assertThat(extractWrittenBytes(seq)).isEqualTo(src.array());
@@ -512,16 +584,18 @@ void writeSrcByteBuffer() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src ByteBuffer with offset where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src ByteBuffer with offset where the src is the same length"
+                        + " as the sequence limit")
         void writeSrcByteBufferWithOffset() {
             final var seq = sequence();
             seq.limit(5);
-            final var src = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            final var src = ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             src.position(2);
             src.limit(7);
             final var pos = seq.position();
             seq.writeBytes(src);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5, 6, 7 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5, 6, 7});
             assertThat(seq.position()).isEqualTo(pos + 5);
         }
 
@@ -532,90 +606,110 @@ void writeSrcDirectByteBufferWithOffset() {
             final int LEN = 10;
             seq.limit(LEN);
             final var src = ByteBuffer.allocateDirect(LEN);
-            src.put(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            src.put(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             src.flip();
             final var pos = seq.position();
             seq.writeBytes(src);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             assertThat(seq.position()).isEqualTo(pos + 10);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src BufferedData where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src BufferedData where the src is the same length as the"
+                        + " sequence limit")
         void writeSrcBufferedData() {
             final var seq = sequence();
             seq.limit(10);
-            final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             final var pos = seq.position();
             seq.writeBytes(src);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             assertThat(seq.position()).isEqualTo(pos + 10);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src BufferedData with offset where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src BufferedData with offset where the src is the same length"
+                        + " as the sequence limit")
         void writeSrcBufferedDataWithOffset() {
             final var seq = sequence();
             seq.limit(5);
-            final var src = BufferedData.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            final var src = BufferedData.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             src.position(2);
             src.limit(7);
             final var pos = seq.position();
             seq.writeBytes(src);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 3, 4, 5, 6, 7 });
+            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] {3, 4, 5, 6, 7});
             assertThat(seq.position()).isEqualTo(pos + 5);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src RandomAccessData where the src is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src RandomAccessData where the src is the same length as the"
+                        + " sequence limit")
         void writeSrcRandomAccessData() {
             final var seq = sequence();
             seq.limit(10);
-            final var src = Bytes.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            final var src = Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             final var pos = seq.position();
             seq.writeBytes(src);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             assertThat(seq.position()).isEqualTo(pos + 10);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src InputStream where the maxLength is the same length as the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src InputStream where the maxLength is the same length as the"
+                        + " sequence limit")
         void writeSrcInputStream() {
             // Given a sequence with a src InputStream with the same number of items as the limit
             final var seq = sequence();
             seq.limit(10);
-            final var srcBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
+            final var srcBytes = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
             final var stream = new ByteArrayInputStream(srcBytes);
             // When we try writing bytes from the src with a maxLength equal to limit
             final var pos = seq.position();
             seq.writeBytes(stream, 10);
             // Then the sequence received those bytes and the position is updated
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             assertThat(seq.position()).isEqualTo(pos + 10);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src InputStream where the maxLength is the larger than the sequence limit")
+        @DisplayName(
+                "Writing bytes from a src InputStream where the maxLength is the larger than the"
+                        + " sequence limit")
         void writeSrcInputStreamLargerThanLimit() {
             // Given a sequence with a src InputStream with more items than the limit
             final var seq = sequence();
             seq.limit(10);
-            final var srcBytes = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 };
+            final var srcBytes =
+                    new byte[] {
+                        1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20
+                    };
             final var stream = new ByteArrayInputStream(srcBytes);
             // When we try writing bytes from the src with a maxLength greater than the limit
             final var pos = seq.position();
             seq.writeBytes(stream, 15);
             // Then the sequence received only up to `limit` bytes and the position is updated
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
             assertThat(seq.position()).isEqualTo(pos + 10);
         }
 
         @Test
-        @DisplayName("Writing bytes from a src InputStream with offset where the maxLength is 0 does nothing")
+        @DisplayName(
+                "Writing bytes from a src InputStream with offset where the maxLength is 0 does"
+                        + " nothing")
         void writeSrcInputStreamWithTooSmallMaxLength() {
             // Given a sequence with a src input stream
             final var seq = sequence();
-            final var arr = new byte[] { 1, 2, 3, 4, 5 };
+            final var arr = new byte[] {1, 2, 3, 4, 5};
             final var src = new ByteArrayInputStream(arr);
             // When we try writing bytes from the src with a maxLength that is == 0
             final var pos = seq.position();
@@ -626,12 +720,14 @@ void writeSrcInputStreamWithTooSmallMaxLength() {
         }
 
         @Test
-        @DisplayName("Writing bytes from a src InputStream where nothing is remaining in the seq does nothing")
+        @DisplayName(
+                "Writing bytes from a src InputStream where nothing is remaining in the seq does"
+                        + " nothing")
         void writeSrcInputStreamWithNothingRemaining() {
             // Given a sequence with a src input stream and a seq with nothing remaining
             final var seq = sequence();
             seq.limit(0);
-            final var arr = new byte[] { 1, 2, 3, 4, 5 };
+            final var arr = new byte[] {1, 2, 3, 4, 5};
             final var src = new ByteArrayInputStream(arr);
             // When we try writing bytes from the src with a maxLength that is > 0
             final var pos = seq.position();
@@ -648,21 +744,24 @@ void closed() throws IOException {
             final var seq = sequence();
             final var src = mock(InputStream.class);
             doThrow(IOException.class).when(src).read(any(), anyInt(), anyInt());
-            // When we try to write some bytes, then we get an exception because the stream throws IOException
-            assertThatThrownBy(() -> seq.writeBytes(src, 5)).isInstanceOf(UncheckedIOException.class);
+            // When we try to write some bytes, then we get an exception because the stream throws
+            // IOException
+            assertThatThrownBy(() -> seq.writeBytes(src, 5))
+                    .isInstanceOf(UncheckedIOException.class);
         }
 
         @ParameterizedTest(name = "offset={0}, length={1}")
         @CsvSource({
-                "-1, 1", // Negative offset
-                "100, 10", // Offset larger than the src array size
-                "5, 10", // Offset+Length larger than the src array size
+            "-1, 1", // Negative offset
+            "100, 10", // Offset larger than the src array size
+            "5, 10", // Offset+Length larger than the src array size
         })
         @DisplayName("Writing bytes where the src offset and length are bad")
         void badOffsetLength(int offset, int length) {
             final var seq = sequence();
             assertThatThrownBy(() -> seq.writeBytes(new byte[10], offset, length))
-                    .isInstanceOfAny(IndexOutOfBoundsException.class, BufferUnderflowException.class);
+                    .isInstanceOfAny(
+                            IndexOutOfBoundsException.class, BufferUnderflowException.class);
         }
     }
 
@@ -684,12 +783,16 @@ void writePastLimit() {
             seq.limit(5);
             // When we try to write an int, then we get a BufferOverflowException
             seq.skip(4); // Only 1 byte left, not enough
-            assertThatThrownBy(() -> seq.writeInt(1234)).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeInt(1234, LITTLE_ENDIAN)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeInt(1234))
+                    .isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeInt(1234, LITTLE_ENDIAN))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
-        @DisplayName("Writing an int when less than 4 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing an int when less than 4 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows() {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -698,7 +801,8 @@ void writeInsufficientDataThrows() {
             final var pos = 10 - Integer.BYTES + 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
             for (int i = pos; i < 10; i++, seq.skip(1)) {
-                assertThatThrownBy(() -> seq.writeInt(1)).isInstanceOf(BufferOverflowException.class);
+                assertThatThrownBy(() -> seq.writeInt(1))
+                        .isInstanceOf(BufferOverflowException.class);
             }
         }
 
@@ -709,7 +813,8 @@ void write(int value) {
             // Given a sequence
             final var seq = sequence();
             final var pos = seq.position();
-            // When we write an int, then it is the same as the one we wrote, and the position has moved forward
+            // When we write an int, then it is the same as the one we wrote, and the position has
+            // moved forward
             // by 4 bytes
             seq.writeInt(value);
             assertThat(seq.position()).isEqualTo(pos + Integer.BYTES);
@@ -724,7 +829,8 @@ void writeLittleEndian(int value) {
             final var pos = seq.position();
             seq.writeInt(value, LITTLE_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Integer.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putInt(value), LITTLE_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putInt(value), LITTLE_ENDIAN));
         }
 
         @ParameterizedTest(name = "value={0}")
@@ -735,7 +841,8 @@ void writeBigEndian(int value) {
             final var pos = seq.position();
             seq.writeInt(value, BIG_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Integer.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putInt(value), BIG_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putInt(value), BIG_ENDIAN));
         }
 
         @Test
@@ -746,15 +853,18 @@ void writeMixedEndian() {
             seq.writeInt(0x05060708, LITTLE_ENDIAN);
             seq.writeInt(0x090A0B0C);
             seq.writeInt(0x0D0E0F10, LITTLE_ENDIAN);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> {
-                c.putInt(0x01020304);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x05060708);
-                c.order(BIG_ENDIAN);
-                c.putInt(0x090A0B0C);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x0D0E0F10);
-            }));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(
+                            asBytes(
+                                    c -> {
+                                        c.putInt(0x01020304);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x05060708);
+                                        c.order(BIG_ENDIAN);
+                                        c.putInt(0x090A0B0C);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x0D0E0F10);
+                                    }));
         }
     }
 
@@ -765,7 +875,8 @@ final class WriteUnsignedIntTest {
         @DisplayName("Writing an unsigned int to an eof sequence throws BufferOverflowException")
         void writeToEofSequenceThrows() {
             final var seq = eofSequence();
-            assertThatThrownBy(() -> seq.writeUnsignedInt(1)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeUnsignedInt(1))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -776,12 +887,16 @@ void writePastLimit() {
             seq.limit(5);
             // When we try to write an unsigned int, then we get a BufferOverflowException
             seq.skip(4); // Only 1 byte left, not enough
-            assertThatThrownBy(() -> seq.writeUnsignedInt(1)).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeUnsignedInt(1234, LITTLE_ENDIAN)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeUnsignedInt(1))
+                    .isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeUnsignedInt(1234, LITTLE_ENDIAN))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
-        @DisplayName("Writing an unsigned int when less than 4 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing an unsigned int when less than 4 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows() {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -790,7 +905,8 @@ void writeInsufficientDataThrows() {
             final var pos = 10 - Integer.BYTES + 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
             for (int i = pos; i < 10; i++, seq.skip(1)) {
-                assertThatThrownBy(() -> seq.writeUnsignedInt(1)).isInstanceOf(BufferOverflowException.class);
+                assertThatThrownBy(() -> seq.writeUnsignedInt(1))
+                        .isInstanceOf(BufferOverflowException.class);
             }
         }
 
@@ -813,7 +929,8 @@ void writeLittleEndian(long value) {
             final var pos = seq.position();
             seq.writeUnsignedInt(value, LITTLE_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Integer.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putInt((int) value), LITTLE_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putInt((int) value), LITTLE_ENDIAN));
         }
 
         @ParameterizedTest(name = "value={0}")
@@ -824,7 +941,8 @@ void writeBigEndian(long value) {
             final var pos = seq.position();
             seq.writeUnsignedInt(value, BIG_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Integer.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putInt((int) value), BIG_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putInt((int) value), BIG_ENDIAN));
         }
 
         @Test
@@ -835,15 +953,18 @@ void writeMixedEndian() {
             seq.writeUnsignedInt(0x95060708L, LITTLE_ENDIAN);
             seq.writeUnsignedInt(0x990A0B0CL);
             seq.writeUnsignedInt(0x9D0E0F10L, LITTLE_ENDIAN);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> {
-                c.putInt(0x91020304);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x95060708);
-                c.order(BIG_ENDIAN);
-                c.putInt(0x990A0B0C);
-                c.order(LITTLE_ENDIAN);
-                c.putInt(0x9D0E0F10);
-            }));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(
+                            asBytes(
+                                    c -> {
+                                        c.putInt(0x91020304);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x95060708);
+                                        c.order(BIG_ENDIAN);
+                                        c.putInt(0x990A0B0C);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putInt(0x9D0E0F10);
+                                    }));
         }
     }
 
@@ -866,11 +987,14 @@ void writePastLimit() {
             // When we try to write a long, then we get a BufferOverflowException
             seq.skip(4); // Only 1 byte left, not enough
             assertThatThrownBy(() -> seq.writeLong(1L)).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeLong(1234, LITTLE_ENDIAN)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeLong(1234, LITTLE_ENDIAN))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
-        @DisplayName("Writing a long when less than 8 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing a long when less than 8 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows() {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -879,7 +1003,8 @@ void writeInsufficientDataThrows() {
             final var pos = 10 - Long.BYTES + 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
             for (int i = pos; i < 10; i++, seq.skip(1)) {
-                assertThatThrownBy(() -> seq.writeLong(1L)).isInstanceOf(BufferOverflowException.class);
+                assertThatThrownBy(() -> seq.writeLong(1L))
+                        .isInstanceOf(BufferOverflowException.class);
             }
         }
 
@@ -902,7 +1027,8 @@ void writeLittleEndian(long value) {
             final var pos = seq.position();
             seq.writeLong(value, LITTLE_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Long.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putLong(value), LITTLE_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putLong(value), LITTLE_ENDIAN));
         }
 
         @ParameterizedTest(name = "value={0}")
@@ -913,7 +1039,8 @@ void writeBigEndian(long value) {
             final var pos = seq.position();
             seq.writeLong(value, BIG_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Long.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putLong(value), BIG_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putLong(value), BIG_ENDIAN));
         }
 
         @Test
@@ -924,15 +1051,18 @@ void writeMixedEndian() {
             seq.writeLong(0x05060708090A0B0CL, LITTLE_ENDIAN);
             seq.writeLong(0x990A0B0C0D0E0F10L);
             seq.writeLong(0x9D0E0F1011121314L, LITTLE_ENDIAN);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> {
-                c.putLong(0x0102030405060708L);
-                c.order(LITTLE_ENDIAN);
-                c.putLong(0x05060708090A0B0CL);
-                c.order(BIG_ENDIAN);
-                c.putLong(0x990A0B0C0D0E0F10L);
-                c.order(LITTLE_ENDIAN);
-                c.putLong(0x9D0E0F1011121314L);
-            }));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(
+                            asBytes(
+                                    c -> {
+                                        c.putLong(0x0102030405060708L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putLong(0x05060708090A0B0CL);
+                                        c.order(BIG_ENDIAN);
+                                        c.putLong(0x990A0B0C0D0E0F10L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putLong(0x9D0E0F1011121314L);
+                                    }));
         }
     }
 
@@ -943,7 +1073,8 @@ final class WriteFloatTest {
         @DisplayName("Writing a float to an eof sequence throws BufferOverflowException")
         void writeToEofSequenceThrows() {
             final var seq = eofSequence();
-            assertThatThrownBy(() -> seq.writeFloat(1.2f)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeFloat(1.2f))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -954,13 +1085,17 @@ void writePastLimit() {
             seq.limit(5);
             // When we try to write a float, then we get a BufferOverflowException
             seq.skip(4); // Only 1 byte left, not enough
-            assertThatThrownBy(() -> seq.writeFloat(1.2f)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeFloat(1.2f))
+                    .isInstanceOf(BufferOverflowException.class);
             seq.skip(1); // No bytes left, not enough
-            assertThatThrownBy(() -> seq.writeFloat(1.2f)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeFloat(1.2f))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
-        @DisplayName("Writing a float when less than 4 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing a float when less than 4 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows() {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -969,12 +1104,25 @@ void writeInsufficientDataThrows() {
             final var pos = 10 - Float.BYTES + 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
             for (int i = pos; i < 10; i++, seq.skip(1)) {
-                assertThatThrownBy(() -> seq.writeFloat(1.2f)).isInstanceOf(BufferOverflowException.class);
+                assertThatThrownBy(() -> seq.writeFloat(1.2f))
+                        .isInstanceOf(BufferOverflowException.class);
             }
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+        @ValueSource(
+                floats = {
+                    Float.NaN,
+                    Float.NEGATIVE_INFINITY,
+                    Float.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Float.MAX_VALUE,
+                    Float.POSITIVE_INFINITY
+                })
         @DisplayName("Writing a float")
         void write(float value) {
             final var seq = sequence();
@@ -985,25 +1133,51 @@ void write(float value) {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+        @ValueSource(
+                floats = {
+                    Float.NaN,
+                    Float.NEGATIVE_INFINITY,
+                    Float.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Float.MAX_VALUE,
+                    Float.POSITIVE_INFINITY
+                })
         @DisplayName("Writing a float in Little Endian")
         void writeLittleEndian(float value) {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeFloat(value, LITTLE_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Float.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putFloat(value), LITTLE_ENDIAN));
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(floats = {Float.NaN, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Float.MAX_VALUE, Float.POSITIVE_INFINITY})
+        @ValueSource(
+                floats = {
+                    Float.NaN,
+                    Float.NEGATIVE_INFINITY,
+                    Float.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Float.MAX_VALUE,
+                    Float.POSITIVE_INFINITY
+                })
         @DisplayName("Writing a float in Big Endian")
         void writeBigEndian(float value) {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeFloat(value, BIG_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Float.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putFloat(value), BIG_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putFloat(value), BIG_ENDIAN));
         }
 
         @Test
@@ -1014,15 +1188,18 @@ void writeMixedEndian() {
             seq.writeFloat(0x05060708, LITTLE_ENDIAN);
             seq.writeFloat(0x990A0B0C);
             seq.writeFloat(0x9D0E0F10, LITTLE_ENDIAN);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> {
-                c.putFloat(0x01020304);
-                c.order(LITTLE_ENDIAN);
-                c.putFloat(0x05060708);
-                c.order(BIG_ENDIAN);
-                c.putFloat(0x990A0B0C);
-                c.order(LITTLE_ENDIAN);
-                c.putFloat(0x9D0E0F10);
-            }));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(
+                            asBytes(
+                                    c -> {
+                                        c.putFloat(0x01020304);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putFloat(0x05060708);
+                                        c.order(BIG_ENDIAN);
+                                        c.putFloat(0x990A0B0C);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putFloat(0x9D0E0F10);
+                                    }));
         }
     }
 
@@ -1033,7 +1210,8 @@ final class WriteDoubleTest {
         @DisplayName("Writing a double to an eof sequence throws BufferOverflowException")
         void writeToEofSequenceThrows() {
             final var seq = eofSequence();
-            assertThatThrownBy(() -> seq.writeDouble(1.3)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeDouble(1.3))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -1044,13 +1222,17 @@ void writePastLimit() {
             seq.limit(5);
             // When we try to write a double, then we get a BufferOverflowException
             seq.skip(4); // Only 1 byte left, not enough
-            assertThatThrownBy(() -> seq.writeDouble(1.3)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeDouble(1.3))
+                    .isInstanceOf(BufferOverflowException.class);
             seq.skip(1); // No bytes left, not enough
-            assertThatThrownBy(() -> seq.writeDouble(1.3)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeDouble(1.3))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
-        @DisplayName("Writing a double when less than 8 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing a double when less than 8 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows() {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -1059,12 +1241,25 @@ void writeInsufficientDataThrows() {
             final var pos = 10 - Double.BYTES + 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
             for (int i = pos; i < 10; i++, seq.skip(1)) {
-                assertThatThrownBy(() -> seq.writeDouble(1.3)).isInstanceOf(BufferOverflowException.class);
+                assertThatThrownBy(() -> seq.writeDouble(1.3))
+                        .isInstanceOf(BufferOverflowException.class);
             }
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+        @ValueSource(
+                doubles = {
+                    Double.NaN,
+                    Double.NEGATIVE_INFINITY,
+                    Double.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Double.MAX_VALUE,
+                    Double.POSITIVE_INFINITY
+                })
         @DisplayName("Writing a double")
         void write(double value) {
             final var seq = sequence();
@@ -1075,25 +1270,51 @@ void write(double value) {
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+        @ValueSource(
+                doubles = {
+                    Double.NaN,
+                    Double.NEGATIVE_INFINITY,
+                    Double.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Double.MAX_VALUE,
+                    Double.POSITIVE_INFINITY
+                })
         @DisplayName("Writing a double in Little Endian")
         void writeLittleEndian(double value) {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeDouble(value, LITTLE_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Double.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putDouble(value), LITTLE_ENDIAN));
         }
 
         @ParameterizedTest(name = "value={0}")
-        @ValueSource(doubles = {Double.NaN, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -8.2f, -1.3f, 0, 1.4f, 8.5f, Double.MAX_VALUE, Double.POSITIVE_INFINITY})
+        @ValueSource(
+                doubles = {
+                    Double.NaN,
+                    Double.NEGATIVE_INFINITY,
+                    Double.MIN_VALUE,
+                    -8.2f,
+                    -1.3f,
+                    0,
+                    1.4f,
+                    8.5f,
+                    Double.MAX_VALUE,
+                    Double.POSITIVE_INFINITY
+                })
         @DisplayName("Writing a double in Big Endian")
         void writeBigEndian(double value) {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeDouble(value, BIG_ENDIAN);
             assertThat(seq.position()).isEqualTo(pos + Double.BYTES);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> c.putDouble(value), BIG_ENDIAN));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(asBytes(c -> c.putDouble(value), BIG_ENDIAN));
         }
 
         @Test
@@ -1104,18 +1325,25 @@ void writeMixedEndian() {
             seq.writeDouble(0x990A0B0C0D0E0F10L, LITTLE_ENDIAN);
             seq.writeDouble(0x1112131415161718L);
             seq.writeDouble(0x191A1B1C1D1E1F20L, LITTLE_ENDIAN);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(asBytes(c -> {
-                c.putDouble(0x9102030405060708L);
-                c.order(LITTLE_ENDIAN);
-                c.putDouble(0x990A0B0C0D0E0F10L); // Same bytes but in little endian
-                c.order(BIG_ENDIAN);
-                c.putDouble(0x1112131415161718L);
-                c.order(LITTLE_ENDIAN);
-                c.putDouble(0x191A1B1C1D1E1F20L); // Same bytes but in little endian
-            }));
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(
+                            asBytes(
+                                    c -> {
+                                        c.putDouble(0x9102030405060708L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putDouble(
+                                                0x990A0B0C0D0E0F10L); // Same bytes but in little
+                                        // endian
+                                        c.order(BIG_ENDIAN);
+                                        c.putDouble(0x1112131415161718L);
+                                        c.order(LITTLE_ENDIAN);
+                                        c.putDouble(
+                                                0x191A1B1C1D1E1F20L); // Same bytes but in little
+                                        // endian
+                                    }));
         }
     }
-    
+
     @Nested
     @DisplayName("writeVarInt()")
     final class WriteVarIntTest {
@@ -1124,7 +1352,8 @@ final class WriteVarIntTest {
         @DisplayName("Writing a varint to an eof sequence throws BufferOverflowException")
         void writeToEofSequenceThrows(final boolean zigZag) {
             final var seq = eofSequence();
-            assertThatThrownBy(() -> seq.writeVarInt(1234, zigZag)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarInt(1234, zigZag))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -1135,13 +1364,17 @@ void writePastLimit() {
             seq.limit(5);
             seq.skip(5);
             // When we try to write a varint, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeVarInt(1234, false)).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeVarInt(1234, true)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarInt(1234, false))
+                    .isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarInt(1234, true))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @ParameterizedTest
         @ValueSource(booleans = {false, true})
-        @DisplayName("Writing a varint when less than 4 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing a varint when less than 4 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows(final boolean zigZag) {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -1149,12 +1382,11 @@ void writeInsufficientDataThrows(final boolean zigZag) {
             // When we try to write an int, then we get a BufferOverflowException
             final var pos = 10 - 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
-            assertThatThrownBy(() -> seq.writeVarInt(1234, zigZag)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarInt(1234, zigZag))
+                    .isInstanceOf(BufferOverflowException.class);
             // A subsequent skip() will also throw an exception now that we hit the end of buffer
-            assertThatThrownBy(() -> seq.skip(1)).isInstanceOfAny(
-                    BufferUnderflowException.class,
-                    BufferOverflowException.class
-            );
+            assertThatThrownBy(() -> seq.skip(1))
+                    .isInstanceOfAny(BufferUnderflowException.class, BufferOverflowException.class);
         }
 
         @Test
@@ -1163,7 +1395,8 @@ void write() {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeVarInt(300, false);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101100, 0b00000010 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {(byte) 0b10101100, 0b00000010});
             assertThat(seq.position()).isEqualTo(pos + 2);
         }
 
@@ -1173,13 +1406,37 @@ void writeZigZag() {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeVarInt(-151, true);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101101, 0b00000010 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {(byte) 0b10101101, 0b00000010});
             assertThat(seq.position()).isEqualTo(pos + 2);
         }
 
         @ParameterizedTest
-        @ValueSource(ints = {0, 1, 2, 3, 7, 8, 9, 1023, 1024, 1025, 65535, 65536, 0x7FFFFFFF,
-                -1, -2, -7, -1023, -1024, -65535, -65536, -0x7FFFFFFF, -0x80000000})
+        @ValueSource(
+                ints = {
+                    0,
+                    1,
+                    2,
+                    3,
+                    7,
+                    8,
+                    9,
+                    1023,
+                    1024,
+                    1025,
+                    65535,
+                    65536,
+                    0x7FFFFFFF,
+                    -1,
+                    -2,
+                    -7,
+                    -1023,
+                    -1024,
+                    -65535,
+                    -65536,
+                    -0x7FFFFFFF,
+                    -0x80000000
+                })
         @DisplayName("Varints must be encoded with less than 5 bytes")
         void checkVarIntLen(final int num) {
             final var seq = sequence();
@@ -1206,7 +1463,8 @@ final class WriteVarLongTest {
         @DisplayName("Writing a varlong to an eof sequence throws BufferOverflowException")
         void writeToEofSequenceThrows(final boolean zigZag) {
             final var seq = eofSequence();
-            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, zigZag)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, zigZag))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @Test
@@ -1217,13 +1475,17 @@ void writePastLimit() {
             seq.limit(5);
             seq.skip(5);
             // When we try to write a varlong, then we get a BufferOverflowException
-            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, false)).isInstanceOf(BufferOverflowException.class);
-            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, true)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, false))
+                    .isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, true))
+                    .isInstanceOf(BufferOverflowException.class);
         }
 
         @ParameterizedTest
         @ValueSource(booleans = {false, true})
-        @DisplayName("Writing a varlong when less than 4 bytes are remaining throws BufferOverflowException")
+        @DisplayName(
+                "Writing a varlong when less than 4 bytes are remaining throws"
+                        + " BufferOverflowException")
         void writeInsufficientDataThrows(final boolean zigZag) {
             // Given a sequence with a limit where position == limit
             final var seq = sequence();
@@ -1231,12 +1493,11 @@ void writeInsufficientDataThrows(final boolean zigZag) {
             // When we try to write an int, then we get a BufferOverflowException
             final var pos = 10 - 1; // A position that doesn't reserve enough bytes
             seq.skip(pos);
-            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, zigZag)).isInstanceOf(BufferOverflowException.class);
+            assertThatThrownBy(() -> seq.writeVarLong(3882918382L, zigZag))
+                    .isInstanceOf(BufferOverflowException.class);
             // A subsequent skip() will also throw an exception now that we hit the end of buffer
-            assertThatThrownBy(() -> seq.skip(1)).isInstanceOfAny(
-                    BufferUnderflowException.class,
-                    BufferOverflowException.class
-            );
+            assertThatThrownBy(() -> seq.skip(1))
+                    .isInstanceOfAny(BufferUnderflowException.class, BufferOverflowException.class);
         }
 
         @Test
@@ -1245,7 +1506,8 @@ void write() {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeVarLong(300, false);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101100, 0b00000010 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {(byte) 0b10101100, 0b00000010});
             assertThat(seq.position()).isEqualTo(pos + 2);
         }
 
@@ -1255,7 +1517,8 @@ void writeZigZag() {
             final var seq = sequence();
             final var pos = seq.position();
             seq.writeVarLong(-151, true);
-            assertThat(extractWrittenBytes(seq)).isEqualTo(new byte[] { (byte) 0b10101101, 0b00000010 });
+            assertThat(extractWrittenBytes(seq))
+                    .isEqualTo(new byte[] {(byte) 0b10101101, 0b00000010});
             assertThat(seq.position()).isEqualTo(pos + 2);
         }
     }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java
index 9bf3c503..00d66591 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTest.java
@@ -14,7 +14,7 @@ protected BufferedData allocate(final int size) {
 
     @NonNull
     @Override
-    protected  BufferedData wrap(final byte[] arr) {
+    protected BufferedData wrap(final byte[] arr) {
         return new BufferedData(ByteBuffer.wrap(arr));
     }
 
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java
index f9c5d170..6e375c1f 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BufferedDataTestBase.java
@@ -90,9 +90,47 @@ void toStringWithSlice() {
     }
 
     @ParameterizedTest
-    @ValueSource(ints = {0, 1, 2, 4, 7, 8, 15, 16, 31, 32, 33, 127, 128, 512, 1000, 1024, 4000,
-                16384, 65535, 65536, 65537, 0xFFFFFF, 0x1000000, 0x1000001, 0x7FFFFFFF,
-                -1, -7, -8, -9, -127, -128, -129, -65535, -65536, -0xFFFFFF, -0x1000000, -0x1000001, -0x80000000})
+    @ValueSource(
+            ints = {
+                0,
+                1,
+                2,
+                4,
+                7,
+                8,
+                15,
+                16,
+                31,
+                32,
+                33,
+                127,
+                128,
+                512,
+                1000,
+                1024,
+                4000,
+                16384,
+                65535,
+                65536,
+                65537,
+                0xFFFFFF,
+                0x1000000,
+                0x1000001,
+                0x7FFFFFFF,
+                -1,
+                -7,
+                -8,
+                -9,
+                -127,
+                -128,
+                -129,
+                -65535,
+                -65536,
+                -0xFFFFFF,
+                -0x1000000,
+                -0x1000001,
+                -0x80000000
+            })
     @DisplayName("readVarInt() works with views")
     void sliceThenReadVarInt(final int num) {
         final var buf = allocate(100);
@@ -108,9 +146,47 @@ void sliceThenReadVarInt(final int num) {
     }
 
     @ParameterizedTest
-    @ValueSource(ints = {0, 1, 2, 4, 7, 8, 15, 16, 31, 32, 33, 127, 128, 512, 1000, 1024, 4000,
-            16384, 65535, 65536, 65537, 0xFFFFFF, 0x1000000, 0x1000001, 0x7FFFFFFF,
-            -1, -7, -8, -9, -127, -128, -129, -65535, -65536, -0xFFFFFF, -0x1000000, -0x1000001, -0x80000000})
+    @ValueSource(
+            ints = {
+                0,
+                1,
+                2,
+                4,
+                7,
+                8,
+                15,
+                16,
+                31,
+                32,
+                33,
+                127,
+                128,
+                512,
+                1000,
+                1024,
+                4000,
+                16384,
+                65535,
+                65536,
+                65537,
+                0xFFFFFF,
+                0x1000000,
+                0x1000001,
+                0x7FFFFFFF,
+                -1,
+                -7,
+                -8,
+                -9,
+                -127,
+                -128,
+                -129,
+                -65535,
+                -65536,
+                -0xFFFFFF,
+                -0x1000000,
+                -0x1000001,
+                -0x80000000
+            })
     @DisplayName("readVar() won't read beyond 10 bytes")
     void readVarFromLargeBuffer(final int num) {
         final var buf = allocate(100);
@@ -124,9 +200,42 @@ void readVarFromLargeBuffer(final int num) {
     }
 
     @ParameterizedTest
-    @ValueSource(longs = {0, 1, 7, 8, 9, 127, 128, 129, 1023, 1024, 1025, 65534, 65535, 65536,
-                0xFFFFFFFFL, 0x100000000L, 0x100000001L, 0xFFFFFFFFFFFFL, 0x1000000000000L, 0x1000000000001L,
-                -1, -7, -8, -9, -127, -128, -129, -65534, -65535, -65536, -0xFFFFFFFFL, -0x100000000L, -0x100000001L})
+    @ValueSource(
+            longs = {
+                0,
+                1,
+                7,
+                8,
+                9,
+                127,
+                128,
+                129,
+                1023,
+                1024,
+                1025,
+                65534,
+                65535,
+                65536,
+                0xFFFFFFFFL,
+                0x100000000L,
+                0x100000001L,
+                0xFFFFFFFFFFFFL,
+                0x1000000000000L,
+                0x1000000000001L,
+                -1,
+                -7,
+                -8,
+                -9,
+                -127,
+                -128,
+                -129,
+                -65534,
+                -65535,
+                -65536,
+                -0xFFFFFFFFL,
+                -0x100000000L,
+                -0x100000001L
+            })
     @DisplayName("readVarLong() works with views")
     void sliceThenReadVarLong(final long num) {
         final var buf = allocate(256);
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java
index 2e1c28ee..472d36ed 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/ByteArrayBufferedDataTest.java
@@ -14,7 +14,7 @@ protected BufferedData allocate(final int size) {
 
     @NonNull
     @Override
-    protected  BufferedData wrap(final byte[] arr) {
+    protected BufferedData wrap(final byte[] arr) {
         return new ByteArrayBufferedData(ByteBuffer.wrap(arr));
     }
 
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java
index 6625b1c2..96c048b9 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/BytesTest.java
@@ -79,7 +79,7 @@ void toStringWorks1() {
 
     @Test
     void testReplicate() {
-        byte[] arr = new byte[] { 0, 1, 2 };
+        byte[] arr = new byte[] {0, 1, 2};
 
         // Only wrap the last two elements:
         Bytes bytes = Bytes.wrap(arr, 1, 2);
@@ -94,7 +94,8 @@ void testReplicate() {
         arr[1] = 90;
         arr[2] = 80;
 
-        // First check if the original wrapped Bytes object sees the changes and keeps its length intact:
+        // First check if the original wrapped Bytes object sees the changes and keeps its length
+        // intact:
         assertEquals(2, bytes.length());
         assertEquals(90, bytes.getByte(0));
         assertEquals(80, bytes.getByte(1));
@@ -114,9 +115,10 @@ final class ByteWrappingTest {
         static Stream<byte[]> byteArraysTestCases() {
             return Stream.of(
                     new byte[0],
-                    new byte[] { 0 },
-                    new byte[] { Byte.MIN_VALUE, -100, -66, -7, -1, 0, 1, 9, 12, 51, 101, Byte.MAX_VALUE }
-            );
+                    new byte[] {0},
+                    new byte[] {
+                        Byte.MIN_VALUE, -100, -66, -7, -1, 0, 1, 9, 12, 51, 101, Byte.MAX_VALUE
+                    });
         }
 
         @Test
@@ -130,7 +132,7 @@ void nullArrayThrows() {
         @DisplayName("Getting a byte with a negative offset throws")
         void getByteWithNegativeOffsetThrows() {
             // Given a Bytes instance
-            final RandomAccessData bytes = Bytes.wrap(new byte[] { 1, 2, 3, 4 });
+            final RandomAccessData bytes = Bytes.wrap(new byte[] {1, 2, 3, 4});
             // When getting a byte with a negative offset
             // Then an IndexOutOfBoundsException is thrown
             assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(-1));
@@ -168,17 +170,18 @@ void toByteArrayNon0() {
             assertNotEquals(byteArray, bytes.toByteArray(0, 5));
         }
 
-//        @Test
-//        @DisplayName("Getting a byte with to large of an offset throws")
-//        void getByteWithLargeOffsetThrows() {
-//            // Given a Bytes instance
-//            final RandomAccessData bytes = Bytes.wrap(new byte[] { 1, 2, 3, 4 });
-//            // When getting a byte from an offset that is too large
-//            // Then an IndexOutOfBoundsException is thrown
-//            assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(4));
-//            assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(5));
-//            assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(Integer.MAX_VALUE));
-//        }
+        //        @Test
+        //        @DisplayName("Getting a byte with to large of an offset throws")
+        //        void getByteWithLargeOffsetThrows() {
+        //            // Given a Bytes instance
+        //            final RandomAccessData bytes = Bytes.wrap(new byte[] { 1, 2, 3, 4 });
+        //            // When getting a byte from an offset that is too large
+        //            // Then an IndexOutOfBoundsException is thrown
+        //            assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(4));
+        //            assertThrows(IndexOutOfBoundsException.class, () -> bytes.getByte(5));
+        //            assertThrows(IndexOutOfBoundsException.class, () ->
+        // bytes.getByte(Integer.MAX_VALUE));
+        //        }
 
         @ParameterizedTest
         @MethodSource("byteArraysTestCases")
@@ -219,7 +222,7 @@ void equality(final byte[] value) {
         void notEqual(final byte[] value) {
             // Given two byte arrays with different bytes, when wrapped
             final RandomAccessData bytes1 = Bytes.wrap(value);
-            final RandomAccessData bytes2 = Bytes.wrap(new byte[]{ 1, 39, 28, 92 });
+            final RandomAccessData bytes2 = Bytes.wrap(new byte[] {1, 39, 28, 92});
             // Then they have different lengths
             assertNotEquals(bytes1.length(), bytes2.length());
             // And they are not equal
@@ -239,8 +242,7 @@ static Stream<String> stringTestCases() {
             return Stream.of(
                     "",
                     "This is a test of the emergency broadcast system",
-                    "Some crazy unicode characters here 🤪"
-            );
+                    "Some crazy unicode characters here 🤪");
         }
 
         @Test
@@ -257,7 +259,8 @@ void wrappedStringsAreUsed(final String value) {
             // Given a String, when it is wrapped
             final RandomAccessData bytes1 = Bytes.wrap(value);
 
-            // Then the length of the Bytes matches (when interpreted as UTF-8) and each byte matches
+            // Then the length of the Bytes matches (when interpreted as UTF-8) and each byte
+            // matches
             final var expected = value.getBytes(StandardCharsets.UTF_8);
             assertEquals(expected.length, bytes1.length());
             for (int i = 0; i < expected.length; i++) {
@@ -299,12 +302,18 @@ void notEqual(final String value) {
             assertNotEquals(bytes1.hashCode(), bytes2.hashCode());
         }
     }
+
     @Test
     @DisplayName("Get Unsigned Bytes")
     void getUnsignedBytes() {
-        // Given a Bytes instance with bytes that are within the range of signed bytes and some that are
+        // Given a Bytes instance with bytes that are within the range of signed bytes and some that
+        // are
         // outside the range of signed bytes but within the range of unsigned bytes
-        final RandomAccessData bytes = Bytes.wrap(new byte[]{0b0000_0000, 0b0000_0001, (byte) 0b1000_0000, (byte) 0b1111_1111});
+        final RandomAccessData bytes =
+                Bytes.wrap(
+                        new byte[] {
+                            0b0000_0000, 0b0000_0001, (byte) 0b1000_0000, (byte) 0b1111_1111
+                        });
         // Then reading them as unsigned bytes returns the expected values
         assertEquals(0, bytes.getUnsignedByte(0));
         assertEquals(1, bytes.getUnsignedByte(1));
@@ -352,6 +361,7 @@ void writeToOutputStreamNo0OffsPartial() throws IOException {
         byte[] comp = {0, 1, 2, 3, 4};
         assertArrayEquals(comp, res);
     }
+
     @Test
     @DisplayName("Write to OutputStream")
     void writeToWritableSequentialData() throws IOException {
@@ -491,7 +501,8 @@ void updateSignatureBoundsChecks() throws InvalidKeyException {
         final Bytes bytes = Bytes.wrap(byteArray);
 
         final Signature signature = mockSignature();
-        assertThrows(IndexOutOfBoundsException.class, () -> bytes.updateSignature(signature, 3, 10));
+        assertThrows(
+                IndexOutOfBoundsException.class, () -> bytes.updateSignature(signature, 3, 10));
         assertThrows(IndexOutOfBoundsException.class, () -> bytes.updateSignature(signature, 0, 6));
         assertThrows(IndexOutOfBoundsException.class, () -> bytes.updateSignature(signature, 1, 5));
         assertThrows(IllegalArgumentException.class, () -> bytes.updateSignature(signature, 0, -5));
@@ -544,7 +555,8 @@ void verifySignatureBoundsChecks() throws InvalidKeyException {
         final Bytes bytes = Bytes.wrap(byteArray);
 
         final Signature signature = mockSignature();
-        assertThrows(IndexOutOfBoundsException.class, () -> bytes.verifySignature(signature, 3, 10));
+        assertThrows(
+                IndexOutOfBoundsException.class, () -> bytes.verifySignature(signature, 3, 10));
         assertThrows(IndexOutOfBoundsException.class, () -> bytes.verifySignature(signature, 0, 6));
         assertThrows(IndexOutOfBoundsException.class, () -> bytes.verifySignature(signature, 1, 5));
         assertThrows(IllegalArgumentException.class, () -> bytes.verifySignature(signature, 0, -5));
@@ -556,8 +568,9 @@ void verifySignatureBoundsChecks() throws InvalidKeyException {
 
     @Test
     @DisplayName("Tests the signature verification without a mock")
-    void realSignatureTest() throws NoSuchAlgorithmException, InvalidKeyException, SignatureException {
-        final Bytes bytes = Bytes.wrap(new byte[]{1, 2, 3, 4, 5});
+    void realSignatureTest()
+            throws NoSuchAlgorithmException, InvalidKeyException, SignatureException {
+        final Bytes bytes = Bytes.wrap(new byte[] {1, 2, 3, 4, 5});
         final KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair();
         // sign the data
         final Signature signer = Signature.getInstance("SHA256withRSA");
@@ -572,157 +585,163 @@ void realSignatureTest() throws NoSuchAlgorithmException, InvalidKeyException, S
         // test a bad signature
         final Signature verifier2 = Signature.getInstance("SHA256withRSA");
         verifier2.initVerify(keyPair.getPublic());
-        Bytes.wrap(new byte[]{123, 1, 2, 3}).updateSignature(verifier2);
+        Bytes.wrap(new byte[] {123, 1, 2, 3}).updateSignature(verifier2);
         assertFalse(signature.verifySignature(verifier2));
     }
 
-    // asUtf8String throws with null (no offset here? That's wierd. Should have offset, or we should have non-offset
+    // asUtf8String throws with null (no offset here? That's wierd. Should have offset, or we should
+    // have non-offset
     // versions of everything else Or at least "getBytes").
 
     // matches prefix....
 
-//
-//
-//
-//
-//    static Stream<Byte> bytesTestCases() {
-//        return Stream.of(Byte.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Byte.MAX_VALUE).map(Number::byteValue);
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("bytesTestCases")
-//    void byteTest(Byte value) {
-//        final int length = Byte.BYTES;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeByte(value);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getByte(0));
-//    }
-//
-//    static Stream<Integer> unsignedBytesTestCases() {
-//        return Stream.of(0,1,9,51,101,127,128,255).map(Number::intValue);
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("unsignedBytesTestCases")
-//    void unsignedByteTest(Integer value) {
-//        final int length = Byte.BYTES;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeUnsignedByte(value);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getUnsignedByte(0));
-//    }
-//
-//    static Stream<Integer> intsTestCases() {
-//        return Stream.of(Integer.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE).map(Number::intValue);
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("intsTestCases")
-//    void intTest(Integer value) {
-//        final int length = Integer.BYTES*2;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeInt(value);
-//        db.writeInt(value, ByteOrder.LITTLE_ENDIAN);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getInt(0));
-//        assertEquals(value, bytes.getInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN));
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("intsTestCases")
-//    void varIntTest(Integer value) {
-//        DataBuffer db = DataBuffer.allocate(20, false);
-//        db.writeVarInt(value, false);
-//        final int varInt1Size = (int)db.position();
-//        db.writeVarInt(value, true);
-//        db.flip();
-//        final Bytes bytes = db.readBytes((int)db.remaining());
-//        assertEquals(value, bytes.getVarInt(0, false));
-//        assertEquals(value, bytes.getVarInt(varInt1Size, true));
-//    }
-//
-//    static Stream<Long> unsignedIntsTestCases() {
-//        return Stream.of(0,1,9,51,127,Integer.MAX_VALUE*2L).map(Number::longValue);
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("unsignedIntsTestCases")
-//    void unsignedIntTest(Long value) {
-//        final int length = Integer.BYTES*2;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeUnsignedInt(value);
-//        db.writeUnsignedInt(value, ByteOrder.LITTLE_ENDIAN);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getUnsignedInt(0));
-//        assertEquals(value, bytes.getUnsignedInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN));
-//    }
-//
-//    static Stream<Long> longsTestCases() {
-//        return Stream.of(Long.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Long.MAX_VALUE).map(Number::longValue);
-//    }
-//    @ParameterizedTest
-//    @MethodSource("longsTestCases")
-//    void longTest(Long value) {
-//        final int length = Long.BYTES*2;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeLong(value);
-//        db.writeLong(value, ByteOrder.LITTLE_ENDIAN);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getLong(0));
-//        assertEquals(value, bytes.getLong(Long.BYTES, ByteOrder.LITTLE_ENDIAN));
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("longsTestCases")
-//    void varLongTest(Long value) {
-//        DataBuffer db = DataBuffer.allocate(20, false);
-//        db.writeVarLong(value, false);
-//        final int varInt1Size = (int)db.position();
-//        db.writeVarLong(value, true);
-//        db.flip();
-//        final Bytes bytes = db.readBytes((int)db.remaining());
-//        assertEquals(value, bytes.getVarLong(0, false));
-//        assertEquals(value, bytes.getVarLong(varInt1Size, true));
-//    }
-//
-//    static Stream<Float> floatsTestCases() {
-//        return Stream.of(Float.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Float.MAX_VALUE).map(Number::floatValue);
-//    }
-//    @ParameterizedTest
-//    @MethodSource("floatsTestCases")
-//    void floatTest(Float value) {
-//        final int length = Float.BYTES*2;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeFloat(value);
-//        db.writeFloat(value, ByteOrder.LITTLE_ENDIAN);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getFloat(0));
-//        assertEquals(value, bytes.getFloat(Float.BYTES, ByteOrder.LITTLE_ENDIAN));
-//    }
-//
-//    static Stream<Double> doublesTestCases() {
-//        return Stream.of(Double.MIN_VALUE, Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Double.MAX_VALUE).map(Number::doubleValue);
-//    }
-//
-//    @ParameterizedTest
-//    @MethodSource("doublesTestCases")
-//    void doubleTest(Double value) {
-//        final int length = Double.BYTES * 2;
-//        DataBuffer db = DataBuffer.allocate(length, false);
-//        db.writeDouble(value);
-//        db.writeDouble(value, ByteOrder.LITTLE_ENDIAN);
-//        db.reset();
-//        final Bytes bytes = db.readBytes(length);
-//        assertEquals(value, bytes.getDouble(0));
-//        assertEquals(value, bytes.getDouble(Double.BYTES, ByteOrder.LITTLE_ENDIAN));
-//    }
+    //
+    //
+    //
+    //
+    //    static Stream<Byte> bytesTestCases() {
+    //        return
+    // Stream.of(Byte.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Byte.MAX_VALUE).map(Number::byteValue);
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("bytesTestCases")
+    //    void byteTest(Byte value) {
+    //        final int length = Byte.BYTES;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeByte(value);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getByte(0));
+    //    }
+    //
+    //    static Stream<Integer> unsignedBytesTestCases() {
+    //        return Stream.of(0,1,9,51,101,127,128,255).map(Number::intValue);
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("unsignedBytesTestCases")
+    //    void unsignedByteTest(Integer value) {
+    //        final int length = Byte.BYTES;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeUnsignedByte(value);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getUnsignedByte(0));
+    //    }
+    //
+    //    static Stream<Integer> intsTestCases() {
+    //        return
+    // Stream.of(Integer.MIN_VALUE,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE).map(Number::intValue);
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("intsTestCases")
+    //    void intTest(Integer value) {
+    //        final int length = Integer.BYTES*2;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeInt(value);
+    //        db.writeInt(value, ByteOrder.LITTLE_ENDIAN);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getInt(0));
+    //        assertEquals(value, bytes.getInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN));
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("intsTestCases")
+    //    void varIntTest(Integer value) {
+    //        DataBuffer db = DataBuffer.allocate(20, false);
+    //        db.writeVarInt(value, false);
+    //        final int varInt1Size = (int)db.position();
+    //        db.writeVarInt(value, true);
+    //        db.flip();
+    //        final Bytes bytes = db.readBytes((int)db.remaining());
+    //        assertEquals(value, bytes.getVarInt(0, false));
+    //        assertEquals(value, bytes.getVarInt(varInt1Size, true));
+    //    }
+    //
+    //    static Stream<Long> unsignedIntsTestCases() {
+    //        return Stream.of(0,1,9,51,127,Integer.MAX_VALUE*2L).map(Number::longValue);
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("unsignedIntsTestCases")
+    //    void unsignedIntTest(Long value) {
+    //        final int length = Integer.BYTES*2;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeUnsignedInt(value);
+    //        db.writeUnsignedInt(value, ByteOrder.LITTLE_ENDIAN);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getUnsignedInt(0));
+    //        assertEquals(value, bytes.getUnsignedInt(Integer.BYTES, ByteOrder.LITTLE_ENDIAN));
+    //    }
+    //
+    //    static Stream<Long> longsTestCases() {
+    //        return Stream.of(Long.MIN_VALUE,
+    // Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Long.MAX_VALUE).map(Number::longValue);
+    //    }
+    //    @ParameterizedTest
+    //    @MethodSource("longsTestCases")
+    //    void longTest(Long value) {
+    //        final int length = Long.BYTES*2;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeLong(value);
+    //        db.writeLong(value, ByteOrder.LITTLE_ENDIAN);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getLong(0));
+    //        assertEquals(value, bytes.getLong(Long.BYTES, ByteOrder.LITTLE_ENDIAN));
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("longsTestCases")
+    //    void varLongTest(Long value) {
+    //        DataBuffer db = DataBuffer.allocate(20, false);
+    //        db.writeVarLong(value, false);
+    //        final int varInt1Size = (int)db.position();
+    //        db.writeVarLong(value, true);
+    //        db.flip();
+    //        final Bytes bytes = db.readBytes((int)db.remaining());
+    //        assertEquals(value, bytes.getVarLong(0, false));
+    //        assertEquals(value, bytes.getVarLong(varInt1Size, true));
+    //    }
+    //
+    //    static Stream<Float> floatsTestCases() {
+    //        return Stream.of(Float.MIN_VALUE,
+    // Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Float.MAX_VALUE).map(Number::floatValue);
+    //    }
+    //    @ParameterizedTest
+    //    @MethodSource("floatsTestCases")
+    //    void floatTest(Float value) {
+    //        final int length = Float.BYTES*2;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeFloat(value);
+    //        db.writeFloat(value, ByteOrder.LITTLE_ENDIAN);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getFloat(0));
+    //        assertEquals(value, bytes.getFloat(Float.BYTES, ByteOrder.LITTLE_ENDIAN));
+    //    }
+    //
+    //    static Stream<Double> doublesTestCases() {
+    //        return Stream.of(Double.MIN_VALUE,
+    // Integer.MIN_VALUE-1L,-100,-66,-7,-1,0,1,9,51,101,Integer.MAX_VALUE+1L,Double.MAX_VALUE).map(Number::doubleValue);
+    //    }
+    //
+    //    @ParameterizedTest
+    //    @MethodSource("doublesTestCases")
+    //    void doubleTest(Double value) {
+    //        final int length = Double.BYTES * 2;
+    //        DataBuffer db = DataBuffer.allocate(length, false);
+    //        db.writeDouble(value);
+    //        db.writeDouble(value, ByteOrder.LITTLE_ENDIAN);
+    //        db.reset();
+    //        final Bytes bytes = db.readBytes(length);
+    //        assertEquals(value, bytes.getDouble(0));
+    //        assertEquals(value, bytes.getDouble(Double.BYTES, ByteOrder.LITTLE_ENDIAN));
+    //    }
 
     @Test
     void malformedVarTest() {
@@ -761,7 +780,9 @@ protected RandomAccessData randomAccessData(@NonNull byte[] bytes) {
     }
 
     @ParameterizedTest
-    @CsvSource(textBlock = """
+    @CsvSource(
+            textBlock =
+                    """
             "", "", 0
             "a", "", 1
             "", "a", -1
@@ -790,18 +811,17 @@ void compareByUnsignedBytes(byte[] arr1, byte[] arr2, int expected) {
     static Stream<Arguments> compareByUnsignedBytes() {
         return Stream.of(
                 Arguments.of(new byte[0], new byte[0], 0),
-                Arguments.of(new byte[0], new byte[]{1}, -1),
-                Arguments.of(new byte[]{1}, new byte[0], 1),
-                Arguments.of(new byte[]{1}, new byte[]{2}, -1),
-                Arguments.of(new byte[]{2}, new byte[]{1}, 1),
-                Arguments.of(new byte[]{-1}, new byte[]{2}, 253),
-                Arguments.of(new byte[]{2}, new byte[]{-1}, -253),
-                Arguments.of(new byte[]{-1}, new byte[]{-2}, 1),
-                Arguments.of(new byte[]{-2}, new byte[]{-1}, -1),
-                Arguments.of(new byte[]{-2, -1}, new byte[]{-2, -1}, 0),
-                Arguments.of(new byte[]{-2}, new byte[]{-2, -1}, -1),
-                Arguments.of(new byte[]{-2, -1}, new byte[]{-1, -2}, -1)
-        );
+                Arguments.of(new byte[0], new byte[] {1}, -1),
+                Arguments.of(new byte[] {1}, new byte[0], 1),
+                Arguments.of(new byte[] {1}, new byte[] {2}, -1),
+                Arguments.of(new byte[] {2}, new byte[] {1}, 1),
+                Arguments.of(new byte[] {-1}, new byte[] {2}, 253),
+                Arguments.of(new byte[] {2}, new byte[] {-1}, -253),
+                Arguments.of(new byte[] {-1}, new byte[] {-2}, 1),
+                Arguments.of(new byte[] {-2}, new byte[] {-1}, -1),
+                Arguments.of(new byte[] {-2, -1}, new byte[] {-2, -1}, 0),
+                Arguments.of(new byte[] {-2}, new byte[] {-2, -1}, -1),
+                Arguments.of(new byte[] {-2, -1}, new byte[] {-1, -2}, -1));
     }
 
     @ParameterizedTest
@@ -816,68 +836,67 @@ void compareBySignedBytes(byte[] arr1, byte[] arr2, int expected) {
     static Stream<Arguments> compareBySignedBytes() {
         return Stream.of(
                 Arguments.of(new byte[0], new byte[0], 0),
-                Arguments.of(new byte[0], new byte[]{1}, -1),
-                Arguments.of(new byte[]{1}, new byte[0], 1),
-                Arguments.of(new byte[]{1}, new byte[]{2}, -1),
-                Arguments.of(new byte[]{2}, new byte[]{1}, 1),
-                Arguments.of(new byte[]{-1}, new byte[]{2}, -3),
-                Arguments.of(new byte[]{2}, new byte[]{-1}, 3),
-                Arguments.of(new byte[]{-1}, new byte[]{-2}, 1),
-                Arguments.of(new byte[]{-2}, new byte[]{-1}, -1),
-                Arguments.of(new byte[]{-2, -1}, new byte[]{-2, -1}, 0),
-                Arguments.of(new byte[]{-2}, new byte[]{-2, -1}, -1),
-                Arguments.of(new byte[]{-2, -1}, new byte[]{-1, -2}, -1)
-        );
+                Arguments.of(new byte[0], new byte[] {1}, -1),
+                Arguments.of(new byte[] {1}, new byte[0], 1),
+                Arguments.of(new byte[] {1}, new byte[] {2}, -1),
+                Arguments.of(new byte[] {2}, new byte[] {1}, 1),
+                Arguments.of(new byte[] {-1}, new byte[] {2}, -3),
+                Arguments.of(new byte[] {2}, new byte[] {-1}, 3),
+                Arguments.of(new byte[] {-1}, new byte[] {-2}, 1),
+                Arguments.of(new byte[] {-2}, new byte[] {-1}, -1),
+                Arguments.of(new byte[] {-2, -1}, new byte[] {-2, -1}, 0),
+                Arguments.of(new byte[] {-2}, new byte[] {-2, -1}, -1),
+                Arguments.of(new byte[] {-2, -1}, new byte[] {-1, -2}, -1));
     }
 
     @Test
     @DisplayName("Appends two Bytes objects")
     void appendBytes() {
-        Bytes b1 = Bytes.wrap(new byte[]{0, 1, 2, 3});
-        Bytes b2 = Bytes.wrap(new byte[]{4, 5, 6});
+        Bytes b1 = Bytes.wrap(new byte[] {0, 1, 2, 3});
+        Bytes b2 = Bytes.wrap(new byte[] {4, 5, 6});
         Bytes appended = b1.append(b2);
         byte[] res = new byte[7];
         appended.getBytes(0, res);
-        assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6}, res);
+        assertArrayEquals(new byte[] {0, 1, 2, 3, 4, 5, 6}, res);
     }
 
     @Test
     @DisplayName("Appends two Bytes objects, one empty")
     void appendEmptyBytes() {
-        Bytes b1 = Bytes.wrap(new byte[]{0, 1, 2, 3});
+        Bytes b1 = Bytes.wrap(new byte[] {0, 1, 2, 3});
         Bytes appended = b1.append(Bytes.EMPTY);
         byte[] res = new byte[4];
         appended.getBytes(0, res);
-        assertArrayEquals(new byte[]{0, 1, 2, 3}, res);
+        assertArrayEquals(new byte[] {0, 1, 2, 3}, res);
     }
 
     @Test
     @DisplayName("Appends RandomAccessData")
     void appendRandomAccessData() {
-        Bytes b1 = Bytes.wrap(new byte[]{0, 1, 2, 3});
-        RandomAccessData rad = BufferedData.wrap(new byte[]{4, 5, 6});
+        Bytes b1 = Bytes.wrap(new byte[] {0, 1, 2, 3});
+        RandomAccessData rad = BufferedData.wrap(new byte[] {4, 5, 6});
         Bytes appended = b1.append(rad);
         byte[] res = new byte[7];
         appended.getBytes(0, res);
-        assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6}, res);
+        assertArrayEquals(new byte[] {0, 1, 2, 3, 4, 5, 6}, res);
     }
 
     @Test
     @DisplayName("Changed toString")
     void changedToString() {
-        Bytes b1 = Bytes.wrap(new byte[]{0, 0, (byte)0xFF});
+        Bytes b1 = Bytes.wrap(new byte[] {0, 0, (byte) 0xFF});
         assertEquals("0000ff", b1.toString());
     }
 
     @Test
     @DisplayName("Changed toString2")
     void changedToString2() {
-        Bytes b1 = Bytes.wrap(new byte[]{(byte)0x0f, 0, (byte)0x0a});
+        Bytes b1 = Bytes.wrap(new byte[] {(byte) 0x0f, 0, (byte) 0x0a});
         assertEquals("0f000a", b1.toString());
     }
 
     @ParameterizedTest
-    @ValueSource(strings = { "", "a", "ab", "abc", "abc123", "✅" })
+    @ValueSource(strings = {"", "a", "ab", "abc", "abc123", "✅"})
     @DisplayName("Overridden asUtf8String")
     void asUtf8StringTest(final String value) {
         final Bytes bytes = Bytes.wrap(value.getBytes(StandardCharsets.UTF_8));
@@ -906,27 +925,22 @@ void writeToByteBufferTest() {
         final ByteBuffer bb = ByteBuffer.allocate(1);
 
         testWriteToFromOffset(
-                bb,
-                (b, d) -> b.writeTo(d, 1, 1),
-                ByteBuffer::position,
-                d -> d.get(0));
+                bb, (b, d) -> b.writeTo(d, 1, 1), ByteBuffer::position, d -> d.get(0));
     }
 
     @Test
     void writeToOutputStreamTest() {
         final List<Integer> data = new ArrayList<>();
-        final OutputStream os = new OutputStream() {
-            @Override
-            public void write(int b) throws IOException {
-                data.add(b);
-            }
-        };
+        final OutputStream os =
+                new OutputStream() {
+                    @Override
+                    public void write(int b) throws IOException {
+                        data.add(b);
+                    }
+                };
 
         testWriteToFromOffset(
-                os,
-                (b, d) -> b.writeTo(d, 1, 1),
-                d -> data.size(),
-                d -> data.get(0).byteValue());
+                os, (b, d) -> b.writeTo(d, 1, 1), d -> data.size(), d -> data.get(0).byteValue());
     }
 
     @Test
@@ -952,7 +966,6 @@ void writeToMessageDigestDataTest() throws NoSuchAlgorithmException {
                     ai.set(md.digest()[0]);
                 },
                 d -> ai.get() == 0 ? 0 : 1,
-                d -> (byte) (ai.get() + 121)
-        );
+                d -> (byte) (ai.get() + 121));
     }
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java
index 05331ae8..8fd6a5e1 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/RandomAccessTestBase.java
@@ -28,17 +28,53 @@ public abstract class RandomAccessTestBase extends ReadableTestBase {
     protected abstract RandomAccessData randomAccessData(@NonNull final byte[] bytes);
 
     static IntStream testIntegers() {
-        return IntStream.of(Integer.MIN_VALUE, Integer.MIN_VALUE + 1,
-                -65536, -65535, -101, -9, -1, 0, 1, 4, 59, 255, 1023, 1024, 1025, 10000,
-                Integer.MAX_VALUE - 1, Integer.MAX_VALUE);
+        return IntStream.of(
+                Integer.MIN_VALUE,
+                Integer.MIN_VALUE + 1,
+                -65536,
+                -65535,
+                -101,
+                -9,
+                -1,
+                0,
+                1,
+                4,
+                59,
+                255,
+                1023,
+                1024,
+                1025,
+                10000,
+                Integer.MAX_VALUE - 1,
+                Integer.MAX_VALUE);
     }
 
     static LongStream testLongs() {
-        return LongStream.of(Long.MIN_VALUE, Long.MIN_VALUE + 1,
-                (long) Integer.MIN_VALUE - 1, Integer.MIN_VALUE, Integer.MIN_VALUE + 1,
-                -65536, -65535, -101, -9, -1, 0, 1, 4, 59, 255, 1023, 1024, 1025, 10000,
-                Integer.MAX_VALUE - 1, Integer.MAX_VALUE, (long) Integer.MAX_VALUE + 1,
-                Long.MAX_VALUE - 1, Long.MAX_VALUE);
+        return LongStream.of(
+                Long.MIN_VALUE,
+                Long.MIN_VALUE + 1,
+                (long) Integer.MIN_VALUE - 1,
+                Integer.MIN_VALUE,
+                Integer.MIN_VALUE + 1,
+                -65536,
+                -65535,
+                -101,
+                -9,
+                -1,
+                0,
+                1,
+                4,
+                59,
+                255,
+                1023,
+                1024,
+                1025,
+                10000,
+                Integer.MAX_VALUE - 1,
+                Integer.MAX_VALUE,
+                (long) Integer.MAX_VALUE + 1,
+                Long.MAX_VALUE - 1,
+                Long.MAX_VALUE);
     }
 
     @Test
@@ -48,7 +84,7 @@ void sliceLength() {
     }
 
     @ParameterizedTest
-    @ValueSource(strings = { "", "a", "ab", "abc", "✅" })
+    @ValueSource(strings = {"", "a", "ab", "abc", "✅"})
     void utf8Strings(final String s) {
         final var buf = randomAccessData(s.getBytes(StandardCharsets.UTF_8));
         assertThat(buf.asUtf8String()).isEqualTo(s);
@@ -83,29 +119,54 @@ void getBytesExtraDstLength() {
 
     @Test
     void matchesPrefixByteArray() {
-        final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09});
-
-        assertTrue(data.matchesPrefix(new byte[]{0x01}));
-        assertTrue(data.matchesPrefix(new byte[]{0x01,0x02}));
-        assertTrue(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x04,}));
-        assertTrue(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09}));
-
-        assertFalse(data.matchesPrefix(new byte[]{0x02}));
-        assertFalse(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x02}));
-        assertFalse(data.matchesPrefix(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x00}));
+        final RandomAccessData data =
+                randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09});
+
+        assertTrue(data.matchesPrefix(new byte[] {0x01}));
+        assertTrue(data.matchesPrefix(new byte[] {0x01, 0x02}));
+        assertTrue(
+                data.matchesPrefix(
+                        new byte[] {
+                            0x01, 0x02, 0x03, 0x04,
+                        }));
+        assertTrue(
+                data.matchesPrefix(
+                        new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09}));
+
+        assertFalse(data.matchesPrefix(new byte[] {0x02}));
+        assertFalse(data.matchesPrefix(new byte[] {0x01, 0x02, 0x03, 0x02}));
+        assertFalse(
+                data.matchesPrefix(
+                        new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x00}));
     }
 
     @Test
     void matchesPrefixBytes() {
-        final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09});
-        assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01})));
-        assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02})));
-        assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x04,})));
-        assertTrue(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09})));
-
-        assertFalse(data.matchesPrefix(Bytes.wrap(new byte[]{0x02})));
-        assertFalse(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x02})));
-        assertFalse(data.matchesPrefix(Bytes.wrap(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x00})));
+        final RandomAccessData data =
+                randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09});
+        assertTrue(data.matchesPrefix(Bytes.wrap(new byte[] {0x01})));
+        assertTrue(data.matchesPrefix(Bytes.wrap(new byte[] {0x01, 0x02})));
+        assertTrue(
+                data.matchesPrefix(
+                        Bytes.wrap(
+                                new byte[] {
+                                    0x01, 0x02, 0x03, 0x04,
+                                })));
+        assertTrue(
+                data.matchesPrefix(
+                        Bytes.wrap(
+                                new byte[] {
+                                    0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09
+                                })));
+
+        assertFalse(data.matchesPrefix(Bytes.wrap(new byte[] {0x02})));
+        assertFalse(data.matchesPrefix(Bytes.wrap(new byte[] {0x01, 0x02, 0x03, 0x02})));
+        assertFalse(
+                data.matchesPrefix(
+                        Bytes.wrap(
+                                new byte[] {
+                                    0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x00
+                                })));
     }
 
     @Test
@@ -117,45 +178,48 @@ void matchesPrefixEmpty_issue37() {
 
     @Test
     void containsZeroOffset() {
-        final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06});
-        assertTrue(data.contains(0, new byte[]{0x01}));
-        assertTrue(data.contains(0, new byte[]{0x01,0x02}));
-        assertTrue(data.contains(0, new byte[]{0x01,0x02,0x03,0x04,0x05,0x06}));
-        assertFalse(data.contains(0, new byte[]{0x01,0x02,0x02}));
-        assertFalse(data.contains(0, new byte[]{0x02,0x02}));
-        assertFalse(data.contains(0, new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07}));
+        final RandomAccessData data =
+                randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06});
+        assertTrue(data.contains(0, new byte[] {0x01}));
+        assertTrue(data.contains(0, new byte[] {0x01, 0x02}));
+        assertTrue(data.contains(0, new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06}));
+        assertFalse(data.contains(0, new byte[] {0x01, 0x02, 0x02}));
+        assertFalse(data.contains(0, new byte[] {0x02, 0x02}));
+        assertFalse(data.contains(0, new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}));
 
         final RandomAccessData slice = data.slice(1, 4);
-        assertTrue(slice.contains(0, new byte[]{0x02}));
-        assertTrue(slice.contains(0, new byte[]{0x02,0x03}));
-        assertTrue(slice.contains(0, new byte[]{0x02,0x03,0x04,0x05}));
-        assertFalse(slice.contains(0, new byte[]{0x01}));
-        assertFalse(slice.contains(0, new byte[]{0x02,0x02}));
-        assertFalse(slice.contains(0, new byte[]{0x02,0x03,0x04,0x05,0x06}));
+        assertTrue(slice.contains(0, new byte[] {0x02}));
+        assertTrue(slice.contains(0, new byte[] {0x02, 0x03}));
+        assertTrue(slice.contains(0, new byte[] {0x02, 0x03, 0x04, 0x05}));
+        assertFalse(slice.contains(0, new byte[] {0x01}));
+        assertFalse(slice.contains(0, new byte[] {0x02, 0x02}));
+        assertFalse(slice.contains(0, new byte[] {0x02, 0x03, 0x04, 0x05, 0x06}));
     }
 
     @Test
     void containsNonZeroOffset() {
-        final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06});
-        assertTrue(data.contains(1, new byte[]{0x02}));
-        assertTrue(data.contains(1, new byte[]{0x02,0x03}));
-        assertTrue(data.contains(1, new byte[]{0x02,0x03,0x04,0x05,0x06}));
-        assertFalse(data.contains(1, new byte[]{0x02,0x03,0x03}));
-        assertFalse(data.contains(1, new byte[]{0x03,0x03}));
-        assertFalse(data.contains(1, new byte[]{0x02,0x03,0x04,0x05,0x06,0x07}));
+        final RandomAccessData data =
+                randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06});
+        assertTrue(data.contains(1, new byte[] {0x02}));
+        assertTrue(data.contains(1, new byte[] {0x02, 0x03}));
+        assertTrue(data.contains(1, new byte[] {0x02, 0x03, 0x04, 0x05, 0x06}));
+        assertFalse(data.contains(1, new byte[] {0x02, 0x03, 0x03}));
+        assertFalse(data.contains(1, new byte[] {0x03, 0x03}));
+        assertFalse(data.contains(1, new byte[] {0x02, 0x03, 0x04, 0x05, 0x06, 0x07}));
 
         final RandomAccessData slice = data.slice(1, 4);
-        assertTrue(slice.contains(1, new byte[]{0x03}));
-        assertTrue(slice.contains(1, new byte[]{0x03,0x04}));
-        assertTrue(slice.contains(1, new byte[]{0x03,0x04,0x05}));
-        assertFalse(slice.contains(1, new byte[]{0x02}));
-        assertFalse(slice.contains(1, new byte[]{0x03,0x03}));
-        assertFalse(slice.contains(1, new byte[]{0x03,0x04,0x05,0x06}));
+        assertTrue(slice.contains(1, new byte[] {0x03}));
+        assertTrue(slice.contains(1, new byte[] {0x03, 0x04}));
+        assertTrue(slice.contains(1, new byte[] {0x03, 0x04, 0x05}));
+        assertFalse(slice.contains(1, new byte[] {0x02}));
+        assertFalse(slice.contains(1, new byte[] {0x03, 0x03}));
+        assertFalse(slice.contains(1, new byte[] {0x03, 0x04, 0x05, 0x06}));
     }
 
     @Test
     void getInt() {
-        final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06});
+        final RandomAccessData data =
+                randomAccessData(new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06});
         assertEquals(0x01020304, data.getInt(0));
         assertEquals(0x02030405, data.getInt(1));
 
@@ -166,7 +230,9 @@ void getInt() {
 
     @Test
     void getLong() {
-        final RandomAccessData data = randomAccessData(new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0A});
+        final RandomAccessData data =
+                randomAccessData(
+                        new byte[] {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A});
         assertEquals(0x0102030405060708L, data.getLong(0));
         assertEquals(0x0203040506070809L, data.getLong(1));
 
@@ -238,5 +304,4 @@ void getVarLongZigZag(final long num) throws IOException {
         data = randomAccessData(writtenBytes);
         assertEquals(num, data.getVarLong(0, true));
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java
index 0923ec7c..bc77a3f3 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/buffer/StubbedRandomAccessDataTest.java
@@ -7,8 +7,6 @@
 import java.io.OutputStream;
 import java.io.UncheckedIOException;
 
-import static org.assertj.core.api.Assertions.assertThat;
-
 public class StubbedRandomAccessDataTest extends RandomAccessTestBase {
 
     @NonNull
@@ -20,7 +18,9 @@ protected ReadableSequentialData emptySequence() {
     @NonNull
     @Override
     protected ReadableSequentialData fullyUsedSequence() {
-        final var buf = new RandomAccessSequenceAdapter(new StubbedRandomAccessData(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }));
+        final var buf =
+                new RandomAccessSequenceAdapter(
+                        new StubbedRandomAccessData(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}));
         buf.skip(10);
         return buf;
     }
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java
index 02557276..091ea052 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/ReadableStreamingDataTest.java
@@ -1,34 +1,32 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
 import com.hedera.pbj.runtime.io.ReadableSequentialData;
 import com.hedera.pbj.runtime.io.ReadableSequentialTestBase;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import edu.umd.cs.findbugs.annotations.NonNull;
-
 import java.io.BufferedInputStream;
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.UncheckedIOException;
+import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.StandardOpenOption;
+import java.util.concurrent.atomic.AtomicBoolean;
 import org.junit.jupiter.api.DisplayName;
 import org.junit.jupiter.api.Test;
 
-import java.nio.BufferUnderflowException;
-import java.nio.charset.StandardCharsets;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
 final class ReadableStreamingDataTest extends ReadableSequentialTestBase {
 
     @NonNull
@@ -41,35 +39,46 @@ protected ReadableStreamingData emptySequence() {
 
     @NonNull
     private ReadableSequentialData throwingSequence() {
-        return new ReadableStreamingData(new InputStream() {
-            @Override
-            public int read() throws IOException {
-                throw new IOException("testing here");
-            }
-        });
+        return new ReadableStreamingData(
+                new InputStream() {
+                    @Override
+                    public int read() throws IOException {
+                        throw new IOException("testing here");
+                    }
+                });
     }
 
     @NonNull
     private ReadableSequentialData oneByteSequence() {
-        return new ReadableStreamingData(new InputStream() {
-            private int pos = 0;
-            @Override
-            public int read() throws IOException {
-                switch (pos) {
-                    case 0: pos++; return 7;
-                    case 1: pos++; return -1;
-                    default: throw new IOException("EOF");
-                }
-            }
-
-            @Override
-            public int readNBytes(byte[] b, int off, int len) throws IOException {
-                switch (pos) {
-                    case 0: b[off] = (byte) read(); return 1;
-                    default: return super.readNBytes(b, off, len);
-                }
-            }
-        });
+        return new ReadableStreamingData(
+                new InputStream() {
+                    private int pos = 0;
+
+                    @Override
+                    public int read() throws IOException {
+                        switch (pos) {
+                            case 0:
+                                pos++;
+                                return 7;
+                            case 1:
+                                pos++;
+                                return -1;
+                            default:
+                                throw new IOException("EOF");
+                        }
+                    }
+
+                    @Override
+                    public int readNBytes(byte[] b, int off, int len) throws IOException {
+                        switch (pos) {
+                            case 0:
+                                b[off] = (byte) read();
+                                return 1;
+                            default:
+                                return super.readNBytes(b, off, len);
+                        }
+                    }
+                });
     }
 
     @Test
@@ -125,7 +134,7 @@ protected ReadableStreamingData fullyUsedSequence() {
 
     @Override
     @NonNull
-    protected ReadableStreamingData sequence(@NonNull byte [] arr) {
+    protected ReadableStreamingData sequence(@NonNull byte[] arr) {
         final var stream = new ReadableStreamingData(arr);
         stream.limit(arr.length);
         return stream;
@@ -147,8 +156,7 @@ void closedStreamHasNoBytesRemaining() {
     void closedStreamCannotBeRead() {
         try (var stream = sequence("0123456789".getBytes(StandardCharsets.UTF_8))) {
             stream.close();
-            assertThatThrownBy(stream::readByte)
-                    .isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(stream::readByte).isInstanceOf(BufferUnderflowException.class);
         }
     }
 
@@ -158,60 +166,60 @@ void closeTwice() {
         try (var stream = sequence("0123456789".getBytes(StandardCharsets.UTF_8))) {
             stream.close();
             stream.close();
-            assertThatThrownBy(stream::readByte)
-                    .isInstanceOf(BufferUnderflowException.class);
+            assertThatThrownBy(stream::readByte).isInstanceOf(BufferUnderflowException.class);
         }
     }
 
     @Test
     @DisplayName("Bad InputStream will fail on skip")
     void inputStreamFailsDuringSkip() {
-        final var byteStream = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7 });
-        final var inputStream = new BufferedInputStream(byteStream) {
-            @Override
-            public synchronized long skip(long n) throws IOException {
-                throw new IOException("Failed");
-            }
-        };
+        final var byteStream = new ByteArrayInputStream(new byte[] {1, 2, 3, 4, 5, 6, 7});
+        final var inputStream =
+                new BufferedInputStream(byteStream) {
+                    @Override
+                    public synchronized long skip(long n) throws IOException {
+                        throw new IOException("Failed");
+                    }
+                };
 
         final var stream = new ReadableStreamingData(inputStream);
-        assertThatThrownBy(() -> stream.skip(5))
-                .isInstanceOf(UncheckedIOException.class);
+        assertThatThrownBy(() -> stream.skip(5)).isInstanceOf(UncheckedIOException.class);
     }
 
     @Test
     @DisplayName("Bad InputStream will fail on read")
     void inputStreamFailsDuringRead() {
         final var throwNow = new AtomicBoolean(false);
-        final var byteStream = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7 });
-        final var inputStream = new BufferedInputStream(byteStream) {
-            @Override
-            public int read() throws IOException {
-                if (throwNow.get()) {
-                    throw new IOException("Failed");
-                } else {
-                    return super.read();
-                }
-            }
-        };
+        final var byteStream = new ByteArrayInputStream(new byte[] {1, 2, 3, 4, 5, 6, 7});
+        final var inputStream =
+                new BufferedInputStream(byteStream) {
+                    @Override
+                    public int read() throws IOException {
+                        if (throwNow.get()) {
+                            throw new IOException("Failed");
+                        } else {
+                            return super.read();
+                        }
+                    }
+                };
 
         final var stream = new ReadableStreamingData(inputStream);
         stream.skip(5);
 
         throwNow.set(true);
-        assertThatThrownBy(stream::readByte)
-                .isInstanceOf(UncheckedIOException.class);
+        assertThatThrownBy(stream::readByte).isInstanceOf(UncheckedIOException.class);
     }
 
     @Test
     @DisplayName("Bad InputStream during close is ignored")
     void inputStreamFailsDuringClose() {
-        final var inputStream = new ByteArrayInputStream(new byte[0]) {
-            @Override
-            public void close() throws IOException {
-                throw new IOException("Failed");
-            }
-        };
+        final var inputStream =
+                new ByteArrayInputStream(new byte[0]) {
+                    @Override
+                    public void close() throws IOException {
+                        throw new IOException("Failed");
+                    }
+                };
 
         final var stream = new ReadableStreamingData(inputStream);
         stream.close();
@@ -221,26 +229,30 @@ public void close() throws IOException {
     @Test
     @DisplayName("Bad InputStream empty when read")
     void inputStreamEmptyReadBytes() {
-        final var inputStream = new ByteArrayInputStream(new byte[0]) {
-            @Override
-            public void close() throws IOException {
-                throw new IOException("Failed");
-            }
-        };
+        final var inputStream =
+                new ByteArrayInputStream(new byte[0]) {
+                    @Override
+                    public void close() throws IOException {
+                        throw new IOException("Failed");
+                    }
+                };
 
         byte[] read = new byte[5];
         final var stream = new ReadableStreamingData(inputStream);
-        assertThrows(EOFException.class, () -> {
-            final var i = stream.readInt();
-        });
+        assertThrows(
+                EOFException.class,
+                () -> {
+                    final var i = stream.readInt();
+                });
         assertEquals(0, stream.readBytes(read));
     }
 
     @Test
     @DisplayName("Bad InputStream empty when read")
     void inputStreamEmptyReadVarLong() {
-        final var inputStream = new ByteArrayInputStream(new byte[] {
-                (byte) 128, (byte) 129, (byte) 130, (byte) 131});
+        final var inputStream =
+                new ByteArrayInputStream(
+                        new byte[] {(byte) 128, (byte) 129, (byte) 130, (byte) 131});
 
         final var stream = new ReadableStreamingData(inputStream);
 
@@ -249,8 +261,9 @@ void inputStreamEmptyReadVarLong() {
 
     @Test
     void incompleteStreamToByteBuffer() {
-        final var inputStream = new ByteArrayInputStream(new byte[] {
-                (byte) 128, (byte) 129, (byte) 130, (byte) 131});
+        final var inputStream =
+                new ByteArrayInputStream(
+                        new byte[] {(byte) 128, (byte) 129, (byte) 130, (byte) 131});
 
         final var stream = new TestReadeableSequentialData(new ReadableStreamingData(inputStream));
         ByteBuffer buffer = ByteBuffer.allocate(8);
@@ -260,8 +273,9 @@ void incompleteStreamToByteBuffer() {
 
     @Test
     void incompleteStreamToBufferedData() {
-        final var inputStream = new ByteArrayInputStream(new byte[] {
-                (byte) 128, (byte) 129, (byte) 130, (byte) 131});
+        final var inputStream =
+                new ByteArrayInputStream(
+                        new byte[] {(byte) 128, (byte) 129, (byte) 130, (byte) 131});
 
         final var stream = new TestReadeableSequentialData(new ReadableStreamingData(inputStream));
         stream.limit(8);
@@ -273,7 +287,7 @@ void incompleteStreamToBufferedData() {
     @Test
     @DisplayName("Reusing an input stream on two ReadableStreamingData does not lose any data")
     void reuseStream() {
-        final var byteStream = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
+        final var byteStream = new ByteArrayInputStream(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
 
         final var bytes1 = new byte[5];
         final var stream1 = new ReadableStreamingData(byteStream);
@@ -334,7 +348,8 @@ void dataOnTopOfByteArrayLimits() {
 
     @Test
     void readDirectoryFile() throws IOException {
-        final Path dir = Files.createTempDirectory(getClass().getSimpleName() + "_readDirectoryFile");
+        final Path dir =
+                Files.createTempDirectory(getClass().getSimpleName() + "_readDirectoryFile");
         try {
             assertThrows(IOException.class, () -> new ReadableStreamingData(dir));
         } finally {
@@ -344,15 +359,17 @@ void readDirectoryFile() throws IOException {
 
     @Test
     void readFileThatDoesntExist() throws IOException {
-        final Path file = Files.createTempFile(getClass().getSimpleName(), "readFileThatDoesntExist");
+        final Path file =
+                Files.createTempFile(getClass().getSimpleName(), "readFileThatDoesntExist");
         Files.delete(file);
         assertThrows(IOException.class, () -> new ReadableStreamingData(file));
     }
 
     /**
-     * The sole purpose of this class is to allow testing of
-     * `{@link ReadableStreamingData#readBytes(ByteBuffer)}` and `{@link ReadableStreamingData#readBytes(BufferedData)}`.
-     * This methods are overriddin in other implementation and not possible to test ortherwise.
+     * The sole purpose of this class is to allow testing of `{@link
+     * ReadableStreamingData#readBytes(ByteBuffer)}` and `{@link
+     * ReadableStreamingData#readBytes(BufferedData)}`. This methods are overriddin in other
+     * implementation and not possible to test ortherwise.
      */
     private static class TestReadeableSequentialData implements ReadableSequentialData {
         private ReadableStreamingData readableStreamingData;
diff --git a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java
index a9714f74..54c1bf7b 100644
--- a/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/com/hedera/pbj/runtime/io/stream/WritableStreamingDataTest.java
@@ -1,20 +1,6 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.runtime.io.stream;
 
-import com.hedera.pbj.runtime.io.WritableSequentialData;
-import com.hedera.pbj.runtime.io.WritableTestBase;
-import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.ValueSource;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.UncheckedIOException;
-import java.nio.charset.StandardCharsets;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThatThrownBy;
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -28,6 +14,21 @@
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
 
+import com.hedera.pbj.runtime.io.WritableSequentialData;
+import com.hedera.pbj.runtime.io.WritableTestBase;
+import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UncheckedIOException;
+import java.nio.charset.StandardCharsets;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
+
 public class WritableStreamingDataTest extends WritableTestBase {
 
     private ByteArrayOutputStream out;
@@ -73,7 +74,7 @@ void specifiedCapacity() throws IOException {
     }
 
     @ParameterizedTest
-    @ValueSource(ints = { -1, 0, 2, 1024, 1025, 2048, 3000 })
+    @ValueSource(ints = {-1, 0, 2, 1024, 1025, 2048, 3000})
     @DisplayName("Skip inserts empty bytes into the output stream")
     void skip(final int numBytesToSkip) {
         // Given a sequence
@@ -93,7 +94,8 @@ void skipClosed() throws IOException {
         final var stream = mock(OutputStream.class);
         final var seq = new WritableStreamingData(stream);
         doThrow(IOException.class).when(stream).write(any(), anyInt(), anyInt());
-        // When we try to skip some bytes, then we get an exception because the stream throws IOException
+        // When we try to skip some bytes, then we get an exception because the stream throws
+        // IOException
         assertThatThrownBy(() -> seq.skip(1)).isInstanceOf(UncheckedIOException.class);
     }
 
@@ -105,7 +107,8 @@ void closed() throws IOException {
         final var seq = new WritableStreamingData(stream);
         doThrow(IOException.class).when(stream).write(any(), anyInt(), anyInt());
         final var src = new ByteArrayInputStream("Gonna Throw".getBytes(StandardCharsets.UTF_8));
-        // When we try to write some bytes, then we get an exception because the stream throws IOException
+        // When we try to write some bytes, then we get an exception because the stream throws
+        // IOException
         assertThatThrownBy(() -> seq.writeBytes(src, 5)).isInstanceOf(UncheckedIOException.class);
     }
 
@@ -124,7 +127,9 @@ void testFlushable() throws IOException {
     }
 
     @Test
-    @DisplayName("writeBytes(RandomAccessData) should delegate to RandomAccessData.writeTo(OutputStream)")
+    @DisplayName(
+            "writeBytes(RandomAccessData) should delegate to"
+                    + " RandomAccessData.writeTo(OutputStream)")
     void testWriteBytesFastPath() {
         final OutputStream out = mock(OutputStream.class);
         final RandomAccessData data = mock(RandomAccessData.class);
@@ -141,5 +146,4 @@ void testWriteBytesFastPath() {
 
         assertEquals(10L, seq.position());
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java b/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java
index b15516d2..381ed789 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/ComparableOneOfTest.java
@@ -1,14 +1,14 @@
 // SPDX-License-Identifier: Apache-2.0
 package tests;
 
-import com.hedera.pbj.runtime.ComparableOneOf;
-import com.hedera.pbj.runtime.EnumWithProtoMetadata;
-import org.junit.jupiter.api.Test;
-
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
+import com.hedera.pbj.runtime.ComparableOneOf;
+import com.hedera.pbj.runtime.EnumWithProtoMetadata;
+import org.junit.jupiter.api.Test;
+
 class ComparableOneOfTest {
     @Test
     void nullNameIsOK() {
@@ -30,8 +30,9 @@ void asReturnsValue() {
     @Test
     void hashCodeReturnsHashCode() {
         final var oneOf = new ComparableOneOf<>(TestEnum.KIND1, "Value");
-        assertEquals((31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31
-                + "Value".hashCode(), oneOf.hashCode());
+        assertEquals(
+                (31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 + "Value".hashCode(),
+                oneOf.hashCode());
     }
 
     @Test
@@ -39,7 +40,8 @@ void equalsWorks() {
         final var oneOf = new ComparableOneOf<>(TestEnum.KIND1, "Value");
         final var sameComparableOneOf = new ComparableOneOf<>(TestEnum.KIND1, "Value");
         final var differentComparableOneOf = new ComparableOneOf<>(TestEnum.KIND2, "Value");
-        final var anotherDifferentComparableOneOf = new ComparableOneOf<>(TestEnum.KIND1, "AnotherValue");
+        final var anotherDifferentComparableOneOf =
+                new ComparableOneOf<>(TestEnum.KIND1, "AnotherValue");
 
         assertEquals(true, oneOf.equals(oneOf));
         assertEquals(true, oneOf.equals(sameComparableOneOf));
@@ -63,5 +65,4 @@ public String protoName() {
             return name();
         }
     }
-
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java b/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java
index 25069987..9b320983 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/FieldDefinitionTest.java
@@ -1,28 +1,29 @@
 // SPDX-License-Identifier: Apache-2.0
 package tests;
 
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
 import com.hedera.pbj.runtime.FieldDefinition;
 import com.hedera.pbj.runtime.FieldType;
 import org.junit.jupiter.api.Test;
 
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
 class FieldDefinitionTest {
     @Test
     void nullNameThrows() {
-        assertThrows(NullPointerException.class, () ->
-                new FieldDefinition(null, FieldType.STRING, false, 1));
+        assertThrows(
+                NullPointerException.class,
+                () -> new FieldDefinition(null, FieldType.STRING, false, 1));
     }
 
     @Test
     void nullTypeThrows() {
-        assertThrows(NullPointerException.class, () ->
-                new FieldDefinition("Name", null, false, 1));
+        assertThrows(NullPointerException.class, () -> new FieldDefinition("Name", null, false, 1));
     }
 
     @Test
     void negativeNumberThrows() {
-        assertThrows(IllegalArgumentException.class, () ->
-                new FieldDefinition("Name", FieldType.STRING, false, -1));
+        assertThrows(
+                IllegalArgumentException.class,
+                () -> new FieldDefinition("Name", FieldType.STRING, false, -1));
     }
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java b/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java
index e200715f..425a9893 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/FuzzTest.java
@@ -2,6 +2,6 @@
 package tests;
 
 public class FuzzTest {
-	// Need a test where we take an arbitrary array of bytes from length 1 to 10,000
-	// with arbitrary values and send it to a parser. It should always throw an exception.
+    // Need a test where we take an arbitrary array of bytes from length 1 to 10,000
+    // with arbitrary values and send it to a parser. It should always throw an exception.
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java b/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java
index a91dbe6e..bf7fb107 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/NegativeTest.java
@@ -2,27 +2,40 @@
 package tests;
 
 public class NegativeTest {
-    // Take a valid protobuf, and send 1 byte, then 2 bytes, and so forth until all bytes - 1. All calls
+    // Take a valid protobuf, and send 1 byte, then 2 bytes, and so forth until all bytes - 1. All
+    // calls
     // should fail, though they may fail in different ways.
 
     // There should also be a test that specifically send a varint of 10+ bytes in a row with the
     // continuation bit set.
 
-    // There should be a test for forwards compatibility where valid protobuf is sent to a parser that
+    // There should be a test for forwards compatibility where valid protobuf is sent to a parser
+    // that
     // doesn't know about all the different types of fields.
 
-    // Test where a duplicate field is included in the protobuf bytes (for each different field type)
-    // The last data in the stream should win. See https://developers.google.com/protocol-buffers/docs/encoding#optional
-    // "Normally, an encoded message would never have more than one instance of a non-repeated field. However,
-    // parsers are expected to handle the case in which they do. For numeric types and strings, if the same field
-    // appears multiple times, the parser accepts the last value it sees. For embedded message fields, the parser
-    // merges multiple instances of the same field, as if with the Message::MergeFrom method – that is, all singular
-    // scalar fields in the latter instance replace those in the former, singular embedded messages are merged, and
-    // repeated fields are concatenated. The effect of these rules is that parsing the concatenation of two encoded
-    // messages produces exactly the same result as if you had parsed the two messages separately and merged the
+    // Test where a duplicate field is included in the protobuf bytes (for each different field
+    // type)
+    // The last data in the stream should win. See
+    // https://developers.google.com/protocol-buffers/docs/encoding#optional
+    // "Normally, an encoded message would never have more than one instance of a non-repeated
+    // field. However,
+    // parsers are expected to handle the case in which they do. For numeric types and strings, if
+    // the same field
+    // appears multiple times, the parser accepts the last value it sees. For embedded message
+    // fields, the parser
+    // merges multiple instances of the same field, as if with the Message::MergeFrom method – that
+    // is, all singular
+    // scalar fields in the latter instance replace those in the former, singular embedded messages
+    // are merged, and
+    // repeated fields are concatenated. The effect of these rules is that parsing the concatenation
+    // of two encoded
+    // messages produces exactly the same result as if you had parsed the two messages separately
+    // and merged the
     // resulting objects." - The Spec
     //
-    // "Note that although there's usually no reason to encode more than one key-value pair for a packed repeated field,
-    // parsers must be prepared to accept multiple key-value pairs. In this case, the payloads should be concatenated.
+    // "Note that although there's usually no reason to encode more than one key-value pair for a
+    // packed repeated field,
+    // parsers must be prepared to accept multiple key-value pairs. In this case, the payloads
+    // should be concatenated.
     // Each pair must contain a whole number of elements." - The Spec
 }
diff --git a/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java b/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java
index 5e3055c7..581a037a 100644
--- a/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java
+++ b/pbj-core/pbj-runtime/src/test/java/tests/OneOfTest.java
@@ -1,16 +1,14 @@
 // SPDX-License-Identifier: Apache-2.0
 package tests;
 
-import com.hedera.pbj.runtime.EnumWithProtoMetadata;
-import com.hedera.pbj.runtime.OneOf;
-import org.junit.jupiter.api.Test;
-
-import java.util.Objects;
-
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
+import com.hedera.pbj.runtime.EnumWithProtoMetadata;
+import com.hedera.pbj.runtime.OneOf;
+import org.junit.jupiter.api.Test;
+
 class OneOfTest {
     @Test
     void nullNameIsOK() {
@@ -32,7 +30,9 @@ void asReturnsValue() {
     @Test
     void hashCodeReturnsHashCode() {
         final var oneOf = new OneOf<>(TestEnum.KIND1, "Value");
-        assertEquals((31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 + "Value".hashCode(), oneOf.hashCode());
+        assertEquals(
+                (31 + Integer.hashCode(TestEnum.KIND1.protoOrdinal())) * 31 + "Value".hashCode(),
+                oneOf.hashCode());
     }
 
     @Test
@@ -64,5 +64,4 @@ public String protoName() {
             return name();
         }
     }
-
 }
diff --git a/pbj-integration-tests/build.gradle.kts b/pbj-integration-tests/build.gradle.kts
index 0b49929a..1f9baecb 100644
--- a/pbj-integration-tests/build.gradle.kts
+++ b/pbj-integration-tests/build.gradle.kts
@@ -1,14 +1,5 @@
 // SPDX-License-Identifier: Apache-2.0
-plugins {
-    id("java")
-    id("jacoco")
-    id("com.hedera.pbj.pbj-compiler")
-    // We depend on Google protobuf plugin as we generate protobuf code using it as well as pbj. Then use it in tests to
-    // compare output and parsing with pbj to make sure it matches.
-    id("com.google.protobuf").version("0.9.4")
-    // add jmh for performance benchmarks
-    id("me.champeau.jmh").version("0.7.2")
-}
+repositories { gradlePluginPortal() }
 
 group = "com.hedera.pbj.integration-tests"
 
@@ -20,6 +11,7 @@ dependencies {
     implementation("io.grpc:grpc-protobuf:1.65.1")
     implementation("io.grpc:grpc-stub:1.65.1")
     implementation("javax.annotation:javax.annotation-api:1.3.2")
+    implementation("com.diffplug.spotless:spotless-plugin-gradle:6.25.0")
     compileOnly("com.github.spotbugs:spotbugs-annotations:4.7.3")
 
     testImplementation("org.junit.jupiter:junit-jupiter-api:5.9.0")
@@ -27,6 +19,19 @@ dependencies {
     testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine")
 }
 
+plugins {
+    id("java")
+    id("jacoco")
+    id("com.hedera.pbj.pbj-compiler")
+    // We depend on Google protobuf plugin as we generate protobuf code using it as well as pbj. Then use it in tests to
+    // compare output and parsing with pbj to make sure it matches.
+    id("com.google.protobuf").version("0.9.4")
+    // add jmh for performance benchmarks
+    id("me.champeau.jmh").version("0.7.2")
+    // Add spotless
+    id("com.diffplug.spotless").version("6.25.0")
+}
+
 java {
     sourceCompatibility = JavaVersion.VERSION_21
     targetCompatibility = JavaVersion.VERSION_21
@@ -167,3 +172,26 @@ tasks.jacocoTestReport.configure {
 tasks.named("check").configure {
     dependsOn(tasks.named<JacocoReport>("jacocoTestReport"))
 }
+
+spotless {
+    java {
+        targetExclude("build/generated/sources/**/*.java")
+        // enable toggle comment support
+        toggleOffOn()
+        // don't need to set target, it is inferred from java
+        // apply a specific flavor of google-java-format
+        googleJavaFormat("1.17.0").aosp().reflowLongStrings()
+        // make sure every file has the following copyright header.
+        // optionally, Spotless can set copyright years by digging
+        // through git history (see "license" section below).
+        // The delimiter override below is required to support some
+        // of our test classes which are in the default package.
+        licenseHeader(
+            """
+           // SPDX-License-Identifier: Apache-2.0
+            """
+                .trimIndent(),
+            "(package|import)"
+        )
+    }
+}
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
index fd0ebfcf..411dfc6c 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ComplexEqualsHashCodeBench.java
@@ -58,8 +58,7 @@ public record HashevalJavaRecord(
             Suit enumSuit,
             @Nullable TimestampTest subObject,
             String text,
-            Bytes bytesField
-    ){}
+            Bytes bytesField) {}
 
     private final Hasheval hasheval;
     private final Hasheval hasheval1;
@@ -69,42 +68,120 @@ public record HashevalJavaRecord(
     private final HashevalJavaRecord hashevalJavaRecordDifferent;
 
     public ComplexEqualsHashCodeBench() {
-        hasheval = new Hasheval(123, 123, 123,
-                123, 123, 1.23f, 123L, 123L,
-                123L, 123L, 123L, 1.23D, true,
-                Suit.ACES, new TimestampTest(987L, 123),
-                "FooBarKKKKHHHHOIOIOI",
-                Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
-        hasheval1 = new Hasheval(123, 123, 123,
-                123, 123, 1.23f, 123L, 123L,
-                123L, 123L, 123L, 1.23D, true,
-                Suit.ACES, new TimestampTest(987L, 123),
-                "FooBarKKKKHHHHOIOIOI",
-                Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
-        hashevalDifferent = new Hasheval(123, 123, 123,
-                123, 123, 1.23f, 123L, 123L,
-                123L, 123L, 123L, 1.23D, true,
-                Suit.ACES, new TimestampTest(987L, 123),
-                "Different",
-                Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
-        hashevalJavaRecord = new HashevalJavaRecord(123, 123, 123,
-                123, 123, 1.23f, 123L, 123L,
-                123L, 123L, 123L, 1.23D, true,
-                Suit.ACES, new TimestampTest(987L, 123),
-                "FooBarKKKKHHHHOIOIOI",
-                Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
-        hashevalJavaRecord1 = new HashevalJavaRecord(123, 123, 123,
-                123, 123, 1.23f, 123L, 123L,
-                123L, 123L, 123L, 1.23D, true,
-                Suit.ACES, new TimestampTest(987L, 123),
-                "FooBarKKKKHHHHOIOIOI",
-                Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
-        hashevalJavaRecordDifferent = new HashevalJavaRecord(123, 123, 123,
-                123, 123, 1.23f, 123L, 123L,
-                123L, 123L, 123L, 1.23D, true,
-                Suit.ACES, new TimestampTest(987L, 123),
-                "Different",
-                Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
+        hasheval =
+                new Hasheval(
+                        123,
+                        123,
+                        123,
+                        123,
+                        123,
+                        1.23f,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        1.23D,
+                        true,
+                        Suit.ACES,
+                        new TimestampTest(987L, 123),
+                        "FooBarKKKKHHHHOIOIOI",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
+        hasheval1 =
+                new Hasheval(
+                        123,
+                        123,
+                        123,
+                        123,
+                        123,
+                        1.23f,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        1.23D,
+                        true,
+                        Suit.ACES,
+                        new TimestampTest(987L, 123),
+                        "FooBarKKKKHHHHOIOIOI",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
+        hashevalDifferent =
+                new Hasheval(
+                        123,
+                        123,
+                        123,
+                        123,
+                        123,
+                        1.23f,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        1.23D,
+                        true,
+                        Suit.ACES,
+                        new TimestampTest(987L, 123),
+                        "Different",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
+        hashevalJavaRecord =
+                new HashevalJavaRecord(
+                        123,
+                        123,
+                        123,
+                        123,
+                        123,
+                        1.23f,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        1.23D,
+                        true,
+                        Suit.ACES,
+                        new TimestampTest(987L, 123),
+                        "FooBarKKKKHHHHOIOIOI",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
+        hashevalJavaRecord1 =
+                new HashevalJavaRecord(
+                        123,
+                        123,
+                        123,
+                        123,
+                        123,
+                        1.23f,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        1.23D,
+                        true,
+                        Suit.ACES,
+                        new TimestampTest(987L, 123),
+                        "FooBarKKKKHHHHOIOIOI",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
+        hashevalJavaRecordDifferent =
+                new HashevalJavaRecord(
+                        123,
+                        123,
+                        123,
+                        123,
+                        123,
+                        1.23f,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        123L,
+                        1.23D,
+                        true,
+                        Suit.ACES,
+                        new TimestampTest(987L, 123),
+                        "Different",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
     }
 
     @Benchmark
@@ -130,6 +207,7 @@ public void benchEquals(Blackhole blackhole) {
             blackhole.consume(hasheval.equals(hasheval1));
         }
     }
+
     @Benchmark
     @OperationsPerInvocation(1050)
     public void benchJavaRecordEquals(Blackhole blackhole) {
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
index 8705997f..bcc8bb2d 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/EqualsHashCodeBench.java
@@ -2,6 +2,7 @@
 package com.hedera.pbj.integration.jmh;
 
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
+import java.util.concurrent.TimeUnit;
 import org.openjdk.jmh.annotations.Benchmark;
 import org.openjdk.jmh.annotations.BenchmarkMode;
 import org.openjdk.jmh.annotations.Fork;
@@ -9,14 +10,12 @@
 import org.openjdk.jmh.annotations.Mode;
 import org.openjdk.jmh.annotations.OperationsPerInvocation;
 import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.State;
 import org.openjdk.jmh.annotations.Scope;
 // Add any other JMH annotation imports you use
+import org.openjdk.jmh.annotations.State;
 import org.openjdk.jmh.annotations.Warmup;
 import org.openjdk.jmh.infra.Blackhole;
 
-import java.util.concurrent.TimeUnit;
-
 /*
 Mac Results
 
@@ -39,7 +38,7 @@
 @OutputTimeUnit(TimeUnit.NANOSECONDS)
 @BenchmarkMode(Mode.AverageTime)
 public class EqualsHashCodeBench {
-    public record TimestampStandardRecord(long seconds, int nanos){}
+    public record TimestampStandardRecord(long seconds, int nanos) {}
 
     private final TimestampTest testStamp;
     private final TimestampTest testStamp1;
@@ -80,6 +79,7 @@ public void benchEquals(Blackhole blackhole) {
             blackhole.consume(testStamp.equals(testStamp1));
         }
     }
+
     @Benchmark
     @OperationsPerInvocation(1050)
     public void benchJavaRecordEquals(Blackhole blackhole) {
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
index ebdd9530..3c0d2e97 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/HashBench.java
@@ -6,6 +6,8 @@
 import com.hedera.pbj.test.proto.pbj.Hasheval;
 import com.hedera.pbj.test.proto.pbj.Suit;
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 import org.openjdk.jmh.annotations.Benchmark;
 import org.openjdk.jmh.annotations.BenchmarkMode;
 import org.openjdk.jmh.annotations.Fork;
@@ -18,9 +20,6 @@
 import org.openjdk.jmh.annotations.Warmup;
 import org.openjdk.jmh.infra.Blackhole;
 
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
 @SuppressWarnings("unused")
 @State(Scope.Benchmark)
 @Fork(1)
@@ -33,10 +32,25 @@ public class HashBench {
 
     public HashBench() {
         TimestampTest tst = new TimestampTest(987L, 123);
-        hasheval = new Hasheval(1, -1, 2, 3, -2,
-                                123f, 7L, -7L, 123L, 234L,
-                                -345L, 456.789D, true, Suit.ACES, tst, "FooBarKKKKHHHHOIOIOI",
-                                 Bytes.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7, (byte)255}));
+        hasheval =
+                new Hasheval(
+                        1,
+                        -1,
+                        2,
+                        3,
+                        -2,
+                        123f,
+                        7L,
+                        -7L,
+                        123L,
+                        234L,
+                        -345L,
+                        456.789D,
+                        true,
+                        Suit.ACES,
+                        tst,
+                        "FooBarKKKKHHHHOIOIOI",
+                        Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, (byte) 255}));
     }
 
     @Benchmark
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
index f6ada547..7025b613 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/JsonBench.java
@@ -2,7 +2,6 @@
 package com.hedera.pbj.integration.jmh;
 
 import com.google.protobuf.GeneratedMessage;
-import com.google.protobuf.GeneratedMessageV3;
 import com.google.protobuf.InvalidProtocolBufferException;
 import com.google.protobuf.util.JsonFormat;
 import com.hedera.hapi.node.base.Timestamp;
@@ -15,6 +14,9 @@
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.test.proto.pbj.Everything;
 import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse;
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
 import org.openjdk.jmh.annotations.Benchmark;
 import org.openjdk.jmh.annotations.BenchmarkMode;
 import org.openjdk.jmh.annotations.Fork;
@@ -27,132 +29,159 @@
 import org.openjdk.jmh.annotations.Warmup;
 import org.openjdk.jmh.infra.Blackhole;
 
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Supplier;
-
 @SuppressWarnings("unused")
 @Fork(1)
 @Warmup(iterations = 2, time = 2)
 @Measurement(iterations = 5, time = 2)
 @OutputTimeUnit(TimeUnit.NANOSECONDS)
 @BenchmarkMode(Mode.AverageTime)
-public abstract class JsonBench<P extends Record,G extends GeneratedMessage> {
-
-	@SuppressWarnings("rawtypes")
-	@State(Scope.Benchmark)
-	public static class JsonBenchmarkState<P extends Record,G extends GeneratedMessage> {
-		private JsonCodec<P> pbjJsonCodec;
-		private Supplier<GeneratedMessage.Builder> builderSupplier;
-		// input objects
-		private P pbjModelObject;
-		private G googleModelObject;
-
-		// input bytes
-		private BufferedData jsonDataBuffer;
-		private String jsonString;
-
-		// output buffers
-		private BufferedData outDataBuffer;
-		public void configure(P pbjModelObject, Codec<P> pbjProtoCodec, JsonCodec<P> pbjJsonCodec,
-							  ProtobufObjectBench.ProtobufParseFunction<byte[],G> googleByteArrayParseMethod,
-							  Supplier<GeneratedMessage.Builder> builderSupplier) {
-			try {
-				this.pbjModelObject = pbjModelObject;
-				this.pbjJsonCodec = pbjJsonCodec;
-				this.builderSupplier = builderSupplier;
-				// write to JSON for parse tests
-				jsonDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
-				pbjJsonCodec.write(pbjModelObject, jsonDataBuffer);
-				jsonDataBuffer.flip();
-				// get as string for parse tests
-				jsonString = jsonDataBuffer.asUtf8String();
-
-				// write to temp data buffer and then read into byte array
-				BufferedData tempDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
-				pbjProtoCodec.write(pbjModelObject, tempDataBuffer);
-				tempDataBuffer.flip();
-				byte[] protoBytes = new byte[(int)tempDataBuffer.length()];
-				tempDataBuffer.getBytes(0,protoBytes);
-				// convert to protobuf
-				googleModelObject = googleByteArrayParseMethod.parse(protoBytes);
-
-				// input buffers
-				// output buffers
-				this.outDataBuffer = BufferedData.allocate(jsonString.length());
-			} catch (IOException e) {
-				e.getStackTrace();
-				System.err.flush();
-				throw new RuntimeException(e);
-			}
-		}
-	}
-
-	/** Same as parsePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this because makes result plotting easier */
-	@Benchmark
-	public void parsePbj(JsonBenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws ParseException {
-		benchmarkState.jsonDataBuffer.position(0);
-		blackhole.consume(benchmarkState.pbjJsonCodec.parse(benchmarkState.jsonDataBuffer));
-	}
-
-	@Benchmark
-	public void parseProtoC(JsonBenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		var builder = benchmarkState.builderSupplier.get();
-		JsonFormat.parser().merge(benchmarkState.jsonString, builder);
-		blackhole.consume(builder.build());
-	}
-
-	/** Same as writePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this because makes result plotting easier */
-	@Benchmark
-	public void writePbj(JsonBenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		benchmarkState.outDataBuffer.reset();
-		benchmarkState.pbjJsonCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBuffer);
-		blackhole.consume(benchmarkState.outDataBuffer);
-	}
-
-	@Benchmark
-	public void writeProtoC(JsonBenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws InvalidProtocolBufferException {
-		blackhole.consume(JsonFormat.printer().print(benchmarkState.googleModelObject));
-	}
-
-	/** Custom interface for method references as java.util.Function does not throw IOException */
-	public interface ProtobufParseFunction<D, G> {
-		G parse(D data) throws IOException;
-	}
-
-	@State(Scope.Benchmark)
-	public static class EverythingBench extends JsonBench<Everything, com.hedera.pbj.test.proto.java.Everything> {
-		@Setup
-		public void setup(JsonBenchmarkState<Everything, com.hedera.pbj.test.proto.java.Everything> benchmarkState) {
-			benchmarkState.configure(EverythingTestData.EVERYTHING,
-					Everything.PROTOBUF,
-					Everything.JSON,
-					com.hedera.pbj.test.proto.java.Everything::parseFrom,
-					com.hedera.pbj.test.proto.java.Everything::newBuilder);
-		}
-	}
-
-	@State(Scope.Benchmark)
-	public static class TimeStampBench extends JsonBench<Timestamp , com.hederahashgraph.api.proto.java.Timestamp> {
-		@Setup
-		public void setup(JsonBenchmarkState<Timestamp , com.hederahashgraph.api.proto.java.Timestamp> benchmarkState) {
-			benchmarkState.configure(new Timestamp(5678L, 1234),
-					Timestamp.PROTOBUF,
-					Timestamp.JSON,
-					com.hederahashgraph.api.proto.java.Timestamp::parseFrom,
-					com.hederahashgraph.api.proto.java.Timestamp::newBuilder);
-		}
-	}
-
-	@State(Scope.Benchmark)
-	public static class AccountDetailsBench extends JsonBench<com.hedera.hapi.node.token.AccountDetails, GetAccountDetailsResponse.AccountDetails> {
-		@Setup
-		public void setup(JsonBenchmarkState<com.hedera.hapi.node.token.AccountDetails, GetAccountDetailsResponse.AccountDetails> benchmarkState) {
-			benchmarkState.configure(AccountDetailsPbj.ACCOUNT_DETAILS,
-					AccountDetails.PROTOBUF,
-					AccountDetails.JSON,
-					GetAccountDetailsResponse.AccountDetails::parseFrom,
-					GetAccountDetailsResponse.AccountDetails::newBuilder);
-		}
-	}
+public abstract class JsonBench<P extends Record, G extends GeneratedMessage> {
+
+    @SuppressWarnings("rawtypes")
+    @State(Scope.Benchmark)
+    public static class JsonBenchmarkState<P extends Record, G extends GeneratedMessage> {
+        private JsonCodec<P> pbjJsonCodec;
+        private Supplier<GeneratedMessage.Builder> builderSupplier;
+        // input objects
+        private P pbjModelObject;
+        private G googleModelObject;
+
+        // input bytes
+        private BufferedData jsonDataBuffer;
+        private String jsonString;
+
+        // output buffers
+        private BufferedData outDataBuffer;
+
+        public void configure(
+                P pbjModelObject,
+                Codec<P> pbjProtoCodec,
+                JsonCodec<P> pbjJsonCodec,
+                ProtobufObjectBench.ProtobufParseFunction<byte[], G> googleByteArrayParseMethod,
+                Supplier<GeneratedMessage.Builder> builderSupplier) {
+            try {
+                this.pbjModelObject = pbjModelObject;
+                this.pbjJsonCodec = pbjJsonCodec;
+                this.builderSupplier = builderSupplier;
+                // write to JSON for parse tests
+                jsonDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
+                pbjJsonCodec.write(pbjModelObject, jsonDataBuffer);
+                jsonDataBuffer.flip();
+                // get as string for parse tests
+                jsonString = jsonDataBuffer.asUtf8String();
+
+                // write to temp data buffer and then read into byte array
+                BufferedData tempDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
+                pbjProtoCodec.write(pbjModelObject, tempDataBuffer);
+                tempDataBuffer.flip();
+                byte[] protoBytes = new byte[(int) tempDataBuffer.length()];
+                tempDataBuffer.getBytes(0, protoBytes);
+                // convert to protobuf
+                googleModelObject = googleByteArrayParseMethod.parse(protoBytes);
+
+                // input buffers
+                // output buffers
+                this.outDataBuffer = BufferedData.allocate(jsonString.length());
+            } catch (IOException e) {
+                e.getStackTrace();
+                System.err.flush();
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    /**
+     * Same as parsePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this
+     * because makes result plotting easier
+     */
+    @Benchmark
+    public void parsePbj(JsonBenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws ParseException {
+        benchmarkState.jsonDataBuffer.position(0);
+        blackhole.consume(benchmarkState.pbjJsonCodec.parse(benchmarkState.jsonDataBuffer));
+    }
+
+    @Benchmark
+    public void parseProtoC(JsonBenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        var builder = benchmarkState.builderSupplier.get();
+        JsonFormat.parser().merge(benchmarkState.jsonString, builder);
+        blackhole.consume(builder.build());
+    }
+
+    /**
+     * Same as writePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this
+     * because makes result plotting easier
+     */
+    @Benchmark
+    public void writePbj(JsonBenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        benchmarkState.outDataBuffer.reset();
+        benchmarkState.pbjJsonCodec.write(
+                benchmarkState.pbjModelObject, benchmarkState.outDataBuffer);
+        blackhole.consume(benchmarkState.outDataBuffer);
+    }
+
+    @Benchmark
+    public void writeProtoC(JsonBenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws InvalidProtocolBufferException {
+        blackhole.consume(JsonFormat.printer().print(benchmarkState.googleModelObject));
+    }
+
+    /** Custom interface for method references as java.util.Function does not throw IOException */
+    public interface ProtobufParseFunction<D, G> {
+        G parse(D data) throws IOException;
+    }
+
+    @State(Scope.Benchmark)
+    public static class EverythingBench
+            extends JsonBench<Everything, com.hedera.pbj.test.proto.java.Everything> {
+        @Setup
+        public void setup(
+                JsonBenchmarkState<Everything, com.hedera.pbj.test.proto.java.Everything>
+                        benchmarkState) {
+            benchmarkState.configure(
+                    EverythingTestData.EVERYTHING,
+                    Everything.PROTOBUF,
+                    Everything.JSON,
+                    com.hedera.pbj.test.proto.java.Everything::parseFrom,
+                    com.hedera.pbj.test.proto.java.Everything::newBuilder);
+        }
+    }
+
+    @State(Scope.Benchmark)
+    public static class TimeStampBench
+            extends JsonBench<Timestamp, com.hederahashgraph.api.proto.java.Timestamp> {
+        @Setup
+        public void setup(
+                JsonBenchmarkState<Timestamp, com.hederahashgraph.api.proto.java.Timestamp>
+                        benchmarkState) {
+            benchmarkState.configure(
+                    new Timestamp(5678L, 1234),
+                    Timestamp.PROTOBUF,
+                    Timestamp.JSON,
+                    com.hederahashgraph.api.proto.java.Timestamp::parseFrom,
+                    com.hederahashgraph.api.proto.java.Timestamp::newBuilder);
+        }
+    }
+
+    @State(Scope.Benchmark)
+    public static class AccountDetailsBench
+            extends JsonBench<
+                    com.hedera.hapi.node.token.AccountDetails,
+                    GetAccountDetailsResponse.AccountDetails> {
+        @Setup
+        public void setup(
+                JsonBenchmarkState<
+                                com.hedera.hapi.node.token.AccountDetails,
+                                GetAccountDetailsResponse.AccountDetails>
+                        benchmarkState) {
+            benchmarkState.configure(
+                    AccountDetailsPbj.ACCOUNT_DETAILS,
+                    AccountDetails.PROTOBUF,
+                    AccountDetails.JSON,
+                    GetAccountDetailsResponse.AccountDetails::parseFrom,
+                    GetAccountDetailsResponse.AccountDetails::newBuilder);
+        }
+    }
 }
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
index adbd182d..53ef208c 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/ProtobufObjectBench.java
@@ -16,6 +16,10 @@
 import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
 import com.hedera.pbj.test.proto.pbj.Everything;
 import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
 import org.openjdk.jmh.annotations.Benchmark;
 import org.openjdk.jmh.annotations.BenchmarkMode;
 import org.openjdk.jmh.annotations.Fork;
@@ -29,270 +33,322 @@
 import org.openjdk.jmh.annotations.Warmup;
 import org.openjdk.jmh.infra.Blackhole;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.concurrent.TimeUnit;
-
 @SuppressWarnings("unused")
 @Fork(1)
 @Warmup(iterations = 2, time = 2)
 @Measurement(iterations = 5, time = 2)
 @OutputTimeUnit(TimeUnit.NANOSECONDS)
 @BenchmarkMode(Mode.AverageTime)
-public abstract class ProtobufObjectBench<P extends Record,G extends GeneratedMessage> {
-	/** we repeat all operations 1000 times so that measured times are nig enough */
-	private static final int OPERATION_COUNT = 1000;
+public abstract class ProtobufObjectBench<P extends Record, G extends GeneratedMessage> {
+    /** we repeat all operations 1000 times so that measured times are nig enough */
+    private static final int OPERATION_COUNT = 1000;
+
+    @State(Scope.Benchmark)
+    public static class BenchmarkState<P extends Record, G extends GeneratedMessage> {
+        private Codec<P> pbjCodec;
+        private ProtobufParseFunction<byte[], G> googleByteArrayParseMethod;
+        private ProtobufParseFunction<ByteBuffer, G> googleByteBufferParseMethod;
+        private ProtobufParseFunction<InputStream, G> googleInputStreamParseMethod;
+        // input objects
+        private P pbjModelObject;
+        private G googleModelObject;
+
+        // input bytes
+        private byte[] protobuf;
+        private ByteBuffer protobufByteBuffer;
+        private BufferedData protobufDataBuffer;
+        private ByteBuffer protobufByteBufferDirect;
+        private BufferedData protobufDataBufferDirect;
+        private NonSynchronizedByteArrayInputStream bin;
+
+        // output buffers
+        private NonSynchronizedByteArrayOutputStream bout;
+        private BufferedData outDataBuffer;
+        private BufferedData outDataBufferDirect;
+        private ByteBuffer bbout;
+        private ByteBuffer bboutDirect;
+
+        public void configure(
+                P pbjModelObject,
+                Codec<P> pbjCodec,
+                ProtobufParseFunction<byte[], G> googleByteArrayParseMethod,
+                ProtobufParseFunction<ByteBuffer, G> googleByteBufferParseMethod,
+                ProtobufParseFunction<InputStream, G> googleInputStreamParseMethod) {
+            try {
+                this.pbjModelObject = pbjModelObject;
+                this.pbjCodec = pbjCodec;
+                this.googleByteArrayParseMethod = googleByteArrayParseMethod;
+                this.googleByteBufferParseMethod = googleByteBufferParseMethod;
+                this.googleInputStreamParseMethod = googleInputStreamParseMethod;
+                // write to temp data buffer and then read into byte array
+                BufferedData tempDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
+                pbjCodec.write(pbjModelObject, tempDataBuffer);
+                tempDataBuffer.flip();
+                this.protobuf = new byte[(int) tempDataBuffer.remaining()];
+                tempDataBuffer.readBytes(this.protobuf);
+                // start by parsing using protoc
+                this.googleModelObject = googleByteArrayParseMethod.parse(this.protobuf);
+
+                // input buffers
+                this.protobufByteBuffer = ByteBuffer.wrap(this.protobuf);
+                this.protobufDataBuffer = BufferedData.wrap(this.protobuf);
+                this.protobufByteBufferDirect = ByteBuffer.allocateDirect(this.protobuf.length);
+                this.protobufByteBufferDirect.put(this.protobuf);
+                this.protobufDataBufferDirect = BufferedData.wrap(this.protobufByteBufferDirect);
+                this.bin = new NonSynchronizedByteArrayInputStream(this.protobuf);
+                ReadableStreamingData din = new ReadableStreamingData(this.bin);
+                // output buffers
+                this.bout = new NonSynchronizedByteArrayOutputStream();
+                WritableStreamingData dout = new WritableStreamingData(this.bout);
+                this.outDataBuffer = BufferedData.allocate(this.protobuf.length);
+                this.outDataBufferDirect = BufferedData.allocateOffHeap(this.protobuf.length);
+                this.bbout = ByteBuffer.allocate(this.protobuf.length);
+                this.bboutDirect = ByteBuffer.allocateDirect(this.protobuf.length);
+            } catch (IOException e) {
+                e.getStackTrace();
+                System.err.flush();
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    /**
+     * Same as parsePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this
+     * because makes result plotting easier
+     */
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parsePbjByteArray(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws ParseException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.protobufDataBuffer.resetPosition();
+            blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBuffer));
+        }
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parsePbjByteBuffer(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws ParseException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.protobufDataBuffer.resetPosition();
+            blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBuffer));
+        }
+    }
 
-	@State(Scope.Benchmark)
-	public static class BenchmarkState<P extends Record,G extends GeneratedMessage> {
-		private Codec<P> pbjCodec;
-		private ProtobufParseFunction<byte[], G> googleByteArrayParseMethod;
-		private ProtobufParseFunction<ByteBuffer, G> googleByteBufferParseMethod;
-		private ProtobufParseFunction<InputStream, G> googleInputStreamParseMethod;
-		// input objects
-		private P pbjModelObject;
-		private G googleModelObject;
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parsePbjByteBufferDirect(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws ParseException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.protobufDataBufferDirect.resetPosition();
+            blackhole.consume(
+                    benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBufferDirect));
+        }
+    }
 
-		// input bytes
-		private byte[] protobuf;
-		private ByteBuffer protobufByteBuffer;
-		private BufferedData protobufDataBuffer;
-		private ByteBuffer protobufByteBufferDirect;
-		private BufferedData protobufDataBufferDirect;
-		private NonSynchronizedByteArrayInputStream bin;
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parsePbjInputStream(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws ParseException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.bin.resetPosition();
+            blackhole.consume(
+                    benchmarkState.pbjCodec.parse(new ReadableStreamingData(benchmarkState.bin)));
+        }
+    }
 
-		// output buffers
-		private NonSynchronizedByteArrayOutputStream bout;
-		private BufferedData outDataBuffer;
-		private BufferedData outDataBufferDirect;
-		private ByteBuffer bbout;
-		private ByteBuffer bboutDirect;
-		public void configure(P pbjModelObject, Codec<P> pbjCodec,
-							   ProtobufParseFunction<byte[],G> googleByteArrayParseMethod,
-							   ProtobufParseFunction<ByteBuffer,G> googleByteBufferParseMethod,
-							   ProtobufParseFunction<InputStream,G> googleInputStreamParseMethod) {
-			try {
-				this.pbjModelObject = pbjModelObject;
-				this.pbjCodec = pbjCodec;
-				this.googleByteArrayParseMethod = googleByteArrayParseMethod;
-				this.googleByteBufferParseMethod = googleByteBufferParseMethod;
-				this.googleInputStreamParseMethod = googleInputStreamParseMethod;
-				// write to temp data buffer and then read into byte array
-				BufferedData tempDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
-				pbjCodec.write(pbjModelObject, tempDataBuffer);
-				tempDataBuffer.flip();
-				this.protobuf = new byte[(int) tempDataBuffer.remaining()];
-				tempDataBuffer.readBytes(this.protobuf);
-				// start by parsing using protoc
-				this.googleModelObject = googleByteArrayParseMethod.parse(this.protobuf);
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parseProtoCByteArray(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            blackhole.consume(
+                    benchmarkState.googleByteArrayParseMethod.parse(benchmarkState.protobuf));
+        }
+    }
 
-				// input buffers
-				this.protobufByteBuffer = ByteBuffer.wrap(this.protobuf);
-				this.protobufDataBuffer = BufferedData.wrap(this.protobuf);
-				this.protobufByteBufferDirect = ByteBuffer.allocateDirect(this.protobuf.length);
-				this.protobufByteBufferDirect.put(this.protobuf);
-				this.protobufDataBufferDirect = BufferedData.wrap(this.protobufByteBufferDirect);
-				this.bin = new NonSynchronizedByteArrayInputStream(this.protobuf);
-				ReadableStreamingData din = new ReadableStreamingData(this.bin);
-				// output buffers
-				this.bout = new NonSynchronizedByteArrayOutputStream();
-				WritableStreamingData dout = new WritableStreamingData(this.bout);
-				this.outDataBuffer = BufferedData.allocate(this.protobuf.length);
-				this.outDataBufferDirect = BufferedData.allocateOffHeap(this.protobuf.length);
-				this.bbout = ByteBuffer.allocate(this.protobuf.length);
-				this.bboutDirect = ByteBuffer.allocateDirect(this.protobuf.length);
-			} catch (IOException e) {
-				e.getStackTrace();
-				System.err.flush();
-				throw new RuntimeException(e);
-			}
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parseProtoCByteBufferDirect(
+            BenchmarkState<P, G> benchmarkState, Blackhole blackhole) throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.protobufByteBufferDirect.position(0);
+            blackhole.consume(
+                    benchmarkState.googleByteBufferParseMethod.parse(
+                            benchmarkState.protobufByteBufferDirect));
+        }
+    }
 
-	/** Same as parsePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this because makes result plotting easier */
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parsePbjByteArray(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws ParseException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.protobufDataBuffer.resetPosition();
-			blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBuffer));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parseProtoCByteBuffer(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            blackhole.consume(
+                    benchmarkState.googleByteBufferParseMethod.parse(
+                            benchmarkState.protobufByteBuffer));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parsePbjByteBuffer(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws ParseException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.protobufDataBuffer.resetPosition();
-			blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBuffer));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void parseProtoCInputStream(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.bin.resetPosition();
+            blackhole.consume(
+                    benchmarkState.googleInputStreamParseMethod.parse(benchmarkState.bin));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parsePbjByteBufferDirect(BenchmarkState<P,G> benchmarkState, Blackhole blackhole)
-			throws ParseException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.protobufDataBufferDirect.resetPosition();
-			blackhole.consume(benchmarkState.pbjCodec.parse(benchmarkState.protobufDataBufferDirect));
-		}
-	}
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parsePbjInputStream(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws ParseException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.bin.resetPosition();
-			blackhole.consume(benchmarkState.pbjCodec.parse(new ReadableStreamingData(benchmarkState.bin)));
-		}
-	}
+    /**
+     * Same as writePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this
+     * because makes result plotting easier
+     */
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writePbjByteArray(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.outDataBuffer.reset();
+            benchmarkState.pbjCodec.write(
+                    benchmarkState.pbjModelObject, benchmarkState.outDataBuffer);
+            blackhole.consume(benchmarkState.outDataBuffer);
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parseProtoCByteArray(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			blackhole.consume(benchmarkState.googleByteArrayParseMethod.parse(benchmarkState.protobuf));
-		}
-	}
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parseProtoCByteBufferDirect(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.protobufByteBufferDirect.position(0);
-			blackhole.consume(benchmarkState.googleByteBufferParseMethod.parse(benchmarkState.protobufByteBufferDirect));
-		}
-	}
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parseProtoCByteBuffer(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			blackhole.consume(benchmarkState.googleByteBufferParseMethod.parse(benchmarkState.protobufByteBuffer));
-		}
-	}
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void parseProtoCInputStream(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.bin.resetPosition();
-			blackhole.consume(benchmarkState.googleInputStreamParseMethod.parse(benchmarkState.bin));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writePbjByteBuffer(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.outDataBuffer.reset();
+            benchmarkState.pbjCodec.write(
+                    benchmarkState.pbjModelObject, benchmarkState.outDataBuffer);
+            blackhole.consume(benchmarkState.outDataBuffer);
+        }
+    }
 
-	/** Same as writePbjByteBuffer because DataBuffer.wrap(byte[]) uses ByteBuffer today, added this because makes result plotting easier */
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writePbjByteArray(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.outDataBuffer.reset();
-			benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBuffer);
-			blackhole.consume(benchmarkState.outDataBuffer);
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writePbjByteDirect(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.outDataBufferDirect.reset();
+            benchmarkState.pbjCodec.write(
+                    benchmarkState.pbjModelObject, benchmarkState.outDataBufferDirect);
+            blackhole.consume(benchmarkState.outDataBufferDirect);
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writePbjByteBuffer(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.outDataBuffer.reset();
-			benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBuffer);
-			blackhole.consume(benchmarkState.outDataBuffer);
-		}
-	}
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writePbjByteDirect(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.outDataBufferDirect.reset();
-			benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, benchmarkState.outDataBufferDirect);
-			blackhole.consume(benchmarkState.outDataBufferDirect);
-		}
-	}
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writePbjOutputStream(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.bout.reset();
-			benchmarkState.pbjCodec.write(benchmarkState.pbjModelObject, new WritableStreamingData(benchmarkState.bout));
-			blackhole.consume(benchmarkState.bout.toByteArray());
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writePbjOutputStream(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.bout.reset();
+            benchmarkState.pbjCodec.write(
+                    benchmarkState.pbjModelObject, new WritableStreamingData(benchmarkState.bout));
+            blackhole.consume(benchmarkState.bout.toByteArray());
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writeProtoCByteArray(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) {
-		for (int i = 0; i < 1000; i++) {
-			blackhole.consume(benchmarkState.googleModelObject.toByteArray());
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writeProtoCByteArray(BenchmarkState<P, G> benchmarkState, Blackhole blackhole) {
+        for (int i = 0; i < 1000; i++) {
+            blackhole.consume(benchmarkState.googleModelObject.toByteArray());
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writeProtoCByteBuffer(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			CodedOutputStream cout = CodedOutputStream.newInstance(benchmarkState.bbout);
-			benchmarkState.googleModelObject.writeTo(cout);
-			blackhole.consume(benchmarkState.bbout);
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writeProtoCByteBuffer(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            CodedOutputStream cout = CodedOutputStream.newInstance(benchmarkState.bbout);
+            benchmarkState.googleModelObject.writeTo(cout);
+            blackhole.consume(benchmarkState.bbout);
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writeProtoCByteBufferDirect(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			CodedOutputStream cout = CodedOutputStream.newInstance(benchmarkState.bboutDirect);
-			benchmarkState.googleModelObject.writeTo(cout);
-			blackhole.consume(benchmarkState.bbout);
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writeProtoCByteBufferDirect(
+            BenchmarkState<P, G> benchmarkState, Blackhole blackhole) throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            CodedOutputStream cout = CodedOutputStream.newInstance(benchmarkState.bboutDirect);
+            benchmarkState.googleModelObject.writeTo(cout);
+            blackhole.consume(benchmarkState.bbout);
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(OPERATION_COUNT)
-	public void writeProtoCOutputStream(BenchmarkState<P,G> benchmarkState, Blackhole blackhole) throws IOException {
-		for (int i = 0; i < 1000; i++) {
-			benchmarkState.bout.reset();
-			benchmarkState.googleModelObject.writeTo(benchmarkState.bout);
-			blackhole.consume(benchmarkState.bout.toByteArray());
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(OPERATION_COUNT)
+    public void writeProtoCOutputStream(BenchmarkState<P, G> benchmarkState, Blackhole blackhole)
+            throws IOException {
+        for (int i = 0; i < 1000; i++) {
+            benchmarkState.bout.reset();
+            benchmarkState.googleModelObject.writeTo(benchmarkState.bout);
+            blackhole.consume(benchmarkState.bout.toByteArray());
+        }
+    }
 
-	/** Custom interface for method references as java.util.Function does not throw IOException */
-	public interface ProtobufParseFunction<D, G> {
-		G parse(D data) throws IOException;
-	}
+    /** Custom interface for method references as java.util.Function does not throw IOException */
+    public interface ProtobufParseFunction<D, G> {
+        G parse(D data) throws IOException;
+    }
 
-	@State(Scope.Benchmark)
-	public static class EverythingBench extends ProtobufObjectBench<Everything, com.hedera.pbj.test.proto.java.Everything> {
-		@Setup
-		public void setup(BenchmarkState<Everything, com.hedera.pbj.test.proto.java.Everything> benchmarkState) {
-			benchmarkState.configure(EverythingTestData.EVERYTHING,
-					Everything.PROTOBUF,
-					com.hedera.pbj.test.proto.java.Everything::parseFrom,
-					com.hedera.pbj.test.proto.java.Everything::parseFrom,
-					com.hedera.pbj.test.proto.java.Everything::parseFrom);
-		}
-	}
+    @State(Scope.Benchmark)
+    public static class EverythingBench
+            extends ProtobufObjectBench<Everything, com.hedera.pbj.test.proto.java.Everything> {
+        @Setup
+        public void setup(
+                BenchmarkState<Everything, com.hedera.pbj.test.proto.java.Everything>
+                        benchmarkState) {
+            benchmarkState.configure(
+                    EverythingTestData.EVERYTHING,
+                    Everything.PROTOBUF,
+                    com.hedera.pbj.test.proto.java.Everything::parseFrom,
+                    com.hedera.pbj.test.proto.java.Everything::parseFrom,
+                    com.hedera.pbj.test.proto.java.Everything::parseFrom);
+        }
+    }
 
-	@State(Scope.Benchmark)
-	public static class TimeStampBench extends ProtobufObjectBench<Timestamp , com.hederahashgraph.api.proto.java.Timestamp> {
-		@Setup
-		public void setup(BenchmarkState<Timestamp , com.hederahashgraph.api.proto.java.Timestamp> benchmarkState) {
-			benchmarkState.configure(new Timestamp(5678L, 1234),
-					Timestamp.PROTOBUF,
-					com.hederahashgraph.api.proto.java.Timestamp::parseFrom,
-					com.hederahashgraph.api.proto.java.Timestamp::parseFrom,
-					com.hederahashgraph.api.proto.java.Timestamp::parseFrom);
-		}
-	}
+    @State(Scope.Benchmark)
+    public static class TimeStampBench
+            extends ProtobufObjectBench<Timestamp, com.hederahashgraph.api.proto.java.Timestamp> {
+        @Setup
+        public void setup(
+                BenchmarkState<Timestamp, com.hederahashgraph.api.proto.java.Timestamp>
+                        benchmarkState) {
+            benchmarkState.configure(
+                    new Timestamp(5678L, 1234),
+                    Timestamp.PROTOBUF,
+                    com.hederahashgraph.api.proto.java.Timestamp::parseFrom,
+                    com.hederahashgraph.api.proto.java.Timestamp::parseFrom,
+                    com.hederahashgraph.api.proto.java.Timestamp::parseFrom);
+        }
+    }
 
-	@State(Scope.Benchmark)
-	public static class AccountDetailsBench extends ProtobufObjectBench<com.hedera.hapi.node.token.AccountDetails, GetAccountDetailsResponse.AccountDetails> {
-		@Setup
-		public void setup(BenchmarkState<com.hedera.hapi.node.token.AccountDetails, GetAccountDetailsResponse.AccountDetails> benchmarkState) {
-			benchmarkState.configure(AccountDetailsPbj.ACCOUNT_DETAILS,
-					AccountDetails.PROTOBUF,
-					GetAccountDetailsResponse.AccountDetails::parseFrom,
-					GetAccountDetailsResponse.AccountDetails::parseFrom,
-					GetAccountDetailsResponse.AccountDetails::parseFrom);
-		}
-	}
+    @State(Scope.Benchmark)
+    public static class AccountDetailsBench
+            extends ProtobufObjectBench<
+                    com.hedera.hapi.node.token.AccountDetails,
+                    GetAccountDetailsResponse.AccountDetails> {
+        @Setup
+        public void setup(
+                BenchmarkState<
+                                com.hedera.hapi.node.token.AccountDetails,
+                                GetAccountDetailsResponse.AccountDetails>
+                        benchmarkState) {
+            benchmarkState.configure(
+                    AccountDetailsPbj.ACCOUNT_DETAILS,
+                    AccountDetails.PROTOBUF,
+                    GetAccountDetailsResponse.AccountDetails::parseFrom,
+                    GetAccountDetailsResponse.AccountDetails::parseFrom,
+                    GetAccountDetailsResponse.AccountDetails::parseFrom);
+        }
+    }
 }
diff --git a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
index 4ae4a8f2..ea0aa9a0 100644
--- a/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
+++ b/pbj-integration-tests/src/jmh/java/com/hedera/pbj/integration/jmh/VarIntBench.java
@@ -9,15 +9,14 @@
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.io.stream.ReadableStreamingData;
-import java.io.InputStream;
-import org.openjdk.jmh.annotations.*;
-import org.openjdk.jmh.infra.Blackhole;
-
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.nio.*;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
+import org.openjdk.jmh.annotations.*;
+import org.openjdk.jmh.infra.Blackhole;
 
 @SuppressWarnings("unused")
 @State(Scope.Benchmark)
@@ -28,233 +27,238 @@
 @BenchmarkMode(Mode.AverageTime)
 public class VarIntBench {
 
-	ByteBuffer buffer = ByteBuffer.allocate(256*1024);
-	final ByteBuffer bufferDirect = ByteBuffer.allocateDirect(256*1024);
-	final BufferedData dataBuffer = BufferedData.wrap(buffer);
-	final BufferedData dataBufferDirect = BufferedData.wrap(bufferDirect);
-
-	Bytes bytes = Bytes.EMPTY;
+    ByteBuffer buffer = ByteBuffer.allocate(256 * 1024);
+    final ByteBuffer bufferDirect = ByteBuffer.allocateDirect(256 * 1024);
+    final BufferedData dataBuffer = BufferedData.wrap(buffer);
+    final BufferedData dataBufferDirect = BufferedData.wrap(bufferDirect);
 
-	InputStream bais = null;
-	ReadableStreamingData rsd = null;
+    Bytes bytes = Bytes.EMPTY;
 
-	InputStream baisNonSync = null;
-	ReadableStreamingData rsdNonSync = null;
+    InputStream bais = null;
+    ReadableStreamingData rsd = null;
 
-	private final int[] offsets = new int[1201];
+    InputStream baisNonSync = null;
+    ReadableStreamingData rsdNonSync = null;
 
-	public VarIntBench() {
-		try {
-			CodedOutputStream cout = CodedOutputStream.newInstance(buffer);
-			Random random = new Random(9387498731984L);
-			int pos = 0;
-			offsets[pos++] = 0;
-			for (int i = 0; i < 600; i++) {
-				cout.writeUInt64NoTag(random.nextLong(0,128));
-				offsets[pos++] = cout.getTotalBytesWritten();
-			}
-			for (int i = 0; i < 150; i++) {
-				cout.writeUInt64NoTag(random.nextLong(128,256));
-				offsets[pos++] = cout.getTotalBytesWritten();
-			}
-			for (int i = 0; i < 150; i++) {
-				cout.writeUInt64NoTag(random.nextLong(256, Integer.MAX_VALUE));
-				offsets[pos++] = cout.getTotalBytesWritten();
-			}
-			for (int i = 0; i < 150; i++) {
-				cout.writeUInt64NoTag(random.nextLong(Integer.MIN_VALUE, Integer.MAX_VALUE));
-				offsets[pos++] = cout.getTotalBytesWritten();
-			}
-			for (int i = 0; i < 150; i++) {
-				cout.writeUInt64NoTag(random.nextLong(0, Long.MAX_VALUE));
-				offsets[pos++] = cout.getTotalBytesWritten();
-			}
-			cout.flush();
-			// copy to direct buffer
-			buffer.flip();
-			bufferDirect.put(buffer);
-			byte[] bts = new byte[buffer.limit()];
-			for (int i = 0; i < buffer.limit(); i++) {
-				bts[i] = buffer.get(i);
-			}
-			bytes = Bytes.wrap(bts);
-			bais = new ByteArrayInputStream(bts.clone());
-			rsd = new ReadableStreamingData(bais);
-			baisNonSync = new NonSynchronizedByteArrayInputStream(bts.clone());
-			rsdNonSync = new ReadableStreamingData(baisNonSync);
-		} catch (IOException e){
-			e.printStackTrace();
-		}
-	}
+    private final int[] offsets = new int[1201];
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void dataBufferRead(Blackhole blackhole) throws IOException {
-		dataBuffer.reset();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(dataBuffer.readVarLong(false));
-		}
-	}
+    public VarIntBench() {
+        try {
+            CodedOutputStream cout = CodedOutputStream.newInstance(buffer);
+            Random random = new Random(9387498731984L);
+            int pos = 0;
+            offsets[pos++] = 0;
+            for (int i = 0; i < 600; i++) {
+                cout.writeUInt64NoTag(random.nextLong(0, 128));
+                offsets[pos++] = cout.getTotalBytesWritten();
+            }
+            for (int i = 0; i < 150; i++) {
+                cout.writeUInt64NoTag(random.nextLong(128, 256));
+                offsets[pos++] = cout.getTotalBytesWritten();
+            }
+            for (int i = 0; i < 150; i++) {
+                cout.writeUInt64NoTag(random.nextLong(256, Integer.MAX_VALUE));
+                offsets[pos++] = cout.getTotalBytesWritten();
+            }
+            for (int i = 0; i < 150; i++) {
+                cout.writeUInt64NoTag(random.nextLong(Integer.MIN_VALUE, Integer.MAX_VALUE));
+                offsets[pos++] = cout.getTotalBytesWritten();
+            }
+            for (int i = 0; i < 150; i++) {
+                cout.writeUInt64NoTag(random.nextLong(0, Long.MAX_VALUE));
+                offsets[pos++] = cout.getTotalBytesWritten();
+            }
+            cout.flush();
+            // copy to direct buffer
+            buffer.flip();
+            bufferDirect.put(buffer);
+            byte[] bts = new byte[buffer.limit()];
+            for (int i = 0; i < buffer.limit(); i++) {
+                bts[i] = buffer.get(i);
+            }
+            bytes = Bytes.wrap(bts);
+            bais = new ByteArrayInputStream(bts.clone());
+            rsd = new ReadableStreamingData(bais);
+            baisNonSync = new NonSynchronizedByteArrayInputStream(bts.clone());
+            rsdNonSync = new ReadableStreamingData(baisNonSync);
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void dataBufferGet(Blackhole blackhole) throws IOException {
-		dataBuffer.reset();
-		int offset = 0;
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(dataBuffer.getVarLong(offsets[offset++], false));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void dataBufferRead(Blackhole blackhole) throws IOException {
+        dataBuffer.reset();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(dataBuffer.readVarLong(false));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void dataBufferDirectRead(Blackhole blackhole) throws IOException {
-		dataBufferDirect.reset();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(dataBufferDirect.readVarLong(false));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void dataBufferGet(Blackhole blackhole) throws IOException {
+        dataBuffer.reset();
+        int offset = 0;
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(dataBuffer.getVarLong(offsets[offset++], false));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void dataBytesGet(Blackhole blackhole) throws IOException {
-		int offset = 0;
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(bytes.getVarLong(offsets[offset++], false));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void dataBufferDirectRead(Blackhole blackhole) throws IOException {
+        dataBufferDirect.reset();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(dataBufferDirect.readVarLong(false));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void dataSyncInputStreamRead(Blackhole blackhole) throws IOException {
-		bais.reset();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(rsd.readVarLong(false));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void dataBytesGet(Blackhole blackhole) throws IOException {
+        int offset = 0;
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(bytes.getVarLong(offsets[offset++], false));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void dataNonSyncInputStreamRead(Blackhole blackhole) throws IOException {
-		baisNonSync.reset();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(rsdNonSync.readVarLong(false));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void dataSyncInputStreamRead(Blackhole blackhole) throws IOException {
+        bais.reset();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(rsd.readVarLong(false));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void richardGet(Blackhole blackhole) throws MalformedProtobufException {
-		int offset = 0;
-		buffer.clear();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(getVarLongRichard(offsets[offset++], buffer));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void dataNonSyncInputStreamRead(Blackhole blackhole) throws IOException {
+        baisNonSync.reset();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(rsdNonSync.readVarLong(false));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void googleRead(Blackhole blackhole) throws IOException {
-		buffer.clear();
-		final CodedInputStream codedInputStream = CodedInputStream.newInstance(buffer);
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(codedInputStream.readRawVarint64());
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void richardGet(Blackhole blackhole) throws MalformedProtobufException {
+        int offset = 0;
+        buffer.clear();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(getVarLongRichard(offsets[offset++], buffer));
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void googleDirecRead(Blackhole blackhole) throws IOException {
-		bufferDirect.clear();
-		final CodedInputStream codedInputStream = CodedInputStream.newInstance(bufferDirect);
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(codedInputStream.readRawVarint64());
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void googleRead(Blackhole blackhole) throws IOException {
+        buffer.clear();
+        final CodedInputStream codedInputStream = CodedInputStream.newInstance(buffer);
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(codedInputStream.readRawVarint64());
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void googleSlowPathRead(Blackhole blackhole) throws MalformedProtobufException {
-		buffer.clear();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(readRawVarint64SlowPath(buffer));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void googleDirecRead(Blackhole blackhole) throws IOException {
+        bufferDirect.clear();
+        final CodedInputStream codedInputStream = CodedInputStream.newInstance(bufferDirect);
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(codedInputStream.readRawVarint64());
+        }
+    }
 
-	@Benchmark
-	@OperationsPerInvocation(1200)
-	public void googleSlowPathDirectRead(Blackhole blackhole) throws MalformedProtobufException {
-		bufferDirect.clear();
-		for (int i = 0; i < 1200; i++) {
-			blackhole.consume(readRawVarint64SlowPath(bufferDirect));
-		}
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void googleSlowPathRead(Blackhole blackhole) throws MalformedProtobufException {
+        buffer.clear();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(readRawVarint64SlowPath(buffer));
+        }
+    }
 
-	private static long readRawVarint64SlowPath(ByteBuffer buf) throws MalformedProtobufException {
-		long result = 0;
-		for (int shift = 0; shift < 64; shift += 7) {
-			final byte b = buf.get();
-			result |= (long) (b & 0x7F) << shift;
-			if ((b & 0x80) == 0) {
-				return result;
-			}
-		}
-		throw new MalformedProtobufException("Malformed varInt");
-	}
+    @Benchmark
+    @OperationsPerInvocation(1200)
+    public void googleSlowPathDirectRead(Blackhole blackhole) throws MalformedProtobufException {
+        bufferDirect.clear();
+        for (int i = 0; i < 1200; i++) {
+            blackhole.consume(readRawVarint64SlowPath(bufferDirect));
+        }
+    }
 
+    private static long readRawVarint64SlowPath(ByteBuffer buf) throws MalformedProtobufException {
+        long result = 0;
+        for (int shift = 0; shift < 64; shift += 7) {
+            final byte b = buf.get();
+            result |= (long) (b & 0x7F) << shift;
+            if ((b & 0x80) == 0) {
+                return result;
+            }
+        }
+        throw new MalformedProtobufException("Malformed varInt");
+    }
 
-	private static final int VARINT_CONTINUATION_MASK = 0b1000_0000;
-	private static final int VARINT_DATA_MASK = 0b0111_1111;
-	private static final int NUM_BITS_PER_VARINT_BYTE = 7;
+    private static final int VARINT_CONTINUATION_MASK = 0b1000_0000;
+    private static final int VARINT_DATA_MASK = 0b0111_1111;
+    private static final int NUM_BITS_PER_VARINT_BYTE = 7;
 
-	public static long getVarLongRichard(int offset, ByteBuffer buf) throws MalformedProtobufException {
-		// Protobuf encodes smaller integers with fewer bytes than larger integers. It takes a full byte
-		// to encode 7 bits of information. So, if all 64 bits of a long are in use (for example, if the
-		// leading bit is 1, or even all bits are 1) then it will take 10 bytes to transmit what would
-		// have otherwise been 8 bytes of data!
-		//
-		// Thus, at most, reading a varint should involve reading 10 bytes of data.
-		//
-		// The leading bit of each byte is a continuation bit. If set, another byte will follow.
-		// If we read 10 bytes in sequence with a continuation bit set, then we have a malformed
-		// byte stream.
-		// The bytes come least to most significant 7 bits. So the first byte we read represents
-		// the lowest 7 bytes, then the next byte is the next highest 7 bytes, etc.
+    public static long getVarLongRichard(int offset, ByteBuffer buf)
+            throws MalformedProtobufException {
+        // Protobuf encodes smaller integers with fewer bytes than larger integers. It takes a full
+        // byte
+        // to encode 7 bits of information. So, if all 64 bits of a long are in use (for example, if
+        // the
+        // leading bit is 1, or even all bits are 1) then it will take 10 bytes to transmit what
+        // would
+        // have otherwise been 8 bytes of data!
+        //
+        // Thus, at most, reading a varint should involve reading 10 bytes of data.
+        //
+        // The leading bit of each byte is a continuation bit. If set, another byte will follow.
+        // If we read 10 bytes in sequence with a continuation bit set, then we have a malformed
+        // byte stream.
+        // The bytes come least to most significant 7 bits. So the first byte we read represents
+        // the lowest 7 bytes, then the next byte is the next highest 7 bytes, etc.
 
-		// The final value.
-		long value = 0;
-		// The amount to shift the bits we read by before AND with the value
-		int shift = -NUM_BITS_PER_VARINT_BYTE;
+        // The final value.
+        long value = 0;
+        // The amount to shift the bits we read by before AND with the value
+        int shift = -NUM_BITS_PER_VARINT_BYTE;
 
-		// This method works with heap byte buffers only
-		final byte[] arr = buf.array();
-		final int arrOffset = buf.arrayOffset() + offset;
+        // This method works with heap byte buffers only
+        final byte[] arr = buf.array();
+        final int arrOffset = buf.arrayOffset() + offset;
 
-		int i = 0;
-		for (; i < 10; i++) {
-			// Use UnsafeUtil instead of arr[arrOffset + i] to avoid array range checks
-			byte b = UnsafeUtils.getArrayByteNoChecks(arr, arrOffset + i);
-			value |= (long) (b & 0x7F) << (shift += NUM_BITS_PER_VARINT_BYTE);
+        int i = 0;
+        for (; i < 10; i++) {
+            // Use UnsafeUtil instead of arr[arrOffset + i] to avoid array range checks
+            byte b = UnsafeUtils.getArrayByteNoChecks(arr, arrOffset + i);
+            value |= (long) (b & 0x7F) << (shift += NUM_BITS_PER_VARINT_BYTE);
 
-			if (b >= 0) {
-				return value;
-			}
-		}
-		// If we read 10 in a row all with the leading continuation bit set, then throw a malformed
-		// protobuf exception
-		throw new MalformedProtobufException("Malformed var int");
-	}
+            if (b >= 0) {
+                return value;
+            }
+        }
+        // If we read 10 in a row all with the leading continuation bit set, then throw a malformed
+        // protobuf exception
+        throw new MalformedProtobufException("Malformed var int");
+    }
 
-	public static void main(String[] args) throws Exception {
-		final Blackhole blackhole = new Blackhole(
-				"Today's password is swordfish. I understand instantiating Blackholes directly is dangerous.");
-		final VarIntBench bench = new VarIntBench();
-		bench.dataBufferRead(blackhole);
-		bench.dataBufferGet(blackhole);
-		bench.dataBufferDirectRead(blackhole);
-		bench.dataBytesGet(blackhole);
-		bench.dataSyncInputStreamRead(blackhole);
-		bench.dataNonSyncInputStreamRead(blackhole);
-		bench.googleRead(blackhole);
-	}
+    public static void main(String[] args) throws Exception {
+        final Blackhole blackhole =
+                new Blackhole(
+                        "Today's password is swordfish. I understand instantiating Blackholes"
+                                + " directly is dangerous.");
+        final VarIntBench bench = new VarIntBench();
+        bench.dataBufferRead(blackhole);
+        bench.dataBufferGet(blackhole);
+        bench.dataBufferDirectRead(blackhole);
+        bench.dataBytesGet(blackhole);
+        bench.dataSyncInputStreamRead(blackhole);
+        bench.dataNonSyncInputStreamRead(blackhole);
+        bench.googleRead(blackhole);
+    }
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java
index eac128b8..22ca6d4c 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsPbj.java
@@ -7,126 +7,160 @@
 import com.hedera.hapi.node.token.GrantedNftAllowance;
 import com.hedera.hapi.node.token.GrantedTokenAllowance;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
-
 import java.util.List;
 import java.util.Random;
 
-/**
- * Create a complex account details we can use as benchmark
- */
+/** Create a complex account details we can use as benchmark */
 public class AccountDetailsPbj {
 
     /** Random for generating sample data */
     private static final Random RANDOM = new Random(351343135153L);
 
-    /**
-     * Sample test object for JMH benchmarks
-     */
+    /** Sample test object for JMH benchmarks */
     public static final AccountDetails ACCOUNT_DETAILS =
             new AccountDetails.Builder()
-                    .accountId(new AccountID.Builder()
-                            .shardNum(0)
-                            .realmNum(0)
-                            .accountNum(posLong())
-                            .build())
+                    .accountId(
+                            new AccountID.Builder()
+                                    .shardNum(0)
+                                    .realmNum(0)
+                                    .accountNum(posLong())
+                                    .build())
                     .contractAccountId(randomHex(64))
                     .deleted(false)
-                    .proxyAccountId(new AccountID.Builder()
-                            .shardNum(0)
-                            .realmNum(0)
-                            .accountNum(posLong())
-                            .alias(randomBytes(32))
-                            .build())
+                    .proxyAccountId(
+                            new AccountID.Builder()
+                                    .shardNum(0)
+                                    .realmNum(0)
+                                    .accountNum(posLong())
+                                    .alias(randomBytes(32))
+                                    .build())
                     .proxyReceived(RANDOM.nextLong())
-                    .key(new Key.Builder()
-                            .keyList(new KeyList.Builder()
-                                    .keys(List.of(
-                                            new Key.Builder()
-                                                    .ed25519(randomBytes(32))
-                                                    .build(),
-                                            new Key.Builder()
-                                                    .ecdsa384(randomBytes(48))
-                                                    .build(),
-                                            new Key.Builder()
-                                                    .contractID(new ContractID.Builder()
-                                                            .shardNum(0)
-                                                            .realmNum(0)
-                                                            .contractNum(posLong())
-                                                            .build())
-                                                    .build()
-                                    ))
+                    .key(
+                            new Key.Builder()
+                                    .keyList(
+                                            new KeyList.Builder()
+                                                    .keys(
+                                                            List.of(
+                                                                    new Key.Builder()
+                                                                            .ed25519(
+                                                                                    randomBytes(32))
+                                                                            .build(),
+                                                                    new Key.Builder()
+                                                                            .ecdsa384(
+                                                                                    randomBytes(48))
+                                                                            .build(),
+                                                                    new Key.Builder()
+                                                                            .contractID(
+                                                                                    new ContractID
+                                                                                                    .Builder()
+                                                                                            .shardNum(
+                                                                                                    0)
+                                                                                            .realmNum(
+                                                                                                    0)
+                                                                                            .contractNum(
+                                                                                                    posLong())
+                                                                                            .build())
+                                                                            .build()))
+                                                    .build())
                                     .build())
-                            .build())
                     .balance(RANDOM.nextLong())
                     .receiverSigRequired(true)
-                    .expirationTime(new Timestamp.Builder()
-                            .nanos(RANDOM.nextInt(0, Integer.MAX_VALUE))
-                            .seconds(RANDOM.nextLong(0, Long.MAX_VALUE))
-                            .build())
-                    .autoRenewPeriod(new Duration.Builder()
-                            .seconds(RANDOM.nextLong(0, Long.MAX_VALUE))
-                            .build())
-                    .tokenRelationships(List.of(
-                            new TokenRelationship.Builder()
-                                    .balance(RANDOM.nextLong(1, Long.MAX_VALUE))
-                                    .decimals(RANDOM.nextInt(0, Integer.MAX_VALUE))
-                                    .automaticAssociation(true)
-                                    .symbol(randomHex(3))
-                                    .tokenId(new TokenID(posLong(),posLong(),posLong()))
-                                    .build(),
-                            new TokenRelationship.Builder()
-                                    .balance(RANDOM.nextLong(1, Long.MAX_VALUE))
-                                    .decimals(RANDOM.nextInt(0, Integer.MAX_VALUE))
-                                    .automaticAssociation(true)
-                                    .symbol(randomHex(3))
-                                    .tokenId(new TokenID(posLong(),posLong(),posLong()))
-                                    .build()
-                    ))
+                    .expirationTime(
+                            new Timestamp.Builder()
+                                    .nanos(RANDOM.nextInt(0, Integer.MAX_VALUE))
+                                    .seconds(RANDOM.nextLong(0, Long.MAX_VALUE))
+                                    .build())
+                    .autoRenewPeriod(
+                            new Duration.Builder()
+                                    .seconds(RANDOM.nextLong(0, Long.MAX_VALUE))
+                                    .build())
+                    .tokenRelationships(
+                            List.of(
+                                    new TokenRelationship.Builder()
+                                            .balance(RANDOM.nextLong(1, Long.MAX_VALUE))
+                                            .decimals(RANDOM.nextInt(0, Integer.MAX_VALUE))
+                                            .automaticAssociation(true)
+                                            .symbol(randomHex(3))
+                                            .tokenId(new TokenID(posLong(), posLong(), posLong()))
+                                            .build(),
+                                    new TokenRelationship.Builder()
+                                            .balance(RANDOM.nextLong(1, Long.MAX_VALUE))
+                                            .decimals(RANDOM.nextInt(0, Integer.MAX_VALUE))
+                                            .automaticAssociation(true)
+                                            .symbol(randomHex(3))
+                                            .tokenId(new TokenID(posLong(), posLong(), posLong()))
+                                            .build()))
                     .memo(randomHex(80))
                     .ownedNfts(RANDOM.nextLong(10, Integer.MAX_VALUE))
                     .maxAutomaticTokenAssociations(RANDOM.nextInt(10, Integer.MAX_VALUE))
                     .alias(randomBytes(32))
                     .ledgerId(randomBytes(32))
-                    .grantedCryptoAllowances(List.of(
-                            new GrantedCryptoAllowance(new AccountID.Builder()
-                                    .shardNum(0).realmNum(0).accountNum(posLong())
-                                    .build()
-                                    ,posLong()),
-                            new GrantedCryptoAllowance(new AccountID.Builder()
-                                    .shardNum(0).realmNum(0).accountNum(posLong())
-                                    .build()
-                                    ,posLong())
-                    ))
-                    .grantedNftAllowances(List.of(
-                            new GrantedNftAllowance(new TokenID.Builder()
-                                    .shardNum(0).realmNum(0).tokenNum(posLong())
-                                    .build()
-                                    ,new AccountID.Builder()
-                                    .shardNum(0).realmNum(0).accountNum(posLong())
-                                    .build()),
-                            new GrantedNftAllowance(new TokenID.Builder()
-                                    .shardNum(0).realmNum(0).tokenNum(posLong())
-                                    .build()
-                                    ,new AccountID.Builder()
-                                    .shardNum(0).realmNum(0).accountNum(posLong())
-                                    .build())
-                    ))
-                    .grantedTokenAllowances(List.of(
-                            new GrantedTokenAllowance(new TokenID.Builder()
-                                    .shardNum(0).realmNum(0).tokenNum(posLong())
-                                    .build()
-                                    ,new AccountID.Builder()
-                                    .shardNum(0).realmNum(0).accountNum(posLong())
-                                    .build(),
-                                    posLong()),
-                            new GrantedTokenAllowance(new TokenID.Builder()
-                                    .shardNum(0).realmNum(0).tokenNum(posLong())
-                                    .build()
-                                    ,new AccountID.Builder()
-                                    .shardNum(0).realmNum(0).accountNum(posLong())
-                                    .build(),
-                                    posLong())
-                    ))
+                    .grantedCryptoAllowances(
+                            List.of(
+                                    new GrantedCryptoAllowance(
+                                            new AccountID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .accountNum(posLong())
+                                                    .build(),
+                                            posLong()),
+                                    new GrantedCryptoAllowance(
+                                            new AccountID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .accountNum(posLong())
+                                                    .build(),
+                                            posLong())))
+                    .grantedNftAllowances(
+                            List.of(
+                                    new GrantedNftAllowance(
+                                            new TokenID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .tokenNum(posLong())
+                                                    .build(),
+                                            new AccountID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .accountNum(posLong())
+                                                    .build()),
+                                    new GrantedNftAllowance(
+                                            new TokenID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .tokenNum(posLong())
+                                                    .build(),
+                                            new AccountID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .accountNum(posLong())
+                                                    .build())))
+                    .grantedTokenAllowances(
+                            List.of(
+                                    new GrantedTokenAllowance(
+                                            new TokenID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .tokenNum(posLong())
+                                                    .build(),
+                                            new AccountID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .accountNum(posLong())
+                                                    .build(),
+                                            posLong()),
+                                    new GrantedTokenAllowance(
+                                            new TokenID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .tokenNum(posLong())
+                                                    .build(),
+                                            new AccountID.Builder()
+                                                    .shardNum(0)
+                                                    .realmNum(0)
+                                                    .accountNum(posLong())
+                                                    .build(),
+                                            posLong())))
                     .build();
 
     private static long posLong() {
@@ -142,7 +176,7 @@ private static Bytes randomBytes(int size) {
     private static String randomHex(int size) {
         final StringBuilder sb = new StringBuilder();
         for (int i = 0; i < size; i++) {
-            sb.append(Integer.toHexString(RANDOM.nextInt(0,15)));
+            sb.append(Integer.toHexString(RANDOM.nextInt(0, 15)));
         }
         return sb.toString();
     }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java
index 1809fcab..90c6f119 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/AccountDetailsWriter.java
@@ -1,15 +1,14 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration;
 
+import static com.hedera.pbj.integration.AccountDetailsPbj.ACCOUNT_DETAILS;
+
 import com.hedera.hapi.node.token.AccountDetails;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse;
 import java.util.Arrays;
-import static com.hedera.pbj.integration.AccountDetailsPbj.ACCOUNT_DETAILS;
 
-/**
- * Testing main class for profiling parser and writer performance
- */
+/** Testing main class for profiling parser and writer performance */
 @SuppressWarnings("unused")
 public class AccountDetailsWriter {
 
@@ -20,7 +19,7 @@ public class AccountDetailsWriter {
      * @throws Exception if there was a problem
      */
     public static void main(String[] args) throws Exception {
-        final BufferedData outDataBuffer = BufferedData.allocate(1024*1024);
+        final BufferedData outDataBuffer = BufferedData.allocate(1024 * 1024);
 
         for (int i = 0; i < 10_000_000; i++) {
             outDataBuffer.reset();
@@ -45,12 +44,13 @@ public static void main2(String[] args) throws Exception {
         final byte[] protobuf = new byte[(int) tempDataBuffer.remaining()];
         tempDataBuffer.readBytes(protobuf);
         // write out with protoc
-        final GetAccountDetailsResponse.AccountDetails accountDetailsProtoC = GetAccountDetailsResponse.AccountDetails.parseFrom(protobuf);
-//
-//        final ByteBuffer bbout = ByteBuffer.allocate(1024*1024);
+        final GetAccountDetailsResponse.AccountDetails accountDetailsProtoC =
+                GetAccountDetailsResponse.AccountDetails.parseFrom(protobuf);
+        //
+        //        final ByteBuffer bbout = ByteBuffer.allocate(1024*1024);
 
         for (int i = 0; i < 10_000_000; i++) {
-//            bbout.clear();
+            //            bbout.clear();
             final byte[] writtenData = accountDetailsProtoC.toByteArray();
             if (writtenData.length != protobuf.length) {
                 System.out.println("writtenData = " + Arrays.toString(writtenData));
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java
index e36391bd..d022fd98 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingTestData.java
@@ -2,81 +2,20 @@
 package com.hedera.pbj.integration;
 
 import com.hedera.pbj.runtime.io.buffer.Bytes;
-import com.hedera.pbj.runtime.io.buffer.RandomAccessData;
 import com.hedera.pbj.test.proto.pbj.Everything;
 import com.hedera.pbj.test.proto.pbj.InnerEverything;
 import com.hedera.pbj.test.proto.pbj.Suit;
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
-
 import java.util.List;
 import java.util.stream.IntStream;
 import java.util.stream.LongStream;
 
-/**
- * Sample test data for everything object
- */
+/** Sample test data for everything object */
 public class EverythingTestData {
 
     // input objects
-    public static final Everything EVERYTHING = new Everything.Builder()
-            .int32Number(1234)
-            .sint32Number(-1234)
-            .uint32Number(Integer.MAX_VALUE)
-            .fixed32Number(644534)
-            .sfixed32Number(-31345)
-            .floatNumber(15834.213581f)
-            .int64Number(53451121355L)
-            .sint64Number(-53451121355L)
-            .uint64Number(2451326663131L)
-            .fixed64Number(33626188515L)
-            .sfixed64Number(-531311551515L)
-            .doubleNumber(135581531681.1535151)
-            .booleanField(true)
-            .enumSuit(Suit.SPADES)
-            .subObject(new TimestampTest.Builder().seconds(5155135L).nanos(44513).build())
-            .text("Hello Everything!")
-            .bytesField(Bytes.wrap(new byte[]{12,29,19,120,127,0,-127}))
-            .int32NumberList(IntStream.range(0,10).boxed().toList())
-            .sint32NumberList(IntStream.range(-10,10).boxed().toList())
-            .uint32NumberList(IntStream.range(0,100).boxed().toList())
-            .fixed32NumberList(IntStream.range(0,25).boxed().toList())
-            .sfixed32NumberList(IntStream.range(-10,25).boxed().toList())
-            .floatNumberList(List.of(513.51f,55535351.3545841f,0f,-1f))
-            .floatNumberList(List.of(513.51f,55535351.3545841f,0f,-1f))
-            .int64NumberList(LongStream.range(0,10).boxed().toList())
-            .sint64NumberList(LongStream.range(-10,10).boxed().toList())
-            .uint64NumberList(LongStream.range(0,10).boxed().toList())
-            .fixed64NumberList(LongStream.range(0,10).boxed().toList())
-            .sfixed64NumberList(LongStream.range(-10,10).boxed().toList())
-            .doubleNumberList(List.of(513.51,55535351.3545841,0d,-1d))
-            .booleanList(List.of(true, false, true, true, false))
-            .enumSuitList(List.of(Suit.ACES, Suit.CLUBS, Suit.DIAMONDS))
-            .subObjectList(List.of(
-                    new TimestampTest.Builder().seconds(5155135L).nanos(44513).build(),
-                    new TimestampTest.Builder().seconds(486486).nanos(31315).build(),
-                    new TimestampTest.Builder().seconds(0).nanos(58).build()
-            ))
-            .textList(List.of(
-                    "صِف خَلقَ خَودِ كَمِثلِ الشَمسِ إِذ بَزَغَت — يَحظى الضَجيعُ بِها نَجلاءَ مِعطارِ",
-                    "ऋषियों को सताने वाले दुष्ट राक्षसों के राजा रावण का सर्वनाश करने वाले विष्णुवतार भगवान श्रीराम, अयोध्या के महाराज दशरथ के बड़े सपुत्र थे।",
-                    "A quick brown fox jumps over the lazy dog"
-            ))
-            .bytesExampleList(List.of(
-                    Bytes.wrap(new byte[]{12,29,19,120,127,0,-127}),
-                    Bytes.wrap(new byte[]{13,15,65,98,-65}),
-                    Bytes.wrap(new byte[]{127,0,-127})
-            ))
-            .int32Boxed(1234)
-            .uint32Boxed(Integer.MAX_VALUE)
-            .floatBoxed(15834.213581f)
-            .int64Boxed(53451121355L)
-            .uint64Boxed(2451326663131L)
-            .doubleBoxed(135581531681.1535151)
-            .boolBoxed(true)
-            .bytesBoxed(Bytes.wrap(new byte[]{13,15,65,98,-65}))
-            .stringBoxed("Hello Everything!")
-            .doubleNumberOneOf(29292.299d)
-            .innerEverything(new InnerEverything.Builder()
+    public static final Everything EVERYTHING =
+            new Everything.Builder()
                     .int32Number(1234)
                     .sint32Number(-1234)
                     .uint32Number(Integer.MAX_VALUE)
@@ -93,37 +32,46 @@ public class EverythingTestData {
                     .enumSuit(Suit.SPADES)
                     .subObject(new TimestampTest.Builder().seconds(5155135L).nanos(44513).build())
                     .text("Hello Everything!")
-                    .bytesField(Bytes.wrap(new byte[]{12,29,19,120,127,0,-127}))
-                    .int32NumberList(IntStream.range(0,10).boxed().toList())
-                    .sint32NumberList(IntStream.range(-10,10).boxed().toList())
-                    .uint32NumberList(IntStream.range(0,100).boxed().toList())
-                    .fixed32NumberList(IntStream.range(0,25).boxed().toList())
-                    .sfixed32NumberList(IntStream.range(-10,25).boxed().toList())
-                    .floatNumberList(List.of(513.51f,55535351.3545841f,0f,-1f))
-                    .floatNumberList(List.of(513.51f,55535351.3545841f,0f,-1f))
-                    .int64NumberList(LongStream.range(0,10).boxed().toList())
-                    .sint64NumberList(LongStream.range(-10,10).boxed().toList())
-                    .uint64NumberList(LongStream.range(0,10).boxed().toList())
-                    .fixed64NumberList(LongStream.range(0,10).boxed().toList())
-                    .sfixed64NumberList(LongStream.range(-10,10).boxed().toList())
-                    .doubleNumberList(List.of(513.51,55535351.3545841,0d,-1d))
+                    .bytesField(Bytes.wrap(new byte[] {12, 29, 19, 120, 127, 0, -127}))
+                    .int32NumberList(IntStream.range(0, 10).boxed().toList())
+                    .sint32NumberList(IntStream.range(-10, 10).boxed().toList())
+                    .uint32NumberList(IntStream.range(0, 100).boxed().toList())
+                    .fixed32NumberList(IntStream.range(0, 25).boxed().toList())
+                    .sfixed32NumberList(IntStream.range(-10, 25).boxed().toList())
+                    .floatNumberList(List.of(513.51f, 55535351.3545841f, 0f, -1f))
+                    .floatNumberList(List.of(513.51f, 55535351.3545841f, 0f, -1f))
+                    .int64NumberList(LongStream.range(0, 10).boxed().toList())
+                    .sint64NumberList(LongStream.range(-10, 10).boxed().toList())
+                    .uint64NumberList(LongStream.range(0, 10).boxed().toList())
+                    .fixed64NumberList(LongStream.range(0, 10).boxed().toList())
+                    .sfixed64NumberList(LongStream.range(-10, 10).boxed().toList())
+                    .doubleNumberList(List.of(513.51, 55535351.3545841, 0d, -1d))
                     .booleanList(List.of(true, false, true, true, false))
                     .enumSuitList(List.of(Suit.ACES, Suit.CLUBS, Suit.DIAMONDS))
-                    .subObjectList(List.of(
-                            new TimestampTest.Builder().seconds(5155135L).nanos(44513).build(),
-                            new TimestampTest.Builder().seconds(486486).nanos(31315).build(),
-                            new TimestampTest.Builder().seconds(0).nanos(58).build()
-                    ))
-                    .textList(List.of(
-                            "صِف خَلقَ خَودِ كَمِثلِ الشَمسِ إِذ بَزَغَت — يَحظى الضَجيعُ بِها نَجلاءَ مِعطارِ",
-                            "ऋषियों को सताने वाले दुष्ट राक्षसों के राजा रावण का सर्वनाश करने वाले विष्णुवतार भगवान श्रीराम, अयोध्या के महाराज दशरथ के बड़े सपुत्र थे।",
-                            "A quick brown fox jumps over the lazy dog"
-                    ))
-                    .bytesExampleList(List.of(
-                            Bytes.wrap(new byte[]{12,29,19,120,127,0,-127}),
-                            Bytes.wrap(new byte[]{13,15,65,98,-65}),
-                            Bytes.wrap(new byte[]{127,0,-127})
-                    ))
+                    .subObjectList(
+                            List.of(
+                                    new TimestampTest.Builder()
+                                            .seconds(5155135L)
+                                            .nanos(44513)
+                                            .build(),
+                                    new TimestampTest.Builder()
+                                            .seconds(486486)
+                                            .nanos(31315)
+                                            .build(),
+                                    new TimestampTest.Builder().seconds(0).nanos(58).build()))
+                    .textList(
+                            List.of(
+                                    "صِف خَلقَ خَودِ كَمِثلِ الشَمسِ إِذ بَزَغَت — يَحظى الضَجيعُ"
+                                            + " بِها نَجلاءَ مِعطارِ",
+                                    "ऋषियों को सताने वाले दुष्ट राक्षसों के राजा रावण का सर्वनाश"
+                                        + " करने वाले विष्णुवतार भगवान श्रीराम, अयोध्या के महाराज"
+                                        + " दशरथ के बड़े सपुत्र थे।",
+                                    "A quick brown fox jumps over the lazy dog"))
+                    .bytesExampleList(
+                            List.of(
+                                    Bytes.wrap(new byte[] {12, 29, 19, 120, 127, 0, -127}),
+                                    Bytes.wrap(new byte[] {13, 15, 65, 98, -65}),
+                                    Bytes.wrap(new byte[] {127, 0, -127})))
                     .int32Boxed(1234)
                     .uint32Boxed(Integer.MAX_VALUE)
                     .floatBoxed(15834.213581f)
@@ -131,9 +79,89 @@ public class EverythingTestData {
                     .uint64Boxed(2451326663131L)
                     .doubleBoxed(135581531681.1535151)
                     .boolBoxed(true)
-                    .bytesBoxed(Bytes.wrap(new byte[]{13,15,65,98,-65}))
+                    .bytesBoxed(Bytes.wrap(new byte[] {13, 15, 65, 98, -65}))
                     .stringBoxed("Hello Everything!")
                     .doubleNumberOneOf(29292.299d)
-                    .build())
-            .build();
+                    .innerEverything(
+                            new InnerEverything.Builder()
+                                    .int32Number(1234)
+                                    .sint32Number(-1234)
+                                    .uint32Number(Integer.MAX_VALUE)
+                                    .fixed32Number(644534)
+                                    .sfixed32Number(-31345)
+                                    .floatNumber(15834.213581f)
+                                    .int64Number(53451121355L)
+                                    .sint64Number(-53451121355L)
+                                    .uint64Number(2451326663131L)
+                                    .fixed64Number(33626188515L)
+                                    .sfixed64Number(-531311551515L)
+                                    .doubleNumber(135581531681.1535151)
+                                    .booleanField(true)
+                                    .enumSuit(Suit.SPADES)
+                                    .subObject(
+                                            new TimestampTest.Builder()
+                                                    .seconds(5155135L)
+                                                    .nanos(44513)
+                                                    .build())
+                                    .text("Hello Everything!")
+                                    .bytesField(
+                                            Bytes.wrap(new byte[] {12, 29, 19, 120, 127, 0, -127}))
+                                    .int32NumberList(IntStream.range(0, 10).boxed().toList())
+                                    .sint32NumberList(IntStream.range(-10, 10).boxed().toList())
+                                    .uint32NumberList(IntStream.range(0, 100).boxed().toList())
+                                    .fixed32NumberList(IntStream.range(0, 25).boxed().toList())
+                                    .sfixed32NumberList(IntStream.range(-10, 25).boxed().toList())
+                                    .floatNumberList(List.of(513.51f, 55535351.3545841f, 0f, -1f))
+                                    .floatNumberList(List.of(513.51f, 55535351.3545841f, 0f, -1f))
+                                    .int64NumberList(LongStream.range(0, 10).boxed().toList())
+                                    .sint64NumberList(LongStream.range(-10, 10).boxed().toList())
+                                    .uint64NumberList(LongStream.range(0, 10).boxed().toList())
+                                    .fixed64NumberList(LongStream.range(0, 10).boxed().toList())
+                                    .sfixed64NumberList(LongStream.range(-10, 10).boxed().toList())
+                                    .doubleNumberList(List.of(513.51, 55535351.3545841, 0d, -1d))
+                                    .booleanList(List.of(true, false, true, true, false))
+                                    .enumSuitList(List.of(Suit.ACES, Suit.CLUBS, Suit.DIAMONDS))
+                                    .subObjectList(
+                                            List.of(
+                                                    new TimestampTest.Builder()
+                                                            .seconds(5155135L)
+                                                            .nanos(44513)
+                                                            .build(),
+                                                    new TimestampTest.Builder()
+                                                            .seconds(486486)
+                                                            .nanos(31315)
+                                                            .build(),
+                                                    new TimestampTest.Builder()
+                                                            .seconds(0)
+                                                            .nanos(58)
+                                                            .build()))
+                                    .textList(
+                                            List.of(
+                                                    "صِف خَلقَ خَودِ كَمِثلِ الشَمسِ إِذ بَزَغَت —"
+                                                        + " يَحظى الضَجيعُ بِها نَجلاءَ مِعطارِ",
+                                                    "ऋषियों को सताने वाले दुष्ट राक्षसों के राजा"
+                                                        + " रावण का सर्वनाश करने वाले विष्णुवतार"
+                                                        + " भगवान श्रीराम, अयोध्या के महाराज दशरथ"
+                                                        + " के बड़े सपुत्र थे।",
+                                                    "A quick brown fox jumps over the lazy dog"))
+                                    .bytesExampleList(
+                                            List.of(
+                                                    Bytes.wrap(
+                                                            new byte[] {
+                                                                12, 29, 19, 120, 127, 0, -127
+                                                            }),
+                                                    Bytes.wrap(new byte[] {13, 15, 65, 98, -65}),
+                                                    Bytes.wrap(new byte[] {127, 0, -127})))
+                                    .int32Boxed(1234)
+                                    .uint32Boxed(Integer.MAX_VALUE)
+                                    .floatBoxed(15834.213581f)
+                                    .int64Boxed(53451121355L)
+                                    .uint64Boxed(2451326663131L)
+                                    .doubleBoxed(135581531681.1535151)
+                                    .boolBoxed(true)
+                                    .bytesBoxed(Bytes.wrap(new byte[] {13, 15, 65, 98, -65}))
+                                    .stringBoxed("Hello Everything!")
+                                    .doubleNumberOneOf(29292.299d)
+                                    .build())
+                    .build();
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java
index c516cb98..51c660a4 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/EverythingWriterPerfTest.java
@@ -9,7 +9,7 @@
 public class EverythingWriterPerfTest {
 
     public static void main(String[] args) throws Exception {
-        final BufferedData outDataBuffer = BufferedData.allocate(1024*1024);
+        final BufferedData outDataBuffer = BufferedData.allocate(1024 * 1024);
 
         for (int i = 0; i < 10_000_000; i++) {
             outDataBuffer.reset();
@@ -19,6 +19,7 @@ public static void main(String[] args) throws Exception {
             }
         }
     }
+
     public static void main2(String[] args) throws Exception {
         // write to temp data buffer and then read into byte array
         BufferedData tempDataBuffer = BufferedData.allocate(5 * 1024 * 1024);
@@ -27,12 +28,13 @@ public static void main2(String[] args) throws Exception {
         final byte[] protobuf = new byte[(int) tempDataBuffer.remaining()];
         tempDataBuffer.readBytes(protobuf);
         // write out with protoc
-        final GetAccountDetailsResponse.AccountDetails accountDetailsProtoC = GetAccountDetailsResponse.AccountDetails.parseFrom(protobuf);
-//
-//        final ByteBuffer bbout = ByteBuffer.allocate(1024*1024);
+        final GetAccountDetailsResponse.AccountDetails accountDetailsProtoC =
+                GetAccountDetailsResponse.AccountDetails.parseFrom(protobuf);
+        //
+        //        final ByteBuffer bbout = ByteBuffer.allocate(1024*1024);
 
         for (int i = 0; i < 10_000_000; i++) {
-//            bbout.clear();
+            //            bbout.clear();
             final byte[] writtenData = accountDetailsProtoC.toByteArray();
             if (writtenData.length != protobuf.length) {
                 System.out.println("writtenData = " + writtenData);
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java
index 1af80d4d..ef422c89 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayInputStream.java
@@ -7,9 +7,7 @@
 import java.util.Arrays;
 import java.util.Objects;
 
-/**
- * Faster non-synchronized ByteArrayInputStream
- */
+/** Faster non-synchronized ByteArrayInputStream */
 public final class NonSynchronizedByteArrayInputStream extends InputStream {
     private final byte[] buf;
     private int pos;
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java
index 9108d01a..baf68d12 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/NonSynchronizedByteArrayOutputStream.java
@@ -6,9 +6,7 @@
 import java.util.Arrays;
 import java.util.Objects;
 
-/**
- * Faster non-synchronized ByteArrayOutputStream
- */
+/** Faster non-synchronized ByteArrayOutputStream */
 public final class NonSynchronizedByteArrayOutputStream extends OutputStream {
 
     private ByteBuffer byteBuffer = null;
@@ -26,8 +24,8 @@ public NonSynchronizedByteArrayOutputStream(int size) {
     }
 
     /**
-     * get a reused bytebuffer directly over the internal buffer. It will have position reset and limit set to
-     * current data size.
+     * get a reused bytebuffer directly over the internal buffer. It will have position reset and
+     * limit set to current data size.
      */
     public ByteBuffer getByteBuffer() {
         if (byteBuffer == null || byteBuffer.array() != buf) {
@@ -43,8 +41,10 @@ private void ensureCapacity(int minCapacity) {
         int oldCapacity = buf.length;
         int minGrowth = minCapacity - oldCapacity;
         if (minGrowth > 0) {
-            buf = Arrays.copyOf(buf, newLength(oldCapacity,
-                    minGrowth, oldCapacity /* preferred growth */));
+            buf =
+                    Arrays.copyOf(
+                            buf,
+                            newLength(oldCapacity, minGrowth, oldCapacity /* preferred growth */));
         }
     }
 
@@ -73,6 +73,5 @@ public byte[] toByteArray() {
         return Arrays.copyOf(buf, count);
     }
 
-    public void close() {
-    }
+    public void close() {}
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java
index b20a30d8..27d971c3 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/Test.java
@@ -13,52 +13,65 @@
 import com.hedera.services.stream.proto.SignatureObject;
 import com.hedera.services.stream.proto.SignatureType;
 import com.hederahashgraph.api.proto.java.AccountID;
-
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.List;
 
-/**
- * Random experimental code
- */
+/** Random experimental code */
 @SuppressWarnings("unused")
 public class Test {
 
     public static void main(String[] args) throws Exception {
-        AccountID accountID = AccountID.newBuilder().setAccountNum(1).setRealmNum(2).setShardNum(3).build();
+        AccountID accountID =
+                AccountID.newBuilder().setAccountNum(1).setRealmNum(2).setShardNum(3).build();
         System.out.println(accountID);
 
         System.out.println("Json = " + JsonFormat.printer().print(accountID));
 
-        SignatureObject signatureObject = SignatureObject.newBuilder()
-                .setType(SignatureType.SHA_384_WITH_RSA)
-                .setLength(48)
-                .setChecksum(123)
-                .setSignature(ByteString.copyFrom(new byte[]{
-                                0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
-                                0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
-                                0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E
-                        }))
-                .setHashObject(HashObject.newBuilder()
-                        .setAlgorithm(HashAlgorithm.SHA_384)
+        SignatureObject signatureObject =
+                SignatureObject.newBuilder()
+                        .setType(SignatureType.SHA_384_WITH_RSA)
                         .setLength(48)
-                        .setHash(ByteString.copyFrom(new byte[]{
-                                0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
-                                0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
-                                0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E
-                        })))
-                .build();
-
-//        SignatureType type,
-//        int length,
-//        int checksum,
-//        Bytes signature,
-//        @Nullable com.hedera.hapi.streams.HashObject hashObject
+                        .setChecksum(123)
+                        .setSignature(
+                                ByteString.copyFrom(
+                                        new byte[] {
+                                            0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
+                                                    0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
+                                            0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
+                                                    0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
+                                            0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
+                                                    0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E
+                                        }))
+                        .setHashObject(
+                                HashObject.newBuilder()
+                                        .setAlgorithm(HashAlgorithm.SHA_384)
+                                        .setLength(48)
+                                        .setHash(
+                                                ByteString.copyFrom(
+                                                        new byte[] {
+                                                            0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
+                                                                    0x06, 0x07, 0x08, 0x09, 0x0A,
+                                                                    0x0B, 0x0C, 0x0D, 0x0E,
+                                                            0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
+                                                                    0x06, 0x07, 0x08, 0x09, 0x0A,
+                                                                    0x0B, 0x0C, 0x0D, 0x0E,
+                                                            0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
+                                                                    0x06, 0x07, 0x08, 0x09, 0x0A,
+                                                                    0x0B, 0x0C, 0x0D, 0x0E
+                                                        })))
+                        .build();
+
+        //        SignatureType type,
+        //        int length,
+        //        int checksum,
+        //        Bytes signature,
+        //        @Nullable com.hedera.hapi.streams.HashObject hashObject
 
         System.out.println(signatureObject);
         System.out.println("Json = " + JsonFormat.printer().print(signatureObject));
     }
-    
+
     public static void main2(String[] args) throws Exception {
         NonSynchronizedByteArrayOutputStream out = new NonSynchronizedByteArrayOutputStream();
         WritableStreamingData dout = new WritableStreamingData(out);
@@ -73,16 +86,14 @@ public static void main2(String[] args) throws Exception {
         int read = din.readVarInt(false);
         System.out.println("read = " + read);
 
-
-
-        final TimestampTest modelObj = new TimestampTest(4L,8 );
+        final TimestampTest modelObj = new TimestampTest(4L, 8);
         // get reusable thread buffers
-        final BufferedData dataBuffer = BufferedData.allocate(1024*1024);
-        final BufferedData dataBuffer2 = BufferedData.allocate(1024*1024);
-        final ByteBuffer byteBuffer = ByteBuffer.allocate(1024*1024);
+        final BufferedData dataBuffer = BufferedData.allocate(1024 * 1024);
+        final BufferedData dataBuffer2 = BufferedData.allocate(1024 * 1024);
+        final ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 1024);
 
         // model to bytes with PBJ
-        TimestampTest.PROTOBUF.write(modelObj,dataBuffer);
+        TimestampTest.PROTOBUF.write(modelObj, dataBuffer);
 
         // clamp limit to bytes written and reset position
         dataBuffer.flip();
@@ -91,15 +102,17 @@ public static void main2(String[] args) throws Exception {
         dataBuffer.readBytes(byteBuffer);
         byteBuffer.flip();
 
-        // read proto bytes with ProtoC to make sure it is readable and no parse exceptions are thrown
-        final com.hedera.pbj.test.proto.java.TimestampTest protoCModelObj = com.hedera.pbj.test.proto.java.TimestampTest.parseFrom(byteBuffer);
+        // read proto bytes with ProtoC to make sure it is readable and no parse exceptions are
+        // thrown
+        final com.hedera.pbj.test.proto.java.TimestampTest protoCModelObj =
+                com.hedera.pbj.test.proto.java.TimestampTest.parseFrom(byteBuffer);
 
         // read proto bytes with PBJ parser
         dataBuffer.resetPosition();
         final TimestampTest modelObj2 = TimestampTest.PROTOBUF.parse(dataBuffer);
 
         // check the read back object is equal to written original one
-        //assertEquals(modelObj.toString(), modelObj2.toString());
+        // assertEquals(modelObj.toString(), modelObj2.toString());
         System.out.println(modelObj.equals(modelObj2));
 
         // model to bytes with ProtoC writer
@@ -124,10 +137,10 @@ public static void main2(String[] args) throws Exception {
         byteBuffer.position(0);
         byte[] protoBytes = new byte[byteBuffer.remaining()];
         byteBuffer.get(protoBytes);
-        NonSynchronizedByteArrayInputStream bin = new NonSynchronizedByteArrayInputStream(protoBytes);
+        NonSynchronizedByteArrayInputStream bin =
+                new NonSynchronizedByteArrayInputStream(protoBytes);
         TimestampTest.PROTOBUF.parse(new ReadableStreamingData(bin));
     }
-    public record Everything(
-            List<String> textList
-    ){}
+
+    public record Everything(List<String> textList) {}
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java
index f3a583e3..f496fcb2 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/Elapsed.java
@@ -2,20 +2,17 @@
 package com.hedera.pbj.integration.fuzz;
 
 import com.hedera.pbj.runtime.test.Sneaky;
-
 import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
 /**
  * A utility class to measure elapsed time of a running code.
+ *
  * @param result the return value of the code, if any
  * @param nanos the time the code took to run, in nanos
  * @param <T> the return type of the code, or Void for Runnables.
  */
-public final record Elapsed<T>(
-        T result,
-        long nanos
-) {
+public final record Elapsed<T>(T result, long nanos) {
 
     /**
      * Measure the time the provided Callable takes to run.
@@ -41,23 +38,25 @@ public static <T> Elapsed<T> time(final Callable<T> callable) {
      * @return an Elapsed record with the time. The result is set to null.
      */
     public static Elapsed<Void> time(final Runnable runnable) {
-        return time(() -> { runnable.run(); return null; });
+        return time(
+                () -> {
+                    runnable.run();
+                    return null;
+                });
     }
 
     /**
      * Format the elapsed time in a human-readable form.
      *
-     * The current implementation translates the nanos to seconds
-     * and returns a string of the form "X seconds".
+     * <p>The current implementation translates the nanos to seconds and returns a string of the
+     * form "X seconds".
      *
-     * The returned value is suitable for reporting/logging purposes only.
-     * Callers should NOT rely on the exact format of the returned
-     * string because it may change in the future.
+     * <p>The returned value is suitable for reporting/logging purposes only. Callers should NOT
+     * rely on the exact format of the returned string because it may change in the future.
      *
      * @return a string describing the elapsed time
      */
     public String format() {
         return TimeUnit.SECONDS.convert(nanos(), TimeUnit.NANOSECONDS) + " seconds";
     }
-
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java
index 3732c8bd..0c25f385 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTest.java
@@ -2,7 +2,6 @@
 package com.hedera.pbj.integration.fuzz;
 
 import com.hedera.pbj.runtime.Codec;
-
 import java.io.InputStream;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
@@ -14,24 +13,22 @@
 
 /**
  * A fuzz test runner for a single object/codec.
- * <p>
- * This class exposes a single public static method that runs a comprehensive fuzz test
- * for a given object and its codec. Note that the codec must be valid for the given
- * object (see the SingleFuzzTest javadoc for more details.)
- * <p>
- * Ultimately, the result of the test is a map that describes how often a particular
- * SingleFuzzTest outcome occurred in percentages. The provided threshold specifies
- * the percentage of the DESERIALIZATION_FAILED outcome for the test to be considered
- * as passed.
- * <p>
- * The method returns a FuzzTestResult record that describes the results in full.
+ *
+ * <p>This class exposes a single public static method that runs a comprehensive fuzz test for a
+ * given object and its codec. Note that the codec must be valid for the given object (see the
+ * SingleFuzzTest javadoc for more details.)
+ *
+ * <p>Ultimately, the result of the test is a map that describes how often a particular
+ * SingleFuzzTest outcome occurred in percentages. The provided threshold specifies the percentage
+ * of the DESERIALIZATION_FAILED outcome for the test to be considered as passed.
+ *
+ * <p>The method returns a FuzzTestResult record that describes the results in full.
  */
 public class FuzzTest {
 
     /**
-     * Run a fuzz test for a given object and codec, and use the provided threshold
-     * for the most desirable DESERIALIZATION_FAILED outcome to determine
-     * if the test passed or not.
+     * Run a fuzz test for a given object and codec, and use the provided threshold for the most
+     * desirable DESERIALIZATION_FAILED outcome to determine if the test passed or not.
      */
     public static <T> FuzzTestResult<T> fuzzTest(
             final T object,
@@ -48,45 +45,47 @@ public static <T> FuzzTestResult<T> fuzzTest(
             // Certain objects result in zero-size payload, so there's nothing to test.
             // Mark it as passed.
             return new FuzzTestResult<>(
-                    object,
-                    true,
-                    Map.of(),
-                    repeatCount,
-                    System.nanoTime() - startNanoTime
-            );
+                    object, true, Map.of(), repeatCount, System.nanoTime() - startNanoTime);
         }
 
-        final Map<SingleFuzzTestResult, Long> resultCounts = IntStream.range(0, repeatCount)
-                // Note that we must run this stream sequentially to enable
-                // reproducing the tests for a given random seed.
-                .mapToObj(n -> SingleFuzzTest.fuzzTest(object, codec, random, protocParser))
-                .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
+        final Map<SingleFuzzTestResult, Long> resultCounts =
+                IntStream.range(0, repeatCount)
+                        // Note that we must run this stream sequentially to enable
+                        // reproducing the tests for a given random seed.
+                        .mapToObj(n -> SingleFuzzTest.fuzzTest(object, codec, random, protocParser))
+                        .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
 
-        final Map<SingleFuzzTestResult, Double> statsMap = computePercentageMap(resultCounts, repeatCount);
+        final Map<SingleFuzzTestResult, Double> statsMap =
+                computePercentageMap(resultCounts, repeatCount);
 
         return new FuzzTestResult<>(
                 object,
                 statsMap.getOrDefault(SingleFuzzTestResult.DESERIALIZATION_FAILED, 0.) >= threshold,
                 statsMap,
                 repeatCount,
-                System.nanoTime() - startNanoTime
-        );
+                System.nanoTime() - startNanoTime);
     }
 
     private static Function<InputStream, ?> getProtocParser(Class<?> protocModelClass) {
         final Function<InputStream, ?> protocParser;
         try {
-            final Method method = protocModelClass.getDeclaredMethod("parseFrom", InputStream.class);
-            protocParser = inputStream -> {
-                try {
-                    return method.invoke(null, inputStream);
-                } catch (IllegalAccessException | InvocationTargetException e) {
-                    throw new FuzzTestException("Failed to invoke protocModelClass.parseFrom(InputStream)", e);
-                }
-            };
+            final Method method =
+                    protocModelClass.getDeclaredMethod("parseFrom", InputStream.class);
+            protocParser =
+                    inputStream -> {
+                        try {
+                            return method.invoke(null, inputStream);
+                        } catch (IllegalAccessException | InvocationTargetException e) {
+                            throw new FuzzTestException(
+                                    "Failed to invoke protocModelClass.parseFrom(InputStream)", e);
+                        }
+                    };
         } catch (NoSuchMethodException e) {
-            throw new FuzzTestException("Protoc model " + protocModelClass.getName()
-                    + " doesn't have the parseFrom(InputStream) method", e);
+            throw new FuzzTestException(
+                    "Protoc model "
+                            + protocModelClass.getName()
+                            + " doesn't have the parseFrom(InputStream) method",
+                    e);
         }
         return protocParser;
     }
@@ -103,12 +102,11 @@ private static <T> int estimateRepeatCount(final T object, final Codec<T> codec)
     }
 
     private static Map<SingleFuzzTestResult, Double> computePercentageMap(
-            final Map<SingleFuzzTestResult, Long> resultCounts,
-            final int repeatCount) {
+            final Map<SingleFuzzTestResult, Long> resultCounts, final int repeatCount) {
         return resultCounts.entrySet().stream()
-                .collect(Collectors.toMap(
-                        Map.Entry::getKey,
-                        entry -> entry.getValue().doubleValue() / (double) repeatCount)
-                );
+                .collect(
+                        Collectors.toMap(
+                                Map.Entry::getKey,
+                                entry -> entry.getValue().doubleValue() / (double) repeatCount));
     }
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java
index 1663697b..29353e5a 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzTestResult.java
@@ -8,6 +8,7 @@
 
 /**
  * A record that describes the result of running a fuzz test.
+ *
  * @param object an object for which this test was run.
  * @param passed indicates if the test passed or not. See the FuzzTest class for the definition.
  * @param percentageMap a map with percentage statistics of occurred outcomes.
@@ -18,26 +19,33 @@ public record FuzzTestResult<T>(
         boolean passed,
         Map<SingleFuzzTestResult, Double> percentageMap,
         int repeatCount,
-        long nanoDuration
-) {
+        long nanoDuration) {
     private static final NumberFormat PERCENTAGE_FORMAT = NumberFormat.getPercentInstance();
 
-    /**
-     * Format the FuzzTestResult object for printing/logging.
-     */
+    /** Format the FuzzTestResult object for printing/logging. */
     public String format() {
-        return "A fuzz test " + (passed ? "PASSED" : "FAILED")
-                + " with " + repeatCount + " runs took "
-                + TimeUnit.MILLISECONDS.convert(nanoDuration, TimeUnit.NANOSECONDS) + " ms"
-                + " for " + object
-                + " with:" + System.lineSeparator()
+        return "A fuzz test "
+                + (passed ? "PASSED" : "FAILED")
+                + " with "
+                + repeatCount
+                + " runs took "
+                + TimeUnit.MILLISECONDS.convert(nanoDuration, TimeUnit.NANOSECONDS)
+                + " ms"
+                + " for "
+                + object
+                + " with:"
+                + System.lineSeparator()
                 + formatResultsStats();
     }
 
     private String formatResultsStats() {
         return percentageMap.entrySet().stream()
                 .sorted(Map.Entry.comparingByKey())
-                .map(entry -> entry.getKey().name() + ": " + PERCENTAGE_FORMAT.format(entry.getValue()))
+                .map(
+                        entry ->
+                                entry.getKey().name()
+                                        + ": "
+                                        + PERCENTAGE_FORMAT.format(entry.getValue()))
                 .collect(Collectors.joining(System.lineSeparator()));
     }
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java
index 0dcf4243..5ea61c3c 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/FuzzUtil.java
@@ -3,9 +3,7 @@
 
 import java.lang.reflect.Field;
 
-/**
- * A utility class used in the fuzz testing framework.
- */
+/** A utility class used in the fuzz testing framework. */
 public final class FuzzUtil {
     /**
      * Get a value of a static field named `name` in a class `clz`.
@@ -20,7 +18,8 @@ public static <T> T getStaticFieldValue(final Class<?> clz, final String name) {
             final Field field = clz.getField(name);
             return (T) field.get(null);
         } catch (NoSuchFieldException | IllegalAccessException e) {
-            throw new FuzzTestException("Failed to get field " + name + " from " + clz.getName(), e);
+            throw new FuzzTestException(
+                    "Failed to get field " + name + " from " + clz.getName(), e);
         }
     }
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java
index b7e617ad..de03ba72 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTest.java
@@ -3,7 +3,6 @@
 
 import com.hedera.pbj.runtime.Codec;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
-
 import java.io.InputStream;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -11,33 +10,33 @@
 
 /**
  * A single fuzz test.
- * <p>
- * This class exposes a single public static method that allows a caller to pass
- * a valid object, a Protobuf codec for this object, as well as a random numbers
- * generator. The method will then perform a single fuzz test run with the given
- * data and return a SingleFuzzTestResult describing the outcome of the test run.
- * <p>
- * A fuzz testing framework is expected to use this class to execute multiple runs
- * of the same test (each being random per the given random numbers generator)
- * until the set of outcomes satisfies the testing framework threshold,
- * or the framework runs out of the maximum number of attempts, or a timeout
- * occurs.
- * <p>
- * This class is thread-safe and can be called concurrently from multiple threads
- * as long as the input parameters are immutable or thread-safe.
+ *
+ * <p>This class exposes a single public static method that allows a caller to pass a valid object,
+ * a Protobuf codec for this object, as well as a random numbers generator. The method will then
+ * perform a single fuzz test run with the given data and return a SingleFuzzTestResult describing
+ * the outcome of the test run.
+ *
+ * <p>A fuzz testing framework is expected to use this class to execute multiple runs of the same
+ * test (each being random per the given random numbers generator) until the set of outcomes
+ * satisfies the testing framework threshold, or the framework runs out of the maximum number of
+ * attempts, or a timeout occurs.
+ *
+ * <p>This class is thread-safe and can be called concurrently from multiple threads as long as the
+ * input parameters are immutable or thread-safe.
  */
 public final class SingleFuzzTest {
     // When set to true, the test will print debugging info to System.out,
     // including payloads, for every single run. This may produce a lot of console output.
-    private final static boolean debug = false;
+    private static final boolean debug = false;
 
-    private final static AtomicInteger TEST_ID_GENERATOR = new AtomicInteger(0);
+    private static final AtomicInteger TEST_ID_GENERATOR = new AtomicInteger(0);
 
     public static int getNumberOfRuns() {
         return TEST_ID_GENERATOR.get();
     }
 
-    private static <T> BufferedData write(final T object, final Codec<T> codec, final int size) throws Exception {
+    private static <T> BufferedData write(final T object, final Codec<T> codec, final int size)
+            throws Exception {
         final BufferedData dataBuffer = BufferedData.allocate(size);
         codec.write(object, dataBuffer);
         return dataBuffer;
@@ -50,32 +49,34 @@ private static void tryProtocParser(
             final Object deserializedObject,
             final Function<InputStream, ?> protocParser,
             Exception pbjException,
-            boolean doThrow
-    ) {
+            boolean doThrow) {
         dataBuffer.reset();
         try {
             Object protocObject = protocParser.apply(dataBuffer.toInputStream());
             if (pbjException != null) {
-                System.out.println(prefix + "NOTE: Protoc was able to parse this payload w/o exceptions as "
-                        + protocObject
-                        + " , but PBJ errored out with "
-                        + pbjException.toString()
-                );
+                System.out.println(
+                        prefix
+                                + "NOTE: Protoc was able to parse this payload w/o exceptions as "
+                                + protocObject
+                                + " , but PBJ errored out with "
+                                + pbjException.toString());
             }
         } catch (Exception ex) {
             // Protoc didn't like the bytes.
             if (doThrow) {
                 throw new FuzzTestException(
-                        prefix + "Protoc threw an exception "
-                                // Fetch the actual cause because this was a call via Java Reflection:
+                        prefix
+                                + "Protoc threw an exception "
+                                // Fetch the actual cause because this was a call via Java
+                                // Reflection:
                                 + ex.getCause().getCause()
                                 + ", while PBJ didn't for original object: "
                                 + originalObject
-                                + " and fuzzBytes " + dataBuffer
-                                + " that PBJ parsed as: " + deserializedObject
-                        ,
-                        ex
-                );
+                                + " and fuzzBytes "
+                                + dataBuffer
+                                + " that PBJ parsed as: "
+                                + deserializedObject,
+                        ex);
             }
         }
     }
@@ -101,19 +102,19 @@ private static int estimateNumberOfBytesToModify(final Random random, final int
     }
 
     /**
-     * Perform a fuzz test for a given input object of type T and its codec
-     * using a provided random numbers generator.
-     * <p>
-     * The input object is expected to be valid (i.e. serializable using the given codec),
+     * Perform a fuzz test for a given input object of type T and its codec using a provided random
+     * numbers generator.
+     *
+     * <p>The input object is expected to be valid (i.e. serializable using the given codec),
      * otherwise an exception is thrown.
-     * <p>
-     * A comparison with Google Protoc parser is performed as well. A log output is generated
-     * if PBJ fails to parse data that Protoc is able to parse. Conversely, the test run
-     * fails with an exception if Protoc fails to parse malformed data that PBJ parses successfully.
-     * <p>
-     * The test run produces debugging output on stdout with a prefix that is unique
-     * to this particular run, allowing one to identify all the debugging output related
-     * to this specific run even if multiple runs are running concurrently.
+     *
+     * <p>A comparison with Google Protoc parser is performed as well. A log output is generated if
+     * PBJ fails to parse data that Protoc is able to parse. Conversely, the test run fails with an
+     * exception if Protoc fails to parse malformed data that PBJ parses successfully.
+     *
+     * <p>The test run produces debugging output on stdout with a prefix that is unique to this
+     * particular run, allowing one to identify all the debugging output related to this specific
+     * run even if multiple runs are running concurrently.
      *
      * @return a SingleFuzzTestResult
      */
@@ -123,7 +124,11 @@ public static <T> SingleFuzzTestResult fuzzTest(
             final Random random,
             final Function<InputStream, ?> protocParser) {
         // Generate a unique test ID prefix for this particular run to tag debugging output:
-        final String prefix = SingleFuzzTest.class.getSimpleName() + " " + TEST_ID_GENERATOR.getAndIncrement() + ": ";
+        final String prefix =
+                SingleFuzzTest.class.getSimpleName()
+                        + " "
+                        + TEST_ID_GENERATOR.getAndIncrement()
+                        + ": ";
 
         if (debug) System.out.println(prefix + "Object: " + object);
         final int size = codec.measureRecord(object);
@@ -158,7 +163,9 @@ public static <T> SingleFuzzTestResult fuzzTest(
 
         final int deserializedSize = codec.measureRecord(deserializedObject);
         if (deserializedSize != size) {
-            if (debug) System.out.println(prefix + "Original size: " + size + " , fuzz size: " + deserializedSize);
+            if (debug)
+                System.out.println(
+                        prefix + "Original size: " + size + " , fuzz size: " + deserializedSize);
             return SingleFuzzTestResult.DESERIALIZED_SIZE_MISMATCHED;
         }
 
@@ -180,7 +187,8 @@ public static <T> SingleFuzzTestResult fuzzTest(
         return SingleFuzzTestResult.RESERIALIZATION_PASSED;
     }
 
-    private static void modifyBufferedData(final Random random, final int size, final BufferedData dataBuffer) {
+    private static void modifyBufferedData(
+            final Random random, final int size, final BufferedData dataBuffer) {
         final int actualNumberOfBytesToModify = estimateNumberOfBytesToModify(random, size);
         for (int i = 0; i < actualNumberOfBytesToModify; i++) {
             final int randomPosition = random.nextInt(size);
@@ -192,10 +200,7 @@ private static void modifyBufferedData(final Random random, final int size, fina
     }
 
     private static <T> BufferedData createBufferedData(
-            final T object,
-            final Codec<T> codec,
-            final int size,
-            final String prefix) {
+            final T object, final Codec<T> codec, final int size, final String prefix) {
         final BufferedData dataBuffer;
         try {
             dataBuffer = write(object, codec, size);
@@ -205,5 +210,4 @@ private static <T> BufferedData createBufferedData(
         }
         return dataBuffer;
     }
-
 }
diff --git a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java
index bf4eaabf..37de92b9 100644
--- a/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java
+++ b/pbj-integration-tests/src/main/java/com/hedera/pbj/integration/fuzz/SingleFuzzTestResult.java
@@ -4,58 +4,57 @@
 /**
  * An enum describing possible outcomes of a fuzz test run for a Protobuf codec.
  *
- * A typical fuzz test performs the following actions:
+ * <p>A typical fuzz test performs the following actions:
+ *
  * <ol>
- * <li> 1. Serializes a valid object into a byte array
- * <li> 2. Modifies a random element of the byte array randomly
- * <li> 3. Deserializes the modified byte array. <b>This is where an exception thrown
- * by the codec is considered to be the best possible outcome of a test run.</b>
- * <li> 4. Compares the measured size of the deserialized object to the measured size
- * of the original object. The test ends if they differ.
- * <li> 5. Reserializes the previously deserialized object into a new byte array.
- * The test ends if codec throws an exception.
- * <li> 6. Compares the bytes of the modified array from step #2 and the new array
- * from step #5. The test ends if the contents differ.
- * <li> 7. Finally, the test ends unconditionally w/o any conclusion because the
- * test was unable to make the codec fail.
+ *   <li>1. Serializes a valid object into a byte array
+ *   <li>2. Modifies a random element of the byte array randomly
+ *   <li>3. Deserializes the modified byte array. <b>This is where an exception thrown by the codec
+ *       is considered to be the best possible outcome of a test run.</b>
+ *   <li>4. Compares the measured size of the deserialized object to the measured size of the
+ *       original object. The test ends if they differ.
+ *   <li>5. Reserializes the previously deserialized object into a new byte array. The test ends if
+ *       codec throws an exception.
+ *   <li>6. Compares the bytes of the modified array from step #2 and the new array from step #5.
+ *       The test ends if the contents differ.
+ *   <li>7. Finally, the test ends unconditionally w/o any conclusion because the test was unable to
+ *       make the codec fail.
  * </ol>
  */
 public enum SingleFuzzTestResult {
     /**
      * codec.parse() threw an exception at step #3.
      *
-     * This indicates that the codec fails on malformed data
-     * which is exactly what we want it to do.
+     * <p>This indicates that the codec fails on malformed data which is exactly what we want it to
+     * do.
      */
     DESERIALIZATION_FAILED,
 
     /**
-     * codec.parse() with fuzz bytes returned an object whose measured size
-     * differs from the measured size of the original object at step #4.
+     * codec.parse() with fuzz bytes returned an object whose measured size differs from the
+     * measured size of the original object at step #4.
      *
-     * This indicates that the fuzz data appears to be a correct
-     * binary message for an object that may differ from the original input object.
-     * There may or may not be bugs in the codec, but this test run
-     * failed to ultimately reveal any.
+     * <p>This indicates that the fuzz data appears to be a correct binary message for an object
+     * that may differ from the original input object. There may or may not be bugs in the codec,
+     * but this test run failed to ultimately reveal any.
      */
     DESERIALIZED_SIZE_MISMATCHED,
 
     /**
      * codec.write() threw an exception for a previously deserialized object at step #5.
      *
-     * This means that the deserialized object produced at step #3 is invalid from the serializer
-     * perspective, which means that the deserializer can read malformed data and produce
-     * such malformed objects which may be a potential bug in the deserializer.
+     * <p>This means that the deserialized object produced at step #3 is invalid from the serializer
+     * perspective, which means that the deserializer can read malformed data and produce such
+     * malformed objects which may be a potential bug in the deserializer.
      */
     RESERIALIZATION_FAILED,
 
     /**
      * codec.write() produced bytes different from the fuzz bytes at step #6.
      *
-     * This means that the deserializer at step #3 may have ignored fuzz data
-     * producing an object that doesn't match its binary representation from step #2.
-     * Alternatively, the serializer at step #5 may have ignored a certain invalid
-     * state of the deserialized object from step #3.
+     * <p>This means that the deserializer at step #3 may have ignored fuzz data producing an object
+     * that doesn't match its binary representation from step #2. Alternatively, the serializer at
+     * step #5 may have ignored a certain invalid state of the deserialized object from step #3.
      * This may be a potential bug in the codec.
      */
     RESERIALIZATION_MISMATCHED,
@@ -63,10 +62,8 @@ public enum SingleFuzzTestResult {
     /**
      * codec.write() produced bytes identical to the fuzz bytes at step #6.
      *
-     * This means that the fuzz data resulted in a correct binary message.
-     * It's unclear if there are any bugs, but this test run was unable to
-     * reveal any.
+     * <p>This means that the fuzz data resulted in a correct binary message. It's unclear if there
+     * are any bugs, but this test run was unable to reveal any.
      */
     RESERIALIZATION_PASSED;
-
 }
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
index 11faa73d..259b1da7 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToNegativeTest.java
@@ -5,39 +5,49 @@
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.io.TempDir;
-
 import java.io.File;
 import java.net.URL;
 import java.util.List;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
 
 class CompareToNegativeTest {
 
-    @TempDir
-    private static File outputDir;
+    @TempDir private static File outputDir;
 
     @Test
     void testNonComparableSubObj() {
-        IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () ->
-                getCompileFilesIn("non_compilable_comparable_sub_obj.proto"));
-        assertEquals("Field NonComparableSubObj.subObject specified in `pbj.comparable` option must implement `Comparable` interface but it doesn't.",
+        IllegalArgumentException exception =
+                assertThrows(
+                        IllegalArgumentException.class,
+                        () -> getCompileFilesIn("non_compilable_comparable_sub_obj.proto"));
+        assertEquals(
+                "Field NonComparableSubObj.subObject specified in `pbj.comparable` option must"
+                        + " implement `Comparable` interface but it doesn't.",
                 exception.getMessage());
     }
 
     @Test
     void testRepeatedField() {
-        IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () ->
-                getCompileFilesIn("non_compilable_comparable_repeated.proto"));
-        assertEquals("Field `int32List` specified in `pbj.comparable` option is repeated. Repeated fields are not supported by this option.",
+        IllegalArgumentException exception =
+                assertThrows(
+                        IllegalArgumentException.class,
+                        () -> getCompileFilesIn("non_compilable_comparable_repeated.proto"));
+        assertEquals(
+                "Field `int32List` specified in `pbj.comparable` option is repeated. Repeated"
+                        + " fields are not supported by this option.",
                 exception.getMessage());
     }
 
     @Test
     void testNonComparableOneOfField() {
-        IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () ->
-                getCompileFilesIn("non_compilable_comparable_oneOf.proto"));
-        assertEquals("Field NonComparableSubObj.subObject specified in `pbj.comparable` option must implement `Comparable` interface but it doesn't.",
+        IllegalArgumentException exception =
+                assertThrows(
+                        IllegalArgumentException.class,
+                        () -> getCompileFilesIn("non_compilable_comparable_oneOf.proto"));
+        assertEquals(
+                "Field NonComparableSubObj.subObject specified in `pbj.comparable` option must"
+                        + " implement `Comparable` interface but it doesn't.",
                 exception.getMessage());
     }
 
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
index f4f7419e..c30d4994 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/CompareToTest.java
@@ -22,15 +22,13 @@
 import com.hedera.pbj.test.proto.pbj.LimitedComparableTestWithOneOf;
 import com.hedera.pbj.test.proto.pbj.StringValueComparableTest;
 import com.hedera.pbj.test.proto.pbj.UnsignedComparableTest;
-import org.junit.jupiter.api.Test;
-
 import java.util.ArrayList;
-
 import java.util.random.RandomGenerator;
+import org.junit.jupiter.api.Test;
 
 /**
- * Unit test for {@link ComparableTest} and {@link LimitedComparableTest} objects.
- * The goal is to test the generated {@link Comparable} interface implementation.
+ * Unit test for {@link ComparableTest} and {@link LimitedComparableTest} objects. The goal is to
+ * test the generated {@link Comparable} interface implementation.
  */
 class CompareToTest {
 
@@ -41,115 +39,116 @@ void testCompareTo_int32() {
                 new ComparableTest(2, 0.0, false, null, null, null, null),
                 new ComparableTest(3, 0.0, false, null, null, null, null));
     }
+
     @Test
     void testCompareTo_double() {
         assertComparables(
                 new ComparableTest(1, 0.0, false, null, null, null, null),
                 new ComparableTest(2, 1.5, false, null, null, null, null),
-                new ComparableTest(3, 2.66, false, null, null, null, null)
-        );
+                new ComparableTest(3, 2.66, false, null, null, null, null));
     }
+
     @Test
     void testCompareTo_bool() {
         assertComparables(
                 new ComparableTest(0, 0.0, false, null, null, null, null),
-                new ComparableTest(0, 0.0, true, null, null, null, null)
-        );
+                new ComparableTest(0, 0.0, true, null, null, null, null));
     }
+
     @Test
     void testCompareTo_string() {
         assertComparables(
                 new ComparableTest(0, 0.0, false, "a", null, null, null),
                 new ComparableTest(0, 0.0, false, "b", null, null, null),
-                new ComparableTest(0, 0.0, false, "c", null, null, null)
-        );
+                new ComparableTest(0, 0.0, false, "c", null, null, null));
     }
+
     @Test
     void testCompareTo_bytes() {
         assertComparables(
                 new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("a")),
                 new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("aa")),
-                new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("aaa"))
-        );
+                new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("aaa")));
     }
+
     @Test
     void testCompareTo_bytes_same_lenth() {
-         assertComparables(
+        assertComparables(
                 new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("aba")),
                 new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("abb")),
-                new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("abc"))
-        );
+                new ComparableTest(0, 0.0, false, null, null, null, Bytes.wrap("abc")));
     }
+
     @Test
-    void testCompareTo_enum(){
+    void testCompareTo_enum() {
         assertComparables(
                 new ComparableTest(0, 0.0, false, null, ComparableEnum.ONE, null, null),
                 new ComparableTest(0, 0.0, false, null, ComparableEnum.TWO, null, null),
-                new ComparableTest(0, 0.0, false, null, ComparableEnum.THREE, null, null)
-        );
+                new ComparableTest(0, 0.0, false, null, ComparableEnum.THREE, null, null));
     }
+
     @Test
-    void testCompareTo_subObject(){
+    void testCompareTo_subObject() {
         assertComparables(
                 new ComparableTest(0, 0.0, false, null, null, new ComparableSubObj(1), null),
-                new ComparableTest(0, 0.0, false, null, null,  new ComparableSubObj(2), null),
-                new ComparableTest(0, 0.0, false, null, null,  new ComparableSubObj(3), null)
-        );
+                new ComparableTest(0, 0.0, false, null, null, new ComparableSubObj(2), null),
+                new ComparableTest(0, 0.0, false, null, null, new ComparableSubObj(3), null));
     }
+
     @Test
-     void compareTo_mixed() {
-         assertComparables(
-                 new ComparableTest(1, 0.0, false, null, null, new ComparableSubObj(1), null),
-                 new ComparableTest(1, 0.0, false, null, null, new ComparableSubObj(2), null),
-                 new ComparableTest(2, 0.0, false, null, null, new ComparableSubObj(1), null),
-                 new ComparableTest(2, 0.0, false, null, null,  new ComparableSubObj(2), null)
-         );
-     }
-     @Test
-     void compareTo_StringValue() {
-            assertComparables(
-                    new StringValueComparableTest(null),
-                    new StringValueComparableTest("a"),
-                    new StringValueComparableTest("b"),
-                    new StringValueComparableTest("c")
-            );
-     }
-     @Test
-     void compareTo_BoolValue() {
-            assertComparables(
-                    new BoolValueComparableTest(null),
-                    new BoolValueComparableTest(false),
-                    new BoolValueComparableTest(true)
-            );
-     }
-     @Test
-     void compareTo_Int32Value() {
-            assertComparables(
-                    new Int32ValueComparableTest(null, null),
-                    new Int32ValueComparableTest(1, null),
-                    new Int32ValueComparableTest(2, null),
-                    new Int32ValueComparableTest(3, null)
-            );
-     }
-     @Test
-     void compareTo_Uint32Value() {
-            assertComparables(
-                    new Int32ValueComparableTest(null, null),
-                    new Int32ValueComparableTest(null, 0),
-                    new Int32ValueComparableTest(null, 3),
-                    new Int32ValueComparableTest(null, -2),
-                    new Int32ValueComparableTest(null, -1)
-            );
-     }
+    void compareTo_mixed() {
+        assertComparables(
+                new ComparableTest(1, 0.0, false, null, null, new ComparableSubObj(1), null),
+                new ComparableTest(1, 0.0, false, null, null, new ComparableSubObj(2), null),
+                new ComparableTest(2, 0.0, false, null, null, new ComparableSubObj(1), null),
+                new ComparableTest(2, 0.0, false, null, null, new ComparableSubObj(2), null));
+    }
+
+    @Test
+    void compareTo_StringValue() {
+        assertComparables(
+                new StringValueComparableTest(null),
+                new StringValueComparableTest("a"),
+                new StringValueComparableTest("b"),
+                new StringValueComparableTest("c"));
+    }
+
+    @Test
+    void compareTo_BoolValue() {
+        assertComparables(
+                new BoolValueComparableTest(null),
+                new BoolValueComparableTest(false),
+                new BoolValueComparableTest(true));
+    }
+
+    @Test
+    void compareTo_Int32Value() {
+        assertComparables(
+                new Int32ValueComparableTest(null, null),
+                new Int32ValueComparableTest(1, null),
+                new Int32ValueComparableTest(2, null),
+                new Int32ValueComparableTest(3, null));
+    }
+
+    @Test
+    void compareTo_Uint32Value() {
+        assertComparables(
+                new Int32ValueComparableTest(null, null),
+                new Int32ValueComparableTest(null, 0),
+                new Int32ValueComparableTest(null, 3),
+                new Int32ValueComparableTest(null, -2),
+                new Int32ValueComparableTest(null, -1));
+    }
+
     @Test
     void compareTo_Int64Value() {
         assertComparables(
                 new Int64ValueComparableTest(null, null),
                 new Int64ValueComparableTest(1L, null),
                 new Int64ValueComparableTest(2L, null),
-                new Int64ValueComparableTest(3L, null)
-        );
+                new Int64ValueComparableTest(3L, null));
     }
+
     @Test
     void compareTo_UInt64Value() {
         assertComparables(
@@ -157,9 +156,9 @@ void compareTo_UInt64Value() {
                 new Int64ValueComparableTest(null, 0L),
                 new Int64ValueComparableTest(null, 3L),
                 new Int64ValueComparableTest(null, -2L),
-                new Int64ValueComparableTest(null, -1L)
-        );
+                new Int64ValueComparableTest(null, -1L));
     }
+
     @Test
     void compareTo_FloatValue() {
         assertComparables(
@@ -168,6 +167,7 @@ void compareTo_FloatValue() {
                 new FloatValueComparableTest(2.5f),
                 new FloatValueComparableTest(7.7f));
     }
+
     @Test
     void compareTo_DoubleValue() {
         assertComparables(
@@ -176,6 +176,7 @@ void compareTo_DoubleValue() {
                 new DoubleValueComparableTest(2.5),
                 new DoubleValueComparableTest(7.7));
     }
+
     @Test
     void compareTo_ByteValue() {
         assertComparables(
@@ -184,6 +185,7 @@ void compareTo_ByteValue() {
                 new BytesValueComparableTest(Bytes.wrap("b")),
                 new BytesValueComparableTest(Bytes.wrap("c")));
     }
+
     @Test
     void comareTo_unsigned() {
         assertComparables(
@@ -195,70 +197,86 @@ void comareTo_unsigned() {
                 new UnsignedComparableTest(0, 0L),
                 new UnsignedComparableTest(0, 3L),
                 new UnsignedComparableTest(0, -2L),
-                new UnsignedComparableTest(0, -1L)
-        );
+                new UnsignedComparableTest(0, -1L));
+    }
+
+    @Test
+    void limitedCompareTo_nonComparableOneOf() {
+        // This code is only here to be compiled. OneOf field is not listed in `pbj.comparable`
+        // option
+        // and therefore it has `OneOf` type, not `ComparableOneOf`.
+        new LimitedComparableTestWithOneOf(
+                0,
+                new OneOf<>(
+                        LimitedComparableTestWithOneOf.OneOfExampleOneOfType.ONE_OF_SUB_OBJECT,
+                        new ComparableSubObj(1)));
+    }
+
+    @Test
+    void limitedCompareTo_int32() {
+        assertComparables(
+                new LimitedComparableTest(1, 0L, false, null, null, null),
+                new LimitedComparableTest(2, 0L, false, null, null, null),
+                new LimitedComparableTest(3, 0L, false, null, null, null));
+    }
+
+    @Test
+    void limitedCompareTo_text() {
+        assertComparables(
+                new LimitedComparableTest(0, 0L, false, "1", null, null),
+                new LimitedComparableTest(0, 0L, false, "2", null, null),
+                new LimitedComparableTest(0, 0L, false, "3", null, null));
+    }
+
+    @Test
+    void limitedCompareTo_subObj() {
+        assertComparables(
+                new LimitedComparableTest(0, 0L, false, null, null, new ComparableSubObj(1)),
+                new LimitedComparableTest(0, 0L, false, null, null, new ComparableSubObj(2)),
+                new LimitedComparableTest(0, 0L, false, null, null, new ComparableSubObj(3)));
     }
+
     @Test
-     void limitedCompareTo_nonComparableOneOf() {
-        // This code is only here to be compiled. OneOf field is not listed in `pbj.comparable` option
-         // and therefore it has `OneOf` type, not `ComparableOneOf`.
-        new LimitedComparableTestWithOneOf(0, new OneOf<>(LimitedComparableTestWithOneOf.OneOfExampleOneOfType.ONE_OF_SUB_OBJECT, new ComparableSubObj(1)));
-     }
-
-     @Test
-     void limitedCompareTo_int32() {
-         assertComparables(
-                 new LimitedComparableTest(1, 0L, false, null, null, null),
-                 new LimitedComparableTest(2, 0L, false, null, null, null),
-                 new LimitedComparableTest(3, 0L, false, null, null, null));
-     }
-
-     @Test
-     void limitedCompareTo_text() {
-         assertComparables(
-                 new LimitedComparableTest(0, 0L, false, "1", null, null),
-                 new LimitedComparableTest(0, 0L, false, "2", null, null),
-                 new LimitedComparableTest(0, 0L, false, "3", null, null));
-     }
-
-     @Test
-     void limitedCompareTo_subObj() {
-         assertComparables(
-                 new LimitedComparableTest(0, 0L, false, null, null,  new ComparableSubObj(1)),
-                 new LimitedComparableTest(0, 0L, false, null, null,  new ComparableSubObj(2)),
-                 new LimitedComparableTest(0, 0L, false, null, null,  new ComparableSubObj(3)));
-     }
-
-     @Test
-     void limitedCompareTo_mixed() {
+    void limitedCompareTo_mixed() {
         // note that only field 1, 4 and 6 are comparable, others are ignored
-         assertComparables(
-                 new LimitedComparableTest(1, nextLong(), nextBoolean(), "1", nextEnum(),  new ComparableSubObj(1)),
-                 new LimitedComparableTest(1, nextLong(), nextBoolean(), "1", nextEnum(),  new ComparableSubObj(2)),
-                 new LimitedComparableTest(1, nextLong(), nextBoolean(), "2", nextEnum(),  new ComparableSubObj(1)),
-                 new LimitedComparableTest(1, nextLong(), nextBoolean(), "2", nextEnum(),  new ComparableSubObj(2)),
-                 new LimitedComparableTest(2, nextLong(), nextBoolean(), "1", nextEnum(),  new ComparableSubObj(1)),
-                 new LimitedComparableTest(2, nextLong(), nextBoolean(), "1", nextEnum(),  new ComparableSubObj(2)),
-                 new LimitedComparableTest(2, nextLong(), nextBoolean(), "2", nextEnum(),  new ComparableSubObj(1)),
-                 new LimitedComparableTest(2, nextLong(), nextBoolean(), "2", nextEnum(),  new ComparableSubObj(2))
-         );
-     }
-
-     @Test
-     void oneOfCompareTo() {
-         assertComparables(
-                 createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT1_ONE_OF, "a"),
-                 createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT1_ONE_OF, "b"),
-                 createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT2_ONE_OF, "a"),
-                 createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT2_ONE_OF, "b"),
-                 createOneOf(ComparableOneOfTest.OneofExampleOneOfType.SUB_OBJECT, new ComparableSubObj(1)),
-                 createOneOf(ComparableOneOfTest.OneofExampleOneOfType.SUB_OBJECT, new ComparableSubObj(2))
-         );
-     }
-
-     private ComparableOneOfTest createOneOf(ComparableOneOfTest.OneofExampleOneOfType type, Comparable value) {
-         return new ComparableOneOfTest(new ComparableOneOf<>(type, value));
-     }
+        assertComparables(
+                new LimitedComparableTest(
+                        1, nextLong(), nextBoolean(), "1", nextEnum(), new ComparableSubObj(1)),
+                new LimitedComparableTest(
+                        1, nextLong(), nextBoolean(), "1", nextEnum(), new ComparableSubObj(2)),
+                new LimitedComparableTest(
+                        1, nextLong(), nextBoolean(), "2", nextEnum(), new ComparableSubObj(1)),
+                new LimitedComparableTest(
+                        1, nextLong(), nextBoolean(), "2", nextEnum(), new ComparableSubObj(2)),
+                new LimitedComparableTest(
+                        2, nextLong(), nextBoolean(), "1", nextEnum(), new ComparableSubObj(1)),
+                new LimitedComparableTest(
+                        2, nextLong(), nextBoolean(), "1", nextEnum(), new ComparableSubObj(2)),
+                new LimitedComparableTest(
+                        2, nextLong(), nextBoolean(), "2", nextEnum(), new ComparableSubObj(1)),
+                new LimitedComparableTest(
+                        2, nextLong(), nextBoolean(), "2", nextEnum(), new ComparableSubObj(2)));
+    }
+
+    @Test
+    void oneOfCompareTo() {
+        assertComparables(
+                createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT1_ONE_OF, "a"),
+                createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT1_ONE_OF, "b"),
+                createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT2_ONE_OF, "a"),
+                createOneOf(ComparableOneOfTest.OneofExampleOneOfType.TEXT2_ONE_OF, "b"),
+                createOneOf(
+                        ComparableOneOfTest.OneofExampleOneOfType.SUB_OBJECT,
+                        new ComparableSubObj(1)),
+                createOneOf(
+                        ComparableOneOfTest.OneofExampleOneOfType.SUB_OBJECT,
+                        new ComparableSubObj(2)));
+    }
+
+    private ComparableOneOfTest createOneOf(
+            ComparableOneOfTest.OneofExampleOneOfType type, Comparable value) {
+        return new ComparableOneOfTest(new ComparableOneOf<>(type, value));
+    }
 
     private static long nextLong() {
         return RandomGenerator.getDefault().nextLong();
@@ -269,18 +287,20 @@ private static boolean nextBoolean() {
     }
 
     private static ComparableEnum nextEnum() {
-        return ComparableEnum.fromProtobufOrdinal(RandomGenerator.getDefault().nextInt(ComparableEnum.values().length));
+        return ComparableEnum.fromProtobufOrdinal(
+                RandomGenerator.getDefault().nextInt(ComparableEnum.values().length));
     }
 
     @SuppressWarnings({"rawtypes", "unchecked"})
     private static void assertComparables(final Comparable... objs) {
-        final var list = new ArrayList<Comparable>() {
-            {
-                for (Comparable<?> obj : objs) {
-                    add(obj);
-                }
-            }
-        };
+        final var list =
+                new ArrayList<Comparable>() {
+                    {
+                        for (Comparable<?> obj : objs) {
+                            add(obj);
+                        }
+                    }
+                };
         // randomize list first before sort it
         shuffle(list);
         sort(list);
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
index 64987db4..b054ef87 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ExtendedUtf8MessageWithStringTest.java
@@ -1,92 +1,92 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalByteBuffer;
+import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer;
+import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer2;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import com.google.protobuf.CodedOutputStream;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.test.NoToStringWrapper;
 import com.hedera.pbj.test.proto.pbj.MessageWithString;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
 import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.stream.IntStream;
 import java.util.stream.Stream;
-import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalByteBuffer;
-import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer;
-import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer2;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
 
-/**
- * Unit Test for MessageWithString model object. Generate based on protobuf schema.
- */
+/** Unit Test for MessageWithString model object. Generate based on protobuf schema. */
 public final class ExtendedUtf8MessageWithStringTest {
-	@ParameterizedTest
+    @ParameterizedTest
     @MethodSource("createModelTestArguments")
-    public void testMessageWithStringAgainstProtoC(final NoToStringWrapper<MessageWithString> modelObjWrapper) throws Exception {
-    	final MessageWithString modelObj = modelObjWrapper.getValue();
-    	// get reusable thread buffers
-    	final BufferedData dataBuffer = getThreadLocalDataBuffer();
-    	final BufferedData dataBuffer2 = getThreadLocalDataBuffer2();
-    	final ByteBuffer byteBuffer = getThreadLocalByteBuffer();
-    
-    	// model to bytes with PBJ
-    	MessageWithString.PROTOBUF.write(modelObj,dataBuffer);
-    	// clamp limit to bytes written
-    	dataBuffer.limit(dataBuffer.position());
-    
-    	// copy bytes to ByteBuffer
-    	dataBuffer.resetPosition();
-    	dataBuffer.readBytes(byteBuffer);
-    	byteBuffer.flip();
-    
-    	// read proto bytes with ProtoC to make sure it is readable and no parse exceptions are thrown
-    	final com.hedera.pbj.test.proto.java.MessageWithString protoCModelObj = com.hedera.pbj.test.proto.java.MessageWithString.parseFrom(byteBuffer);
-    
-    	// read proto bytes with PBJ parser
-    	dataBuffer.resetPosition();
-    	final MessageWithString modelObj2 = MessageWithString.PROTOBUF.parse(dataBuffer);
-    
-    	// check the read back object is equal to written original one
-    	//assertEquals(modelObj.toString(), modelObj2.toString());
-    	assertEquals(modelObj, modelObj2);
-    
-    	// model to bytes with ProtoC writer
-    	byteBuffer.clear();
-    	final CodedOutputStream codedOutput = CodedOutputStream.newInstance(byteBuffer);
-    	protoCModelObj.writeTo(codedOutput);
-    	codedOutput.flush();
-    	byteBuffer.flip();
-    	// copy to a data buffer
-    	dataBuffer2.writeBytes(byteBuffer);
-    	dataBuffer2.flip();
-    
-    	// compare written bytes
-    	assertEquals(dataBuffer, dataBuffer2);
-    
-    	// parse those bytes again with PBJ
-    	dataBuffer2.resetPosition();
-    	final MessageWithString modelObj3 = MessageWithString.PROTOBUF.parse(dataBuffer2);
-    	assertEquals(modelObj, modelObj3);
+    public void testMessageWithStringAgainstProtoC(
+            final NoToStringWrapper<MessageWithString> modelObjWrapper) throws Exception {
+        final MessageWithString modelObj = modelObjWrapper.getValue();
+        // get reusable thread buffers
+        final BufferedData dataBuffer = getThreadLocalDataBuffer();
+        final BufferedData dataBuffer2 = getThreadLocalDataBuffer2();
+        final ByteBuffer byteBuffer = getThreadLocalByteBuffer();
+
+        // model to bytes with PBJ
+        MessageWithString.PROTOBUF.write(modelObj, dataBuffer);
+        // clamp limit to bytes written
+        dataBuffer.limit(dataBuffer.position());
+
+        // copy bytes to ByteBuffer
+        dataBuffer.resetPosition();
+        dataBuffer.readBytes(byteBuffer);
+        byteBuffer.flip();
+
+        // read proto bytes with ProtoC to make sure it is readable and no parse exceptions are
+        // thrown
+        final com.hedera.pbj.test.proto.java.MessageWithString protoCModelObj =
+                com.hedera.pbj.test.proto.java.MessageWithString.parseFrom(byteBuffer);
+
+        // read proto bytes with PBJ parser
+        dataBuffer.resetPosition();
+        final MessageWithString modelObj2 = MessageWithString.PROTOBUF.parse(dataBuffer);
+
+        // check the read back object is equal to written original one
+        // assertEquals(modelObj.toString(), modelObj2.toString());
+        assertEquals(modelObj, modelObj2);
+
+        // model to bytes with ProtoC writer
+        byteBuffer.clear();
+        final CodedOutputStream codedOutput = CodedOutputStream.newInstance(byteBuffer);
+        protoCModelObj.writeTo(codedOutput);
+        codedOutput.flush();
+        byteBuffer.flip();
+        // copy to a data buffer
+        dataBuffer2.writeBytes(byteBuffer);
+        dataBuffer2.flip();
+
+        // compare written bytes
+        assertEquals(dataBuffer, dataBuffer2);
+
+        // parse those bytes again with PBJ
+        dataBuffer2.resetPosition();
+        final MessageWithString modelObj3 = MessageWithString.PROTOBUF.parse(dataBuffer2);
+        assertEquals(modelObj, modelObj3);
     }
-    
-	/**
-     * List of all valid arguments for testing, built as a static list, so we can reuse it.
-     */
+
+    /** List of all valid arguments for testing, built as a static list, so we can reuse it. */
     public static final List<MessageWithString> ARGUMENTS;
-    
+
     /**
-     * Create a stream of all test permutations of the MessageWithString class we are testing. This is reused by other tests
-     * as well that have model objects with fields of this type.
+     * Create a stream of all test permutations of the MessageWithString class we are testing. This
+     * is reused by other tests as well that have model objects with fields of this type.
      *
      * @return stream of model objects for all test cases
      */
     public static Stream<NoToStringWrapper<MessageWithString>> createModelTestArguments() {
-    	return ARGUMENTS.stream().map(NoToStringWrapper::new);
+        return ARGUMENTS.stream().map(NoToStringWrapper::new);
     }
 
-
-	/** Simple multi-line text test block */
-	private static final String SAMPLE_TEXT_BLOCK = """
+    /** Simple multi-line text test block */
+    private static final String SAMPLE_TEXT_BLOCK =
+            """
                     To be, or not to be, that is the question:
                     Whether ’tis nobler in the mind to suffer
                     The slings and arrows of outrageous fortune,
@@ -102,8 +102,9 @@ public static Stream<NoToStringWrapper<MessageWithString>> createModelTestArgume
                     Must give us pause—there’s the respect
                     That makes calamity of so long life…""";
 
-	/** UTF-8 language test block containing pangrams in a bunch of languages */
-	private static final String UTF8_LANGUAGES_TEXT_BLOCK_1 = """
+    /** UTF-8 language test block containing pangrams in a bunch of languages */
+    private static final String UTF8_LANGUAGES_TEXT_BLOCK_1 =
+            """
             English : A quick brown fox jumps over the lazy dog
             Arabic : صِف خَلقَ خَودِ كَمِثلِ الشَمسِ إِذ بَزَغَت — يَحظى الضَجيعُ بِها نَجلاءَ مِعطارِ
             Arabic : نصٌّ حكيمٌ لهُ سِرٌّ قاطِعٌ وَذُو شَأنٍ عَظيمٍ مكتوبٌ على ثوبٍ أخضرَ ومُغلفٌ بجلدٍ أزرق
@@ -130,8 +131,9 @@ public static Stream<NoToStringWrapper<MessageWithString>> createModelTestArgume
             Japanese : あめ つち ほし そら / やま かは みね たに / くも きり むろ こけ / ひと いぬ うへ すゑ / ゆわ さる おふ せよ / えのえを なれ ゐて
             """;
 
-	/** UTF-8 language test block containing pangrams in a bunch of languages, continued */
-	private static final String UTF8_LANGUAGES_TEXT_BLOCK_2 = """
+    /** UTF-8 language test block containing pangrams in a bunch of languages, continued */
+    private static final String UTF8_LANGUAGES_TEXT_BLOCK_2 =
+            """
             Japanese : あめ つち ほし そら / やま かは みね たに / くも きり むろ こけ / ひと いぬ うへ すゑ / ゆわ さる おふ せよ / えのえを なれ ゐて
             Japanese : 天 地 星 空 / 山 川 峰 谷 / 雲 霧 室 苔 / 人 犬 上 末 / 硫黄 猿 生ふ 為よ / 榎の 枝を 馴れ 居て
             Japanese : いろはにほへと ちりぬるを わかよたれそ つねならむ うゐのおくやま けふこえて あさきゆめみし ゑひもせす(ん)
@@ -161,8 +163,9 @@ public static Stream<NoToStringWrapper<MessageWithString>> createModelTestArgume
             Welsh : Parciais fy jac codi baw hud llawn dŵr ger tŷ Mabon.
             """;
 
-	/** Example Unicode Math symbols */
-	private static final String MATH_SYMBOLS = """
+    /** Example Unicode Math symbols */
+    private static final String MATH_SYMBOLS =
+            """
             U+220x  ∀	∁	∂	∃	∄	∅	∆	∇	∈	∉	∊	∋	∌	∍	∎	∏
             U+221x	∐	∑	−	∓	∔	∕	∖	∗	∘	∙	√	∛	∜	∝	∞	∟
             U+222x	∠	∡	∢	∣	∤	∥	∦	∧	∨	∩	∪	∫	∬	∭	∮	∯
@@ -196,7 +199,9 @@ public static Stream<NoToStringWrapper<MessageWithString>> createModelTestArgume
             U+2AEx	⫠	⫡	⫢	⫣	⫤	⫥	⫦	⫧	⫨	⫩	⫪	⫫	⫬	⫭	⫮	⫯
             U+2AFx	⫰	⫱	⫲	⫳	⫴	⫵	⫶	⫷	⫸	⫹	⫺	⫻	⫼	⫽	⫾	⫿
             """;
-	private static final String ARROW_SYMBOLS = """
+
+    private static final String ARROW_SYMBOLS =
+            """
             U+219x	←	↑	→	↓	↔	↕	↖	↗	↘	↙	↚	↛	↜	↝	↞	↟
             U+21Ax	↠	↡	↢	↣	↤	↥	↦	↧	↨	↩	↪	↫	↬	↭	↮	↯
             U+21Bx	↰	↱	↲	↳	↴	↵	↶	↷	↸	↹	↺	↻	↼	↽	↾	↿
@@ -213,32 +218,34 @@ public static Stream<NoToStringWrapper<MessageWithString>> createModelTestArgume
             U+296x	⥠	⥡	⥢	⥣	⥤	⥥	⥦	⥧	⥨	⥩	⥪	⥫	⥬	⥭	⥮	⥯
             U+297x	⥰	⥱	⥲	⥳	⥴	⥵	⥶	⥷	⥸	⥹	⥺	⥻	⥼	⥽	⥾	⥿
             """;
-	/** string type test cases */
-	public static final List<String> EXTENDED_STRING_TESTS_LIST = List.of(
-			"",
-			"Dude",
-			"©«",
-			"\n",
-			"I need some HBAR to run work on Hedera!",
-			"I need some ℏ to run work on Hedera!",
-			SAMPLE_TEXT_BLOCK,
-			UTF8_LANGUAGES_TEXT_BLOCK_1,
-			UTF8_LANGUAGES_TEXT_BLOCK_2,
-			MATH_SYMBOLS,
-			ARROW_SYMBOLS
-	);
-
-
-	static {
-		final var aTestStringList = EXTENDED_STRING_TESTS_LIST;
-		// work out the longest of all the lists of args as that is how many test cases we need
-		final int maxValues = IntStream.of(
-				aTestStringList.size()
-		).max().getAsInt();
-		// create new stream of model objects using lists above as constructor params
-		ARGUMENTS = IntStream.range(0,maxValues)
-				.mapToObj(i -> new MessageWithString(
-						aTestStringList.get(Math.min(i, aTestStringList.size()-1))
-				)).toList();
-	}
+
+    /** string type test cases */
+    public static final List<String> EXTENDED_STRING_TESTS_LIST =
+            List.of(
+                    "",
+                    "Dude",
+                    "©«",
+                    "\n",
+                    "I need some HBAR to run work on Hedera!",
+                    "I need some ℏ to run work on Hedera!",
+                    SAMPLE_TEXT_BLOCK,
+                    UTF8_LANGUAGES_TEXT_BLOCK_1,
+                    UTF8_LANGUAGES_TEXT_BLOCK_2,
+                    MATH_SYMBOLS,
+                    ARROW_SYMBOLS);
+
+    static {
+        final var aTestStringList = EXTENDED_STRING_TESTS_LIST;
+        // work out the longest of all the lists of args as that is how many test cases we need
+        final int maxValues = IntStream.of(aTestStringList.size()).max().getAsInt();
+        // create new stream of model objects using lists above as constructor params
+        ARGUMENTS =
+                IntStream.range(0, maxValues)
+                        .mapToObj(
+                                i ->
+                                        new MessageWithString(
+                                                aTestStringList.get(
+                                                        Math.min(i, aTestStringList.size() - 1))))
+                        .toList();
+    }
 }
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
index fc086b02..9320cc93 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/FieldsNonNullTest.java
@@ -1,6 +1,10 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
 import com.hedera.hapi.node.base.FeeSchedule;
 import com.hedera.hapi.node.base.TransactionFeeSchedule;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
@@ -8,10 +12,6 @@
 import com.hedera.pbj.test.proto.pbj.MessageWithString;
 import org.junit.jupiter.api.Test;
 
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
 public class FieldsNonNullTest {
     @Test
     void testBytesNeverNull() {
@@ -71,7 +71,10 @@ void testRepeatedNeverNull() {
         assertNotNull(msg.transactionFeeSchedule());
         assertTrue(msg.transactionFeeSchedule().isEmpty());
 
-        msg = FeeSchedule.newBuilder().transactionFeeSchedule((TransactionFeeSchedule[])null).build();
+        msg =
+                FeeSchedule.newBuilder()
+                        .transactionFeeSchedule((TransactionFeeSchedule[]) null)
+                        .build();
         assertNotNull(msg.transactionFeeSchedule());
         assertTrue(msg.transactionFeeSchedule().isEmpty());
     }
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
index edf02637..482220fe 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/HashEqualsTest.java
@@ -5,10 +5,9 @@
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertNotEquals;
 
-import org.junit.jupiter.api.Test;
-
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
 import com.hedera.pbj.test.proto.pbj.TimestampTest2;
+import org.junit.jupiter.api.Test;
 
 class HashEqualsTest {
     @Test
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
index 7097e1ea..07e036c5 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/JsonCodecTest.java
@@ -1,6 +1,8 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import com.google.protobuf.ByteString;
 import com.google.protobuf.util.JsonFormat;
 import com.hedera.hapi.node.base.Key;
@@ -12,13 +14,10 @@
 import com.hedera.pbj.runtime.io.stream.WritableStreamingData;
 import com.hedera.pbj.test.proto.pbj.Everything;
 import com.hederahashgraph.api.proto.java.GetAccountDetailsResponse;
-import org.junit.jupiter.api.Test;
-
 import java.io.ByteArrayOutputStream;
 import java.nio.charset.StandardCharsets;
 import java.util.HexFormat;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 /**
  * Base set of tests to make sure that JSON is round tripped correctly with ProtoC Generated code
@@ -28,14 +27,17 @@ public class JsonCodecTest {
     @Test
     public void simpleTimestampTest() throws Exception {
         // build with protoc
-        com.hederahashgraph.api.proto.java.Timestamp t = com.hederahashgraph.api.proto.java.Timestamp.newBuilder()
-                .setSeconds(1234)
-                .setNanos(567)
-                .build();
+        com.hederahashgraph.api.proto.java.Timestamp t =
+                com.hederahashgraph.api.proto.java.Timestamp.newBuilder()
+                        .setSeconds(1234)
+                        .setNanos(567)
+                        .build();
         // write to JSON with protoc
         String protoCJson = JsonFormat.printer().print(t);
         // parse with pbj
-        Timestamp tPbj = Timestamp.JSON.parse(BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
+        Timestamp tPbj =
+                Timestamp.JSON.parse(
+                        BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
         // check
         assertEquals(t.getSeconds(), tPbj.seconds());
         assertEquals(t.getNanos(), tPbj.nanos());
@@ -47,19 +49,23 @@ public void simpleTimestampTest() throws Exception {
         System.out.println("pbjJson = " + pbjJson);
         assertEquals(protoCJson, pbjJson);
     }
+
     @Test
     public void simpleKeyTest() throws Exception {
         // build with protoc
-        com.hederahashgraph.api.proto.java.Key keyProtoC = com.hederahashgraph.api.proto.java.Key.newBuilder()
-                .setECDSA384(ByteString.copyFrom(new byte[]{0,1,2,3}))
-                .build();
+        com.hederahashgraph.api.proto.java.Key keyProtoC =
+                com.hederahashgraph.api.proto.java.Key.newBuilder()
+                        .setECDSA384(ByteString.copyFrom(new byte[] {0, 1, 2, 3}))
+                        .build();
         // write to JSON with protoc
         String protoCJson = JsonFormat.printer().print(keyProtoC);
         System.out.println("protoCJson = " + protoCJson);
         // parse with pbj
         Key tPbj = Key.JSON.parse(BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
         // check
-        assertEquals(HexFormat.of().formatHex(keyProtoC.getECDSA384().toByteArray()), tPbj.ecdsa384().toHex());
+        assertEquals(
+                HexFormat.of().formatHex(keyProtoC.getECDSA384().toByteArray()),
+                tPbj.ecdsa384().toHex());
         // write with pbj
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
         WritableStreamingData out = new WritableStreamingData(bout);
@@ -82,7 +88,9 @@ public void accountDetailsTest() throws Exception {
         // write to JSON with protoC
         String protoCJson = JsonFormat.printer().print(accountDetailsProtoC);
         // parse with pbj
-        AccountDetails accountDetailsPbj2 = AccountDetails.JSON.parse(BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
+        AccountDetails accountDetailsPbj2 =
+                AccountDetails.JSON.parse(
+                        BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
         // check
         assertEquals(accountDetailsPbj, accountDetailsPbj2);
         // write with pbj
@@ -106,7 +114,9 @@ public void everythingTest() throws Exception {
         String protoCJson = JsonFormat.printer().print(accountDetailsProtoC);
         System.out.println("protoCJson = " + protoCJson);
         // parse with pbj
-        Everything everythingPbj2 = Everything.JSON.parse(BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
+        Everything everythingPbj2 =
+                Everything.JSON.parse(
+                        BufferedData.wrap(protoCJson.getBytes(StandardCharsets.UTF_8)));
         // check
         assertEquals(everythingPbj, everythingPbj2);
         // write with pbj
@@ -115,6 +125,4 @@ public void everythingTest() throws Exception {
         String pbjJson = bout.toString();
         assertEquals(protoCJson, pbjJson);
     }
-
-
 }
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
index a6f9e233..16d0b829 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MalformedMessageTest.java
@@ -8,12 +8,11 @@
 import com.hedera.pbj.test.proto.pbj.Everything;
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
 import com.hedera.pbj.test.proto.pbj.codec.EverythingProtoCodec;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.random.RandomGenerator;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 class MalformedMessageTest {
 
@@ -41,7 +40,9 @@ void parseMalformedEverything_parse_fail() throws IOException {
         buffer.limit(10); // we trick the parser into thinking that there is more to process
         buffer.array()[9] = 0; // but the byte is not valid
         buffer.array()[1] += 1; // artificially increase message size
-        assertThrows(ParseException.class, () -> codec.parse(data)); // parser fails because of an unknown tag
+        assertThrows(
+                ParseException.class,
+                () -> codec.parse(data)); // parser fails because of an unknown tag
     }
 
     @Test
@@ -66,12 +67,9 @@ private BufferedData prepareTestData(final ByteBuffer byteBuffer) throws IOExcep
         final BufferedData data = BufferedData.wrap(byteBuffer);
         byte[] bytes = new byte[8];
         rng.nextBytes(bytes);
-        final TimestampTest bytesTest = TimestampTest.newBuilder()
-                .seconds(System.currentTimeMillis())
-                .build();
-        final Everything obj = Everything.newBuilder()
-                .subObject(bytesTest)
-                .build();
+        final TimestampTest bytesTest =
+                TimestampTest.newBuilder().seconds(System.currentTimeMillis()).build();
+        final Everything obj = Everything.newBuilder().subObject(bytesTest).build();
         codec.write(obj, data);
         data.flip();
         return data;
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
index 711299a1..cd7a423b 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxDepthTest.java
@@ -1,13 +1,13 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
 import com.hedera.pbj.runtime.ParseException;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.test.proto.pbj.MessageWithMessage;
 import org.junit.jupiter.api.Test;
 
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
 public class MaxDepthTest {
     @Test
     void testMaxDepth_depth0() throws Exception {
@@ -27,11 +27,13 @@ void testMaxDepth_depth0() throws Exception {
     void testMaxDepth_depth1_actually0() throws Exception {
         MessageWithMessage msg;
 
-        msg = MessageWithMessage.newBuilder()
-                // NOTE: this is a "default" message, and its serialized size is zero,
-                // so parse() wouldn't be called to read it, and hence the actual depth is still 0
-                .message(MessageWithMessage.newBuilder().build())
-                .build();
+        msg =
+                MessageWithMessage.newBuilder()
+                        // NOTE: this is a "default" message, and its serialized size is zero,
+                        // so parse() wouldn't be called to read it, and hence the actual depth is
+                        // still 0
+                        .message(MessageWithMessage.newBuilder().build())
+                        .build();
         BufferedData bd = BufferedData.allocate(MessageWithMessage.PROTOBUF.measureRecord(msg));
         MessageWithMessage.PROTOBUF.write(msg, bd);
 
@@ -48,13 +50,17 @@ void testMaxDepth_depth1_actually0() throws Exception {
     void testMaxDepth_depth2_actually1() throws Exception {
         MessageWithMessage msg;
 
-        msg = MessageWithMessage.newBuilder()
-                .message(MessageWithMessage.newBuilder()
-                        // NOTE: this is a "default" message, and its serialized size is zero,
-                        // so parse() wouldn't be called to read it, and hence the actual depth is only 1
-                        .message(MessageWithMessage.newBuilder().build())
-                        .build())
-                .build();
+        msg =
+                MessageWithMessage.newBuilder()
+                        .message(
+                                MessageWithMessage.newBuilder()
+                                        // NOTE: this is a "default" message, and its serialized
+                                        // size is zero,
+                                        // so parse() wouldn't be called to read it, and hence the
+                                        // actual depth is only 1
+                                        .message(MessageWithMessage.newBuilder().build())
+                                        .build())
+                        .build();
         BufferedData bd = BufferedData.allocate(MessageWithMessage.PROTOBUF.measureRecord(msg));
         MessageWithMessage.PROTOBUF.write(msg, bd);
 
@@ -71,15 +77,22 @@ void testMaxDepth_depth2_actually1() throws Exception {
     void testMaxDepth_depth3_actually2() throws Exception {
         MessageWithMessage msg;
 
-        msg = MessageWithMessage.newBuilder()
-                .message(MessageWithMessage.newBuilder()
-                        .message(MessageWithMessage.newBuilder()
-                                // NOTE: this is a "default" message, and its serialized size is zero,
-                                // so parse() wouldn't be called to read it, and hence the actual depth is only 2
-                                .message(MessageWithMessage.newBuilder().build())
-                                .build())
-                        .build())
-                .build();
+        msg =
+                MessageWithMessage.newBuilder()
+                        .message(
+                                MessageWithMessage.newBuilder()
+                                        .message(
+                                                MessageWithMessage.newBuilder()
+                                                        // NOTE: this is a "default" message, and
+                                                        // its serialized size is zero,
+                                                        // so parse() wouldn't be called to read it,
+                                                        // and hence the actual depth is only 2
+                                                        .message(
+                                                                MessageWithMessage.newBuilder()
+                                                                        .build())
+                                                        .build())
+                                        .build())
+                        .build();
         BufferedData bd = BufferedData.allocate(MessageWithMessage.PROTOBUF.measureRecord(msg));
         MessageWithMessage.PROTOBUF.write(msg, bd);
 
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
index 04743dfc..efa2f20f 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/MaxSizeTest.java
@@ -1,6 +1,8 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
 import com.hedera.pbj.runtime.ParseException;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
@@ -8,14 +10,14 @@
 import com.hedera.pbj.test.proto.pbj.MessageWithString;
 import org.junit.jupiter.api.Test;
 
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
 public class MaxSizeTest {
     @Test
     void testBytesMaxSize() throws Exception {
-        final Bytes bytes = Bytes.wrap("test string long enough to hold Integer.MAX_VALUE as VarInt");
+        final Bytes bytes =
+                Bytes.wrap("test string long enough to hold Integer.MAX_VALUE as VarInt");
         final MessageWithBytes msg = MessageWithBytes.newBuilder().bytesField(bytes).build();
-        final BufferedData data = BufferedData.allocate(MessageWithBytes.PROTOBUF.measureRecord(msg));
+        final BufferedData data =
+                BufferedData.allocate(MessageWithBytes.PROTOBUF.measureRecord(msg));
         MessageWithBytes.PROTOBUF.write(msg, data);
 
         // That's where the Bytes length is stored
@@ -32,7 +34,8 @@ void testBytesMaxSize() throws Exception {
     void testStringMaxSize() throws Exception {
         final String string = "test string long enough to hold Integer.MAX_VALUE as VarInt";
         final MessageWithString msg = MessageWithString.newBuilder().aTestString(string).build();
-        final BufferedData data = BufferedData.allocate(MessageWithString.PROTOBUF.measureRecord(msg));
+        final BufferedData data =
+                BufferedData.allocate(MessageWithString.PROTOBUF.measureRecord(msg));
         MessageWithString.PROTOBUF.write(msg, data);
 
         // That's where the string length is stored
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
index a269a210..aff7e3eb 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/ParserNeverWrapsTest.java
@@ -1,15 +1,16 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+
 import com.hedera.pbj.runtime.Codec;
 import com.hedera.pbj.runtime.io.WritableSequentialData;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.runtime.test.UncheckedThrowingFunction;
 import com.hedera.pbj.test.proto.pbj.MessageWithBytes;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
-
 import java.io.IOException;
 import java.io.UncheckedIOException;
 import java.nio.ByteBuffer;
@@ -19,10 +20,8 @@
 import java.util.function.Function;
 import java.util.function.Supplier;
 import java.util.stream.Stream;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
 
 public class ParserNeverWrapsTest {
 
@@ -75,8 +74,7 @@ private static record WrapTestData<T>(
             Function<Codec<T>, T> parser,
             Runnable resetter,
             Supplier<byte[]> getter,
-            BiConsumer<Integer, byte[]> setter
-    ) {
+            BiConsumer<Integer, byte[]> setter) {
         static <T> WrapTestData<T> createByteArrayBufferedData(int size) {
             // The current implementation creates ByteArrayBufferedData:
             final BufferedData seq = BufferedData.allocate(size);
@@ -88,8 +86,7 @@ static <T> WrapTestData<T> createByteArrayBufferedData(int size) {
                     (pos, bytes) -> {
                         seq.position(pos);
                         seq.writeBytes(bytes);
-                    }
-            );
+                    });
         }
 
         static <T> WrapTestData<T> createDirectBufferedData(int size) {
@@ -103,8 +100,7 @@ static <T> WrapTestData<T> createDirectBufferedData(int size) {
                     (pos, bytes) -> {
                         seq.position(pos);
                         seq.writeBytes(bytes);
-                    }
-            );
+                    });
         }
 
         static <T> WrapTestData<T> createBytes(int size) {
@@ -122,8 +118,7 @@ static <T> WrapTestData<T> createBytes(int size) {
                         for (int i = 0; i < arr.length; i++) {
                             byteArray[pos + i] = arr[i];
                         }
-                    }
-            );
+                    });
         }
     }
 
@@ -131,8 +126,7 @@ static Stream<Function<Integer, WrapTestData>> provideWrapTestArguments() {
         return Stream.of(
                 WrapTestData::createByteArrayBufferedData,
                 WrapTestData::createDirectBufferedData,
-                WrapTestData::createBytes
-        );
+                WrapTestData::createBytes);
     }
 
     @ParameterizedTest
@@ -141,9 +135,8 @@ void testNoWrap(Function<Integer, WrapTestData> config) throws IOException {
         final String randomString = UUID.randomUUID().toString();
         final byte[] originalBytes = randomString.getBytes(StandardCharsets.UTF_8);
 
-        final MessageWithBytes originalMessage = MessageWithBytes.newBuilder()
-                .bytesField(Bytes.wrap(originalBytes))
-                .build();
+        final MessageWithBytes originalMessage =
+                MessageWithBytes.newBuilder().bytesField(Bytes.wrap(originalBytes)).build();
         final int size = MessageWithBytes.PROTOBUF.measureRecord(originalMessage);
         final WrapTestData<MessageWithBytes> data = config.apply(size);
         MessageWithBytes.PROTOBUF.write(originalMessage, data.wSeq().get());
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
index 03832cc0..79db483e 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/SampleFuzzTest.java
@@ -1,6 +1,9 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+
 import com.hedera.hapi.node.base.tests.AccountIDTest;
 import com.hedera.hapi.node.base.tests.ContractIDTest;
 import com.hedera.pbj.integration.fuzz.Elapsed;
@@ -17,27 +20,22 @@
 import com.hedera.pbj.test.proto.pbj.tests.TimestampTestSeconds2Test;
 import com.hedera.pbj.test.proto.pbj.tests.TimestampTestSecondsTest;
 import com.hedera.pbj.test.proto.pbj.tests.TimestampTestTest;
-import org.junit.jupiter.api.Tag;
-import org.junit.jupiter.api.Test;
-
 import java.text.NumberFormat;
 import java.util.List;
 import java.util.Random;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
 /**
- * This is a sample fuzz test just to demonstrate the usage of the FuzzTest class.
- * It will be replaced with a more elaborate fuzz testing framework in the future.
- * See javadoc for FuzzTest for more details.
+ * This is a sample fuzz test just to demonstrate the usage of the FuzzTest class. It will be
+ * replaced with a more elaborate fuzz testing framework in the future. See javadoc for FuzzTest for
+ * more details.
  *
- * Three thresholds defined at the beginning of the class below
- * determine whether an individual test for a specific model object
- * is considered passed and whether the fuzz test as a whole
- * is considered passed or failed.
+ * <p>Three thresholds defined at the beginning of the class below determine whether an individual
+ * test for a specific model object is considered passed and whether the fuzz test as a whole is
+ * considered passed or failed.
  */
 public class SampleFuzzTest {
     // Flip to true to print out results stats for every tested model object.
@@ -45,91 +43,76 @@ public class SampleFuzzTest {
     private static final boolean debug = false;
 
     /**
-     * A percentage threshold for the share of DESERIALIZATION_FAILED outcomes
-     * when running tests for a given model object.
+     * A percentage threshold for the share of DESERIALIZATION_FAILED outcomes when running tests
+     * for a given model object.
      *
-     * A test for that specific model object is considered passed
-     * if random modifications of the object's payload produce
-     * that many DESERIALIZATION_FAILED outcomes.
+     * <p>A test for that specific model object is considered passed if random modifications of the
+     * object's payload produce that many DESERIALIZATION_FAILED outcomes.
      */
     private static final double THRESHOLD = .95;
 
     /**
-     * A percentage threshold for the pass rate across tests
-     * for all model objects.
+     * A percentage threshold for the pass rate across tests for all model objects.
      *
-     * The fuzz test as a whole is considered passed
-     * if that many individual model tests pass.
+     * <p>The fuzz test as a whole is considered passed if that many individual model tests pass.
      */
     private static final double PASS_RATE_THRESHOLD = 1.;
 
     /**
-     * A threshold for the mean value of the shares of DESERIALIZATION_FAILED
-     * outcomes across tests for all model objects.
+     * A threshold for the mean value of the shares of DESERIALIZATION_FAILED outcomes across tests
+     * for all model objects.
      *
-     * The fuzz test as a whole is considered passed
-     * if the mean value of all the individual DESERIALIZATION_FAILED
-     * shares is greater than this threshold.
+     * <p>The fuzz test as a whole is considered passed if the mean value of all the individual
+     * DESERIALIZATION_FAILED shares is greater than this threshold.
      */
     private static final double DESERIALIZATION_FAILED_MEAN_THRESHOLD = .9829;
 
     /**
-     * Fuzz tests are tagged with this tag to allow Gradle/JUnit
-     * to disable assertions when running these tests.
-     * This enables us to catch the actual codec failures.
+     * Fuzz tests are tagged with this tag to allow Gradle/JUnit to disable assertions when running
+     * these tests. This enables us to catch the actual codec failures.
      */
     private static final String FUZZ_TEST_TAG = "FUZZ_TEST";
 
     /**
-     * A fixed seed for a random numbers generator when
-     * we want to run the tests in a reproducible way.
+     * A fixed seed for a random numbers generator when we want to run the tests in a reproducible
+     * way.
      *
-     * Use the randomFuzzTest Gradle target to use a random seed
-     * instead, which will run the tests in a random way
-     * allowing one to potentially discover new and unknown issues.
+     * <p>Use the randomFuzzTest Gradle target to use a random seed instead, which will run the
+     * tests in a random way allowing one to potentially discover new and unknown issues.
      *
-     * This number is completely random. However, the threshold
-     * values above may need changing if this value changes.
+     * <p>This number is completely random. However, the threshold values above may need changing if
+     * this value changes.
      */
     private static final long FIXED_RANDOM_SEED = 837582698436792L;
 
-    private static final List<Class<?>> MODEL_TEST_CLASSES = List.of(
-            AccountIDTest.class,
-            ContractIDTest.class,
-            EverythingTest.class,
-            HashevalTest.class,
-            InnerEverythingTest.class,
-            MessageWithStringTest.class,
-            TimestampTest2Test.class,
-            TimestampTestSeconds2Test.class,
-            TimestampTestSecondsTest.class,
-            TimestampTestTest.class
-    );
-
-    private static record FuzzTestParams<T, P>(
-            T object,
-            Class<P> protocModelClass
-    ) {
-    }
+    private static final List<Class<?>> MODEL_TEST_CLASSES =
+            List.of(
+                    AccountIDTest.class,
+                    ContractIDTest.class,
+                    EverythingTest.class,
+                    HashevalTest.class,
+                    InnerEverythingTest.class,
+                    MessageWithStringTest.class,
+                    TimestampTest2Test.class,
+                    TimestampTestSeconds2Test.class,
+                    TimestampTestSecondsTest.class,
+                    TimestampTestTest.class);
+
+    private static record FuzzTestParams<T, P>(T object, Class<P> protocModelClass) {}
 
     private static Stream<? extends FuzzTestParams<?, ?>> testCases() {
         return MODEL_TEST_CLASSES.stream()
-                .flatMap(clz -> {
-                    final Class<?> protocModelClass = FuzzUtil.getStaticFieldValue(clz, "PROTOC_MODEL_CLASS");
-
-                    return FuzzUtil.<List<?>>getStaticFieldValue(clz, "ARGUMENTS")
-                            .stream()
-                            .map(object -> new FuzzTestParams<>(
-                                    object,
-                                    protocModelClass
-                            ));
-                });
+                .flatMap(
+                        clz -> {
+                            final Class<?> protocModelClass =
+                                    FuzzUtil.getStaticFieldValue(clz, "PROTOC_MODEL_CLASS");
+
+                            return FuzzUtil.<List<?>>getStaticFieldValue(clz, "ARGUMENTS").stream()
+                                    .map(object -> new FuzzTestParams<>(object, protocModelClass));
+                        });
     }
 
-    private static record ResultStats(
-            double passRate,
-            double deserializationFailedMean
-    ) {
+    private static record ResultStats(double passRate, double deserializationFailedMean) {
         private static final NumberFormat PERCENTAGE_FORMAT = NumberFormat.getPercentInstance();
 
         static {
@@ -142,9 +125,12 @@ boolean passed() {
         }
 
         String format() {
-            return "Fuzz tests " + (passed() ? "PASSED" : "FAILED")
-                    + " with passRate = " + PERCENTAGE_FORMAT.format(passRate)
-                    + " and deserializationFailedMean = " + PERCENTAGE_FORMAT.format(deserializationFailedMean);
+            return "Fuzz tests "
+                    + (passed() ? "PASSED" : "FAILED")
+                    + " with passRate = "
+                    + PERCENTAGE_FORMAT.format(passRate)
+                    + " and deserializationFailedMean = "
+                    + PERCENTAGE_FORMAT.format(deserializationFailedMean);
         }
     }
 
@@ -153,63 +139,80 @@ String format() {
     void fuzzTest() {
         assumeFalse(
                 this.getClass().desiredAssertionStatus(),
-                "Fuzz tests run with assertions disabled only. Use the fuzzTest Gradle target."
-        );
+                "Fuzz tests run with assertions disabled only. Use the fuzzTest Gradle target.");
 
         final Random random = buildRandom();
 
-        Elapsed<ResultStats> elapsedResultStats = Elapsed.time(() -> {
-            final List<? extends FuzzTestResult<?>> results = testCases()
-                    // Note that we must run this stream sequentially to enable
-                    // reproducing the tests for a given random seed.
-                    .map(testCase -> FuzzTest.fuzzTest(
-                            testCase.object(),
-                            THRESHOLD,
-                            random,
-                            testCase.protocModelClass()))
-                    .peek(result -> { if (debug) System.out.println(result.format()); })
-                    .collect(Collectors.toList());
-
-            return results.stream()
-                    .map(result -> new ResultStats(
-                                    result.passed() ? 1. : 0.,
-                                    result.percentageMap().getOrDefault(SingleFuzzTestResult.DESERIALIZATION_FAILED, 0.)
-                            )
-                    )
-                    .reduce(
-                            (r1, r2) -> new ResultStats(
-                                    r1.passRate() + r2.passRate(),
-                                    r1.deserializationFailedMean() + r2.deserializationFailedMean())
-                    )
-                    .map(stats -> new ResultStats(
-                                    stats.passRate() / (double) results.size(),
-                                    stats.deserializationFailedMean() / (double) results.size()
-                            )
-                    )
-                    .orElse(new ResultStats(0., 0.));
-
-        });
+        Elapsed<ResultStats> elapsedResultStats =
+                Elapsed.time(
+                        () -> {
+                            final List<? extends FuzzTestResult<?>> results =
+                                    testCases()
+                                            // Note that we must run this stream sequentially to
+                                            // enable
+                                            // reproducing the tests for a given random seed.
+                                            .map(
+                                                    testCase ->
+                                                            FuzzTest.fuzzTest(
+                                                                    testCase.object(),
+                                                                    THRESHOLD,
+                                                                    random,
+                                                                    testCase.protocModelClass()))
+                                            .peek(
+                                                    result -> {
+                                                        if (debug)
+                                                            System.out.println(result.format());
+                                                    })
+                                            .collect(Collectors.toList());
+
+                            return results.stream()
+                                    .map(
+                                            result ->
+                                                    new ResultStats(
+                                                            result.passed() ? 1. : 0.,
+                                                            result.percentageMap()
+                                                                    .getOrDefault(
+                                                                            SingleFuzzTestResult
+                                                                                    .DESERIALIZATION_FAILED,
+                                                                            0.)))
+                                    .reduce(
+                                            (r1, r2) ->
+                                                    new ResultStats(
+                                                            r1.passRate() + r2.passRate(),
+                                                            r1.deserializationFailedMean()
+                                                                    + r2
+                                                                            .deserializationFailedMean()))
+                                    .map(
+                                            stats ->
+                                                    new ResultStats(
+                                                            stats.passRate()
+                                                                    / (double) results.size(),
+                                                            stats.deserializationFailedMean()
+                                                                    / (double) results.size()))
+                                    .orElse(new ResultStats(0., 0.));
+                        });
 
         final String statsMessage = elapsedResultStats.result().format();
         System.out.println(statsMessage);
-        System.out.println("Total number of SingleFuzzTest runs: " + SingleFuzzTest.getNumberOfRuns());
+        System.out.println(
+                "Total number of SingleFuzzTest runs: " + SingleFuzzTest.getNumberOfRuns());
         System.out.println("Elapsed time: " + elapsedResultStats.format());
 
         assertTrue(elapsedResultStats.result().passed(), statsMessage);
     }
 
     private Random buildRandom() {
-        final boolean useRandomSeed
-                = Boolean.valueOf(System.getProperty("com.hedera.pbj.integration.test.fuzz.useRandomSeed"));
+        final boolean useRandomSeed =
+                Boolean.valueOf(
+                        System.getProperty("com.hedera.pbj.integration.test.fuzz.useRandomSeed"));
         final long seed = useRandomSeed ? new Random().nextLong() : FIXED_RANDOM_SEED;
 
-        System.out.println("Fuzz tests are configured to use a "
-                + (useRandomSeed ? "RANDOM" : "FIXED")
-                + " seed for `new Random(seed)`, and the seed value for this run is: "
-                + seed
-        );
+        System.out.println(
+                "Fuzz tests are configured to use a "
+                        + (useRandomSeed ? "RANDOM" : "FIXED")
+                        + " seed for `new Random(seed)`, and the seed value for this run is: "
+                        + seed);
 
         return new Random(seed);
     }
-
 }
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
index 48be11a6..5bbf801a 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TestHashFunctions.java
@@ -3,19 +3,16 @@
 
 import com.hedera.pbj.test.proto.pbj.Hasheval;
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
-import org.junit.jupiter.params.ParameterizedTest;
-import java.nio.ByteBuffer;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 
-/**
- * Unit Test for TimestampTest model object. Generate based on protobuf schema.
- */
+/** Unit Test for TimestampTest model object. Generate based on protobuf schema. */
 public final class TestHashFunctions {
     public static int hash1(Hasheval hashEval) {
         try {
-            byte[] hash = MessageDigest.getInstance("SHA-256").digest(
-                    Hasheval.PROTOBUF.toBytes(hashEval).toByteArray());
+            byte[] hash =
+                    MessageDigest.getInstance("SHA-256")
+                            .digest(Hasheval.PROTOBUF.toBytes(hashEval).toByteArray());
             int res = hash[0] << 24 | hash[1] << 16 | hash[2] << 8 | hash[3];
             return processForBetterDistribution(res);
         } catch (NoSuchAlgorithmException e) {
@@ -82,7 +79,11 @@ public static int hash2(Hasheval hashEval) {
             result = 31 * result + hashEval.text().hashCode();
         }
         if (hashEval.bytesField() != Hasheval.DEFAULT.bytesField()) {
-            result = 31 * result + (hashEval.bytesField() == null ? 0 : hashEval.bytesField().hashCode());
+            result =
+                    31 * result
+                            + (hashEval.bytesField() == null
+                                    ? 0
+                                    : hashEval.bytesField().hashCode());
         }
 
         return processForBetterDistribution(result);
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
index 4b5b5255..59a4e28c 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TimestampTestTest.java
@@ -1,105 +1,105 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static com.hedera.pbj.runtime.ProtoTestTools.INTEGER_TESTS_LIST;
+import static com.hedera.pbj.runtime.ProtoTestTools.LONG_TESTS_LIST;
+import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalByteBuffer;
+import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer;
+import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer2;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import com.google.protobuf.CodedOutputStream;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.test.NoToStringWrapper;
 import com.hedera.pbj.test.proto.pbj.TimestampTest;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
 import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.stream.IntStream;
 import java.util.stream.Stream;
-import static com.hedera.pbj.runtime.ProtoTestTools.INTEGER_TESTS_LIST;
-import static com.hedera.pbj.runtime.ProtoTestTools.LONG_TESTS_LIST;
-import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalByteBuffer;
-import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer;
-import static com.hedera.pbj.runtime.ProtoTestTools.getThreadLocalDataBuffer2;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
 
-/**
- * Unit Test for TimestampTest model object. Generate based on protobuf schema.
- */
+/** Unit Test for TimestampTest model object. Generate based on protobuf schema. */
 public final class TimestampTestTest {
-	@ParameterizedTest
-	@MethodSource("createModelTestArguments")
-	public void testTimestampTestAgainstProtoC(final NoToStringWrapper<TimestampTest> modelObjWrapper) throws Exception {
-		final TimestampTest modelObj = modelObjWrapper.getValue();
-		// get reusable thread buffers
-		final BufferedData dataBuffer = getThreadLocalDataBuffer();
-		final BufferedData dataBuffer2 = getThreadLocalDataBuffer2();
-		final ByteBuffer byteBuffer = getThreadLocalByteBuffer();
-
-		// model to bytes with PBJ
-		TimestampTest.PROTOBUF.write(modelObj,dataBuffer);
-		// clamp limit to bytes written
-		dataBuffer.limit(dataBuffer.position());
+    @ParameterizedTest
+    @MethodSource("createModelTestArguments")
+    public void testTimestampTestAgainstProtoC(
+            final NoToStringWrapper<TimestampTest> modelObjWrapper) throws Exception {
+        final TimestampTest modelObj = modelObjWrapper.getValue();
+        // get reusable thread buffers
+        final BufferedData dataBuffer = getThreadLocalDataBuffer();
+        final BufferedData dataBuffer2 = getThreadLocalDataBuffer2();
+        final ByteBuffer byteBuffer = getThreadLocalByteBuffer();
 
-		// copy bytes to ByteBuffer
-		dataBuffer.resetPosition();
-		dataBuffer.readBytes(byteBuffer);
-		byteBuffer.flip();
+        // model to bytes with PBJ
+        TimestampTest.PROTOBUF.write(modelObj, dataBuffer);
+        // clamp limit to bytes written
+        dataBuffer.limit(dataBuffer.position());
 
-		// read proto bytes with ProtoC to make sure it is readable and no parse exceptions are thrown
-		final com.hedera.pbj.test.proto.java.TimestampTest protoCModelObj = com.hedera.pbj.test.proto.java.TimestampTest.parseFrom(byteBuffer);
+        // copy bytes to ByteBuffer
+        dataBuffer.resetPosition();
+        dataBuffer.readBytes(byteBuffer);
+        byteBuffer.flip();
 
-		// read proto bytes with PBJ parser
-		dataBuffer.resetPosition();
-		final TimestampTest modelObj2 = TimestampTest.PROTOBUF.parse(dataBuffer);
+        // read proto bytes with ProtoC to make sure it is readable and no parse exceptions are
+        // thrown
+        final com.hedera.pbj.test.proto.java.TimestampTest protoCModelObj =
+                com.hedera.pbj.test.proto.java.TimestampTest.parseFrom(byteBuffer);
 
-		// check the read back object is equal to written original one
-		//assertEquals(modelObj.toString(), modelObj2.toString());
-		assertEquals(modelObj, modelObj2);
+        // read proto bytes with PBJ parser
+        dataBuffer.resetPosition();
+        final TimestampTest modelObj2 = TimestampTest.PROTOBUF.parse(dataBuffer);
 
-		// model to bytes with ProtoC writer
-		byteBuffer.clear();
-		final CodedOutputStream codedOutput = CodedOutputStream.newInstance(byteBuffer);
-		protoCModelObj.writeTo(codedOutput);
-		codedOutput.flush();
-		byteBuffer.flip();
-		// copy to a data buffer
-		dataBuffer2.writeBytes(byteBuffer);
-		dataBuffer2.flip();
+        // check the read back object is equal to written original one
+        // assertEquals(modelObj.toString(), modelObj2.toString());
+        assertEquals(modelObj, modelObj2);
 
-		// compare written bytes
-		assertEquals(dataBuffer, dataBuffer2);
+        // model to bytes with ProtoC writer
+        byteBuffer.clear();
+        final CodedOutputStream codedOutput = CodedOutputStream.newInstance(byteBuffer);
+        protoCModelObj.writeTo(codedOutput);
+        codedOutput.flush();
+        byteBuffer.flip();
+        // copy to a data buffer
+        dataBuffer2.writeBytes(byteBuffer);
+        dataBuffer2.flip();
 
-		// parse those bytes again with PBJ
-		dataBuffer2.resetPosition();
-		final TimestampTest modelObj3 = TimestampTest.PROTOBUF.parse(dataBuffer2);
-		assertEquals(modelObj, modelObj3);
-	}
+        // compare written bytes
+        assertEquals(dataBuffer, dataBuffer2);
 
-	/**
-	 * List of all valid arguments for testing, built as a static list, so we can reuse it.
-	 */
-	public static final List<TimestampTest> ARGUMENTS;
+        // parse those bytes again with PBJ
+        dataBuffer2.resetPosition();
+        final TimestampTest modelObj3 = TimestampTest.PROTOBUF.parse(dataBuffer2);
+        assertEquals(modelObj, modelObj3);
+    }
 
-	static {
-		final var secondsList = LONG_TESTS_LIST;
-		final var nanosList = INTEGER_TESTS_LIST;
-		// work out the longest of all the lists of args as that is how many test cases we need
-		final int maxValues = IntStream.of(
-				secondsList.size(),
-				nanosList.size()
-		).max().getAsInt();
-		// create new stream of model objects using lists above as constructor params
-		ARGUMENTS = IntStream.range(0,maxValues)
-				.mapToObj(i -> new TimestampTest(
-						secondsList.get(Math.min(i, secondsList.size()-1)),
-						nanosList.get(Math.min(i, nanosList.size()-1))
-				)).toList();
-	}
+    /** List of all valid arguments for testing, built as a static list, so we can reuse it. */
+    public static final List<TimestampTest> ARGUMENTS;
 
-	/**
-	 * Create a stream of all test permutations of the TimestampTest class we are testing. This is reused by other tests
-	 * as well that have model objects with fields of this type.
-	 *
-	 * @return stream of model objects for all test cases
-	 */
-	public static Stream<NoToStringWrapper<TimestampTest>> createModelTestArguments() {
-		return ARGUMENTS.stream().map(NoToStringWrapper::new);
-	}
+    static {
+        final var secondsList = LONG_TESTS_LIST;
+        final var nanosList = INTEGER_TESTS_LIST;
+        // work out the longest of all the lists of args as that is how many test cases we need
+        final int maxValues = IntStream.of(secondsList.size(), nanosList.size()).max().getAsInt();
+        // create new stream of model objects using lists above as constructor params
+        ARGUMENTS =
+                IntStream.range(0, maxValues)
+                        .mapToObj(
+                                i ->
+                                        new TimestampTest(
+                                                secondsList.get(
+                                                        Math.min(i, secondsList.size() - 1)),
+                                                nanosList.get(Math.min(i, nanosList.size() - 1))))
+                        .toList();
+    }
 
+    /**
+     * Create a stream of all test permutations of the TimestampTest class we are testing. This is
+     * reused by other tests as well that have model objects with fields of this type.
+     *
+     * @return stream of model objects for all test cases
+     */
+    public static Stream<NoToStringWrapper<TimestampTest>> createModelTestArguments() {
+        return ARGUMENTS.stream().map(NoToStringWrapper::new);
+    }
 }
diff --git a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
index d8dd1ff2..970e22f1 100644
--- a/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
+++ b/pbj-integration-tests/src/test/java/com/hedera/pbj/integration/test/TruncatedDataTests.java
@@ -1,21 +1,20 @@
 // SPDX-License-Identifier: Apache-2.0
 package com.hedera.pbj.integration.test;
 
+import static org.junit.jupiter.api.Assertions.*;
+
 import com.google.protobuf.InvalidProtocolBufferException;
 import com.hedera.pbj.runtime.ProtoParserTools;
 import com.hedera.pbj.runtime.io.buffer.BufferedData;
 import com.hedera.pbj.runtime.io.buffer.Bytes;
 import com.hedera.pbj.test.proto.pbj.Everything;
+import java.util.HexFormat;
+import java.util.stream.Stream;
 import org.junit.jupiter.api.DisplayName;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import java.util.HexFormat;
-import java.util.stream.Stream;
-
-import static org.junit.jupiter.api.Assertions.*;
-
 final class TruncatedDataTests {
     // ================================================================================================================
     // Verify common comments.
@@ -24,12 +23,16 @@ final class TruncatedDataTests {
     @DisplayName("Issue 96")
     void issue96() {
         final var unhexed = HexFormat.of().parseHex(testData);
-        assertThrows(InvalidProtocolBufferException.class, () ->
-                com.hederahashgraph.api.proto.java.ServicesConfigurationList.parseFrom(unhexed));
-        assertThrows(Exception.class, () ->
-                com.hedera.hapi.node.base.ServicesConfigurationList.PROTOBUF.parseStrict(
-                        BufferedData.wrap(unhexed)));
-
+        assertThrows(
+                InvalidProtocolBufferException.class,
+                () ->
+                        com.hederahashgraph.api.proto.java.ServicesConfigurationList.parseFrom(
+                                unhexed));
+        assertThrows(
+                Exception.class,
+                () ->
+                        com.hedera.hapi.node.base.ServicesConfigurationList.PROTOBUF.parseStrict(
+                                BufferedData.wrap(unhexed)));
     }
 
     @ParameterizedTest
@@ -38,11 +41,12 @@ void issue96() {
     void testBoxedFloat(final int value) {
         Everything ev = Everything.newBuilder().floatBoxed(123.0f).build();
         Bytes bytes = Everything.PROTOBUF.toBytes(ev);
-        assertThrows(Exception.class, () -> {
-            final Bytes b = bytes.slice(0, bytes.length() + value);
-            Everything.PROTOBUF.parseStrict(
-                    BufferedData.wrap(b.toByteArray()));
-        });
+        assertThrows(
+                Exception.class,
+                () -> {
+                    final Bytes b = bytes.slice(0, bytes.length() + value);
+                    Everything.PROTOBUF.parseStrict(BufferedData.wrap(b.toByteArray()));
+                });
     }
 
     @ParameterizedTest
@@ -51,11 +55,12 @@ void testBoxedFloat(final int value) {
     void testBoxedInt32(final int value) {
         Everything ev = Everything.newBuilder().int32Boxed(123).build();
         Bytes bytes = Everything.PROTOBUF.toBytes(ev);
-        assertThrows(Exception.class, () -> {
-            final Bytes b = bytes.slice(0, bytes.length() + value);
-            Everything.PROTOBUF.parseStrict(
-                    BufferedData.wrap(b.toByteArray()));
-        });
+        assertThrows(
+                Exception.class,
+                () -> {
+                    final Bytes b = bytes.slice(0, bytes.length() + value);
+                    Everything.PROTOBUF.parseStrict(BufferedData.wrap(b.toByteArray()));
+                });
     }
 
     @ParameterizedTest
@@ -64,11 +69,12 @@ void testBoxedInt32(final int value) {
     void testBoxedInt64(final int value) {
         Everything ev = Everything.newBuilder().int64Boxed(12345678L).build();
         Bytes bytes = Everything.PROTOBUF.toBytes(ev);
-        assertThrows(Exception.class, () -> {
-            final Bytes b = bytes.slice(0, bytes.length() + value);
-            Everything.PROTOBUF.parseStrict(
-                    BufferedData.wrap(b.toByteArray()));
-        });
+        assertThrows(
+                Exception.class,
+                () -> {
+                    final Bytes b = bytes.slice(0, bytes.length() + value);
+                    Everything.PROTOBUF.parseStrict(BufferedData.wrap(b.toByteArray()));
+                });
     }
 
     @ParameterizedTest
@@ -77,24 +83,27 @@ void testBoxedInt64(final int value) {
     void testNumberInt64(final int value) {
         Everything ev = Everything.newBuilder().int64Boxed(123456789L).build();
         Bytes bytes = Everything.PROTOBUF.toBytes(ev);
-        assertThrows(Exception.class, () -> {
-            final Bytes b = bytes.slice(0, bytes.length() + value);
-            Everything.PROTOBUF.parseStrict(
-                    BufferedData.wrap(b.toByteArray()));
-        });
+        assertThrows(
+                Exception.class,
+                () -> {
+                    final Bytes b = bytes.slice(0, bytes.length() + value);
+                    Everything.PROTOBUF.parseStrict(BufferedData.wrap(b.toByteArray()));
+                });
     }
 
     @ParameterizedTest
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test String")
     void testString(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarInt(10, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readString(seq));
     }
 
@@ -102,13 +111,15 @@ void testString(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test Bytes")
     void testBytes(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarInt(10, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readBytes(seq));
     }
 
@@ -116,13 +127,15 @@ void testBytes(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readInt32")
     void testInt32(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarInt(0xFFFFFFFF, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readInt32(seq));
     }
 
@@ -130,13 +143,15 @@ void testInt32(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readInt64")
     void testInt64(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarLong(0xFFFFFFFFFFFFFFFFL, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readInt32(seq));
     }
 
@@ -144,12 +159,14 @@ void testInt64(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readUint32")
     void testUint32(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarInt(0xFFFFFFFF, false);
         seq.position(0);
         seq.limit(seq.length() + value);
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readUint32(seq));
     }
 
@@ -157,13 +174,15 @@ void testUint32(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readUint64")
     void testUint64(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarLong(0xFFFFFFFFFFFFFFFFL, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readUint64(seq));
     }
 
@@ -171,13 +190,15 @@ void testUint64(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readBool")
     void testBool(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(1);
         seq.writeVarInt(0x1, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readBool(seq));
     }
 
@@ -185,13 +206,15 @@ void testBool(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readEnum")
     void testEnum(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(1);
         seq.writeVarInt(0x3, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readEnum(seq));
     }
 
@@ -199,13 +222,15 @@ void testEnum(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readSint32")
     void testSint32(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarInt(0xFFFFFFFF, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readUint32(seq));
     }
 
@@ -213,13 +238,15 @@ void testSint32(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readSint64")
     void testSint64(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(10);
         seq.writeVarLong(0xFFFFFFFFFFFFFFFFL, false);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readUint32(seq));
     }
 
@@ -227,13 +254,15 @@ void testSint64(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readSFixedInt32")
     void testSFixedInt32(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(4);
         seq.writeInt(0xFFFFFFFF);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readSignedInt32(seq));
     }
 
@@ -241,13 +270,15 @@ void testSFixedInt32(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readFixedInt32")
     void testFixedInt32(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(4);
         seq.writeUnsignedInt(0xFFFFFFF0);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readFixed32(seq));
     }
 
@@ -255,13 +286,15 @@ void testFixedInt32(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readFloat")
     void testFloat(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(4);
         seq.writeFloat(0xFFFFFFFF);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readFloat(seq));
     }
 
@@ -269,13 +302,15 @@ void testFloat(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readDouble")
     void testDouble(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(8);
         seq.writeUnsignedInt(0xFFFFFFF0FFFFFFFFL);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readDouble(seq));
     }
 
@@ -283,13 +318,15 @@ void testDouble(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readSFixedInt64")
     void testSFixedInt64(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(8);
         seq.writeLong(0xFFFFFFFFFFFFFFFFL);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readSignedInt64(seq));
     }
 
@@ -297,13 +334,15 @@ void testSFixedInt64(final int value) {
     @MethodSource("createTruncateTestArguments")
     @DisplayName("Test readFixedInt64")
     void testFixedInt64(final int value) {
-        // Given a buffer where the first varInt is the length, and is longer than the number of bytes in the buffer.
+        // Given a buffer where the first varInt is the length, and is longer than the number of
+        // bytes in the buffer.
         final var seq = BufferedData.allocate(8);
         seq.writeLong(0xFFFFFFF0FFFFFFFFL);
         seq.position(0);
         seq.limit(seq.length() + value);
 
-        // When we parse that sequence, then we fail because there are not enough bytes to continue parsing.
+        // When we parse that sequence, then we fail because there are not enough bytes to continue
+        // parsing.
         assertThrows(Exception.class, () -> ProtoParserTools.readFixed64(seq));
     }
 
@@ -311,29 +350,47 @@ void testFixedInt64(final int value) {
     @DisplayName("Test readUint32Strict")
     void testUint32Strict() {
         final var unhexed = HexFormat.of().parseHex("");
-        assertDoesNotThrow(() ->
-                com.hederahashgraph.api.proto.java.ServicesConfigurationList.parseFrom(unhexed));
+        assertDoesNotThrow(
+                () ->
+                        com.hederahashgraph.api.proto.java.ServicesConfigurationList.parseFrom(
+                                unhexed));
         // empty object is valid
-        assertDoesNotThrow(() ->
-                com.hedera.hapi.node.base.ServicesConfigurationList.PROTOBUF.parseStrict(
-                        BufferedData.wrap(unhexed)));
+        assertDoesNotThrow(
+                () ->
+                        com.hedera.hapi.node.base.ServicesConfigurationList.PROTOBUF.parseStrict(
+                                BufferedData.wrap(unhexed)));
     }
 
     @Test
     @DisplayName("Test readUint32NonStrict")
     void testUint32NonStrict() {
         final var unhexed = HexFormat.of().parseHex("");
-        assertDoesNotThrow(() ->
-                com.hederahashgraph.api.proto.java.ServicesConfigurationList.parseFrom(unhexed));
-        assertDoesNotThrow(() ->
-                com.hedera.hapi.node.base.ServicesConfigurationList.PROTOBUF.parse(
-                        BufferedData.wrap(unhexed)));
+        assertDoesNotThrow(
+                () ->
+                        com.hederahashgraph.api.proto.java.ServicesConfigurationList.parseFrom(
+                                unhexed));
+        assertDoesNotThrow(
+                () ->
+                        com.hedera.hapi.node.base.ServicesConfigurationList.PROTOBUF.parse(
+                                BufferedData.wrap(unhexed)));
     }
 
     static Stream<Integer> createTruncateTestArguments() {
         return Stream.of(
-                Integer.MIN_VALUE, (int)Byte.MIN_VALUE, -100, -66, -1, -2, -3, -4, -6, -7, -999, -1000);
+                Integer.MIN_VALUE,
+                (int) Byte.MIN_VALUE,
+                -100,
+                -66,
+                -1,
+                -2,
+                -3,
+                -4,
+                -6,
+                -7,
+                -999,
+                -1000);
     }
 
-    public static final String testData = "0a190a1266696c65732e6665655363686564756c657312033131310a310a29636f6e7472616374732e707265636f6d70696c652e687473456e61626c65546f6b656e4372656174651204747275650a230a1c746f6b656e732e6d6178546f6b656e4e616d6555746638427974657312033130300a1f0a16746f6b656e732e73746f726552656c734f6e4469736b120566616c73650a260a2072617465732e696e7472616461794368616e67654c696d697450657263656e74120232350a230a1e7374616b696e672e72657761726442616c616e63655468726573686f6c641201300a2a0a24636f6e7472616374732e6d6178526566756e6450657263656e744f664761734c696d6974120232300a2d0a267374616b696e672e726577617264486973746f72792e6e756d53746f726564506572696f647312033336350a1a0a146163636f756e74732e73797374656d41646d696e120235300a280a21666565732e746f6b656e5472616e7366657255736167654d756c7469706c69657212033338300a1c0a146175746f4372656174696f6e2e656e61626c65641204747275650a1e0a18666565732e6d696e436f6e67657374696f6e506572696f64120236300a1a0a1366696c65732e65786368616e6765526174657312033131320a280a1a626f6f7473747261702e72617465732e6e657874457870697279120a343130323434343830300a1a0a146163636f756e74732e667265657a6541646d696e120235380a1e0a166865646572612e666972737455736572456e746974791204313030310a370a1f636f6e7472616374732e73746f72616765536c6f745072696365546965727312143074696c3130304d2c3230303074696c3435304d0a270a2174726163656162696c6974792e6d61784578706f727473506572436f6e73536563120231300a220a1c6163636f756e74732e73797374656d556e64656c65746541646d696e120236300a280a1f636f6e7472616374732e616c6c6f774175746f4173736f63696174696f6e73120566616c73650a320a2b6865646572612e7265636f726453747265616d2e6e756d4f66426c6f636b486173686573496e537461746512033235360a2e0a256865646572612e776f726b666c6f772e766572696669636174696f6e54696d656f75744d53120532303030300a1c0a146163636f756e74732e73746f72654f6e4469736b1204747275650a280a216865646572612e616c6c6f77616e6365732e6d61784163636f756e744c696d697412033130300a2b0a256865646572612e616c6c6f77616e6365732e6d61785472616e73616374696f6e4c696d6974120232300a2b0a25636f6e73656e7375732e6d6573736167652e6d6178466f6c6c6f77696e675265636f726473120235300a2a0a236865646572612e7472616e73616374696f6e2e6d617856616c69644475726174696f6e12033138300a490a0c76657273696f6e2e68617069123953656d616e74696356657273696f6e5b6d616a6f723d302c206d696e6f723d34302c2070617463683d302c207072653d2c206275696c643d5d0a240a1d6163636f756e74732e7374616b696e675265776172644163636f756e7412033830300a310a2c6175746f72656e65772e6d61784e756d6265724f66456e746974696573546f52656e65774f7244656c6574651201320a380a217374616b696e672e6d61784461696c795374616b655265776172645468506572481213393232333337323033363835343737353830370a2b0a1f636f6e7472616374732e7265666572656e6365536c6f744c69666574696d65120833313533363030300a2d0a226c65646765722e6175746f52656e6577506572696f642e6d696e4475726174696f6e1207323539323030300a4d0a1076657273696f6e2e7365727669636573123953656d616e74696356657273696f6e5b6d616a6f723d302c206d696e6f723d34302c2070617463683d302c207072653d2c206275696c643d5d0a3a0a31636f6e7472616374732e707265636f6d70696c652e61746f6d696343727970746f5472616e736665722e656e61626c6564120566616c73650a220a14656e7469746965732e6d61784c69666574696d65120a333135333630303030300a260a1d636f6e7472616374732e65766d2e76657273696f6e2e64796e616d6963120566616c73650a2b0a22636f6e7472616374732e7369646563617256616c69646174696f6e456e61626c6564120566616c73650a210a1a6163636f756e74732e6e6f64655265776172644163636f756e7412033830310a180a11636f6e7472616374732e636861696e496412033239350a270a216c65646765722e6368616e6765486973746f7269616e2e6d656d6f727953656373120232300a290a21636f6e73656e7375732e6d6573736167652e6d61784279746573416c6c6f7765641204313032340a180a1166696c65732e61646472657373426f6f6b12033130310a200a1a6163636f756e74732e73797374656d44656c65746541646d696e120235390a380a30636f6e7472616374732e707265636f6d70696c652e6872634661636164652e6173736f63696174652e656e61626c65641204747275650a220a1b6163636f756e74732e6c6173745468726f74746c654578656d707412033130300a1e0a16746f6b656e732e6e6674732e617265456e61626c65641204747275650a1b0a10746f706963732e6d61784e756d6265721207313030303030300a200a1a6c65646765722e6e66745472616e73666572732e6d61784c656e120231300a2a0a25636f6e73656e7375732e6d6573736167652e6d6178507265636564696e675265636f7264731201330a190a117374616b696e672e6973456e61626c65641204747275650a260a1b746f6b656e732e6e6674732e6d6178416c6c6f7765644d696e74731207353030303030300a2f0a187374616b696e672e6d61785374616b6552657761726465641213353030303030303030303030303030303030300a2b0a1d626f6f7473747261702e72617465732e63757272656e74457870697279120a343130323434343830300a1e0a1766696c65732e7570677261646546696c654e756d62657212033135300a240a19636f6e7472616374732e64656661756c744c69666574696d651207373839303030300a260a217374616b696e672e666565732e6e6f646552657761726450657263656e746167651201300a200a19746f6b656e732e6d617853796d626f6c55746638427974657312033130300a250a1d736967732e657870616e6446726f6d496d6d757461626c6553746174651204747275650a170a127374616b696e672e726577617264526174651201300a2b0a1d626f6f7473747261702e73797374656d2e656e74697479457870697279120a313831323633373638360a1f0a196163636f756e74732e61646472657373426f6f6b41646d696e120235350a2b0a246865646572612e7265636f726453747265616d2e736964656361724d617853697a654d6212033235360a300a257363686564756c696e672e6d617845787069726174696f6e4675747572655365636f6e64731207353335363830300a2a0a21636f6e7472616374732e656e666f7263654372656174696f6e5468726f74746c65120566616c73650a1c0a14746f6b656e732e6d61785065724163636f756e741204313030300a1c0a1566696c65732e686170695065726d697373696f6e7312033132320a2d0a286865646572612e7265636f726453747265616d2e7369676e617475726546696c6556657273696f6e1201360a200a19746f6b656e732e6e6674732e6d6178517565727952616e676512033130300a1d0a176c65646765722e7472616e73666572732e6d61784c656e120231300a230a1a6163636f756e74732e626c6f636b6c6973742e656e61626c6564120566616c73650a200a1b72617465732e6d69646e69676874436865636b496e74657276616c1201310a2f0a2a74726163656162696c6974792e6d696e46726565546f557365644761735468726f74746c65526174696f1201390a340a266865646572612e7265636f726453747265616d2e73747265616d46696c6550726f6475636572120a636f6e63757272656e740a220a1c746f6b656e732e6e6674732e6d6178426174636853697a6557697065120231300a330a2b6865646572612e7265636f726453747265616d2e636f6d707265737346696c65734f6e4372656174696f6e1204747275650a1a0a127374616b696e672e706572696f644d696e731204313434300a240a1b6175746f72656e65772e6772616e744672656552656e6577616c73120566616c73650a2b0a1e636f6e7472616374732e6d61784b7650616972732e61676772656761746512093530303030303030300a220a1c746f6b656e732e6e6674732e6d6178426174636853697a654d696e74120231300a240a1d7374616b696e672e73756d4f66436f6e73656e7375735765696768747312033530300a210a1b746f6b656e732e6d6178437573746f6d46656573416c6c6f776564120231300a1c0a146c617a794372656174696f6e2e656e61626c65641204747275650a1b0a10746f6b656e732e6d61784e756d6265721207313030303030300a1d0a126163636f756e74732e6d61784e756d6265721207353030303030300a240a1c636f6e7472616374732e6974656d697a6553746f72616765466565731204747275650a230a1b6865646572612e616c6c6f77616e6365732e6973456e61626c65641204747275650a380a23626f6f7473747261702e6665655363686564756c65734a736f6e2e7265736f7572636512116665655363686564756c65732e6a736f6e0a2b0a246c65646765722e7265636f7264732e6d6178517565727961626c6542794163636f756e7412033138300a220a16636f6e7472616374732e6d6178476173506572536563120831353030303030300a300a28636f6e7472616374732e707265636f6d70696c652e6578706f72745265636f7264526573756c74731204747275650a1b0a156175746f52656e65772e746172676574547970657312025b5d0a270a22636f6e7472616374732e6d61784e756d5769746848617069536967734163636573731201300a280a20636f6e7472616374732e7468726f74746c652e7468726f74746c6542794761731204747275650a230a17746f6b656e732e6d617841676772656761746552656c73120831303030303030300a260a20626f6f7473747261702e72617465732e63757272656e7443656e744571756976120231320a290a236865646572612e7472616e73616374696f6e2e6d696e56616c69644475726174696f6e120231350a510a12636f6e7472616374732e7369646563617273123b5b434f4e54524143545f53544154455f4348414e47452c20434f4e54524143545f414354494f4e2c20434f4e54524143545f42595445434f44455d0a1b0a156c65646765722e66756e64696e674163636f756e74120239380a230a1a7363686564756c696e672e6c6f6e675465726d456e61626c6564120566616c73650a220a1a6c65646765722e6d61784175746f4173736f63696174696f6e731204353030300a1e0a16636f6e7472616374";
+    public static final String testData =
+            "0a190a1266696c65732e6665655363686564756c657312033131310a310a29636f6e7472616374732e707265636f6d70696c652e687473456e61626c65546f6b656e4372656174651204747275650a230a1c746f6b656e732e6d6178546f6b656e4e616d6555746638427974657312033130300a1f0a16746f6b656e732e73746f726552656c734f6e4469736b120566616c73650a260a2072617465732e696e7472616461794368616e67654c696d697450657263656e74120232350a230a1e7374616b696e672e72657761726442616c616e63655468726573686f6c641201300a2a0a24636f6e7472616374732e6d6178526566756e6450657263656e744f664761734c696d6974120232300a2d0a267374616b696e672e726577617264486973746f72792e6e756d53746f726564506572696f647312033336350a1a0a146163636f756e74732e73797374656d41646d696e120235300a280a21666565732e746f6b656e5472616e7366657255736167654d756c7469706c69657212033338300a1c0a146175746f4372656174696f6e2e656e61626c65641204747275650a1e0a18666565732e6d696e436f6e67657374696f6e506572696f64120236300a1a0a1366696c65732e65786368616e6765526174657312033131320a280a1a626f6f7473747261702e72617465732e6e657874457870697279120a343130323434343830300a1a0a146163636f756e74732e667265657a6541646d696e120235380a1e0a166865646572612e666972737455736572456e746974791204313030310a370a1f636f6e7472616374732e73746f72616765536c6f745072696365546965727312143074696c3130304d2c3230303074696c3435304d0a270a2174726163656162696c6974792e6d61784578706f727473506572436f6e73536563120231300a220a1c6163636f756e74732e73797374656d556e64656c65746541646d696e120236300a280a1f636f6e7472616374732e616c6c6f774175746f4173736f63696174696f6e73120566616c73650a320a2b6865646572612e7265636f726453747265616d2e6e756d4f66426c6f636b486173686573496e537461746512033235360a2e0a256865646572612e776f726b666c6f772e766572696669636174696f6e54696d656f75744d53120532303030300a1c0a146163636f756e74732e73746f72654f6e4469736b1204747275650a280a216865646572612e616c6c6f77616e6365732e6d61784163636f756e744c696d697412033130300a2b0a256865646572612e616c6c6f77616e6365732e6d61785472616e73616374696f6e4c696d6974120232300a2b0a25636f6e73656e7375732e6d6573736167652e6d6178466f6c6c6f77696e675265636f726473120235300a2a0a236865646572612e7472616e73616374696f6e2e6d617856616c69644475726174696f6e12033138300a490a0c76657273696f6e2e68617069123953656d616e74696356657273696f6e5b6d616a6f723d302c206d696e6f723d34302c2070617463683d302c207072653d2c206275696c643d5d0a240a1d6163636f756e74732e7374616b696e675265776172644163636f756e7412033830300a310a2c6175746f72656e65772e6d61784e756d6265724f66456e746974696573546f52656e65774f7244656c6574651201320a380a217374616b696e672e6d61784461696c795374616b655265776172645468506572481213393232333337323033363835343737353830370a2b0a1f636f6e7472616374732e7265666572656e6365536c6f744c69666574696d65120833313533363030300a2d0a226c65646765722e6175746f52656e6577506572696f642e6d696e4475726174696f6e1207323539323030300a4d0a1076657273696f6e2e7365727669636573123953656d616e74696356657273696f6e5b6d616a6f723d302c206d696e6f723d34302c2070617463683d302c207072653d2c206275696c643d5d0a3a0a31636f6e7472616374732e707265636f6d70696c652e61746f6d696343727970746f5472616e736665722e656e61626c6564120566616c73650a220a14656e7469746965732e6d61784c69666574696d65120a333135333630303030300a260a1d636f6e7472616374732e65766d2e76657273696f6e2e64796e616d6963120566616c73650a2b0a22636f6e7472616374732e7369646563617256616c69646174696f6e456e61626c6564120566616c73650a210a1a6163636f756e74732e6e6f64655265776172644163636f756e7412033830310a180a11636f6e7472616374732e636861696e496412033239350a270a216c65646765722e6368616e6765486973746f7269616e2e6d656d6f727953656373120232300a290a21636f6e73656e7375732e6d6573736167652e6d61784279746573416c6c6f7765641204313032340a180a1166696c65732e61646472657373426f6f6b12033130310a200a1a6163636f756e74732e73797374656d44656c65746541646d696e120235390a380a30636f6e7472616374732e707265636f6d70696c652e6872634661636164652e6173736f63696174652e656e61626c65641204747275650a220a1b6163636f756e74732e6c6173745468726f74746c654578656d707412033130300a1e0a16746f6b656e732e6e6674732e617265456e61626c65641204747275650a1b0a10746f706963732e6d61784e756d6265721207313030303030300a200a1a6c65646765722e6e66745472616e73666572732e6d61784c656e120231300a2a0a25636f6e73656e7375732e6d6573736167652e6d6178507265636564696e675265636f7264731201330a190a117374616b696e672e6973456e61626c65641204747275650a260a1b746f6b656e732e6e6674732e6d6178416c6c6f7765644d696e74731207353030303030300a2f0a187374616b696e672e6d61785374616b6552657761726465641213353030303030303030303030303030303030300a2b0a1d626f6f7473747261702e72617465732e63757272656e74457870697279120a343130323434343830300a1e0a1766696c65732e7570677261646546696c654e756d62657212033135300a240a19636f6e7472616374732e64656661756c744c69666574696d651207373839303030300a260a217374616b696e672e666565732e6e6f646552657761726450657263656e746167651201300a200a19746f6b656e732e6d617853796d626f6c55746638427974657312033130300a250a1d736967732e657870616e6446726f6d496d6d757461626c6553746174651204747275650a170a127374616b696e672e726577617264526174651201300a2b0a1d626f6f7473747261702e73797374656d2e656e74697479457870697279120a313831323633373638360a1f0a196163636f756e74732e61646472657373426f6f6b41646d696e120235350a2b0a246865646572612e7265636f726453747265616d2e736964656361724d617853697a654d6212033235360a300a257363686564756c696e672e6d617845787069726174696f6e4675747572655365636f6e64731207353335363830300a2a0a21636f6e7472616374732e656e666f7263654372656174696f6e5468726f74746c65120566616c73650a1c0a14746f6b656e732e6d61785065724163636f756e741204313030300a1c0a1566696c65732e686170695065726d697373696f6e7312033132320a2d0a286865646572612e7265636f726453747265616d2e7369676e617475726546696c6556657273696f6e1201360a200a19746f6b656e732e6e6674732e6d6178517565727952616e676512033130300a1d0a176c65646765722e7472616e73666572732e6d61784c656e120231300a230a1a6163636f756e74732e626c6f636b6c6973742e656e61626c6564120566616c73650a200a1b72617465732e6d69646e69676874436865636b496e74657276616c1201310a2f0a2a74726163656162696c6974792e6d696e46726565546f557365644761735468726f74746c65526174696f1201390a340a266865646572612e7265636f726453747265616d2e73747265616d46696c6550726f6475636572120a636f6e63757272656e740a220a1c746f6b656e732e6e6674732e6d6178426174636853697a6557697065120231300a330a2b6865646572612e7265636f726453747265616d2e636f6d707265737346696c65734f6e4372656174696f6e1204747275650a1a0a127374616b696e672e706572696f644d696e731204313434300a240a1b6175746f72656e65772e6772616e744672656552656e6577616c73120566616c73650a2b0a1e636f6e7472616374732e6d61784b7650616972732e61676772656761746512093530303030303030300a220a1c746f6b656e732e6e6674732e6d6178426174636853697a654d696e74120231300a240a1d7374616b696e672e73756d4f66436f6e73656e7375735765696768747312033530300a210a1b746f6b656e732e6d6178437573746f6d46656573416c6c6f776564120231300a1c0a146c617a794372656174696f6e2e656e61626c65641204747275650a1b0a10746f6b656e732e6d61784e756d6265721207313030303030300a1d0a126163636f756e74732e6d61784e756d6265721207353030303030300a240a1c636f6e7472616374732e6974656d697a6553746f72616765466565731204747275650a230a1b6865646572612e616c6c6f77616e6365732e6973456e61626c65641204747275650a380a23626f6f7473747261702e6665655363686564756c65734a736f6e2e7265736f7572636512116665655363686564756c65732e6a736f6e0a2b0a246c65646765722e7265636f7264732e6d6178517565727961626c6542794163636f756e7412033138300a220a16636f6e7472616374732e6d6178476173506572536563120831353030303030300a300a28636f6e7472616374732e707265636f6d70696c652e6578706f72745265636f7264526573756c74731204747275650a1b0a156175746f52656e65772e746172676574547970657312025b5d0a270a22636f6e7472616374732e6d61784e756d5769746848617069536967734163636573731201300a280a20636f6e7472616374732e7468726f74746c652e7468726f74746c6542794761731204747275650a230a17746f6b656e732e6d617841676772656761746552656c73120831303030303030300a260a20626f6f7473747261702e72617465732e63757272656e7443656e744571756976120231320a290a236865646572612e7472616e73616374696f6e2e6d696e56616c69644475726174696f6e120231350a510a12636f6e7472616374732e7369646563617273123b5b434f4e54524143545f53544154455f4348414e47452c20434f4e54524143545f414354494f4e2c20434f4e54524143545f42595445434f44455d0a1b0a156c65646765722e66756e64696e674163636f756e74120239380a230a1a7363686564756c696e672e6c6f6e675465726d456e61626c6564120566616c73650a220a1a6c65646765722e6d61784175746f4173736f63696174696f6e731204353030300a1e0a16636f6e7472616374";
 }