diff --git a/src/app/blog/hashing-multiple-blobs-with-BLAKE3/page.mdx b/src/app/blog/hashing-multiple-blobs-with-BLAKE3/page.mdx index 819bbb8d..8ebf2905 100644 --- a/src/app/blog/hashing-multiple-blobs-with-BLAKE3/page.mdx +++ b/src/app/blog/hashing-multiple-blobs-with-BLAKE3/page.mdx @@ -4,7 +4,7 @@ import {ThemeImage} from '@/components/ThemeImage' export const post = { draft: false, author: 'RĂ¼diger Klaehn', - date: '2025-10-20', + date: '2025-10-15', title: 'Hashing multiple blobs with BLAKE3', description: "How to quickly hash multiple small blobs with BLAKE3", @@ -129,6 +129,8 @@ The hazmat API gives you the ability to use the `Hasher` to compute the intermed But the API still focuses around the `Hasher`, so it still works only for computing data for *individual* blobs. +We have written about the [new hazmat API](https://www.iroh.computer/blog/blake3-hazmat-api) previously + ## Using the internal platform API So it looks like we have no choice but to dig deeper and see if we can implement this using existing internals.