From 0946c333fa3a6b400355c6f6beb2ddc7fe5c4ebc Mon Sep 17 00:00:00 2001
From: Kieran O'Neill
Date: Sun, 25 Feb 2024 17:33:10 +0200
Subject: [PATCH] feat: list flies (#3)
* refactor: remove temp directory use and use a random hash
* refactor: revert directory naming to the merkle root, it just makes sense
* feat: implement merkle proof generation
* fix: merkle tree algorithm to correctly create trees when odd leafs are found
* feat: add page to list files and fetch files and generate the proof for each file
---
README.md | 12 +-
app/@types/environment.d.ts | 1 +
app/components/Header/Header.tsx | 4 +-
app/components/Navigation/Navigation.tsx | 11 +-
.../UploadCompleteModal.tsx | 18 +-
.../UploadCompleteModal/types/IProps.ts | 5 +-
app/constants/Paths.ts | 3 +
app/constants/Routes.ts | 1 +
app/constants/index.ts | 1 +
app/enums/LeafPositionEnum.ts | 6 +
app/enums/index.ts | 1 +
app/files/page.tsx | 179 +++++++++++++++++
app/hooks/useFiles/index.ts | 2 +
app/hooks/useFiles/types/IUseFilesState.ts | 10 +
app/hooks/useFiles/types/index.ts | 1 +
app/hooks/useFiles/useFiles.ts | 50 +++++
app/types/IFileDirectoryItem.ts | 6 +
app/types/IFileProofItem.ts | 9 +
app/types/IFileResponse.ts | 9 +
app/types/IUploadResponse.ts | 4 +
app/types/index.ts | 3 +
app/upload/page.tsx | 14 +-
.../downloadJSONFile/downloadJSONFile.ts | 6 +-
cmd/core/main.go | 22 +--
configs/.env.core.example | 1 +
configs/.env.web.example | 7 +-
deployments/docker-compose.yml | 1 -
internal/constants/{storage.go => files.go} | 2 +-
internal/files/hashing.go | 21 --
internal/files/storage.go | 24 ---
internal/files/storage_test.go | 25 ---
internal/merkletree/merkletree.go | 179 ++++++++++++++---
internal/merkletree/merkletree_test.go | 140 ++++++++-----
internal/routes/files.go | 187 ++++++++----------
internal/routes/versions.go | 2 +-
internal/types/filedirectoryitem.go | 6 +
internal/types/filemetadata.go | 6 -
internal/types/filereaddata.go | 8 +
internal/types/fileresponse.go | 7 +
internal/types/filesuploadresponse.go | 3 +-
internal/types/merkletreeproofitem.go | 6 +
internal/utils/filedirectory.go | 71 +++++++
internal/utils/hash.go | 13 ++
internal/utils/storage.go | 42 ++++
package.json | 6 +-
tsconfig.json | 1 +
46 files changed, 824 insertions(+), 312 deletions(-)
create mode 100644 app/constants/Paths.ts
create mode 100644 app/enums/LeafPositionEnum.ts
create mode 100644 app/enums/index.ts
create mode 100644 app/files/page.tsx
create mode 100644 app/hooks/useFiles/index.ts
create mode 100644 app/hooks/useFiles/types/IUseFilesState.ts
create mode 100644 app/hooks/useFiles/types/index.ts
create mode 100644 app/hooks/useFiles/useFiles.ts
create mode 100644 app/types/IFileDirectoryItem.ts
create mode 100644 app/types/IFileProofItem.ts
create mode 100644 app/types/IFileResponse.ts
rename internal/constants/{storage.go => files.go} (59%)
delete mode 100644 internal/files/hashing.go
delete mode 100644 internal/files/storage.go
delete mode 100644 internal/files/storage_test.go
create mode 100644 internal/types/filedirectoryitem.go
delete mode 100644 internal/types/filemetadata.go
create mode 100644 internal/types/filereaddata.go
create mode 100644 internal/types/fileresponse.go
create mode 100644 internal/types/merkletreeproofitem.go
create mode 100644 internal/utils/filedirectory.go
create mode 100644 internal/utils/storage.go
diff --git a/README.md b/README.md
index c84b871..7077527 100644
--- a/README.md
+++ b/README.md
@@ -5,15 +5,15 @@
- Aether
+ aether
- Journey into the Aether where storage defies conventional bounds.
+ Journey into the aether where storage defies conventional bounds.
- Aether is a modern file storage platform that implements a state-of-the-art Merkle tree structure to store multiple files.
+ aether is a modern file storage platform that implements a state-of-the-art Merkle tree structure to store multiple files.
@@ -86,7 +86,7 @@ make setup
make
```
-> ⚠️ **NOTE:** The `make` command will run/re-run `make install`, but will not overwrite any `.env.*` that may have been edited in section [1.2.](#22-setting-up-environment-variables-optional)
+> ⚠️ **NOTE:** The `make` command will run/re-run `make setup`, but will not overwrite any `.env.*` that may have been edited in section [1.2.](#22-setting-up-environment-variables-optional)
2. Navigate to [http://localhost:8080](http://localhost:8080) to access the web portal.
@@ -105,8 +105,8 @@ make
| `make dev-web` | Runs the web app using `next dev`. Intended for development purposes only. |
| `make clean` | Deletes the build directory. |
| `make install` | Installs the yarn and golang dependencies. |
-| `make run` | Checks if the apps are correctly configured and runs Docker Compose. |
-| `make setup` | Creates the `.env.*` files to the `.config/` directory. |
+| `make run` | Checks if the apps are correctly configured and runs Docker Compose. Intended for development purposes only. |
+| `make setup` | Creates `.env.*` files in the `.config/` directory. |
[Back to top ^][table-of-contents]
diff --git a/app/@types/environment.d.ts b/app/@types/environment.d.ts
index bd0b276..61ec628 100644
--- a/app/@types/environment.d.ts
+++ b/app/@types/environment.d.ts
@@ -6,6 +6,7 @@ declare namespace NodeJS {
readonly PORT: string;
// public
+ readonly NEXT_PUBLIC_CORE_URL: string;
readonly NEXT_PUBLIC_DESCRIPTION: string;
readonly NEXT_PUBLIC_LOG_LEVEL: string;
readonly NEXT_PUBLIC_TAGLINE: string;
diff --git a/app/components/Header/Header.tsx b/app/components/Header/Header.tsx
index 5f20d7e..eb2efc8 100644
--- a/app/components/Header/Header.tsx
+++ b/app/components/Header/Header.tsx
@@ -36,7 +36,7 @@ const Header: FC = ({ onNavigationClick }) => {
w="full"
>
{/*open navigation menu button*/}
-
+
= ({ onNavigationClick }) => {
>
: }
onClick={handlerColorChangeClick}
diff --git a/app/components/Navigation/Navigation.tsx b/app/components/Navigation/Navigation.tsx
index bf3a37d..67ac770 100644
--- a/app/components/Navigation/Navigation.tsx
+++ b/app/components/Navigation/Navigation.tsx
@@ -18,6 +18,7 @@ import {
IoCheckmarkCircleOutline,
IoChevronBackOutline,
IoCloudUploadOutline,
+ IoListOutline,
} from 'react-icons/io5';
// components
@@ -27,6 +28,7 @@ import NavigationLinkItem from './NavigationLinkItem';
// constants
import {
DEFAULT_GAP,
+ FILES_ROUTE,
INDEX_ROUTE,
UPLOAD_ROUTE,
VERIFY_ROUTE,
@@ -53,6 +55,11 @@ const Navigation: FC = ({ isOpen, onClose }) => {
label: 'Upload',
route: UPLOAD_ROUTE,
},
+ {
+ icon: IoListOutline,
+ label: 'Files',
+ route: FILES_ROUTE,
+ },
{
icon: IoCheckmarkCircleOutline,
label: 'Verify',
@@ -82,7 +89,7 @@ const Navigation: FC = ({ isOpen, onClose }) => {
{/*icon*/}
}
onClick={handleHomeClick}
@@ -93,7 +100,7 @@ const Navigation: FC = ({ isOpen, onClose }) => {
{/*close navigation menu button*/}
-
+
= ({ merkleTreeRootHash, onClose }) => {
+const UploadCompleteModal: FC = ({ onClose, uploadResponse }) => {
// hooks
const defaultTextColor: string = useDefaultTextColor();
const logger: ILogger = useLogger();
@@ -44,7 +44,7 @@ const UploadCompleteModal: FC = ({ merkleTreeRootHash, onClose }) => {
const handleDownloadMerkleRoot = () => {
const _functionName: string = 'handleDownloadMerkleRoot';
- if (!merkleTreeRootHash) {
+ if (!uploadResponse) {
logger.debug(
`${UploadCompleteModal.displayName}#${_functionName}: no merkle tree root hash found, ignoring`
);
@@ -52,16 +52,14 @@ const UploadCompleteModal: FC = ({ merkleTreeRootHash, onClose }) => {
return;
}
- // create a data uri from the json and download ir
- downloadJSONFile('root', {
- root: merkleTreeRootHash,
- });
+ // create a data uri from the json and download it - use the merkle root as the file name
+ downloadJSONFile(uploadResponse.root, uploadResponse);
};
return (
@@ -82,19 +80,19 @@ const UploadCompleteModal: FC = ({ merkleTreeRootHash, onClose }) => {
{/*merkle tree root hash*/}
- {!merkleTreeRootHash ? (
+ {!uploadResponse ? (
) : (
<>
{/*merkle tree root hash*/}
- {merkleTreeRootHash}
+ {uploadResponse.root}
{/*copy button*/}
>
)}
diff --git a/app/components/UploadCompleteModal/types/IProps.ts b/app/components/UploadCompleteModal/types/IProps.ts
index 7471dea..7b9e2e5 100644
--- a/app/components/UploadCompleteModal/types/IProps.ts
+++ b/app/components/UploadCompleteModal/types/IProps.ts
@@ -1,6 +1,9 @@
+// types
+import type { IUploadResponse } from '@app/types';
+
interface IProps {
- merkleTreeRootHash: string | null;
onClose: () => void;
+ uploadResponse: IUploadResponse | null;
}
export default IProps;
diff --git a/app/constants/Paths.ts b/app/constants/Paths.ts
new file mode 100644
index 0000000..ed2f902
--- /dev/null
+++ b/app/constants/Paths.ts
@@ -0,0 +1,3 @@
+export const FILES_PATH: string = 'files';
+export const UPLOAD_PATH: string = 'upload';
+export const VERSIONS_PATH: string = 'versions';
diff --git a/app/constants/Routes.ts b/app/constants/Routes.ts
index 1ffcf2b..71548dd 100644
--- a/app/constants/Routes.ts
+++ b/app/constants/Routes.ts
@@ -1,3 +1,4 @@
+export const FILES_ROUTE: string = '/files';
export const INDEX_ROUTE: string = '/';
export const UPLOAD_ROUTE: string = '/upload';
export const VERIFY_ROUTE: string = '/verify';
diff --git a/app/constants/index.ts b/app/constants/index.ts
index ee645a6..1356330 100644
--- a/app/constants/index.ts
+++ b/app/constants/index.ts
@@ -1,4 +1,5 @@
export * from './Dimensions';
export * from './Links';
+export * from './Paths';
export * from './Routes';
export * from './Styles';
diff --git a/app/enums/LeafPositionEnum.ts b/app/enums/LeafPositionEnum.ts
new file mode 100644
index 0000000..c0bcbfb
--- /dev/null
+++ b/app/enums/LeafPositionEnum.ts
@@ -0,0 +1,6 @@
+enum LeafPositionEnum {
+ Left = 0,
+ Right = 1,
+}
+
+export default LeafPositionEnum;
diff --git a/app/enums/index.ts b/app/enums/index.ts
new file mode 100644
index 0000000..40dfb05
--- /dev/null
+++ b/app/enums/index.ts
@@ -0,0 +1 @@
+export { default as LeafPositionEnum } from './LeafPositionEnum';
diff --git a/app/files/page.tsx b/app/files/page.tsx
new file mode 100644
index 0000000..833cea3
--- /dev/null
+++ b/app/files/page.tsx
@@ -0,0 +1,179 @@
+'use client';
+import {
+ Accordion,
+ AccordionButton,
+ AccordionIcon,
+ AccordionItem,
+ AccordionPanel,
+ Code,
+ Heading,
+ HStack,
+ IconButton,
+ Skeleton,
+ Spacer,
+ Stack,
+ Text,
+ Tooltip,
+ VStack,
+} from '@chakra-ui/react';
+import { NextPage } from 'next';
+import React from 'react';
+import { IoDownloadOutline } from 'react-icons/io5';
+
+// components
+import CopyIconButton from '@app/components/CopyIconButton';
+
+// constants
+import { DEFAULT_GAP } from '@app/constants';
+
+// hooks
+import useButtonHoverBackgroundColor from '@app/hooks/useButtonHoverBackgroundColor';
+import useDefaultTextColor from '@app/hooks/useDefaultTextColor';
+import useFiles from '@app/hooks/useFiles';
+import useSubTextColor from '@app/hooks/useSubTextColor';
+
+// types
+import type { IFileResponse } from '@app/types';
+
+// utils
+import downloadJSONFile from '@app/utils/downloadJSONFile';
+
+const FilesPage: NextPage = () => {
+ // hooks
+ const buttonHoverBackgroundColor: string = useButtonHoverBackgroundColor();
+ const defaultTextColor: string = useDefaultTextColor();
+ const subTextColor: string = useSubTextColor();
+ const { files, loading } = useFiles();
+ // handlers
+ const handleDownloadProofClick = (file: IFileResponse) => () =>
+ downloadJSONFile(file.hash, file.proof);
+ // renders
+ const renderContent = () => {
+ let fileKeys: string[];
+
+ if (loading) {
+ return Array.from({ length: 3 }, (_, index) => (
+
+ ));
+ }
+
+ if (files) {
+ fileKeys = Object.keys(files);
+
+ if (fileKeys.length > 0) {
+ return (
+
+ {fileKeys.map((key, fileKeyIndex) => (
+
+ {/*accordian button*/}
+
+
+ {key}
+
+
+
+
+
+
+
+ {/*list of files*/}
+
+ {files[key].map((file, index) => (
+
+ {/*name*/}
+
+ {file.name}
+
+
+
+
+ {/*copy hash button*/}
+
+
+ {/*download proof button*/}
+
+ }
+ onClick={handleDownloadProofClick(file)}
+ size="md"
+ variant="ghost"
+ />
+
+
+ ))}
+
+
+ ))}
+
+ );
+ }
+ }
+
+ // when there are no files returned
+ return (
+
+
+ {`No files found!`}
+
+
+ );
+ };
+
+ return (
+
+ {/*heading*/}
+
+ {`Files`}
+
+
+ {/*description*/}
+
+ {`Below is a list of files, grouped by their merkle tree roots. You can download a file's proof and use the root you received to verify the file's integrity.`}
+
+
+
+ {renderContent()}
+
+
+ );
+};
+
+export default FilesPage;
diff --git a/app/hooks/useFiles/index.ts b/app/hooks/useFiles/index.ts
new file mode 100644
index 0000000..6d2ca39
--- /dev/null
+++ b/app/hooks/useFiles/index.ts
@@ -0,0 +1,2 @@
+export { default } from './useFiles';
+export * from './types';
diff --git a/app/hooks/useFiles/types/IUseFilesState.ts b/app/hooks/useFiles/types/IUseFilesState.ts
new file mode 100644
index 0000000..41a40a4
--- /dev/null
+++ b/app/hooks/useFiles/types/IUseFilesState.ts
@@ -0,0 +1,10 @@
+// types
+import { IFileResponse } from '@app/types';
+
+interface IUseFilesState {
+ error: string | null;
+ files: Record | null;
+ loading: boolean;
+}
+
+export default IUseFilesState;
diff --git a/app/hooks/useFiles/types/index.ts b/app/hooks/useFiles/types/index.ts
new file mode 100644
index 0000000..a3a7402
--- /dev/null
+++ b/app/hooks/useFiles/types/index.ts
@@ -0,0 +1 @@
+export type { default as IUseFilesState } from './IUseFilesState';
diff --git a/app/hooks/useFiles/useFiles.ts b/app/hooks/useFiles/useFiles.ts
new file mode 100644
index 0000000..2540278
--- /dev/null
+++ b/app/hooks/useFiles/useFiles.ts
@@ -0,0 +1,50 @@
+import axios, { AxiosResponse } from 'axios';
+import { useEffect, useState } from 'react';
+
+// constants
+import { FILES_PATH } from '@app/constants';
+
+// hooks
+import useLogger from '@app/hooks/useLogger';
+
+// types
+import type { IFileResponse, ILogger } from '@app/types';
+import type { IUseFilesState } from './types';
+
+export default function useFiles(): IUseFilesState {
+ const _functionName: string = 'useFiles';
+ // hooks
+ const logger: ILogger = useLogger();
+ // states
+ const [error, setError] = useState(null);
+ const [files, setFiles] = useState | null>(
+ null
+ );
+ const [loading, setLoading] = useState(true);
+
+ useEffect(() => {
+ (async () => {
+ let response: AxiosResponse>;
+
+ try {
+ response = await axios.get(
+ `${process.env.NEXT_PUBLIC_CORE_URL}/${FILES_PATH}`
+ );
+
+ setFiles(response.data);
+ } catch (error) {
+ logger.error(`${_functionName}:`, error);
+
+ setError(error.message);
+ }
+
+ setLoading(false);
+ })();
+ }, []);
+
+ return {
+ error,
+ files,
+ loading,
+ };
+}
diff --git a/app/types/IFileDirectoryItem.ts b/app/types/IFileDirectoryItem.ts
new file mode 100644
index 0000000..575d83b
--- /dev/null
+++ b/app/types/IFileDirectoryItem.ts
@@ -0,0 +1,6 @@
+interface IFileDirectoryItem {
+ hash: string;
+ name: string;
+}
+
+export default IFileDirectoryItem;
diff --git a/app/types/IFileProofItem.ts b/app/types/IFileProofItem.ts
new file mode 100644
index 0000000..f4879f5
--- /dev/null
+++ b/app/types/IFileProofItem.ts
@@ -0,0 +1,9 @@
+// enums
+import { LeafPositionEnum } from '@app/enums';
+
+interface IFileProofItem {
+ hash: string;
+ position: LeafPositionEnum;
+}
+
+export default IFileProofItem;
diff --git a/app/types/IFileResponse.ts b/app/types/IFileResponse.ts
new file mode 100644
index 0000000..889c9e8
--- /dev/null
+++ b/app/types/IFileResponse.ts
@@ -0,0 +1,9 @@
+// types
+import IFileDirectoryItem from './IFileDirectoryItem';
+import IFileProofItem from './IFileProofItem';
+
+interface IFileResponse extends IFileDirectoryItem {
+ proof: IFileProofItem[];
+}
+
+export default IFileResponse;
diff --git a/app/types/IUploadResponse.ts b/app/types/IUploadResponse.ts
index f6b4c92..9a5cd6a 100644
--- a/app/types/IUploadResponse.ts
+++ b/app/types/IUploadResponse.ts
@@ -1,4 +1,8 @@
+// types
+import IFileDirectoryItem from './IFileDirectoryItem';
+
interface IUploadResponse {
+ directory: IFileDirectoryItem[];
root: string;
}
diff --git a/app/types/index.ts b/app/types/index.ts
index ae216f1..b97aa33 100644
--- a/app/types/index.ts
+++ b/app/types/index.ts
@@ -1,3 +1,6 @@
+export type { default as IFileDirectoryItem } from './IFileDirectoryItem';
+export type { default as IFileProofItem } from './IFileProofItem';
+export type { default as IFileResponse } from './IFileResponse';
export type { default as ILayoutProps } from './ILayoutProps';
export type { default as ILogger } from './ILogger';
export type { default as ILogLevel } from './ILogLevel';
diff --git a/app/upload/page.tsx b/app/upload/page.tsx
index 212b1fd..3ee7188 100644
--- a/app/upload/page.tsx
+++ b/app/upload/page.tsx
@@ -27,7 +27,7 @@ import { IoCloudUploadOutline, IoDocumentsOutline } from 'react-icons/io5';
import UploadCompleteModal from '@app/components/UploadCompleteModal/UploadCompleteModal';
// constants
-import { DEFAULT_GAP } from '@app/constants';
+import { DEFAULT_GAP, FILES_PATH, UPLOAD_PATH } from '@app/constants';
// hooks
import useDefaultTextColor from '@app/hooks/useDefaultTextColor';
@@ -54,13 +54,13 @@ const UploadPage: NextPage = () => {
const primaryColorScheme: string = usePrimaryColorScheme();
// state
const [fileList, setFileList] = useState(null);
- const [merkleTreeRootHash, setMerkleTreeRootHash] = useState(
+ const [uploadResponse, setUploadResponse] = useState(
null
);
const [uploading, setUploading] = useState(false);
// misc
const reset = () => {
- setMerkleTreeRootHash(null);
+ setUploadResponse(null);
setFileList(null);
setUploading(false);
};
@@ -85,12 +85,12 @@ const UploadPage: NextPage = () => {
// create the form data
Array.from(fileList).forEach((file) =>
- formData.append('files', file, file.name)
+ formData.append('file', file, file.name)
);
try {
response = await axios.post(
- 'http://localhost:3000/files/upload',
+ `${process.env.NEXT_PUBLIC_CORE_URL}/${FILES_PATH}/${UPLOAD_PATH}`,
formData
);
@@ -99,7 +99,7 @@ const UploadPage: NextPage = () => {
response.data
);
- setMerkleTreeRootHash(response.data.root);
+ setUploadResponse(response.data);
setUploading(false);
} catch (error) {
logger.error(`${UploadPage.displayName}#${_functionName}:`, error);
@@ -130,8 +130,8 @@ const UploadPage: NextPage = () => {
return (
<>
} data - the actual JSON to encode in the data URI.
+ * @param {unknown} data - the actual JSON to encode in the data URI.
*/
-export default function downloadJSONFile(
+export default function downloadJSONFile>(
fileName: string,
- data: Record
+ data: Data
): void {
const dataURI: string = `data:text/json;charset=utf-8,${encodeURIComponent(JSON.stringify(data))}`;
const anchorElement: HTMLAnchorElement = document.createElement('a');
diff --git a/cmd/core/main.go b/cmd/core/main.go
index b38fb68..4cdb298 100644
--- a/cmd/core/main.go
+++ b/cmd/core/main.go
@@ -2,8 +2,8 @@ package main
import (
"aether/internal/constants"
- "aether/internal/files"
"aether/internal/routes"
+ "aether/internal/utils"
"fmt"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
@@ -14,28 +14,24 @@ func main() {
e := echo.New()
// create the root files directory
- writeError := files.CreateDirectory(constants.RootFileDirectory)
- if writeError != nil {
- e.Logger.Fatal(writeError.Error)
- }
-
- // create the temp files directory
- writeError = files.CreateDirectory(constants.TempFileDirectory)
- if writeError != nil {
- e.Logger.Fatal(writeError.Error)
+ err := utils.CreateDir(constants.RootFileDirectory)
+ if err != nil {
+ e.Logger.Fatal(err)
}
// middlewares
e.Use(middleware.Logger())
e.Use(middleware.CORS()) // allow any origin, obviously a major security loophole, but this is just an experiment :)
+ // /files
+ e.GET(constants.FilesRoute, routes.NewGetFilesRoute())
// /files/upload
- e.POST(fmt.Sprint(constants.FilesRoute, constants.UploadRoute), routes.NewFilesUploadRoute())
+ e.POST(fmt.Sprint(constants.FilesRoute, constants.UploadRoute), routes.NewPostFilesUploadRoute())
// /versions
- e.GET(constants.VersionsRoute, routes.NewVersionsRoute())
+ e.GET(constants.VersionsRoute, routes.NewGetVersionsRoute())
// start the server
- err := e.Start(fmt.Sprintf(":%s", os.Getenv("PORT")))
+ err = e.Start(fmt.Sprintf(":%s", os.Getenv("PORT")))
if err != nil {
e.Logger.Fatal(err)
}
diff --git a/configs/.env.core.example b/configs/.env.core.example
index 237850f..f6d40fc 100644
--- a/configs/.env.core.example
+++ b/configs/.env.core.example
@@ -2,3 +2,4 @@
ENVIRONMENT="development"
NAME="aether-core"
PORT="3000"
+VERSION=
diff --git a/configs/.env.web.example b/configs/.env.web.example
index 9e58978..8f1e90b 100644
--- a/configs/.env.web.example
+++ b/configs/.env.web.example
@@ -4,8 +4,9 @@ NAME="aether-web"
PORT="8080"
# public
-NEXT_PUBLIC_DESCRIPTION="Aether is a modern file storage platform that implements a state-of-the-art Merkle tree structure to store multiple files"
+NEXT_PUBLIC_CORE_URL="http://localhost:3000"
+NEXT_PUBLIC_DESCRIPTION="aether is a modern file storage platform that implements a state-of-the-art Merkle tree structure to store multiple files"
NEXT_PUBLIC_LOG_LEVEL="debug"
-NEXT_PUBLIC_TAGLINE="Journey into the Aether where storage defies conventional bounds"
-NEXT_PUBLIC_TITLE="Aether"
+NEXT_PUBLIC_TAGLINE="Journey into the aether where storage defies conventional bounds"
+NEXT_PUBLIC_TITLE="aether"
NEXT_PUBLIC_VERSION=
diff --git a/deployments/docker-compose.yml b/deployments/docker-compose.yml
index 7e887c5..8e765b7 100644
--- a/deployments/docker-compose.yml
+++ b/deployments/docker-compose.yml
@@ -19,7 +19,6 @@ services:
- "${CORE_APP_PORT:-3000}:${CORE_APP_PORT:-3000}"
volumes:
- ../.files:/usr/app/.files:cached
- - ../.temp:/usr/app/.temp:cached
- ../cmd:/usr/app/cmd:cached
- ../internal:/usr/app/internal:cached
diff --git a/internal/constants/storage.go b/internal/constants/files.go
similarity index 59%
rename from internal/constants/storage.go
rename to internal/constants/files.go
index 458b243..cef5e93 100644
--- a/internal/constants/storage.go
+++ b/internal/constants/files.go
@@ -1,6 +1,6 @@
package constants
const (
+ DirectoryFileName string = "directory.json"
RootFileDirectory string = ".files"
- TempFileDirectory string = ".temp"
)
diff --git a/internal/files/hashing.go b/internal/files/hashing.go
deleted file mode 100644
index d0d84df..0000000
--- a/internal/files/hashing.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package files
-
-import (
- "aether/internal/errors"
- "crypto/sha256"
- "encoding/hex"
- "fmt"
- "io"
- "mime/multipart"
-)
-
-// HashFile Creates a hex encoded SHA-256 hash of the file
-func HashFile(f multipart.File, filename string) (string, *errors.HashError) {
- h := sha256.New()
-
- if _, err := io.Copy(h, f); err != nil {
- return "", errors.NewHashError(fmt.Sprintf("unable to hash file %s", filename), err)
- }
-
- return hex.EncodeToString(h.Sum(nil)), nil
-}
diff --git a/internal/files/storage.go b/internal/files/storage.go
deleted file mode 100644
index 8b50067..0000000
--- a/internal/files/storage.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package files
-
-import (
- "aether/internal/errors"
- "fmt"
- "os"
-)
-
-// CreateDirectory Creates the directory if it doesn't exist
-func CreateDirectory(dirName string) *errors.WriteError {
- _, err := os.Stat(dirName)
- if os.IsNotExist(err) {
- fmt.Printf("file directory %s does not exist, creating a new one", dirName)
-
- err = os.Mkdir(dirName, 0755)
- if err != nil {
- return errors.NewWriteError(fmt.Sprintf("failed to create directory %s", dirName), err)
- }
-
- fmt.Printf("created file directory %s", dirName)
- }
-
- return nil
-}
diff --git a/internal/files/storage_test.go b/internal/files/storage_test.go
deleted file mode 100644
index e29e77f..0000000
--- a/internal/files/storage_test.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package files
-
-import (
- "aether/internal/constants"
- "log"
- "os"
- "testing"
-)
-
-func TestGetRootFilesDirectory(t *testing.T) {
- rootFileDir, writeError := GetRootFilesDirectory()
- if writeError != nil {
- log.Fatal(writeError)
- }
-
- if rootFileDir != constants.RootFileDirectory {
- t.Errorf("expect result: %s, actual result: %s", rootFileDir, constants.RootFileDirectory)
- }
-
- // clean up
- err := os.RemoveAll(rootFileDir)
- if err != nil {
- log.Fatal(err)
- }
-}
diff --git a/internal/merkletree/merkletree.go b/internal/merkletree/merkletree.go
index c9f505d..e053aa9 100644
--- a/internal/merkletree/merkletree.go
+++ b/internal/merkletree/merkletree.go
@@ -2,20 +2,49 @@ package merkletree
import (
"aether/internal/constants"
+ "aether/internal/types"
"crypto/sha256"
"encoding/hex"
+ "math"
)
-// Given the hash and the merkletree, determine whether the leaf node is a left or
-// right node
-func determineLeafNodeDirectionFromHash(merkletree [][]string, leafHash string, level int) int {
- for nodeIndex, value := range merkletree[level] { // we are only concerned with the bottom level
- if value == leafHash {
- if nodeIndex%2 == 0 { // if the index is even, this will be a left node, otherwise (odd) will be a right leaf
- return constants.Left
- }
+/**
+ private functions
+*/
- return constants.Right
+func buildMerkleTree(hashes []string, tree [][]string) [][]string {
+ var newTreeLevel []string
+
+ // if the hashes only contain one hash, we are at the root, so return the tree with the root appended
+ if len(hashes) == 1 {
+ return append(tree, hashes)
+ }
+
+ // make sure there is an even number of hashes
+ preparedHashes := prepareHashes(hashes)
+ tree = append(tree, preparedHashes)
+
+ // for each left and right hash, concatenate and make a new hash and add it to the new level of the tree
+ for i := 0; i < len(hashes); i += 2 {
+ newTreeLevel = append(newTreeLevel, createHashPair(preparedHashes[i], preparedHashes[i+1]))
+ }
+
+ // recursively call the function with the new level
+ return buildMerkleTree(newTreeLevel, tree)
+}
+
+func createHashPair(leftHash string, rightHash string) string {
+ hashPair := leftHash + rightHash
+ hash := sha256.Sum256([]byte(hashPair))
+
+ return hex.EncodeToString(hash[:])
+}
+
+// findLeafIndexInTreeLevel Simply gets the index of the leaf in the level. If the leaf doesn't exist in the level, -1 is returned.
+func findLeafIndexInTreeLevel(leaf string, level []string) int {
+ for index, value := range level {
+ if value == leaf {
+ return index
}
}
@@ -23,7 +52,17 @@ func determineLeafNodeDirectionFromHash(merkletree [][]string, leafHash string,
return -1
}
-// A Merkle Tree must have an even number of nodes, if the number of hashes is odd, we append the last hash to the end
+// determineLeafPosition Convenience function that determines if a leaf index is a left or right leaf. Even indexes are
+// left leafs whereas odd indexed equate to a right leaf.
+func determineLeafPosition(index int) int {
+ if index%2 == 0 { // if the index is even, this will be a left node, otherwise (odd) will be a right leaf
+ return constants.Left
+ }
+
+ return constants.Right
+}
+
+// prepareHashes A Merkle Tree must have an even number of nodes, if the number of hashes is odd, we append the last hash to the end
// to make it even
func prepareHashes(hashes []string) []string {
if len(hashes)%2 != 0 {
@@ -33,32 +72,118 @@ func prepareHashes(hashes []string) []string {
return hashes
}
-// GenerateMerkleTreeRoot Generates a Merkle Tree root from a list of hashes. This function takes several steps:
+// generateMerkleTree Generates a Merkle Tree from a list of hashes. This function takes several steps:
// 1. Ensures the list of hashes is even by duplicating the last hash and appending it to the end if it is an odd length
-// 2. Concatenates each pair and hashes this pair to create a new parent node
-// 3. Recursively repeats the above steps until a root node (a level with one node) is achieved. This root is returned.
-func GenerateMerkleTreeRoot(hashes []string) string {
- var newTreeLevel []string
+// 2. Concatenates each pair and hashes this pair to create a new parent node.
+// 3. Recursively repeats the above steps until a root node (a level with one node) is achieved.
+func generateMerkleTree(hashes []string) [][]string {
+ // for empty hashes, return nil, we cannot make a merkle tree with no hashes
+ if len(hashes) <= 0 {
+ return nil
+ }
+
+ // recursively build the merkle tree
+ return buildMerkleTree(hashes, [][]string{})
+}
+
+/**
+ public functions
+*/
+
+func GenerateMerkleTreeProof(hash string, hashes []string) []*types.MerkleTreeProofItem {
+ var siblingIndex int
+
+ if len(hashes) <= 0 {
+ return nil
+ }
+
+ // generate the merkle tree
+ tree := generateMerkleTree(hashes)
+
+ // get the index of the leaf
+ leafIndex := findLeafIndexInTreeLevel(hash, tree[0])
+
+ // if the leaf is not in the tree, the proof is invalid
+ if leafIndex < 0 {
+ return nil
+ }
+
+ // add the current hash to the proof
+ proof := []*types.MerkleTreeProofItem{{
+ Hash: hash,
+ Position: determineLeafPosition(leafIndex),
+ }}
+ // traverse the tree adding the necessary nodes to the proof
+ for i := 0; i < len(tree)-1; i++ {
+ // find out the position of this leaf
+ leafPosition := determineLeafPosition(leafIndex)
+
+ // get the index of the leaf's sibling, for left leafs, the sibling is to the right of the slice (index + 1), or right leafs, the sibling is to the left of the slice (index -1)
+ if leafPosition == constants.Left {
+ siblingIndex = leafIndex + 1
+ } else {
+ siblingIndex = leafIndex - 1
+ }
+
+ // add the sibling to the proof
+ proof = append(proof, &types.MerkleTreeProofItem{
+ Hash: tree[i][siblingIndex],
+ Position: determineLeafPosition(siblingIndex),
+ })
+
+ leafIndex = int(math.Floor(float64(leafIndex / 2)))
+ }
+
+ return proof
+}
+
+// GenerateMerkleTreeRoot Generates a Merkle Tree from a list of hashes and simply returns the root.
+func GenerateMerkleTreeRoot(hashes []string) string {
+ // if there are no hashes, return an empty string
if len(hashes) <= 0 {
return ""
}
- // make sure there is an even number of hashes
- preparedHashes := prepareHashes(hashes)
+ // generate the merkle tree
+ tree := generateMerkleTree(hashes)
- // for each left and right hash, concatenate and make a new hash and add it to the new level of the tree
- for i := 0; i < len(preparedHashes); i += 2 {
- hashPair := preparedHashes[i] + preparedHashes[i+1]
- hash := sha256.Sum256([]byte(hashPair))
- newTreeLevel = append(newTreeLevel, hex.EncodeToString(hash[:]))
+ // the root not will be the last element
+ rootNode := tree[len(tree)-1]
+
+ // if there is more than one node in the last element something went wrong
+ if len(rootNode) > 1 {
+ return ""
+ }
+
+ return rootNode[0]
+}
+
+func VerifyMerkleTreeProof(root string, proof []*types.MerkleTreeProofItem) bool {
+ var leafHash string
+
+ // if the merkle tree proof is empty, the root is not in there :P
+ if len(proof) <= 0 {
+ return false
}
- // if the new tree level has only one node, we have the root
- if len(newTreeLevel) == 1 {
- return newTreeLevel[0]
+ for index, value := range proof {
+ // if we are at the first element just use the first hash
+ if index == 0 {
+ leafHash = value.Hash
+ continue
+ }
+
+ // if the next leaf is a left leaf, create the parent
+ if value.Position == constants.Left {
+ leafHash = createHashPair(value.Hash, leafHash)
+ continue
+ }
+
+ // if the next leaf is a right leaf, create the parent
+ leafHash = createHashPair(leafHash, value.Hash)
}
- // if there is more than one level, we have more levels to go to get to the root
- return GenerateMerkleTreeRoot(newTreeLevel)
+ // check if the traversed hash matches the supplied root
+ return leafHash == root
}
diff --git a/internal/merkletree/merkletree_test.go b/internal/merkletree/merkletree_test.go
index 9b98bce..10a4b49 100644
--- a/internal/merkletree/merkletree_test.go
+++ b/internal/merkletree/merkletree_test.go
@@ -1,30 +1,59 @@
package merkletree
import (
- "fmt"
"testing"
)
-var hashes []string
+var emptyHashes []string
+var oneHashInHashes = []string{
+ "d9f893bed0c563e78a5b225dbfd642b3957a56879ee7afed605479025847af50",
+}
+var pairOfHashes = []string{
+ "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
+ "08e8378e98dd1b8c81992a113c73e3b50a42aa24f744f984adc3b5b28fc690ed",
+}
+var oddNumberOfHashes = []string{
+ "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
+ "08e8378e98dd1b8c81992a113c73e3b50a42aa24f744f984adc3b5b28fc690ed",
+ "f17a98f8dcb95ea1fb3a7016ef08301f3482eb89ec7d1e43164bdcf1cfac323e",
+ "522b2aa04d3541d8d67d382d659c992314620d34807b9439090708c2519fb232",
+ "59fa57badd1a68045672bf90360e8a7fb401709149878e837a91ac85ccf5031d",
+ "79a61238173fb912c6a9e251081768b8f13bf80d81afdafe8572269bd352c58e",
+ "66da398be63468af7d410cd03df2f0c6def65ef78542f91a3c02dc955c7c0205",
+ "3a38a370a8fd060daf623e7985c55c994d1387a0a7b5ef2d740e89a9b3cb73d3",
+ "d681f09b4e03ee9e887e30dc3fb7307df143ed7c213272f87192714ecbab2f63",
+ "d90ac811fa0da57444d997a770e0d7fa296c6b7978f384ba82bdd70fa3f50776",
+ "9291313a9f9f9cff6760d868726135a6af2a82d70cf549de65d33f4362230a98",
+}
+var evenNumberOfHashes = []string{
+ "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
+ "08e8378e98dd1b8c81992a113c73e3b50a42aa24f744f984adc3b5b28fc690ed",
+ "f17a98f8dcb95ea1fb3a7016ef08301f3482eb89ec7d1e43164bdcf1cfac323e",
+ "522b2aa04d3541d8d67d382d659c992314620d34807b9439090708c2519fb232",
+ "59fa57badd1a68045672bf90360e8a7fb401709149878e837a91ac85ccf5031d",
+ "79a61238173fb912c6a9e251081768b8f13bf80d81afdafe8572269bd352c58e",
+ "66da398be63468af7d410cd03df2f0c6def65ef78542f91a3c02dc955c7c0205",
+ "3a38a370a8fd060daf623e7985c55c994d1387a0a7b5ef2d740e89a9b3cb73d3",
+ "d681f09b4e03ee9e887e30dc3fb7307df143ed7c213272f87192714ecbab2f63",
+ "d90ac811fa0da57444d997a770e0d7fa296c6b7978f384ba82bdd70fa3f50776",
+ "9291313a9f9f9cff6760d868726135a6af2a82d70cf549de65d33f4362230a98",
+ "68e6cdf0cae7fb8eef39cc899c8882e34dd1727a2d08f2303811886949c539e6",
+}
func TestGenerateMerkleTreeRootForEmptyHashes(t *testing.T) {
const expectedMerkleRoot string = ""
- hashes = []string{}
- actualMerkleRoot := GenerateMerkleTreeRoot(hashes)
+ actualMerkleRoot := GenerateMerkleTreeRoot(emptyHashes)
if expectedMerkleRoot != actualMerkleRoot {
t.Errorf("expect result: %s, actual result: %s", expectedMerkleRoot, actualMerkleRoot)
}
}
-func TestGenerateMerkleTreeRootForHash(t *testing.T) {
+func TestGenerateMerkleTreeRootForOneHash(t *testing.T) {
const expectedMerkleRoot string = "d9f893bed0c563e78a5b225dbfd642b3957a56879ee7afed605479025847af50"
- hashes = []string{
- "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
- }
- actualMerkleRoot := GenerateMerkleTreeRoot(hashes)
+ actualMerkleRoot := GenerateMerkleTreeRoot(oneHashInHashes)
if expectedMerkleRoot != actualMerkleRoot {
t.Errorf("expect result: %s, actual result: %s", expectedMerkleRoot, actualMerkleRoot)
@@ -34,11 +63,7 @@ func TestGenerateMerkleTreeRootForHash(t *testing.T) {
func TestGenerateMerkleTreeRootForAPairOfHashes(t *testing.T) {
const expectedMerkleRoot string = "e818f4f035a41a36a574e42dc6986e730b70b7c8473715c0e6171c3ee6e50f26"
- hashes = []string{
- "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
- "08e8378e98dd1b8c81992a113c73e3b50a42aa24f744f984adc3b5b28fc690ed",
- }
- actualMerkleRoot := GenerateMerkleTreeRoot(hashes)
+ actualMerkleRoot := GenerateMerkleTreeRoot(pairOfHashes)
if expectedMerkleRoot != actualMerkleRoot {
t.Errorf("expect result: %s, actual result: %s", expectedMerkleRoot, actualMerkleRoot)
@@ -48,20 +73,7 @@ func TestGenerateMerkleTreeRootForAPairOfHashes(t *testing.T) {
func TestGenerateMerkleTreeRootForOddNumberOfHashes(t *testing.T) {
const expectedMerkleRoot string = "1ddc0cfe8640ab380b5dbb186b1e84011c3b6bcf7a79be2b9257fec31ca606c3"
- hashes = []string{
- "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
- "08e8378e98dd1b8c81992a113c73e3b50a42aa24f744f984adc3b5b28fc690ed",
- "f17a98f8dcb95ea1fb3a7016ef08301f3482eb89ec7d1e43164bdcf1cfac323e",
- "522b2aa04d3541d8d67d382d659c992314620d34807b9439090708c2519fb232",
- "59fa57badd1a68045672bf90360e8a7fb401709149878e837a91ac85ccf5031d",
- "79a61238173fb912c6a9e251081768b8f13bf80d81afdafe8572269bd352c58e",
- "66da398be63468af7d410cd03df2f0c6def65ef78542f91a3c02dc955c7c0205",
- "3a38a370a8fd060daf623e7985c55c994d1387a0a7b5ef2d740e89a9b3cb73d3",
- "d681f09b4e03ee9e887e30dc3fb7307df143ed7c213272f87192714ecbab2f63",
- "d90ac811fa0da57444d997a770e0d7fa296c6b7978f384ba82bdd70fa3f50776",
- "9291313a9f9f9cff6760d868726135a6af2a82d70cf549de65d33f4362230a98",
- }
- actualMerkleRoot := GenerateMerkleTreeRoot(hashes)
+ actualMerkleRoot := GenerateMerkleTreeRoot(oddNumberOfHashes)
if expectedMerkleRoot != actualMerkleRoot {
t.Errorf("expect result: %s, actual result: %s", expectedMerkleRoot, actualMerkleRoot)
@@ -71,25 +83,63 @@ func TestGenerateMerkleTreeRootForOddNumberOfHashes(t *testing.T) {
func TestGenerateMerkleTreeRootForEvenNumberOfHashes(t *testing.T) {
const expectedMerkleRoot string = "0c2fdf0d51ab5a8f9577ba8909c9815d73c4845fab332af9c671e98d8a3a3971"
- hashes = []string{
- "e9616a8f682133fe550840eedecb0492a8c209044b6644dc999738b64a6a11aa",
- "08e8378e98dd1b8c81992a113c73e3b50a42aa24f744f984adc3b5b28fc690ed",
- "f17a98f8dcb95ea1fb3a7016ef08301f3482eb89ec7d1e43164bdcf1cfac323e",
- "522b2aa04d3541d8d67d382d659c992314620d34807b9439090708c2519fb232",
- "59fa57badd1a68045672bf90360e8a7fb401709149878e837a91ac85ccf5031d",
- "79a61238173fb912c6a9e251081768b8f13bf80d81afdafe8572269bd352c58e",
- "66da398be63468af7d410cd03df2f0c6def65ef78542f91a3c02dc955c7c0205",
- "3a38a370a8fd060daf623e7985c55c994d1387a0a7b5ef2d740e89a9b3cb73d3",
- "d681f09b4e03ee9e887e30dc3fb7307df143ed7c213272f87192714ecbab2f63",
- "d90ac811fa0da57444d997a770e0d7fa296c6b7978f384ba82bdd70fa3f50776",
- "9291313a9f9f9cff6760d868726135a6af2a82d70cf549de65d33f4362230a98",
- "68e6cdf0cae7fb8eef39cc899c8882e34dd1727a2d08f2303811886949c539e6",
- }
- actualMerkleRoot := GenerateMerkleTreeRoot(hashes)
-
- fmt.Println(actualMerkleRoot)
+ actualMerkleRoot := GenerateMerkleTreeRoot(evenNumberOfHashes)
if expectedMerkleRoot != actualMerkleRoot {
t.Errorf("expect result: %s, actual result: %s", expectedMerkleRoot, actualMerkleRoot)
}
}
+
+func TestVerifyMerkleTreeProofForOneHash(t *testing.T) {
+ merkleRoot := GenerateMerkleTreeRoot(oneHashInHashes)
+ merkleProof := GenerateMerkleTreeProof(oneHashInHashes[0], oneHashInHashes)
+
+ if !VerifyMerkleTreeProof(merkleRoot, merkleProof) {
+ t.Errorf("expect root: %s, to be verified in proof", merkleRoot)
+ }
+}
+
+func TestVerifyMerkleTreeProofForAPairOfHashes(t *testing.T) {
+ merkleRoot := GenerateMerkleTreeRoot(pairOfHashes)
+ merkleProof := GenerateMerkleTreeProof(pairOfHashes[0], pairOfHashes)
+
+ if !VerifyMerkleTreeProof(merkleRoot, merkleProof) {
+ t.Errorf("expect root: %s, to be verified in proof", merkleRoot)
+ }
+}
+
+func TestVerifyMerkleTreeProofForOddNumberOfHashes(t *testing.T) {
+ merkleRoot := GenerateMerkleTreeRoot(oddNumberOfHashes)
+ merkleProof := GenerateMerkleTreeProof(oddNumberOfHashes[0], oddNumberOfHashes)
+
+ if !VerifyMerkleTreeProof(merkleRoot, merkleProof) {
+ t.Errorf("expect root: %s, to be verified in proof", merkleRoot)
+ }
+}
+
+func TestVerifyMerkleTreeProofForEvenNumberOfHashes(t *testing.T) {
+ merkleRoot := GenerateMerkleTreeRoot(evenNumberOfHashes)
+ merkleProof := GenerateMerkleTreeProof(evenNumberOfHashes[0], evenNumberOfHashes)
+
+ if !VerifyMerkleTreeProof(merkleRoot, merkleProof) {
+ t.Errorf("expect root: %s, to be verified in proof", merkleRoot)
+ }
+}
+
+func TestVerifyMerkleTreeProofWithTheLastHashForAnOddNumberOfHashes(t *testing.T) {
+ merkleRoot := GenerateMerkleTreeRoot(oddNumberOfHashes)
+ merkleProof := GenerateMerkleTreeProof(oddNumberOfHashes[len(oddNumberOfHashes)-1], oddNumberOfHashes)
+
+ if !VerifyMerkleTreeProof(merkleRoot, merkleProof) {
+ t.Errorf("expect root: %s, to be verified in proof", merkleRoot)
+ }
+}
+
+func TestVerifyMerkleTreeProofWithTheLastHashForAnEvenNumberOfHashes(t *testing.T) {
+ merkleRoot := GenerateMerkleTreeRoot(evenNumberOfHashes)
+ merkleProof := GenerateMerkleTreeProof(evenNumberOfHashes[len(oddNumberOfHashes)-1], evenNumberOfHashes)
+
+ if !VerifyMerkleTreeProof(merkleRoot, merkleProof) {
+ t.Errorf("expect root: %s, to be verified in proof", merkleRoot)
+ }
+}
diff --git a/internal/routes/files.go b/internal/routes/files.go
index f68d142..be73aa5 100644
--- a/internal/routes/files.go
+++ b/internal/routes/files.go
@@ -3,80 +3,56 @@ package routes
import (
"aether/internal/constants"
"aether/internal/errors"
- internalfiles "aether/internal/files"
"aether/internal/merkletree"
"aether/internal/types"
"aether/internal/utils"
"fmt"
"github.com/labstack/echo/v4"
- "io"
"net/http"
- "os"
+ "sort"
)
-func copyFiles(srcDir string, destDir string, fileMetadatas []*types.FileMetadata) error {
- // create a new directory at the root .files directory with the merkle tree root as the name
- writeError := internalfiles.CreateDirectory(destDir)
- if writeError != nil {
- fmt.Println(writeError)
+func NewGetFilesRoute() echo.HandlerFunc {
+ return func(c echo.Context) error {
+ var filesResponse map[string][]*types.FileResponse
+ var hashes []string
+ var readError *errors.ReadError
- // attempt to clean up
- err := removeTempFiles(srcDir)
+ fileDirectories, err := utils.GetAllFileDirectories()
if err != nil {
- return err
- }
+ readError = errors.NewReadError("failed to get file directories", err)
- return writeError.Error
- }
-
- for _, fileMetadata := range fileMetadatas {
- // read the source file
- srcFile, err := os.Open(fmt.Sprintf("%s/%s", srcDir, fileMetadata.FileName))
- if err != nil {
- return err
- }
- defer srcFile.Close()
+ fmt.Println(readError)
- // create the destination file
- destFile, err := os.Create(fmt.Sprintf("%s/%s", destDir, fileMetadata.FileName))
- if err != nil {
- return err
+ return c.JSON(http.StatusInternalServerError, readError)
}
- defer destFile.Close()
- // copy the contents over
- _, err = io.Copy(destFile, srcFile)
- if err != nil {
- return err
- }
- }
+ filesResponse = map[string][]*types.FileResponse{}
- return nil
-}
-
-func extractFileHashesFromFileMetadata(fileMetadatas []*types.FileMetadata) []string {
- hashes := make([]string, len(fileMetadatas))
-
- for i := range fileMetadatas {
- hashes[i] = fileMetadatas[i].Hash
- }
+ // add the merkle proofs to the file directories
+ for key, fileDirectory := range fileDirectories {
+ hashes = utils.ExtractHashesFromFileDirectory(fileDirectory)
+ sort.Slice(hashes, func(i int, j int) bool {
+ return hashes[i] < hashes[j]
+ })
- return hashes
-}
+ for _, value := range fileDirectory {
+ filesResponse[key] = append(filesResponse[key], &types.FileResponse{
+ Hash: value.Hash,
+ Name: value.Name,
+ Proof: merkletree.GenerateMerkleTreeProof(value.Hash, hashes),
+ })
+ }
+ }
-func removeTempFiles(tempDir string) error {
- err := os.RemoveAll(fmt.Sprintf("%s", tempDir)) // the entire .temp/ subdirectory
- if err != nil {
- return err
+ return c.JSON(http.StatusOK, filesResponse)
}
-
- return nil
}
-func NewFilesUploadRoute() echo.HandlerFunc {
+func NewPostFilesUploadRoute() echo.HandlerFunc {
return func(c echo.Context) error {
- var fileMetadatas []*types.FileMetadata
- var hashError *errors.HashError
+ var fileDirectory []*types.FileDirectoryItem
+ var files []*types.FileReadData
var readError *errors.ReadError
var writeError *errors.WriteError
@@ -87,27 +63,9 @@ func NewFilesUploadRoute() echo.HandlerFunc {
return c.JSON(http.StatusInternalServerError, err)
}
- formFiles := form.File["files"]
- tempSubDir, err := utils.CreateRandomSha256Hash()
- if err != nil {
- hashError = errors.NewHashError("failed to create temp sub-directory hash", err)
+ formFiles := form.File["file"]
- fmt.Println(hashError)
-
- return c.JSON(http.StatusInternalServerError, hashError)
- }
-
- tempDir := fmt.Sprintf("%s/%s", constants.TempFileDirectory, tempSubDir)
-
- // create the temp subdirectory where the files will be stored before the merkle root is calculated
- writeError = internalfiles.CreateDirectory(tempDir)
- if writeError != nil {
- fmt.Println(writeError)
-
- return c.JSON(http.StatusInternalServerError, writeError)
- }
-
- // iterate through each file, hash it and copy it to the .temp/ directory
+ // iterate through each file, hash it and copy it to the .files/ directory
for _, fileHeader := range formFiles {
// read file
uploadFile, err := fileHeader.Open()
@@ -121,8 +79,10 @@ func NewFilesUploadRoute() echo.HandlerFunc {
defer uploadFile.Close()
// get a hash of the file
- fileHash, hashError := internalfiles.HashFile(uploadFile, fileHeader.Filename)
- if hashError != nil {
+ fileHash, err := utils.HashFile(uploadFile, fileHeader.Filename)
+ if err != nil {
+ hashError := errors.NewHashError(fmt.Sprintf("unable to hash file %s", fileHeader.Filename), err)
+
fmt.Println(hashError)
return c.JSON(http.StatusInternalServerError, hashError)
@@ -130,64 +90,71 @@ func NewFilesUploadRoute() echo.HandlerFunc {
fmt.Println(fmt.Sprintf("file %s has a hash of %s", fileHeader.Filename, fileHash))
- fileMetadatas = append(fileMetadatas, &types.FileMetadata{
- FileName: fileHeader.Filename,
- Hash: fileHash,
+ // add files so they can be written to storage when we have the merkle root
+ files = append(files, &types.FileReadData{
+ File: uploadFile,
+ Name: fileHeader.Filename,
})
- // store the file in the .temp/ directory
- tempFile, err := os.Create(fmt.Sprintf("%s/%s", tempDir, fileHeader.Filename))
- if err != nil {
- writeError = errors.NewWriteError(fmt.Sprintf("failed to write file %s", fileHeader.Filename), err)
+ // add to the directory
+ fileDirectory = append(fileDirectory, &types.FileDirectoryItem{
+ Hash: fileHash,
+ Name: fileHeader.Filename,
+ })
+ }
- fmt.Println(writeError)
+ // sort the files by hash
+ sort.Slice(fileDirectory, func(i int, j int) bool {
+ return fileDirectory[i].Hash < fileDirectory[j].Hash
+ })
- return c.JSON(http.StatusInternalServerError, writeError)
- }
- defer tempFile.Close()
+ // create a merkle root
+ merkleRoot := merkletree.GenerateMerkleTreeRoot(utils.ExtractHashesFromFileDirectory(fileDirectory))
- // copy file contents to of the files
- if _, err = io.Copy(tempFile, uploadFile); err != nil {
- writeError = errors.NewWriteError(fmt.Sprintf("failed to write contents of file %s", fileHeader.Filename), err)
+ fmt.Println(fmt.Sprintf("created merkle root %s", merkleRoot))
- fmt.Println(writeError)
+ // if we have an empty merkle root, no files were uploaded
+ if merkleRoot == "" {
+ fmt.Println("empty merkle root there was no files added to directory")
- return c.JSON(http.StatusInternalServerError, writeError)
- }
+ return c.NoContent(http.StatusBadRequest)
}
- // create a merkle tree root
- merkleTreeRoot := merkletree.GenerateMerkleTreeRoot(extractFileHashesFromFileMetadata(fileMetadatas))
- merkleTreeRootDir := fmt.Sprintf("%s/%s", constants.RootFileDirectory, merkleTreeRoot)
+ // create a subdirectory to store the files using the merkle root hash as the directory name
+ dirName := fmt.Sprintf("%s/%s", constants.RootFileDirectory, merkleRoot)
+ err = utils.CreateDir(dirName)
+ if err != nil {
+ writeError = errors.NewWriteError(fmt.Sprintf("failed create the %s directory", dirName), err)
- fmt.Println(fmt.Sprintf("created merkle tree root %s", merkleTreeRoot))
- fmt.Println(fmt.Sprintf("copying files from %s to %s", tempDir, merkleTreeRootDir))
+ fmt.Println(writeError)
- // copy the files from the .temp/ directory to the .files/ merkle tree directory
- err = copyFiles(
- tempDir,
- merkleTreeRootDir,
- fileMetadatas,
- )
+ return c.JSON(http.StatusInternalServerError, writeError)
+ }
+
+ // add the files to the directory
+ err = utils.SaveFilesToDir(dirName, files)
if err != nil {
- writeError = errors.NewWriteError(fmt.Sprintf("failed to copy files from %s to %s", tempDir, merkleTreeRootDir), err)
+ writeError = errors.NewWriteError(fmt.Sprintf("failed write files to %s", dirName), err)
fmt.Println(writeError)
return c.JSON(http.StatusInternalServerError, writeError)
}
- fmt.Println(fmt.Sprintf("cleaning up files from %s", tempDir))
+ fmt.Println(fmt.Sprintf("added files to directory %s", dirName))
- // remove the temp directory files
- err = removeTempFiles(tempDir)
+ // create a file directory json in the directory
+ err = utils.WriteFileDirectoryJSONToStorage(dirName, fileDirectory)
if err != nil {
- fmt.Println(err)
+ fmt.Println(fmt.Sprintf("failed to create file directory json at %s", dirName))
}
- // finally return the merkle tree root
+ fmt.Println(fmt.Sprintf("created file directory json at %s, with %d entries", dirName, len(fileDirectory)))
+
+ // finally return the merkle root and the directory
return c.JSON(http.StatusOK, types.FilesUploadResponse{
- Root: merkleTreeRoot,
+ Directory: fileDirectory,
+ Root: merkleRoot,
})
}
}
diff --git a/internal/routes/versions.go b/internal/routes/versions.go
index cb83c32..5142a96 100644
--- a/internal/routes/versions.go
+++ b/internal/routes/versions.go
@@ -7,7 +7,7 @@ import (
"os"
)
-func NewVersionsRoute() echo.HandlerFunc {
+func NewGetVersionsRoute() echo.HandlerFunc {
return func(c echo.Context) error {
return c.JSON(http.StatusOK, types.VersionsResponse{
Environment: os.Getenv("ENVIRONMENT"),
diff --git a/internal/types/filedirectoryitem.go b/internal/types/filedirectoryitem.go
new file mode 100644
index 0000000..b43cabe
--- /dev/null
+++ b/internal/types/filedirectoryitem.go
@@ -0,0 +1,6 @@
+package types
+
+type FileDirectoryItem struct {
+ Hash string `json:"hash"`
+ Name string `json:"name"`
+}
diff --git a/internal/types/filemetadata.go b/internal/types/filemetadata.go
deleted file mode 100644
index 95942e6..0000000
--- a/internal/types/filemetadata.go
+++ /dev/null
@@ -1,6 +0,0 @@
-package types
-
-type FileMetadata struct {
- FileName string
- Hash string
-}
diff --git a/internal/types/filereaddata.go b/internal/types/filereaddata.go
new file mode 100644
index 0000000..9a86855
--- /dev/null
+++ b/internal/types/filereaddata.go
@@ -0,0 +1,8 @@
+package types
+
+import "io"
+
+type FileReadData struct {
+ File io.Reader
+ Name string
+}
diff --git a/internal/types/fileresponse.go b/internal/types/fileresponse.go
new file mode 100644
index 0000000..b3ebbce
--- /dev/null
+++ b/internal/types/fileresponse.go
@@ -0,0 +1,7 @@
+package types
+
+type FileResponse struct {
+ Hash string `json:"hash"`
+ Name string `json:"name"`
+ Proof []*MerkleTreeProofItem `json:"proof"`
+}
diff --git a/internal/types/filesuploadresponse.go b/internal/types/filesuploadresponse.go
index c9af728..722a939 100644
--- a/internal/types/filesuploadresponse.go
+++ b/internal/types/filesuploadresponse.go
@@ -1,5 +1,6 @@
package types
type FilesUploadResponse struct {
- Root string `json:"root"`
+ Directory []*FileDirectoryItem `json:"directory"`
+ Root string `json:"root"`
}
diff --git a/internal/types/merkletreeproofitem.go b/internal/types/merkletreeproofitem.go
new file mode 100644
index 0000000..a6f8a1d
--- /dev/null
+++ b/internal/types/merkletreeproofitem.go
@@ -0,0 +1,6 @@
+package types
+
+type MerkleTreeProofItem struct {
+ Hash string `json:"hash"`
+ Position int `json:"position"`
+}
diff --git a/internal/utils/filedirectory.go b/internal/utils/filedirectory.go
new file mode 100644
index 0000000..a6a4043
--- /dev/null
+++ b/internal/utils/filedirectory.go
@@ -0,0 +1,71 @@
+package utils
+
+import (
+ "aether/internal/constants"
+ "aether/internal/types"
+ "encoding/json"
+ "fmt"
+ "os"
+)
+
+func ExtractHashesFromFileDirectory(fileDirectory []*types.FileDirectoryItem) []string {
+ hashes := make([]string, len(fileDirectory))
+
+ for i := range fileDirectory {
+ hashes[i] = fileDirectory[i].Hash
+ }
+
+ return hashes
+}
+
+func GetAllFileDirectories() (map[string][]*types.FileDirectoryItem, error) {
+ var fileDirectory []*types.FileDirectoryItem
+ var fileDirectories map[string][]*types.FileDirectoryItem
+
+ directories, err := os.ReadDir(constants.RootFileDirectory)
+ if err != nil {
+ return nil, err
+ }
+
+ fileDirectories = map[string][]*types.FileDirectoryItem{}
+
+ for _, dir := range directories {
+ // read the directory file
+ data, err := os.ReadFile(fmt.Sprintf("%s/%s/%s", constants.RootFileDirectory, dir.Name(), constants.DirectoryFileName))
+ if err != nil {
+ return nil, err
+ }
+
+ err = json.Unmarshal(data, &fileDirectory)
+ if err != nil {
+ return nil, err
+ }
+
+ fileDirectories[dir.Name()] = fileDirectory
+ }
+
+ return fileDirectories, nil
+}
+
+func WriteFileDirectoryJSONToStorage(dir string, fileDirectory []*types.FileDirectoryItem) error {
+ // create a file at the directory
+ file, err := os.Create(fmt.Sprintf("%s/%s", dir, constants.DirectoryFileName))
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+
+ // convert the file directory to a json
+ data, err := json.Marshal(fileDirectory)
+ if err != nil {
+ return err
+ }
+
+ // write the json data to the file
+ _, err = file.Write(data)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/internal/utils/hash.go b/internal/utils/hash.go
index 6f27074..80f9d58 100644
--- a/internal/utils/hash.go
+++ b/internal/utils/hash.go
@@ -3,7 +3,9 @@ package utils
import (
"crypto/rand"
"crypto/sha256"
+ "encoding/hex"
"fmt"
+ "io"
)
// CreateRandomSha256Hash Creates a hex encoded SHA-256 hash of 32 random bytes
@@ -16,3 +18,14 @@ func CreateRandomSha256Hash() (string, error) {
return fmt.Sprintf("%x", sha256.Sum256(bytes)), nil
}
+
+// HashFile Creates a hex encoded SHA-256 hash of the file
+func HashFile(f io.Reader, filename string) (string, error) {
+ h := sha256.New()
+
+ if _, err := io.Copy(h, f); err != nil {
+ return "", err
+ }
+
+ return hex.EncodeToString(h.Sum(nil)), nil
+}
diff --git a/internal/utils/storage.go b/internal/utils/storage.go
new file mode 100644
index 0000000..5da683f
--- /dev/null
+++ b/internal/utils/storage.go
@@ -0,0 +1,42 @@
+package utils
+
+import (
+ "aether/internal/types"
+ "fmt"
+ "io"
+ "os"
+)
+
+func SaveFilesToDir(dirName string, files []*types.FileReadData) error {
+ for _, file := range files {
+ destFile, err := os.Create(fmt.Sprintf("%s/%s", dirName, file.Name))
+ if err != nil {
+ return err
+ }
+ defer destFile.Close()
+
+ _, err = io.Copy(destFile, file.File)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// CreateDir Creates a directory folder to storage if it doesn't exist
+func CreateDir(dirName string) error {
+ _, err := os.Stat(dirName)
+ if os.IsNotExist(err) {
+ fmt.Printf("directory %s does not exist, creating a new one", dirName)
+
+ err = os.Mkdir(dirName, 0755)
+ if err != nil {
+ return err
+ }
+
+ fmt.Printf("created new directory %s", dirName)
+ }
+
+ return nil
+}
diff --git a/package.json b/package.json
index 0d87486..e0b93ce 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "@kieranroneill/aether",
"version": "1.0.0",
- "description": "Aether is a modern file storage platform that implements a state-of-the-art Merkle tree structure to store multiple files",
+ "description": "aether is a modern file storage platform that implements a state-of-the-art Merkle tree structure to store multiple files",
"main": "./.build/index.js",
"repository": {
"type": "git",
@@ -26,8 +26,8 @@
"node": ">=20.9.0"
},
"scripts": {
- "build": "next build",
- "dev": "next dev",
+ "build": "EXT_PUBLIC_VERSION=$npm_package_version next build",
+ "dev": "NEXT_PUBLIC_VERSION=$npm_package_version next dev",
"lint": "next lint",
"prepare": "husky install",
"prettier": "prettier --config .prettierrc --write \"**/*.{js,json,ts,tsx}\"",
diff --git a/tsconfig.json b/tsconfig.json
index bd11d35..743ad97 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -23,6 +23,7 @@
"paths": {
"@app/components/*": ["app/components/*"],
"@app/constants": ["app/constants"],
+ "@app/enums": ["app/enums"],
"@app/fonts": ["app/fonts"],
"@app/hooks/*": ["app/hooks/*"],
"@app/theme": ["app/theme"],