From 82470f48dee348b501359537bc4ba86887ea7f16 Mon Sep 17 00:00:00 2001 From: cosmoquester Date: Sat, 30 Sep 2023 18:11:11 +0900 Subject: [PATCH 1/7] build: Add build informations --- setup.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bd14f9d..d833f52 100644 --- a/setup.py +++ b/setup.py @@ -1,12 +1,22 @@ from setuptools import find_packages, setup +with open("README.md", "r") as f: + long_description = f.read() + setup( name="memoria", version="0.0.1", - description="This repository is template for my python project.", + description="Memoria is a Hebbian memory architecture for neural networks.", + long_description=long_description, python_requires=">=3.7", install_requires=["torch"], url="https://github.com/cosmoquester/memoria.git", author="Park Sangjun", + keywords=["memoria", "hebbian", "memory", "transformer"], + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + ], packages=find_packages(exclude=["tests", "experiment"]), ) From 8e67a31d2ed965462aaed86c989b9b74304012cc Mon Sep 17 00:00:00 2001 From: cosmoquester Date: Sat, 30 Sep 2023 18:11:22 +0900 Subject: [PATCH 2/7] chore: Add __version__ --- memoria/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/memoria/__init__.py b/memoria/__init__.py index 16a68c5..0c68217 100644 --- a/memoria/__init__.py +++ b/memoria/__init__.py @@ -5,3 +5,4 @@ from .sparse_tensor import SparseTensor __all__ = ["utils", "Abstractor", "Engrams", "EngramType", "Memoria", "SparseTensor"] +__version__ = "0.0.1" From 734dba0715c66bd0f1f25ab28951aa653a4a11b5 Mon Sep 17 00:00:00 2001 From: cosmoquester Date: Sat, 30 Sep 2023 18:11:51 +0900 Subject: [PATCH 3/7] build: Set version 1.0.0 --- memoria/__init__.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/memoria/__init__.py b/memoria/__init__.py index 0c68217..ede4039 100644 --- a/memoria/__init__.py +++ b/memoria/__init__.py @@ -5,4 +5,4 @@ from .sparse_tensor import SparseTensor __all__ = ["utils", "Abstractor", "Engrams", "EngramType", "Memoria", "SparseTensor"] -__version__ = "0.0.1" +__version__ = "1.0.0" diff --git a/setup.py b/setup.py index d833f52..05340d0 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name="memoria", - version="0.0.1", + version="1.0.0", description="Memoria is a Hebbian memory architecture for neural networks.", long_description=long_description, python_requires=">=3.7", From 36be54eaf1dca851f97a9a8bddee2209b27bfedd Mon Sep 17 00:00:00 2001 From: cosmoquester Date: Sat, 30 Sep 2023 18:20:10 +0900 Subject: [PATCH 4/7] docs: Change memoria pip name --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b737d77..0d265a2 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ Memoria is an independant module which can be applied to neural network models i ## Installation ```sh -$ pip install git+ssh://git@github.com/cosmoquester/memoria +$ pip install memoria-pytorch ``` You can install memoria by pip command above. From 02a57d4dfad850c88509cbc8952844b9ec4d01aa Mon Sep 17 00:00:00 2001 From: cosmoquester Date: Wed, 4 Oct 2023 15:26:37 +0900 Subject: [PATCH 5/7] docs: Add License badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0d265a2..4642078 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ # Memoria +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) [![CircleCI](https://dl.circleci.com/status-badge/img/gh/cosmoquester/memoria/tree/master.svg?style=svg&circle-token=513f0f5e9a706a51509d198359fe0e016a227ce9)](https://dl.circleci.com/status-badge/redirect/gh/cosmoquester/memoria/tree/master) From bb8cd3ed72244315762e3b63713422100c708dd6 Mon Sep 17 00:00:00 2001 From: cosmoquester Date: Wed, 4 Oct 2023 15:40:35 +0900 Subject: [PATCH 6/7] docs: Elaborate on Tutorial --- README.md | 69 +++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 54 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 4642078..a0ff035 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,8 @@ You can install memoria by pip command above. ## Tutorial This is a tutorial to help to understand the concept and mechanism of Memoria. -Fake random data and lifespan delta are used for simplification. + +#### 1. Import Memoria and Set Parameters ```python import torch @@ -42,7 +43,13 @@ num_final_ltms = 4 batch_size = 2 sequence_length = 8 hidden_dim = 64 +``` + +#### 2. Initialize Memoria and Dummy Data +- Fake random data and lifespan delta are used for simplification. + +```python memoria = Memoria( num_reminded_stm=num_reminded_stm, stm_capacity=stm_capacity, @@ -51,12 +58,17 @@ memoria = Memoria( num_final_ltms=num_final_ltms, ) data = torch.rand(batch_size, sequence_length, hidden_dim) +``` +#### 3. Add Data as Working Memory + +```python # Add data as working memory memoria.add_working_memory(data) +``` +```python # Expected values -""" >>> len(memoria.engrams) 16 >>> memoria.engrams.data.shape @@ -64,22 +76,34 @@ torch.Size([2, 8, 64]) >>> memoria.engrams.lifespan tensor([[3., 3., 3., 3., 3., 3., 3., 3.], [3., 3., 3., 3., 3., 3., 3., 3.]]) -""" +``` +#### 4. Remind Memories + +- Empty memories are reminded because there is no engrams in STM/LTM yet + +```python reminded_memories, reminded_indices = memoria.remind() +``` +```python # No reminded memories because there is no STM/LTM engrams yet -""" >>> reminded_memories tensor([], size=(2, 0, 64)) >>> reminded_indices tensor([], size=(2, 0), dtype=torch.int64) -""" +``` + +#### 5. Adjust Lifespan and Memories + +- In this step, no engrams earn lifespan because there is no reminded memories +```python memoria.adjust_lifespan_and_memories(reminded_indices, torch.zeros_like(reminded_indices)) +``` +```python # Decreases lifespan for all engrams & working memories have changed into shortterm memory -""" >>> memoria.engrams.lifespan tensor([[2., 2., 2., 2., 2., 2., 2., 2.], [2., 2., 2., 2., 2., 2., 2., 2.]]) @@ -88,23 +112,31 @@ tensor([[2, 2, 2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2, 2, 2]], dtype=torch.uint8) >>> EngramType.SHORTTERM -""" +``` + +#### 6. Repeat one more time +- Now, there are some engrams in STM, remind and adjustment from STM will work + +```python data2 = torch.rand(batch_size, sequence_length, hidden_dim) memoria.add_working_memory(data2) +``` -""" +```python >>> len(memoria.engrams) 32 >>> memoria.engrams.lifespan tensor([[2., 2., 2., 2., 2., 2., 2., 2., 3., 3., 3., 3., 3., 3., 3., 3.], [2., 2., 2., 2., 2., 2., 2., 2., 3., 3., 3., 3., 3., 3., 3., 3.]]) -""" +``` +```python reminded_memories, reminded_indices = memoria.remind() +``` +```python # Remind memories from STM -""" >>> reminded_memories.shape torch.Size([2, 6, 64]) >>> reminded_indices.shape @@ -112,19 +144,26 @@ torch.Size([2, 6]) >>> reminded_indices tensor([[ 0, 6, 4, 3, 2, -1], [ 0, 7, 6, 5, 4, -1]]) -""" +``` +```python # Increase lifespan of all the reminded engrams by 5 memoria.adjust_lifespan_and_memories(reminded_indices, torch.full_like(reminded_indices, 5)) +``` +```python # Reminded engrams got lifespan by 5, other engrams have got older -""" >>> memoria.engrams.lifespan >>> memoria.engrams.lifespan tensor([[6., 1., 6., 6., 6., 1., 6., 1., 2., 2., 2., 2., 2., 2., 2., 2.], [6., 1., 1., 1., 6., 6., 6., 6., 2., 2., 2., 2., 2., 2., 2., 2.]]) -""" +``` +#### 7. Repeat + +- Repeat 10 times to see the dynamics of LTM + +```python # This is default process to utilize Memoria for _ in range(10): data = torch.rand(batch_size, sequence_length, hidden_dim) @@ -135,10 +174,11 @@ for _ in range(10): lifespan_delta = torch.randint_like(reminded_indices, 0, 6).float() memoria.adjust_lifespan_and_memories(reminded_indices, lifespan_delta) +``` +```python # After 10 iteration, some engrams have changed into longterm memory and got large lifespan # Engram type zero means those engrams are deleted -""" >>> len(memoria.engrams) 72 >>> memoria.engrams.engrams_types @@ -157,5 +197,4 @@ tensor([[ 9., 1., 8., 2., 16., 5., 13., 7., 7., 3., 3., 4., 3., 3., [-1., -1., 3., 2., 19., 21., 11., 6., 14., 1., 5., 1., 5., 1., 5., 1., 1., 8., 2., 1., 1., 1., 2., 1., 1., 1., 1., 1., 2., 2., 2., 2., 2., 2., 2., 2.]]) -""" ``` From 8300e038b434520210eb6de3a7001ed7a5e98a9a Mon Sep 17 00:00:00 2001 From: Sangjun Park Date: Fri, 6 Oct 2023 10:04:01 +0900 Subject: [PATCH 7/7] docs: Add CITATION information --- CITATION.bib | 8 ++++++++ README.md | 15 +++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 CITATION.bib diff --git a/CITATION.bib b/CITATION.bib new file mode 100644 index 0000000..a086dbb --- /dev/null +++ b/CITATION.bib @@ -0,0 +1,8 @@ +@misc{park2023memoria, + title = {Memoria: Hebbian Memory Architecture for Human-Like Sequential Processing}, + author = {Sangjun Park and JinYeong Bak}, + year = {2023}, + eprint = {2310.03052}, + archiveprefix = {arXiv}, + primaryclass = {cs.LG} +} diff --git a/README.md b/README.md index a0ff035..63075df 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,8 @@ Memoria is a general memory network that applies Hebbian theory which is a major Memoria is an independant module which can be applied to neural network models in various ways and the experiment code of the paper is in the `experiment` directory. +Please refer to [Memoria: Hebbian Memory Architecture for Human-Like Sequential Processing](https://arxiv.org/abs/2310.03052) for more details about Memoria. + ## Installation ```sh @@ -198,3 +200,16 @@ tensor([[ 9., 1., 8., 2., 16., 5., 13., 7., 7., 3., 3., 4., 3., 3., 5., 1., 1., 8., 2., 1., 1., 1., 2., 1., 1., 1., 1., 1., 2., 2., 2., 2., 2., 2., 2., 2.]]) ``` + +# Citation + +``` +@misc{park2023memoria, + title = {Memoria: Hebbian Memory Architecture for Human-Like Sequential Processing}, + author = {Sangjun Park and JinYeong Bak}, + year = {2023}, + eprint = {2310.03052}, + archiveprefix = {arXiv}, + primaryclass = {cs.LG} +} +```