diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py
index a0ea1eee..fc9cc63a 100644
--- a/backend/app/models/__init__.py
+++ b/backend/app/models/__init__.py
@@ -10,7 +10,8 @@ def init_app(app):
     from .user import User
     from .sign_in_logs import SignInLogs
     from .log_records import LogRecords
-    from .tag import Tag
+    from .tags import Tag
+    from .log_record_tags import LogRecordTag
     from .residents import Residents
 
     app.app_context().push()
diff --git a/backend/app/models/log_record_tags.py b/backend/app/models/log_record_tags.py
new file mode 100644
index 00000000..2096f0e3
--- /dev/null
+++ b/backend/app/models/log_record_tags.py
@@ -0,0 +1,32 @@
+from sqlalchemy import inspect
+from sqlalchemy.orm.properties import ColumnProperty
+
+from . import db
+
+
+class LogRecordTag(db.Model):
+    __tablename__ = "log_record_tag"
+
+    log_record_tag_id = db.Column(db.Integer, primary_key=True, nullable=False)
+    log_record_id = db.Column(db.Integer, db.ForeignKey("log_records.log_id"), nullable=False)
+    tag_id = db.Column(db.Integer, db.ForeignKey("tags.tag_id"), nullable=False)
+
+    def to_dict(self, include_relationships=False):
+        # define the entities table
+        cls = type(self)
+
+        mapper = inspect(cls)
+        formatted = {}
+        for column in mapper.attrs:
+            field = column.key
+            attr = getattr(self, field)
+            # if it's a regular column, extract the value
+            if isinstance(column, ColumnProperty):
+                formatted[field] = attr
+            # otherwise, it's a relationship field
+            # (currently not applicable, but may be useful for entity groups)
+            elif include_relationships:
+                # recursively format the relationship
+                # don't format the relationship's relationships
+                formatted[field] = [obj.to_dict() for obj in attr]
+        return formatted
diff --git a/backend/app/models/log_records.py b/backend/app/models/log_records.py
index eece709f..2bf7c878 100644
--- a/backend/app/models/log_records.py
+++ b/backend/app/models/log_records.py
@@ -13,8 +13,8 @@ class LogRecords(db.Model):
     attn_to = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
     # TODO: replace open String fields with VarChar(NUM_CHARS)
     note = db.Column(db.String, nullable=False)
-    tags = db.Column(db.ARRAY(db.String), nullable=True)
     building = db.Column(db.String, nullable=False)
+    tags = db.relationship("Tag", secondary="log_record_tag", back_populates="log_records")
 
     def to_dict(self, include_relationships=False):
         # define the entities table
diff --git a/backend/app/models/tag.py b/backend/app/models/tags.py
similarity index 92%
rename from backend/app/models/tag.py
rename to backend/app/models/tags.py
index ee7da1e0..7636d127 100644
--- a/backend/app/models/tag.py
+++ b/backend/app/models/tags.py
@@ -10,6 +10,7 @@ class Tag(db.Model):
     tag_id = db.Column(db.Integer, primary_key=True, nullable=False)
     name = db.Column(db.String, nullable=False)
     status = db.Column(db.Enum("Deleted", "Active", name="status"), nullable=False)
+    log_records = db.relationship("LogRecords", secondary="log_record_tag", back_populates="tags")
 
     def to_dict(self, include_relationships=False):
         # define the entities table
diff --git a/backend/app/rest/log_records_routes.py b/backend/app/rest/log_records_routes.py
index f3143164..c2137b64 100644
--- a/backend/app/rest/log_records_routes.py
+++ b/backend/app/rest/log_records_routes.py
@@ -67,7 +67,7 @@ def get_log_records():
 
 
 @blueprint.route("/count", methods=["GET"], strict_slashes=False)
-# @require_authorization_by_role({"Relief Staff", "Regular Staff", "Admin"})
+@require_authorization_by_role({"Relief Staff", "Regular Staff", "Admin"})
 def count_log_records():
     """
     Get number of log records. Can optionally add filters.
diff --git a/backend/app/services/implementations/log_records_service.py b/backend/app/services/implementations/log_records_service.py
index 171e1092..fe7397a7 100644
--- a/backend/app/services/implementations/log_records_service.py
+++ b/backend/app/services/implementations/log_records_service.py
@@ -1,6 +1,6 @@
 from ..interfaces.log_records_service import ILogRecordsService
 from ...models.log_records import LogRecords
-from ...models.user import User
+from ...models.tags import Tag
 from ...models import db
 from datetime import datetime
 from pytz import timezone
@@ -23,15 +23,28 @@ def __init__(self, logger):
         self.logger = logger
 
     def add_record(self, log_record):
-        new_log_record = log_record
+        new_log_record = log_record.copy()
+
+        tag_names = new_log_record["tags"]
+        del new_log_record["tags"]
 
         try:
             new_log_record = LogRecords(**new_log_record)
+            self.construct_tags(new_log_record, tag_names)
+
             db.session.add(new_log_record)
             db.session.commit()
             return log_record
         except Exception as postgres_error:
             raise postgres_error
+        
+    def construct_tags(self, log_record, tag_names):
+        for tag_name in tag_names:
+            tag = Tag.query.filter_by(name=tag_name).first()
+
+            if not tag: 
+                raise Exception(f"Tag with name {tag_name} does not exist")
+            log_record.tags.append(tag)
 
     def to_json_list(self, logs):
         try:
@@ -54,7 +67,7 @@ def to_json_list(self, logs):
                             "last_name": log[10]
                         },
                         "note": log[6],
-                        "tags": log[7],
+                        "tags ": log[7],
                         "building": log[8],
                     }
                 )
@@ -109,10 +122,12 @@ def filter_by_date_range(self, date_range):
         return sql
 
     def filter_by_tags(self, tags):
-        sql_statement = f"\n'{tags[0]}'=ANY (tags)"
-        for i in range(1, len(tags)):
-            sql_statement = sql_statement + f"\nOR '{tags[i]}'=ANY (tags)"
-        return sql_statement
+        if len(tags) >= 1:
+            sql_statement = f"\n'{tags[0]}'=ANY (tag_names)"
+            for i in range(1, len(tags)):
+                sql_statement = sql_statement + f"\nAND '{tags[i]}'=ANY (tag_names)"
+            return sql_statement
+        return f"\n'{tags}'=ANY (tag_names)"
 
     def filter_by_flagged(self, flagged):
         print(flagged)
@@ -142,30 +157,39 @@ def filter_log_records(self, filters=None):
                         if filters.get(filter):
                             sql = sql + "\nAND " + options[filter](filters.get(filter))
         return sql
-
+    
+    def join_tag_attributes(self):
+        return "\nLEFT JOIN\n \
+                    (SELECT logs.log_id, ARRAY_AGG(tags.name) AS tag_names FROM log_records logs\n \
+                    JOIN log_record_tag lrt ON logs.log_id = lrt.log_record_id\n \
+                    JOIN tags ON lrt.tag_id = tags.tag_id\n \
+                    GROUP BY logs.log_id \n \
+                ) t ON logs.log_id = t.log_id\n"
+            
     def get_log_records(
         self, page_number, return_all, results_per_page=10, filters=None
     ):
         try:
             sql = "SELECT\n \
-            logs.log_id,\n \
-            logs.employee_id,\n \
-            CONCAT(residents.initial, residents.room_num) AS resident_id,\n \
-            logs.datetime,\n \
-            logs.flagged,\n \
-            logs.attn_to,\n \
-            logs.note,\n \
-            logs.tags,\n \
-            logs.building,\n \
-            employees.first_name AS employee_first_name,\n \
-            employees.last_name AS employee_last_name,\n \
-            attn_tos.first_name AS attn_to_first_name,\n \
-            attn_tos.last_name AS attn_to_last_name\n \
-            FROM log_records logs\n \
-            LEFT JOIN users attn_tos ON logs.attn_to = attn_tos.id\n \
-            JOIN users employees ON logs.employee_id = employees.id \n \
-            JOIN residents ON logs.resident_id = residents.id"
-
+                logs.log_id,\n \
+                logs.employee_id,\n \
+                CONCAT(residents.initial, residents.room_num) AS resident_id,\n \
+                logs.datetime,\n \
+                logs.flagged,\n \
+                logs.attn_to,\n \
+                logs.note,\n \
+                t.tag_names, \n \
+                logs.building,\n \
+                employees.first_name AS employee_first_name,\n \
+                employees.last_name AS employee_last_name,\n \
+                attn_tos.first_name AS attn_to_first_name,\n \
+                attn_tos.last_name AS attn_to_last_name\n \
+                FROM log_records logs\n \
+                LEFT JOIN users attn_tos ON logs.attn_to = attn_tos.id\n \
+                JOIN users employees ON logs.employee_id = employees.id\n \
+                JOIN residents ON logs.resident_id = residents.id"
+            
+            sql += self.join_tag_attributes()
             sql += self.filter_log_records(filters)
 
             sql += "\nORDER BY datetime DESC"
@@ -191,6 +215,8 @@ def count_log_records(self, filters=None):
             FROM log_records logs\n \
             LEFT JOIN users attn_tos ON logs.attn_to = attn_tos.id\n \
             JOIN users employees ON logs.employee_id = employees.id"
+            
+            sql += f"\n{self.join_tag_attributes()}"
 
             sql += self.filter_log_records(filters)
 
@@ -204,11 +230,13 @@ def count_log_records(self, filters=None):
             raise postgres_error
 
     def delete_log_record(self, log_id):
-        deleted_log_record = LogRecords.query.filter_by(log_id=log_id).delete()
-        if not deleted_log_record:
+        log_record_to_delete = LogRecords.query.filter_by(log_id=log_id).first()
+        if not log_record_to_delete:
             raise Exception(
                 "Log record with id {log_id} not found".format(log_id=log_id)
             )
+        log_record_to_delete.tags = []
+        db.session.delete(log_record_to_delete)
         db.session.commit()
 
     def update_log_record(self, log_id, updated_log_record):
@@ -225,9 +253,10 @@ def update_log_record(self, log_id, updated_log_record):
                 }
             )
         if "tags" in updated_log_record:
-            LogRecords.query.filter_by(log_id=log_id).update(
-                {LogRecords.tags: updated_log_record["tags"]}
-            )
+            log_record = LogRecords.query.filter_by(log_id=log_id).first()
+            if (log_record):
+                log_record.tags = []
+                self.construct_tags(log_record, updated_log_record["tags"])
         else:
             LogRecords.query.filter_by(log_id=log_id).update(
                 {
diff --git a/backend/app/services/implementations/tags_service.py b/backend/app/services/implementations/tags_service.py
index a9389929..6d81f363 100644
--- a/backend/app/services/implementations/tags_service.py
+++ b/backend/app/services/implementations/tags_service.py
@@ -1,5 +1,5 @@
 from ..interfaces.tags_service import ITagsService
-from ...models.tag import Tag
+from ...models.tags import Tag
 from ...models import db
 
 
diff --git a/backend/migrations/versions/65a56c245ad7_create_junction_table_between_log_.py b/backend/migrations/versions/65a56c245ad7_create_junction_table_between_log_.py
new file mode 100644
index 00000000..b9c70c29
--- /dev/null
+++ b/backend/migrations/versions/65a56c245ad7_create_junction_table_between_log_.py
@@ -0,0 +1,41 @@
+"""create junction table between log records and tags
+
+Revision ID: 65a56c245ad7
+Revises: 82f36cdf325f
+Create Date: 2023-05-20 04:29:49.322186
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = '65a56c245ad7'
+down_revision = '82f36cdf325f'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('log_record_tag',
+    sa.Column('log_record_tag_id', sa.Integer(), nullable=False),
+    sa.Column('log_record_id', sa.Integer(), nullable=False),
+    sa.Column('tag_id', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['log_record_id'], ['log_records.log_id'], ),
+    sa.ForeignKeyConstraint(['tag_id'], ['tags.tag_id'], ),
+    sa.PrimaryKeyConstraint('log_record_tag_id')
+    )
+    with op.batch_alter_table('log_records', schema=None) as batch_op:
+        batch_op.drop_column('tags')
+
+    # ### end Alembic commands ###
+
+
+def downgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    with op.batch_alter_table('log_records', schema=None) as batch_op:
+        batch_op.add_column(sa.Column('tags', postgresql.ARRAY(sa.VARCHAR()), autoincrement=False, nullable=True))
+
+    op.drop_table('log_record_tag')
+    # ### end Alembic commands ###
diff --git a/backend/migrations/versions/8b5132609f1f_merging.py b/backend/migrations/versions/8b5132609f1f_merging.py
new file mode 100644
index 00000000..99188538
--- /dev/null
+++ b/backend/migrations/versions/8b5132609f1f_merging.py
@@ -0,0 +1,24 @@
+"""merging
+
+Revision ID: 8b5132609f1f
+Revises: 24fad25f60e3, 65a56c245ad7
+Create Date: 2023-10-04 23:41:43.310280
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '8b5132609f1f'
+down_revision = ('24fad25f60e3', '65a56c245ad7')
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    pass
+
+
+def downgrade():
+    pass