diff --git a/pyperformance/tests/__init__.py b/pyperformance/tests/__init__.py
index 1925203d..4cd20dcb 100644
--- a/pyperformance/tests/__init__.py
+++ b/pyperformance/tests/__init__.py
@@ -1,8 +1,27 @@
 import contextlib
 import errno
 import os
+import subprocess
+import sys
 import tempfile
 
+DATA_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), 'data'))
+
+
+def run_cmd(cmd):
+    print("Execute: %s" % ' '.join(cmd))
+    proc = subprocess.Popen(cmd)
+    try:
+        proc.wait()
+    except:  # noqa
+        proc.kill()
+        proc.wait()
+        raise
+
+    exitcode = proc.returncode
+    if exitcode:
+        sys.exit(exitcode)
+
 
 @contextlib.contextmanager
 def temporary_file(**kwargs):
diff --git a/pyperformance/tests/data/user_defined_bm/MANIFEST b/pyperformance/tests/data/user_defined_bm/MANIFEST
new file mode 100644
index 00000000..4c944937
--- /dev/null
+++ b/pyperformance/tests/data/user_defined_bm/MANIFEST
@@ -0,0 +1,4 @@
+[benchmarks]
+
+name	metafile
+1	<local>
diff --git a/pyperformance/tests/data/user_defined_bm/base.toml b/pyperformance/tests/data/user_defined_bm/base.toml
new file mode 100644
index 00000000..abdfe357
--- /dev/null
+++ b/pyperformance/tests/data/user_defined_bm/base.toml
@@ -0,0 +1,3 @@
+[project]
+dynamic = ["name"]
+version = "1.0.0"
diff --git a/pyperformance/tests/data/user_defined_bm/bm_1/pyproject.toml b/pyperformance/tests/data/user_defined_bm/bm_1/pyproject.toml
new file mode 100644
index 00000000..32b36cbb
--- /dev/null
+++ b/pyperformance/tests/data/user_defined_bm/bm_1/pyproject.toml
@@ -0,0 +1,11 @@
+[project]
+name = "test_bm_1"
+requires-python = ">=3.8"
+dependencies = ["pyperf"]
+urls = { repository = "https://github.com/python/pyperformance" }
+dynamic = ["version"]
+
+[tool.pyperformance]
+name = "1"
+tags = "test"
+inherits = ".."
diff --git a/pyperformance/tests/test_compare.py b/pyperformance/tests/test_compare.py
index 938add99..14204405 100755
--- a/pyperformance/tests/test_compare.py
+++ b/pyperformance/tests/test_compare.py
@@ -7,24 +7,7 @@
 import unittest
 
 from pyperformance import tests
-
-
-DATA_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), 'data'))
-
-
-def run_cmd(cmd):
-    print("Execute: %s" % ' '.join(cmd))
-    proc = subprocess.Popen(cmd)
-    try:
-        proc.wait()
-    except:   # noqa
-        proc.kill()
-        proc.wait()
-        raise
-
-    exitcode = proc.returncode
-    if exitcode:
-        sys.exit(exitcode)
+from pyperformance.tests import run_cmd, DATA_DIR
 
 
 class CompareTests(unittest.TestCase):
diff --git a/pyperformance/tests/test_user_defined_bm.py b/pyperformance/tests/test_user_defined_bm.py
new file mode 100644
index 00000000..584367e6
--- /dev/null
+++ b/pyperformance/tests/test_user_defined_bm.py
@@ -0,0 +1,18 @@
+import os.path
+import sys
+import unittest
+
+from pyperformance.tests import DATA_DIR, run_cmd
+
+USER_DEFINED_MANIFEST = os.path.join(DATA_DIR, 'user_defined_bm', 'MANIFEST')
+
+
+class TestBM(unittest.TestCase):
+    def test_user_defined_bm(self):
+        cmd = [sys.executable, '-m', 'pyperformance', 'run', f'--manifest={USER_DEFINED_MANIFEST}']
+
+        run_cmd(cmd)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/runtests.py b/runtests.py
index 5c044e62..8deca355 100755
--- a/runtests.py
+++ b/runtests.py
@@ -77,8 +77,7 @@ def run_bench(*cmd):
 
 def main():
     # Unit tests
-    cmd = [sys.executable, '-u',
-           os.path.join('pyperformance', 'tests', 'test_compare.py')]
+    cmd = [sys.executable, '-u', '-m', 'unittest']
     run_cmd(cmd)
 
     # Functional tests