Skip to content
This repository has been archived by the owner on Oct 31, 2023. It is now read-only.

Commit

Permalink
Add run_benchmark to environments
Browse files Browse the repository at this point in the history
  • Loading branch information
Krigpl committed Jun 24, 2019
1 parent f1ac1a3 commit 7ad329c
Show file tree
Hide file tree
Showing 32 changed files with 88 additions and 15 deletions.
Empty file.
2 changes: 1 addition & 1 deletion codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ coverage:
target: auto
changes: false
ignore:
- "apps/core/benchmark/minilight/*"
- "apps/rendering/resources/taskcollector/*"
- "golem/database/schemas"
- "golem/envs/docker/benchmark/minilight/*"
- "golem/testutils.py"
- "golem/tools/pyuic.py"
- "save/*"
Expand Down
7 changes: 2 additions & 5 deletions golem/environments/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@

from os import path

from apps.rendering.benchmark.minilight.src.minilight import make_perf_test

from golem.core.common import get_golem_path
from golem.environments.minperformancemultiplier import MinPerformanceMultiplier
from golem.envs.docker.benchmark.minilight import make_perf_test
from golem.model import Performance


Expand Down Expand Up @@ -113,9 +112,7 @@ def get_min_accepted_performance(cls) -> float:
def run_default_benchmark(cls, save=False):
logger = logging.getLogger('golem.task.benchmarkmanager')
logger.info('Running benchmark for %s', cls.get_id())
test_file = path.join(get_golem_path(), 'apps', 'rendering',
'benchmark', 'minilight', 'cornellbox.ml.txt')
performance = make_perf_test(test_file)
performance = make_perf_test()
logger.info('%s performance is %.2f', cls.get_id(), performance)
if save:
Performance.update_or_create(cls.get_id(), performance)
Expand Down
11 changes: 11 additions & 0 deletions golem/envs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,12 @@ def start(self) -> Deferred:
""" Start the computation. Assumes current status is 'PREPARED'. """
raise NotImplementedError

@abstractmethod
def wait_until_stopped(self) -> Deferred:
""" Can be called after calling `start` to wait until the runtime has
stopped """
raise NotImplementedError

@abstractmethod
def stop(self) -> Deferred:
""" Interrupt the computation. Assumes current status is 'RUNNING'. """
Expand Down Expand Up @@ -450,6 +456,11 @@ def clean_up(self) -> Deferred:
'ERROR'. """
raise NotImplementedError

@abstractmethod
def run_benchmark(self) -> Deferred:
""" Get the general performace score for this environment. """
raise NotImplementedError

@classmethod
@abstractmethod
def metadata(cls) -> EnvMetadata:
Expand Down
8 changes: 8 additions & 0 deletions golem/envs/docker/benchmark/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
FROM golemfactory/base:1.5

MAINTAINER Golem Tech <tech@golem.network>

COPY minilight /golem/minilight
COPY entrypoint.py /golem/

ENTRYPOINT ["python3", "/golem/entrypoint.py"]
6 changes: 6 additions & 0 deletions golem/envs/docker/benchmark/entrypoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from minilight import make_perf_test


if __name__ == '__main__':
score = make_perf_test()
print(score)
9 changes: 9 additions & 0 deletions golem/envs/docker/benchmark/minilight/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pathlib import Path

from .src.minilight import make_perf_test as make_perf_test_impl

TESTFILE = Path(__file__).parent / 'cornellbox.ml.txt'


def make_perf_test() -> float:
return make_perf_test_impl(str(TESTFILE))
35 changes: 33 additions & 2 deletions golem/envs/docker/cpu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import os
from pathlib import Path
from socket import socket, SocketIO, SHUT_WR
from threading import Thread, Lock
Expand All @@ -7,7 +8,7 @@
NamedTuple, Tuple, Iterator, Union, Iterable

from docker.errors import APIError
from twisted.internet.defer import Deferred
from twisted.internet.defer import Deferred, inlineCallbacks
from twisted.internet.threads import deferToThread
from urllib3.contrib.pyopenssl import WrappedSocket

Expand Down Expand Up @@ -285,6 +286,12 @@ def _spawn_status_update_thread(_):
f"Starting container '{self._container_id}' failed."))
return deferred_start

def wait_until_stopped(self) -> Deferred:
def _wait_until_stopped():
while self.status() == RuntimeStatus.RUNNING:
sleep(1)
return deferToThread(_wait_until_stopped)

def stop(self) -> Deferred:
with self._status_lock:
self._assert_status(self._status, RuntimeStatus.RUNNING)
Expand Down Expand Up @@ -404,7 +411,7 @@ class DockerCPUEnvironment(Environment):
MIN_MEMORY_MB: ClassVar[int] = 1024
MIN_CPU_COUNT: ClassVar[int] = 1

SHARED_DIR_PATH: ClassVar[str] = '/golem'
SHARED_DIR_PATH: ClassVar[str] = '/golem/work'

NETWORK_MODE: ClassVar[str] = 'none'
DNS_SERVERS: ClassVar[List[str]] = []
Expand All @@ -431,6 +438,8 @@ class DockerCPUEnvironment(Environment):
'sys_tty_config'
]

BENCHMARK_IMAGE = 'igorgolem/cpu_benchmark:1.0'

@classmethod
def supported(cls) -> EnvSupportStatus:
logger.info('Checking environment support status...')
Expand Down Expand Up @@ -505,6 +514,28 @@ def _clean_up():

return deferToThread(_clean_up)

@inlineCallbacks
def run_benchmark(self) -> Deferred:
image, tag = self.BENCHMARK_IMAGE.split(':')
yield self.install_prerequisites(DockerPrerequisites(
image=image,
tag=tag,
))
payload = DockerPayload(
image=image,
tag=tag,
user=None if is_windows() else str(os.getuid()),
env={},
)
runtime = self.runtime(payload)
yield runtime.prepare()
yield runtime.start()
yield runtime.wait_until_stopped()
# Benchmark is supposed to output a single line containing a float value
score = float(list(runtime.stdout('utf-8'))[0])
yield runtime.clean_up()
return score

@classmethod
def metadata(cls) -> EnvMetadata:
return EnvMetadata(
Expand Down
4 changes: 2 additions & 2 deletions scripts/pyinstaller/hooks/hook-golem.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@
('apps/*.ini', 'apps/'),
('apps/core/resources/images/*',
'apps/core/resources/images/'),
('apps/rendering/benchmark/minilight/cornellbox.ml.txt',
'apps/rendering/benchmark/minilight/'),
('apps/blender/resources/images/*.Dockerfile',
'apps/blender/resources/images/'),
('apps/blender/resources/images/entrypoints/scripts/render_tools/templates/'
Expand All @@ -29,6 +27,8 @@
('golem/RELEASE-VERSION', 'golem/'),
('golem/TERMS.html', 'golem/'),
('golem/database/schemas/*.py', 'golem/database/schemas/'),
('golem/envs/docker/benchmark/minilight/cornellbox.ml.txt',
'golem/envs/docker/benchmark/minilight/'),
('golem/network/concent/resources/ssl/certs/*.crt',
'golem/network/concent/resources/ssl/certs/'),
('scripts/docker/create-share.ps1', 'scripts/docker/'),
Expand Down
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,9 @@
path.normpath('apps/registered_test.ini'),
path.normpath('apps/images.ini')
]),
(path.normpath('../../golem/apps/rendering/benchmark/minilight'), [
path.normpath('apps/rendering/benchmark/minilight/cornellbox.ml.txt'),
(path.normpath('../../golem/golem/envs/docker/benchmark/minilight'), [
path.normpath(
'golem/envs/docker/benchmark/minilight/cornellbox.ml.txt'),
]),
(path.normpath(
'../../golem/apps/blender/resources/images/entrypoints/scripts/'
Expand Down
1 change: 0 additions & 1 deletion tests/golem/envs/docker/cpu/test_env.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from logging import Logger
from pathlib import Path
from subprocess import SubprocessError
from unittest.mock import patch as _patch, Mock, MagicMock, ANY

from twisted.trial.unittest import TestCase
Expand Down
15 changes: 13 additions & 2 deletions tests/golem/envs/docker/cpu/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,23 @@ def _clean_up_runtime():
self.assertEqual(test_input, test_output)

# Wait for exit and delete container
while runtime.status() == RuntimeStatus.RUNNING:
time.sleep(1)
yield runtime.wait_until_stopped()
self.assertEqual(runtime.status(), RuntimeStatus.STOPPED)
yield runtime.clean_up()
self.assertEqual(runtime.status(), RuntimeStatus.TORN_DOWN)

# Clean up the environment
yield env.clean_up()
self.assertEqual(env.status(), EnvStatus.DISABLED)

@inlineCallbacks
def test_benchmark(self):
config = DockerCPUConfig(work_dir=Path(tempfile.gettempdir()))
env = DockerCPUEnvironment(config)
yield env.prepare()

Whitelist.add(env.BENCHMARK_IMAGE.split('/')[0])
score = yield env.run_benchmark()
self.assertGreater(score, 0)

yield env.clean_up()

0 comments on commit 7ad329c

Please sign in to comment.