Skip to content

Commit

Permalink
Build: pass api_client down to environment/builders/etc (#10390)
Browse files Browse the repository at this point in the history
For #10289
we won't have the credentials hardcoded in our settings anymore,
but instead we will be provided with a dynamic token for the
build to use, for this reason we now have to pass the api client
object around.
  • Loading branch information
stsewd authored Jun 7, 2023
1 parent 21d60af commit 60b4133
Show file tree
Hide file tree
Showing 15 changed files with 99 additions and 134 deletions.
6 changes: 1 addition & 5 deletions readthedocs/api/v2/client.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
"""Simple client to access our API with Slumber credentials."""

import structlog

import requests
import structlog
from django.conf import settings
from rest_framework.renderers import JSONRenderer
from slumber import API, serialize
Expand Down Expand Up @@ -71,6 +70,3 @@ def setup_api():
else:
log.warning('SLUMBER_USERNAME/PASSWORD settings are not set')
return API(**api_config)


api = setup_api()
52 changes: 24 additions & 28 deletions readthedocs/doc_builder/backends/sphinx.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,14 @@
from django.urls import reverse
from requests.exceptions import ConnectionError

from readthedocs.api.v2.client import api
from readthedocs.builds import utils as version_utils
from readthedocs.builds.models import APIVersion
from readthedocs.core.utils.filesystem import safe_open
from readthedocs.doc_builder.exceptions import PDFNotFound
from readthedocs.projects.constants import PUBLIC
from readthedocs.projects.exceptions import ProjectConfigurationError, UserFileNotFound
from readthedocs.projects.models import Feature
from readthedocs.projects.templatetags.projects_tags import sort_version_aware
from readthedocs.projects.utils import safe_write

from ..base import BaseBuilder
Expand Down Expand Up @@ -167,33 +168,28 @@ def get_config_params(self):
versions = []
downloads = []
subproject_urls = []
# Avoid hitting database and API if using Docker build environment
if settings.DONT_HIT_API:
if self.project.has_feature(Feature.ALL_VERSIONS_IN_HTML_CONTEXT):
versions = self.project.active_versions()
else:
versions = self.project.active_versions().filter(
privacy_level=PUBLIC,
)
downloads = self.version.get_downloads(pretty=True)
subproject_urls = self.project.get_subproject_urls()
else:
try:
versions = self.project.api_versions()
if not self.project.has_feature(Feature.ALL_VERSIONS_IN_HTML_CONTEXT):
versions = [
v
for v in versions
if v.privacy_level == PUBLIC
]
downloads = api.version(self.version.pk).get()['downloads']
subproject_urls = self.project.get_subproject_urls()
except ConnectionError:
log.exception(
'Timeout while fetching versions/downloads/subproject_urls for Sphinx context.',
project_slug=self.project.slug,
version_slug=self.version.slug,
)
try:
active_versions_data = self.api_client.project(
self.project.pk
).active_versions.get()["versions"]
versions = sort_version_aware(
[APIVersion(**version_data) for version_data in active_versions_data]
)
if not self.project.has_feature(Feature.ALL_VERSIONS_IN_HTML_CONTEXT):
versions = [v for v in versions if v.privacy_level == PUBLIC]
downloads = self.api_client.version(self.version.pk).get()["downloads"]
subproject_urls = [
(project["slug"], project["canonical_url"])
for project in self.api_client.project(self.project.pk)
.subprojects()
.get()["subprojects"]
]
except ConnectionError:
log.exception(
"Timeout while fetching versions/downloads/subproject_urls for Sphinx context.",
project_slug=self.project.slug,
version_slug=self.version.slug,
)

build_id = self.build_env.build.get('id')
build_url = None
Expand Down
6 changes: 2 additions & 4 deletions readthedocs/doc_builder/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def __init__(self, build_env, python_env):
self.project = build_env.project
self.config = python_env.config if python_env else None
self.project_path = self.project.checkout_path(self.version.slug)
self.api_client = self.build_env.api_client

def get_final_doctype(self):
"""Some builders may have a different doctype at build time."""
Expand All @@ -52,11 +53,8 @@ def build(self):
def _post_build(self):
"""Execute extra steps (e.g. create ZIP, rename PDF, etc) after building if required."""

def docs_dir(self, docs_dir=None, **__):
def docs_dir(self):
"""Handle creating a custom docs_dir if it doesn't exist."""
if docs_dir:
return docs_dir

for doc_dir_name in ['docs', 'doc', 'Doc', 'book']:
possible_path = os.path.join(self.project_path, doc_dir_name)
if os.path.exists(possible_path):
Expand Down
2 changes: 2 additions & 0 deletions readthedocs/doc_builder/director.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ def create_vcs_environment(self):
# Force the ``container_image`` to use one that has the latest
# ca-certificate package which is compatible with Lets Encrypt
container_image=settings.RTD_DOCKER_BUILD_SETTINGS["os"]["ubuntu-20.04"],
api_client=self.data.api_client,
)

def create_build_environment(self):
Expand All @@ -132,6 +133,7 @@ def create_build_environment(self):
build=self.data.build,
environment=self.get_build_env_vars(),
use_gvisor=use_gvisor,
api_client=self.data.api_client,
)

def setup_environment(self):
Expand Down
23 changes: 15 additions & 8 deletions readthedocs/doc_builder/environments.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from requests.exceptions import ConnectionError, ReadTimeout
from requests_toolbelt.multipart.encoder import MultipartEncoder

from readthedocs.api.v2.client import api as api_v2
from readthedocs.builds.models import BuildCommandResultMixin
from readthedocs.core.utils import slugify
from readthedocs.projects.models import Feature
Expand Down Expand Up @@ -227,7 +226,7 @@ def get_command(self):
return ' '.join(self.command)
return self.command

def save(self):
def save(self, api_client):
"""Save this command and result via the API."""
# Force record this command as success to avoid Build reporting errors
# on commands that are just for checking purposes and do not interferes
Expand All @@ -251,7 +250,7 @@ def save(self):
encoder = MultipartEncoder(
{key: str(value) for key, value in data.items()}
)
resource = api_v2.command
resource = api_client.command
resp = resource._store["session"].post(
resource._store["base_url"] + "/",
data=encoder,
Expand All @@ -261,7 +260,7 @@ def save(self):
)
log.debug('Post response via multipart form.', response=resp)
else:
resp = api_v2.command.post(data)
resp = api_client.command.post(data)
log.debug('Post response via JSON encoded data.', response=resp)


Expand Down Expand Up @@ -416,9 +415,12 @@ class BaseBuildEnvironment:
:param build: Build instance
:param environment: shell environment variables
:param record: whether or not record a build commands in the databse via
the API. The only case where we want this to be `False` is when
instantiating this class from `sync_repository_task` because it's a
background task that does not expose commands to the user.
the API. The only case where we want this to be `False` is when
instantiating this class from `sync_repository_task` because it's a
background task that does not expose commands to the user.
:param api_client: API v2 client instance (readthedocs.v2.client).
This is used to record commands in the database, if `record=True`
this argument is required.
"""

def __init__(
Expand All @@ -429,6 +431,7 @@ def __init__(
config=None,
environment=None,
record=True,
api_client=None,
**kwargs,
):
self.project = project
Expand All @@ -438,6 +441,10 @@ def __init__(
self.build = build
self.config = config
self.record = record
self.api_client = api_client

if self.record and not self.api_client:
raise ValueError("api_client is required when record=True")

# TODO: remove these methods, we are not using LocalEnvironment anymore. We
# need to find a way for tests to not require this anymore
Expand All @@ -449,7 +456,7 @@ def __exit__(self, exc_type, exc_value, tb):

def record_command(self, command):
if self.record:
command.save()
command.save(self.api_client)

def run(self, *cmd, **kwargs):
"""Shortcut to run command from environment."""
Expand Down
26 changes: 0 additions & 26 deletions readthedocs/projects/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from django_extensions.db.models import TimeStampedModel
from taggit.managers import TaggableManager

from readthedocs.api.v2.client import api
from readthedocs.builds.constants import EXTERNAL, INTERNAL, LATEST, STABLE
from readthedocs.constants import pattern_opts
from readthedocs.core.history import ExtraHistoricalRecords
Expand Down Expand Up @@ -609,23 +608,6 @@ def get_builds_url(self):
},
)

def get_canonical_url(self):
if settings.DONT_HIT_DB:
return api.project(self.pk).canonical_url().get()['url']
return self.get_docs_url()

def get_subproject_urls(self):
"""
List subproject URLs.
This is used in search result linking
"""
if settings.DONT_HIT_DB:
return [(proj['slug'], proj['canonical_url']) for proj in
(api.project(self.pk).subprojects().get()['subprojects'])]
return [(proj.child.slug, proj.child.get_docs_url())
for proj in self.subprojects.all()]

def get_storage_paths(self):
"""
Get the paths of all artifacts used by the project.
Expand Down Expand Up @@ -1110,14 +1092,6 @@ def get_latest_build(self, finished=True):
kwargs['state'] = 'finished'
return self.builds(manager=INTERNAL).filter(**kwargs).first()

def api_versions(self):
from readthedocs.builds.models import APIVersion
ret = []
for version_data in api.project(self.pk).active_versions.get()['versions']:
version = APIVersion(**version_data)
ret.append(version)
return sort_version_aware(ret)

def active_versions(self):
from readthedocs.builds.models import Version
versions = Version.internal.public(project=self, only_active=True)
Expand Down
35 changes: 19 additions & 16 deletions readthedocs/projects/tasks/builds.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@
from celery import Task
from django.conf import settings
from django.utils import timezone
from slumber import API
from slumber.exceptions import HttpClientError

from readthedocs.api.v2.client import api as api_v2
from readthedocs.api.v2.client import setup_api
from readthedocs.builds import tasks as build_tasks
from readthedocs.builds.constants import (
ARTIFACT_TYPES,
Expand Down Expand Up @@ -102,6 +103,9 @@ class TaskData:
build_pk: int = None
build_commit: str = None

# Slumber client to interact with the API v2.
api_client: API = None

start_time: timezone.datetime = None
# pylint: disable=unsubscriptable-object
environment_class: type[DockerBuildEnvironment] | type[LocalBuildEnvironment] = None
Expand Down Expand Up @@ -153,6 +157,8 @@ def before_start(self, task_id, args, kwargs):
# argument
self.data.version_pk = args[0]

self.data.api_client = setup_api()

# load all data from the API required for the build
self.data.version = self.get_version(self.data.version_pk)
self.data.project = self.data.version.project
Expand Down Expand Up @@ -339,8 +345,10 @@ def sigint_received(*args, **kwargs):

def _check_concurrency_limit(self):
try:
response = api_v2.build.concurrent.get(project__slug=self.data.project.slug)
concurrency_limit_reached = response.get('limit_reached', False)
response = self.data.api_client.build.concurrent.get(
project__slug=self.data.project.slug
)
concurrency_limit_reached = response.get("limit_reached", False)
max_concurrent_builds = response.get(
'max_concurrent',
settings.RTD_MAX_CONCURRENT_BUILDS,
Expand Down Expand Up @@ -390,6 +398,8 @@ def before_start(self, task_id, args, kwargs):
# anymore and we are not using it
self.data.environment_class = LocalBuildEnvironment

self.data.api_client = setup_api()

self.data.build = self.get_build(self.data.build_pk)
self.data.version = self.get_version(self.data.version_pk)
self.data.project = self.data.version.project
Expand Down Expand Up @@ -427,7 +437,7 @@ def _reset_build(self):
# Reset build only if it has some commands already.
if self.data.build.get("commands"):
log.info("Resetting build.")
api_v2.build(self.data.build["id"]).reset.post()
self.data.api_client.build(self.data.build["id"]).reset.post()

def on_failure(self, exc, task_id, args, kwargs, einfo):
"""
Expand Down Expand Up @@ -598,7 +608,7 @@ def on_success(self, retval, task_id, args, kwargs):
# TODO: remove this condition and *always* update the DB Version instance
if "html" in valid_artifacts:
try:
api_v2.version(self.data.version.pk).patch(
self.data.api_client.version(self.data.version.pk).patch(
{
"built": True,
"documentation_type": self.data.version.documentation_type,
Expand Down Expand Up @@ -709,7 +719,7 @@ def update_build(self, state=None):
# self.data.build[key] = val.decode('utf-8', 'ignore')

try:
api_v2.build(self.data.build['id']).patch(self.data.build)
self.data.api_client.build(self.data.build["id"]).patch(self.data.build)
except Exception:
# NOTE: we are updating the "Build" object on each `state`.
# Only if the last update fails, there may be some inconsistency
Expand Down Expand Up @@ -802,22 +812,15 @@ def save_build_data(self):
except Exception:
log.exception("Error while saving build data")

@staticmethod
def get_project(project_pk):
"""Get project from API."""
project_data = api_v2.project(project_pk).get()
return APIProject(**project_data)

@staticmethod
def get_build(build_pk):
def get_build(self, build_pk):
"""
Retrieve build object from API.
:param build_pk: Build primary key
"""
build = {}
if build_pk:
build = api_v2.build(build_pk).get()
build = self.data.api_client.build(build_pk).get()
private_keys = [
'project',
'version',
Expand All @@ -834,7 +837,7 @@ def get_build(build_pk):
# build has finished to reduce API calls.
def set_valid_clone(self):
"""Mark on the project that it has been cloned properly."""
api_v2.project(self.data.project.pk).patch(
self.data.api_client.project(self.data.project.pk).patch(
{'has_valid_clone': True}
)
self.data.project.has_valid_clone = True
Expand Down
Loading

0 comments on commit 60b4133

Please sign in to comment.