Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Blacken code by default #8611

Merged
merged 2 commits into from
Oct 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 3 additions & 30 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,38 +5,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- id: files
uses: tj-actions/changed-files@v41.0.0
with:
files: |
cvat-sdk/**/*.py
cvat-cli/**/*.py
tests/python/**/*.py
cvat/apps/quality_control/**/*.py
cvat/apps/analytics_report/**/*.py
dir_names: true

- name: Run checks
env:
PR_FILES_AM: ${{ steps.files.outputs.added_modified }}
PR_FILES_RENAMED: ${{ steps.files.outputs.renamed }}
run: |
# If different modules use different Black configs,
# we need to run Black for each python component group separately.
# Otherwise, they all will use the same config.
pipx install $(grep "^black" ./cvat-cli/requirements/development.txt)

UPDATED_DIRS="${{steps.files.outputs.all_changed_files}}"
echo "Black version: $(black --version)"

if [[ ! -z $UPDATED_DIRS ]]; then
pipx install $(egrep "black.*" ./cvat-cli/requirements/development.txt)

echo "Black version: "$(black --version)
echo "The dirs will be checked: $UPDATED_DIRS"
EXIT_CODE=0
for DIR in $UPDATED_DIRS; do
black --check --diff $DIR || EXIT_CODE=$(($? | $EXIT_CODE)) || true
done
exit $EXIT_CODE
else
echo "No files with the \"py\" extension found"
fi
black --check --diff .
SpecLad marked this conversation as resolved.
Show resolved Hide resolved
2 changes: 1 addition & 1 deletion cvat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@

from cvat.utils.version import get_version

VERSION = (2, 22, 0, 'alpha', 0)
VERSION = (2, 22, 0, "alpha", 0)

__version__ = get_version(VERSION)
5 changes: 4 additions & 1 deletion cvat/apps/profiler.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
from django.apps import apps

if apps.is_installed('silk'):
if apps.is_installed("silk"):
from silk.profiling.profiler import silk_profile # pylint: disable=unused-import
else:
from functools import wraps

def silk_profile(name=None):
def profile(f):
@wraps(f)
def wrapped(*args, **kwargs):
return f(*args, **kwargs)

return wrapped

return profile
1 change: 1 addition & 0 deletions cvat/asgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@


if debug.is_debugging_enabled():

class DebuggerApp(ASGIHandler):
"""
Support for VS code debugger
Expand Down
20 changes: 14 additions & 6 deletions cvat/rq_patching.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,25 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None)
job_ids = self.get_expired_job_ids(score)

if job_ids:
failed_job_registry = rq.registry.FailedJobRegistry(self.name, self.connection, serializer=self.serializer)
failed_job_registry = rq.registry.FailedJobRegistry(
self.name, self.connection, serializer=self.serializer
)
queue = self.get_queue()

with self.connection.pipeline() as pipeline:
for job_id in job_ids:
try:
job = self.job_class.fetch(job_id, connection=self.connection, serializer=self.serializer)
job = self.job_class.fetch(
job_id, connection=self.connection, serializer=self.serializer
)
except NoSuchJobError:
continue

job.execute_failure_callback(
self.death_penalty_class, AbandonedJobError, AbandonedJobError(), traceback.extract_stack()
self.death_penalty_class,
AbandonedJobError,
AbandonedJobError(),
traceback.extract_stack(),
)

retry = job.retries_left and job.retries_left > 0
Expand All @@ -54,8 +61,8 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None)
else:
exc_string = f"due to {AbandonedJobError.__name__}"
rq.registry.logger.warning(
f'{self.__class__.__name__} cleanup: Moving job to {rq.registry.FailedJobRegistry.__name__} '
f'({exc_string})'
f"{self.__class__.__name__} cleanup: Moving job to {rq.registry.FailedJobRegistry.__name__} "
f"({exc_string})"
)
job.set_status(JobStatus.FAILED)
job._exc_info = f"Moved to {rq.registry.FailedJobRegistry.__name__}, {exc_string}, at {datetime.now()}"
Expand All @@ -69,7 +76,8 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None)

return job_ids


def update_started_job_registry_cleanup() -> None:
# don't forget to check if the issue https://github.com/rq/rq/issues/2006 has been resolved in upstream
assert VERSION == '1.16.0'
assert VERSION == "1.16.0"
rq.registry.StartedJobRegistry.cleanup = custom_started_job_registry_cleanup
3 changes: 3 additions & 0 deletions cvat/rqworker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,14 @@ def execute_job(self, *args, **kwargs):
# errors during debugging
# https://stackoverflow.com/questions/8242837/django-multiprocessing-and-database-connections/10684672#10684672
from django import db

db.connections.close_all()

return self.perform_job(*args, **kwargs)


if debug.is_debugging_enabled():

class RemoteDebugWorker(SimpleWorker):
"""
Support for VS code debugger
Expand All @@ -68,6 +70,7 @@ def execute_job(self, *args, **kwargs):

if os.environ.get("COVERAGE_PROCESS_START"):
import coverage

default_exit = os._exit

def coverage_exit(*args, **kwargs):
Expand Down
38 changes: 19 additions & 19 deletions cvat/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,31 +23,31 @@
from django.urls import path, include

urlpatterns = [
path('admin/', admin.site.urls),
path('', include('cvat.apps.engine.urls')),
path('django-rq/', include('django_rq.urls')),
path("admin/", admin.site.urls),
path("", include("cvat.apps.engine.urls")),
path("django-rq/", include("django_rq.urls")),
]

if apps.is_installed('cvat.apps.log_viewer'):
urlpatterns.append(path('', include('cvat.apps.log_viewer.urls')))
if apps.is_installed("cvat.apps.log_viewer"):
urlpatterns.append(path("", include("cvat.apps.log_viewer.urls")))

if apps.is_installed('cvat.apps.events'):
urlpatterns.append(path('api/', include('cvat.apps.events.urls')))
if apps.is_installed("cvat.apps.events"):
urlpatterns.append(path("api/", include("cvat.apps.events.urls")))

if apps.is_installed('cvat.apps.lambda_manager'):
urlpatterns.append(path('', include('cvat.apps.lambda_manager.urls')))
if apps.is_installed("cvat.apps.lambda_manager"):
urlpatterns.append(path("", include("cvat.apps.lambda_manager.urls")))

if apps.is_installed('cvat.apps.webhooks'):
urlpatterns.append(path('api/', include('cvat.apps.webhooks.urls')))
if apps.is_installed("cvat.apps.webhooks"):
urlpatterns.append(path("api/", include("cvat.apps.webhooks.urls")))

if apps.is_installed('cvat.apps.quality_control'):
urlpatterns.append(path('api/', include('cvat.apps.quality_control.urls')))
if apps.is_installed("cvat.apps.quality_control"):
urlpatterns.append(path("api/", include("cvat.apps.quality_control.urls")))

if apps.is_installed('silk'):
urlpatterns.append(path('profiler/', include('silk.urls')))
if apps.is_installed("silk"):
urlpatterns.append(path("profiler/", include("silk.urls")))

if apps.is_installed('health_check'):
urlpatterns.append(path('api/server/health/', include('health_check.urls')))
if apps.is_installed("health_check"):
urlpatterns.append(path("api/server/health/", include("health_check.urls")))

if apps.is_installed('cvat.apps.analytics_report'):
urlpatterns.append(path('api/', include('cvat.apps.analytics_report.urls')))
if apps.is_installed("cvat.apps.analytics_report"):
urlpatterns.append(path("api/", include("cvat.apps.analytics_report.urls")))
7 changes: 2 additions & 5 deletions cvat/utils/background_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,9 @@

import django_rq


def schedule_job_with_throttling(
queue_name: str,
job_id_base: str,
scheduled_time: datetime,
func: Callable,
**func_kwargs
queue_name: str, job_id_base: str, scheduled_time: datetime, func: Callable, **func_kwargs
) -> None:
"""
This function schedules an RQ job to run at `scheduled_time`,
Expand Down
7 changes: 4 additions & 3 deletions cvat/utils/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,12 @@

if settings.SMOKESCREEN_ENABLED:
PROXIES_FOR_UNTRUSTED_URLS = {
'http': 'http://localhost:4750',
'https': 'http://localhost:4750',
"http": "http://localhost:4750",
"https": "http://localhost:4750",
}


def make_requests_session() -> requests.Session:
session = requests.Session()
session.headers['User-Agent'] = _CVAT_USER_AGENT
session.headers["User-Agent"] = _CVAT_USER_AGENT
return session
11 changes: 6 additions & 5 deletions cvat/utils/remote_debugger.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@


def is_debugging_enabled() -> bool:
return os.environ.get('CVAT_DEBUG_ENABLED') == 'yes'
return os.environ.get("CVAT_DEBUG_ENABLED") == "yes"


if is_debugging_enabled():
import debugpy
Expand All @@ -21,8 +22,8 @@ class RemoteDebugger:
Read more: https://modwsgi.readthedocs.io/en/develop/user-guides/debugging-techniques.html
"""

ENV_VAR_PORT = 'CVAT_DEBUG_PORT'
ENV_VAR_WAIT = 'CVAT_DEBUG_WAIT'
ENV_VAR_PORT = "CVAT_DEBUG_PORT"
ENV_VAR_WAIT = "CVAT_DEBUG_WAIT"
__debugger_initialized = False

@classmethod
Expand All @@ -35,7 +36,7 @@ def _singleton_init(cls):

# The only intended use is in Docker.
# Using 127.0.0.1 will not allow host connections
addr = ('0.0.0.0', port) # nosec - B104:hardcoded_bind_all_interfaces
addr = ("0.0.0.0", port) # nosec - B104:hardcoded_bind_all_interfaces

# Debugpy is a singleton
# We put it in the main thread of the process and then report new threads
Expand All @@ -45,7 +46,7 @@ def _singleton_init(cls):
# Feel free to enable if needed.
debugpy.configure({"subProcess": False})

if os.environ.get(cls.ENV_VAR_WAIT) == 'yes':
if os.environ.get(cls.ENV_VAR_WAIT) == "yes":
debugpy.wait_for_client()
except Exception as ex:
raise Exception("failed to set debugger") from ex
Expand Down
27 changes: 16 additions & 11 deletions cvat/utils/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import os
import subprocess


def get_version(version):
"""Return a PEP 440-compliant version number from VERSION."""
# Now build the two parts of the version number:
Expand All @@ -20,21 +21,23 @@ def get_version(version):

main = get_main_version(version)

sub = ''
if version[3] == 'alpha' and version[4] == 0:
sub = ""
if version[3] == "alpha" and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
sub = ".dev%s" % git_changeset
SpecLad marked this conversation as resolved.
Show resolved Hide resolved

elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
elif version[3] != "final":
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
sub = mapping[version[3]] + str(version[4])

return main + sub


def get_main_version(version):
"""Return main version (X.Y.Z) from VERSION."""
return '.'.join(str(x) for x in version[:3])
return ".".join(str(x) for x in version[:3])


def get_git_changeset():
"""Return a numeric identifier of the latest git changeset.
Expand All @@ -44,14 +47,16 @@ def get_git_changeset():
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen( # nosec: B603, B607
['git', 'log', '--pretty=format:%ct', '--quiet', '-1', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=repo_dir, universal_newlines=True,
git_log = subprocess.Popen( # nosec: B603, B607
["git", "log", "--pretty=format:%ct", "--quiet", "-1", "HEAD"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=repo_dir,
universal_newlines=True,
)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.fromtimestamp(int(timestamp), tz=datetime.timezone.utc)
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
return timestamp.strftime("%Y%m%d%H%M%S")
18 changes: 10 additions & 8 deletions dev/check_changelog_fragments.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,18 @@

REPO_ROOT = Path(__file__).resolve().parents[1]


def main():
scriv_config = configparser.ConfigParser()
scriv_config.read(REPO_ROOT / 'changelog.d/scriv.ini')
scriv_config.read(REPO_ROOT / "changelog.d/scriv.ini")

scriv_section = scriv_config['scriv']
assert scriv_section['format'] == 'md'
scriv_section = scriv_config["scriv"]
assert scriv_section["format"] == "md"

md_header_level = int(scriv_section['md_header_level'])
md_header_prefix = '#' * md_header_level + '# '
md_header_level = int(scriv_section["md_header_level"])
md_header_prefix = "#" * md_header_level + "# "

categories = {s.strip() for s in scriv_section['categories'].split(',')}
categories = {s.strip() for s in scriv_section["categories"].split(",")}

success = True

Expand All @@ -25,12 +26,12 @@ def complain(message):
success = False
print(f"{fragment_path.relative_to(REPO_ROOT)}:{line_index+1}: {message}", file=sys.stderr)

for fragment_path in REPO_ROOT.glob('changelog.d/*.md'):
for fragment_path in REPO_ROOT.glob("changelog.d/*.md"):
with open(fragment_path) as fragment_file:
for line_index, line in enumerate(fragment_file):
if not line.startswith(md_header_prefix):
# The first line should be a header, and all headers should be of appropriate level.
if line_index == 0 or line.startswith('#'):
if line_index == 0 or line.startswith("#"):
complain(f"line should start with {md_header_prefix!r}")
continue

Expand All @@ -40,4 +41,5 @@ def complain(message):

sys.exit(0 if success else 1)


main()
Loading
Loading