Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ jobs:
matrix:
python-version:
- "3.11"
- "3.12"
django-version:
- "pinned"
# When updating the shards, remember to make the same changes in
Expand Down
38 changes: 20 additions & 18 deletions cms/djangoapps/contentstore/video_storage_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,11 @@
import shutil
import pathlib
import zipfile
import boto3

from contextlib import closing
from datetime import datetime, timedelta
from uuid import uuid4
from boto.s3.connection import S3Connection
from boto import s3
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.http import FileResponse, HttpResponseNotFound, StreamingHttpResponse
Expand Down Expand Up @@ -826,7 +825,7 @@ def videos_post(course, request):
return {'error': error_msg}, 400

edx_video_id = str(uuid4())
key = storage_service_key(bucket, file_name=edx_video_id)
key_name = storage_service_key(bucket, file_name=edx_video_id)

metadata_list = [
('client_video_id', file_name),
Expand All @@ -846,12 +845,20 @@ def videos_post(course, request):
if transcript_preferences is not None:
metadata_list.append(('transcript_preferences', json.dumps(transcript_preferences)))

for metadata_name, value in metadata_list:
key.set_metadata(metadata_name, value)
upload_url = key.generate_url(
KEY_EXPIRATION_IN_SECONDS,
'PUT',
headers={'Content-Type': req_file['content_type']}
# Prepare metadata for presigned URL
metadata = dict(metadata_list)

# Generate presigned URL using boto3
s3_client = bucket.meta.client
upload_url = s3_client.generate_presigned_url(
'put_object',
Params={
'Bucket': bucket.name,
'Key': key_name,
'ContentType': req_file['content_type'],
'Metadata': metadata
},
ExpiresIn=KEY_EXPIRATION_IN_SECONDS
)

# persist edx_video_id in VAL
Expand Down Expand Up @@ -886,24 +893,19 @@ def storage_service_bucket():
'aws_secret_access_key': settings.AWS_SECRET_ACCESS_KEY
}

conn = S3Connection(**params)
conn = boto3.resource("s3", **params)

# We don't need to validate our bucket, it requires a very permissive IAM permission
# set since behind the scenes it fires a HEAD request that is equivalent to get_all_keys()
# meaning it would need ListObjects on the whole bucket, not just the path used in each
# environment (since we share a single bucket for multiple deployments in some configurations)
return conn.get_bucket(settings.VIDEO_UPLOAD_PIPELINE['VEM_S3_BUCKET'], validate=False)
return conn.Bucket(settings.VIDEO_UPLOAD_PIPELINE['VEM_S3_BUCKET'])


def storage_service_key(bucket, file_name):
"""
Returns an S3 key to the given file in the given bucket.
Returns an S3 key name for the given file in the given bucket.
"""
key_name = "{}/{}".format(
return "{}/{}".format(
settings.VIDEO_UPLOAD_PIPELINE.get("ROOT_PATH", ""),
file_name
)
return s3.key.Key(bucket, key_name)


def send_video_status_update(updates):
Expand Down
Loading
Loading