diff options
author | Matthew Lemon <y@yulqen.org> | 2024-10-15 21:01:31 +0100 |
---|---|---|
committer | Matthew Lemon <y@yulqen.org> | 2024-10-15 21:01:31 +0100 |
commit | eeaddb27560d723ca7d61359744ceb2709fccd2d (patch) | |
tree | 04ddbc49ae7b73d5f5a9e1716d7227aecd3b9f85 /pyblackbird_cc/resources/s3.py | |
parent | 7a3044c859043837e6c7c95bb4894d04e9b2cbc2 (diff) |
Renamed from pyblackbird_cc to alphabetlearning - everywhere
Diffstat (limited to 'pyblackbird_cc/resources/s3.py')
-rw-r--r-- | pyblackbird_cc/resources/s3.py | 94 |
1 files changed, 0 insertions, 94 deletions
diff --git a/pyblackbird_cc/resources/s3.py b/pyblackbird_cc/resources/s3.py deleted file mode 100644 index 3693b55..0000000 --- a/pyblackbird_cc/resources/s3.py +++ /dev/null @@ -1,94 +0,0 @@ -import logging -from pathlib import Path -from typing import Sequence - -import boto3 -from boto3 import Session -from botocore.exceptions import ClientError -from django.conf import settings - -from pyblackbird_cc.resources.utils import _get_pdf_collection_type - -logger = logging.getLogger(__name__) - - -def get_presigned_obj_url(bucket_name, obj_name, expiration=3600) -> str | None: - client = boto3.client( - "s3", - endpoint_url=settings.AWS_S3_ENDPOINT_URL, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - region_name=settings.AWS_S3_REGION_NAME, - ) - try: - response = client.generate_presigned_url( - "get_object", - Params={"Bucket": bucket_name, "Key": obj_name}, - ExpiresIn=expiration, - ) - except ClientError as e: - logger.exception("Error generating presigned URL", extra={"error": e}) - return None - return response - - -def get_s3_client() -> Session.client: - return boto3.Session().client( - "s3", - endpoint_url=settings.AWS_S3_ENDPOINT_URL, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - region_name=settings.AWS_S3_REGION_NAME, - ) - - -def upload_files_to_s3(files: Sequence, dir_name: str) -> None: - """ - Generic upload function. Pass "thumbnails" or "pdfuploads" as dir_name to - dictate the type of file to upload. - :param files: - :param dir_name: - :return: - """ - s3_client = get_s3_client() - for file in files: - logging.info(f"Uploading {file.name} to S3") - s3_client.upload_fileobj(file, settings.AWS_STORAGE_BUCKET_NAME, f"{dir_name}/{file.name}") - - -def upload_snapshotted_pages_to_s3(snapshotted_pages) -> bool: - s3_client = get_s3_client() - collection_type = _get_pdf_collection_type(snapshotted_pages) - if collection_type in ["SINGLE_PDF_SINGLE_PAGE", "SINGLE_PDF_MULTI_PAGE"]: - for img in snapshotted_pages[0]: - logging.info(f"Uploading {img} to S3") - s3_client.upload_file( - img, settings.AWS_STORAGE_BUCKET_NAME, f"snapshotted_pages/{Path(img).name}" - ) - return True - if collection_type in ["MULTI_PDF_SINGLE_PAGE", "MULTI_PDF_MULTI_PAGE"]: - for pdf in snapshotted_pages: - for img in pdf: - logging.info(f"Uploading {img} to S3") - s3_client.upload_file( - img, settings.AWS_STORAGE_BUCKET_NAME, f"snapshotted_pages/{Path(img).name}" - ) - return True - return False - - -def upload_to_s3(pdf_files, thumbnail_files, snapshotted_pages) -> bool: - """ - - :param pdf_files: a list of PDF files - :param thumbnail_files: a list of thumbnail files - :param snapshotted_pages: a list of snapshotted pages - :return: True if the files was uploaded, False otherwise - """ - try: - upload_files_to_s3(pdf_files, dir_name="pdfuploads") - upload_files_to_s3(thumbnail_files, dir_name="thumbnails") - return upload_snapshotted_pages_to_s3(snapshotted_pages) - except ClientError: - logging.exception("Error uploading files to S3") - return False |