aboutsummaryrefslogtreecommitdiffstats
path: root/alphabetlearning/resources/s3.py
diff options
context:
space:
mode:
Diffstat (limited to 'alphabetlearning/resources/s3.py')
-rw-r--r--alphabetlearning/resources/s3.py94
1 files changed, 94 insertions, 0 deletions
diff --git a/alphabetlearning/resources/s3.py b/alphabetlearning/resources/s3.py
new file mode 100644
index 0000000..af367e2
--- /dev/null
+++ b/alphabetlearning/resources/s3.py
@@ -0,0 +1,94 @@
+import logging
+from pathlib import Path
+from typing import Sequence
+
+import boto3
+from boto3 import Session
+from botocore.exceptions import ClientError
+from django.conf import settings
+
+from alphabetlearning.resources.utils import _get_pdf_collection_type
+
+logger = logging.getLogger(__name__)
+
+
+def get_presigned_obj_url(bucket_name, obj_name, expiration=3600) -> str | None:
+ client = boto3.client(
+ "s3",
+ endpoint_url=settings.AWS_S3_ENDPOINT_URL,
+ aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
+ region_name=settings.AWS_S3_REGION_NAME,
+ )
+ try:
+ response = client.generate_presigned_url(
+ "get_object",
+ Params={"Bucket": bucket_name, "Key": obj_name},
+ ExpiresIn=expiration,
+ )
+ except ClientError as e:
+ logger.exception("Error generating presigned URL", extra={"error": e})
+ return None
+ return response
+
+
+def get_s3_client() -> Session.client:
+ return boto3.Session().client(
+ "s3",
+ endpoint_url=settings.AWS_S3_ENDPOINT_URL,
+ aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
+ region_name=settings.AWS_S3_REGION_NAME,
+ )
+
+
+def upload_files_to_s3(files: Sequence, dir_name: str) -> None:
+ """
+ Generic upload function. Pass "thumbnails" or "pdfuploads" as dir_name to
+ dictate the type of file to upload.
+ :param files:
+ :param dir_name:
+ :return:
+ """
+ s3_client = get_s3_client()
+ for file in files:
+ logging.info(f"Uploading {file.name} to S3")
+ s3_client.upload_fileobj(file, settings.AWS_STORAGE_BUCKET_NAME, f"{dir_name}/{file.name}")
+
+
+def upload_snapshotted_pages_to_s3(snapshotted_pages) -> bool:
+ s3_client = get_s3_client()
+ collection_type = _get_pdf_collection_type(snapshotted_pages)
+ if collection_type in ["SINGLE_PDF_SINGLE_PAGE", "SINGLE_PDF_MULTI_PAGE"]:
+ for img in snapshotted_pages[0]:
+ logging.info(f"Uploading {img} to S3")
+ s3_client.upload_file(
+ img, settings.AWS_STORAGE_BUCKET_NAME, f"snapshotted_pages/{Path(img).name}"
+ )
+ return True
+ if collection_type in ["MULTI_PDF_SINGLE_PAGE", "MULTI_PDF_MULTI_PAGE"]:
+ for pdf in snapshotted_pages:
+ for img in pdf:
+ logging.info(f"Uploading {img} to S3")
+ s3_client.upload_file(
+ img, settings.AWS_STORAGE_BUCKET_NAME, f"snapshotted_pages/{Path(img).name}"
+ )
+ return True
+ return False
+
+
+def upload_to_s3(pdf_files, thumbnail_files, snapshotted_pages) -> bool:
+ """
+
+ :param pdf_files: a list of PDF files
+ :param thumbnail_files: a list of thumbnail files
+ :param snapshotted_pages: a list of snapshotted pages
+ :return: True if the files was uploaded, False otherwise
+ """
+ try:
+ upload_files_to_s3(pdf_files, dir_name="pdfuploads")
+ upload_files_to_s3(thumbnail_files, dir_name="thumbnails")
+ return upload_snapshotted_pages_to_s3(snapshotted_pages)
+ except ClientError:
+ logging.exception("Error uploading files to S3")
+ return False