diff --git a/integration_tests/test_downloading.py b/integration_tests/test_downloading.py index e9402bbc..e77feea4 100644 --- a/integration_tests/test_downloading.py +++ b/integration_tests/test_downloading.py @@ -70,9 +70,6 @@ def test_that_downloader_correctly_downloads_file_and_updates_database( ) ).is_between(before_invocation, after_invocation) - today_str = now.strftime("%Y-%m-%d") bucket_objects = list(upload_bucket.objects.all()) assert_that(bucket_objects).is_length(1) - assert_that(bucket_objects[0].key).is_equal_to( - f"{today_str}/integration-test-filename.SAFE" - ) + assert_that(bucket_objects[0].key).is_equal_to("integration-test-filename.zip") diff --git a/lambdas/downloader/handler.py b/lambdas/downloader/handler.py index a9ce6f89..0e86e9f7 100644 --- a/lambdas/downloader/handler.py +++ b/lambdas/downloader/handler.py @@ -44,7 +44,7 @@ def handler(event, context): LOGGER.info(f"Received event to download image: {image_filename}") try: - granule = get_granule(image_id) + get_granule(image_id) except GranuleNotFoundException: return except GranuleAlreadyDownloadedException: @@ -58,7 +58,6 @@ def handler(event, context): image_id, image_filename, download_url, - granule.beginposition, ) LOGGER.info(f"Successfully downloaded image: {image_filename}") @@ -178,7 +177,6 @@ def download_file( image_id: str, image_filename: str, download_url: str, - begin_position: datetime, ): """ For a given image of id `image_id` and download location of `download_url`, make @@ -192,8 +190,6 @@ def download_file( `granule` table :param download_url: str representing the SciHub URL to request the images file from - :param begin_position: datetime representing the begin_position of the image in the - `granule` table """ session_maker = get_session_maker() with get_session(session_maker) as db: @@ -204,14 +200,14 @@ def download_file( aws_checksum = generate_aws_checksum(image_checksum) - begin_position_str = begin_position.strftime("%Y-%m-%d") - s3_client = get_s3_client() upload_bucket = os.environ["UPLOAD_BUCKET"] + root, ext = os.path.splitext(image_filename) + zip_key = f"{root}.zip" s3_client.put_object( Body=response.raw.read(), Bucket=upload_bucket, - Key=f"{begin_position_str}/{image_filename}", + Key=f"{zip_key}", ContentMD5=aws_checksum, ) diff --git a/lambdas/downloader/tests/test_downloader_handler.py b/lambdas/downloader/tests/test_downloader_handler.py index 1f50b88e..54712e9b 100644 --- a/lambdas/downloader/tests/test_downloader_handler.py +++ b/lambdas/downloader/tests/test_downloader_handler.py @@ -216,7 +216,10 @@ def test_that_download_file_correctly_raises_exception_if_request_fails( with pytest.raises(FailedToDownloadFileException) as ex: download_file( - "ACHECKSUM", "test-id", "test-filename.SAFE", download_url, datetime.now() + "ACHECKSUM", + "test-id", + "test-filename.SAFE", + download_url, ) assert_that(str(ex.value)).is_equal_to( ( @@ -260,7 +263,6 @@ def put_object(self, **args): "test-id", "test-filename.SAFE", download_url, - datetime.now(), ) assert_that(str(ex.value)).is_equal_to( ( @@ -308,7 +310,6 @@ def test_that_download_file_correctly_raises_exception_if_db_update_fails( "test-id", "test-filename.SAFE", download_url, - datetime.now(), ) assert_that(str(ex.value)).is_equal_to( ( @@ -356,15 +357,13 @@ def test_that_download_file_correctly_uploads_file_to_s3_and_updates_db( ) patched_generate_aws_checksum.return_value = "an-aws-checksum" - download_file( - "ACHECKSUM", "test-id", "test-filename.SAFE", download_url, datetime.now() - ) + download_file("ACHECKSUM", "test-id", "test-filename.SAFE", download_url) patched_generate_aws_checksum.assert_called_once_with("ACHECKSUM") bucket_objects = list(mock_s3_bucket.objects.all()) assert_that(bucket_objects).is_length(1) - assert_that(bucket_objects[0].key).is_equal_to("2020-01-01/test-filename.SAFE") + assert_that(bucket_objects[0].key).is_equal_to("test-filename.zip") bucket_object_content = bucket_objects[0].get()["Body"].read().decode("utf-8") assert_that(bucket_object_content).contains("THIS IS A FAKE SAFE FILE") @@ -873,7 +872,7 @@ def test_that_handler_correctly_downloads_file_and_updates_granule( bucket_objects = list(mock_s3_bucket.objects.all()) assert_that(bucket_objects).is_length(1) - assert_that(bucket_objects[0].key).is_equal_to("2020-02-02/test-filename") + assert_that(bucket_objects[0].key).is_equal_to("test-filename.zip") bucket_object_content = bucket_objects[0].get()["Body"].read().decode("utf-8") assert_that(bucket_object_content).contains("THIS IS A FAKE SAFE FILE") @@ -938,7 +937,7 @@ def test_that_handler_correctly_downloads_file_and_updates_granule_using_inthub2 db_session.add( Granule( id="test-id", - filename="test-filename", + filename="test-filename.SAFE", tileid="NM901", size=100, beginposition=datetime.now(), @@ -958,7 +957,7 @@ def test_that_handler_correctly_downloads_file_and_updates_granule_using_inthub2 bucket_objects = list(mock_s3_bucket.objects.all()) assert_that(bucket_objects).is_length(1) - assert_that(bucket_objects[0].key).is_equal_to("2020-02-02/test-filename") + assert_that(bucket_objects[0].key).is_equal_to("test-filename.zip") bucket_object_content = bucket_objects[0].get()["Body"].read().decode("utf-8") assert_that(bucket_object_content).contains("THIS IS A FAKE SAFE FILE")