Skip to content

Commit

Permalink
Versioning test (#40)
Browse files Browse the repository at this point in the history
* Add new markers for bucket_versioning and bucket_versioning_cli

* Create tests for versioning

* add some docs to versioning tests

* update mgc_path to accept params.example.yaml without mgc_path

* create versioning cli tests

* update pull-resquest-test.yml to run versioning tests

* fix category name of versioning tests

* change if cmd[0] == "mgc" to if cmd[0] != "mgc"

* change assertions to use parametrize and add cli class

* add cli class to locking_cli_test

* Add cli mark

* Melhorando algumas coisas de docs

* update mgc

* update mgc_path to looking mgc path if mgc_path isn't in params.yaml

* fix mgc_attribute e melhorando a doc
  • Loading branch information
luizantonio26 authored Dec 18, 2024
1 parent 8aeb6cf commit 8bae286
Show file tree
Hide file tree
Showing 10 changed files with 269 additions and 19 deletions.
1 change: 1 addition & 0 deletions .github/workflows/pull-request-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ jobs:
- cold_storage
- basic
- presign
- bucket_versioning
# - locking
# - policy
uses: ./.github/workflows/run-tests.yml
Expand Down
8 changes: 7 additions & 1 deletion .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,13 @@ jobs:

- name: Install the project
run: uv sync --no-dev

- name: Install MGC
run: |
curl -Lo mgc.tar.gz "https://github.com/MagaluCloud/mgccli/releases/download/v0.31.0/mgccli_0.31.0_linux_amd64.tar.gz"
tar xzvf mgc.tar.gz
rm mgc.tar.gz
cp "./mgc" /usr/local/bin/mgc
- name: Configure Profiles
run: |
echo "${{ secrets.PROFILES }}" > profiles.yaml
Expand Down
50 changes: 40 additions & 10 deletions docs/cold_storage_test.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,27 @@
# ---
# jupyter:
# jupytext:
# cell_metadata_json: true
# notebook_metadata_filter: language_info
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.16.5
# kernelspec:
# name: s3-specs
# display_name: S3 Specs
# display_name: Python 3 (ipykernel)
# language: python
# name: python3
# language_info:
# codemirror_mode:
# name: ipython
# version: 3
# file_extension: .py
# mimetype: text/x-python
# name: python
# nbconvert_exporter: python
# pygments_lexer: ipython3
# version: 3.12.7
# ---

# # Cold Storage
Expand All @@ -19,9 +36,8 @@
# que oferece um custo de armazenamento reduzido e um custo de acesso mais elevado. Consulte a página de Preços
# para comparar as diferentes classes de armazenamento.

# + tags=["parameters"]
# + {"tags": ["parameters"]}
config = "../params/br-se1.yaml"
# -

# + {"jupyter": {"source_hidden": true}}
import logging
Expand All @@ -33,17 +49,19 @@

# ## Exemplos

# + tags=["parameters"]
# + {"tags": ["parameters"]}
config = "../params/aws-east-1.yaml"
# -

# ### Subindo um objeto utilizando boto3
#
#
# O Magalu Cloud possibilita a utilização da biblioteca Python boto3 para manipular buckets e seus objetos.
# No exemplo abaixo, é demonstrado como realizar o upload de um objeto novo em um bucket já configurado com a classe fria.
# O boto3 aceita apenas algumas classes específicas como parâmetro, como "STANDARD", "GLACIER_IR", etc.
#
#
# No Magalu Cloud, a classe fria é chamada **COLD_INSTANT**, mas, por motivos de compatibilidade com o boto3, utiliza-se a classe **GLACIER_IR** para especificar um objeto com a classe fria.

# +
def test_boto_upload_object_with_cold_storage_class(s3_client, existing_bucket_name):
bucket_name = existing_bucket_name
object_key = "cold_file.txt"
Expand All @@ -66,12 +84,15 @@ def test_boto_upload_object_with_cold_storage_class(s3_client, existing_bucket_n
assert storage_class == "GLACIER_IR" or storage_class == "COLD_INSTANT", "Expected StorageClass GLACIER_IR or COLD_INSTANT"

run_example(__name__, "test_boto_upload_object_with_cold_storage_class", config=config)
# -

# ### Trocar a classe de um objeto existente
#
# Além de poder subir um novo objeto com a classe, também é possível trocar a classe de armazenamento
# de um objeto existente, usando a função copy_object do boto3. Isso é feito copiando um objeto para o mesmo lugar
# (mesma object key), mas passando um valor diferente para o argumento StorageClass, como é possível visualizar no exemplo abaixo.

# +
def test_boto_change_object_class_to_cold_storage(s3_client, bucket_with_one_object):
bucket_name, object_key, _ = bucket_with_one_object

Expand All @@ -90,12 +111,15 @@ def test_boto_change_object_class_to_cold_storage(s3_client, bucket_with_one_obj
assert storage_class == "GLACIER_IR" or storage_class == "COLD_INSTANT", "Expected StorageClass GLACIER_IR or COLD_INSTANT"

run_example(__name__, "test_boto_change_object_class_to_cold_storage", config=config)
# -

# ### Upload de um objeto com metadados customizados e ACLs
#
#
# O upload de um objeto com storage class não é diferente de um upload normal
# e aceita todos os outros atributos possíveis. O exemplo abaixo demonstra o upload de um objeto
# com metadados customizados, ACLs e a classe fria.

# +
def test_boto_object_with_custom_metadata_acls_and_storage_class(s3_client, existing_bucket_name):
bucket_name = existing_bucket_name
object_key = "cold_file.txt"
Expand Down Expand Up @@ -136,13 +160,16 @@ def test_boto_object_with_custom_metadata_acls_and_storage_class(s3_client, exis
assert 'READ' == acl.get('Permission'), "Expected acl permission be READ"

run_example(__name__, "test_boto_object_with_custom_metadata_acls_and_storage_class", config=config)
# -

# ### Listagem de objetos
#
#
# No boto3, utilizando a função list_objects_v2, é possível listar os objetos de um bucket e, junto do objeto,
# obter algumas informações sobre ele, sendo uma delas a StorageClass. Apesar de não ser compatível
# utilizar a classe COLD_INSTANT durante o put_object ou copy_object com o Magalu Cloud, quando se realiza a busca
# de um objeto ou das informações do objeto no Magalu Cloud, a classe retornada é COLD_INSTANT.

# +
def test_boto_list_objects_with_cold_storage_class(s3_client, bucket_with_one_storage_class_cold_object):
bucket_name, _, _ = bucket_with_one_storage_class_cold_object

Expand All @@ -159,10 +186,13 @@ def test_boto_list_objects_with_cold_storage_class(s3_client, bucket_with_one_st
assert obj_storage_class == 'COLD_INSTANT' or obj_storage_class == 'GLACIER_IR', "Expected GACIER_IR or COLD_INSTANT as Storage Class"

run_example(__name__, "test_boto_multipart_upload_with_cold_storage_class", config=config)
# -

# ### Multipart Upload com a Classe Fria
#
#
# Outra possibilidade é realizar o multipart upload utilizando a classe fria.

# +
def test_boto_multipart_upload_with_cold_storage_class(s3_client, existing_bucket_name, create_multipart_object_files):
bucket_name = existing_bucket_name

Expand Down
10 changes: 8 additions & 2 deletions docs/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import yaml
import logging
import subprocess
import shutil

from s3_helpers import (
generate_unique_bucket_name,
delete_bucket_and_wait,
Expand Down Expand Up @@ -57,8 +59,12 @@ def mgc_path(default_profile):
"""
Validates and returns the path to the 'mgc' binary.
"""
spec_dir = os.path.dirname(get_spec_path())
path = os.path.join(spec_dir, default_profile.get("mgc_path", "mgc"))
mgc_path_field_name = "mgc_path"
if not default_profile.get(mgc_path_field_name):
path = shutil.which("mgc")
else:
spec_dir = os.path.dirname(get_spec_path())
path = os.path.join(spec_dir, default_profile.get(mgc_path_field_name))
if not os.path.isfile(path):
pytest.fail(f"The specified mgc_path '{path}' (absolute: {os.path.abspath(path)}) does not exist or is not a file.")
return path
Expand Down
24 changes: 20 additions & 4 deletions docs/list-buckets_test.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,36 @@
# ---
# jupyter:
# jupytext:
# cell_metadata_json: true
# notebook_metadata_filter: language_info
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.16.5
# kernelspec:
# name: s3-specs
# display_name: S3 Specs
# display_name: Python 3 (ipykernel)
# language: python
# name: python3
# language_info:
# codemirror_mode:
# name: ipython
# version: 3
# file_extension: .py
# mimetype: text/x-python
# name: python
# nbconvert_exporter: python
# pygments_lexer: ipython3
# version: 3.12.7
# ---

# # List buckets
#
# Lista os buckets de um perfil[<sup>1</sup>](../glossary#profile)


# + tags=["parameters"]
# + {"tags": ["parameters"]}
config = "../params/br-ne1.yaml"
# -

# + {"jupyter": {"source_hidden": true}}
import pytest
Expand Down
2 changes: 1 addition & 1 deletion docs/locking_cli_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from s3_helpers import run_example, get_spec_path
from datetime import datetime, timedelta, timezone
# -
pytestmark = pytest.mark.locking
pytestmark = [pytest.mark.locking, pytest.mark.cli]

# ## Exemplos

Expand Down
68 changes: 68 additions & 0 deletions docs/versioning_cli_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# + {"jupyter": {"source_hidden": true}}
import logging
import pytest
from s3_helpers import run_example
from botocore.exceptions import ClientError
from shlex import split, quote
import subprocess

config = "../params/br-se1.yaml"


# + {"jupyter": {"source_hidden": true}}
pytestmark = [pytest.mark.bucket_versioning, pytest.mark.cli]


commands = [
("mgc object-storage objects delete {bucket_name}/{object_key} --no-confirm", ""),
("aws --profile {profile_name} s3 rm s3://{bucket_name}/{object_key}", "delete: s3://{bucket_name}/{object_key}\n"),
("rclone delete {profile_name}:{bucket_name}/{object_key}", "")
]

@pytest.mark.parametrize("cmd_template, expected", commands)
def test_delete_object_with_versions(cmd_template, expected, s3_client, versioned_bucket_with_one_object, profile_name, active_mgc_workspace):
bucket_name, object_key, _ = versioned_bucket_with_one_object

#Adicionando uma segunda versão deste objeto
s3_client.put_object(
Bucket = bucket_name,
Key = object_key,
Body = b"second version of this object"
)


cmd = split(cmd_template.format(bucket_name=bucket_name, profile_name=profile_name, object_key=object_key))
result = subprocess.run(cmd, capture_output=True, text=True)

assert result.returncode == 0, f"Command failed with error: {result.stderr}"
logging.info(f"Output from {cmd_template}: {result.stdout}")


assert result.stdout == expected.format(bucket_name=bucket_name, object_key=object_key)

run_example(__name__, "test_delete_bucket_with_objects_with_versions", config=config)

commands = [
("mgc object-storage buckets delete {bucket_name} --no-confirm --recursive --raw", "the bucket may not be empty"),
("aws --profile {profile_name} s3 rb s3://{bucket_name}", "BucketNotEmpty"),
("rclone rmdir {profile_name}:{bucket_name}", "BucketNotEmpty")
]

@pytest.mark.parametrize("cmd_template, expected", commands)
def test_delete_bucket_with_objects_with_versions(cmd_template, expected, s3_client, versioned_bucket_with_one_object, profile_name, active_mgc_workspace):
bucket_name, object_key, _ = versioned_bucket_with_one_object

s3_client.put_object(
Bucket = bucket_name,
Key = object_key,
Body = b"v2"
)

cmd = split(cmd_template.format(bucket_name=bucket_name, profile_name=profile_name, object_key=object_key))
result = subprocess.run(cmd, capture_output=True, text=True)

assert result.returncode != 0, f"Command failed with error: {result.stderr}"
logging.info(f"Output from {cmd_template}: {result.stdout}")
assert expected in result.stderr

run_example(__name__, "test_delete_bucket_with_objects_with_versions", config=config)
Loading

0 comments on commit 8bae286

Please sign in to comment.