File: test_helper.py

package info (click to toggle)
aws-crt-python 0.16.8%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 78,328 kB
  • sloc: ansic: 330,743; python: 18,949; makefile: 6,271; sh: 3,712; asm: 754; cpp: 699; ruby: 208; java: 77; perl: 73; javascript: 46; xml: 11
file content (142 lines) | stat: -rw-r--r-- 4,684 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
import argparse
import boto3
import botocore
import sys
import os


s3 = boto3.resource('s3')
s3_client = boto3.client('s3')


s3_control_client = boto3.client('s3control')


BUCKET_NAME = 'aws-c-s3-test-bucket'
# Create a public bucket with one object for testing public access
PUBLIC_BUCKET_NAME = 'aws-c-s3-test-bucket-public'

REGION = 'us-west-2'
os.environ['AWS_DEFAULT_REGION'] = REGION


MB = 1024*1024
GB = 1024*1024*1024


def create_bytes(size):
    return bytearray([1] * size)


def put_pre_existing_objects(size, keyname, bucket=BUCKET_NAME, sse=None, public_read=False):
    if size == 0:
        s3_client.put_object(Bucket=bucket, Key=keyname)
        return

    body = create_bytes(size)
    args = {'Bucket': bucket, 'Key': keyname, 'Body': body}
    if sse == 'aes256':
        args['ServerSideEncryption'] = 'AES256'
    elif sse == 'aes256-c':
        random_key = os.urandom(32)
        args['SSECustomerKey'] = random_key
        args['SSECustomerAlgorithm'] = 'AES256'
    elif sse == 'kms':
        args['ServerSideEncryption'] = 'aws:kms'
        args['SSEKMSKeyId'] = 'alias/aws/s3'

    if public_read:
        args['ACL'] = 'public-read'

    s3_client.put_object(**args)
    print(f"Object {keyname} uploaded")


def create_bucket_with_lifecycle():
    try:
        # Create the bucket. This returns an error if the bucket already exists.
        s3_client.create_bucket(
            Bucket=BUCKET_NAME, CreateBucketConfiguration={'LocationConstraint': REGION})
        s3_client.put_bucket_lifecycle_configuration(
            Bucket=BUCKET_NAME,
            LifecycleConfiguration={
                'Rules': [
                    {
                        'ID': 'clean up non-pre-existing objects',
                        'Expiration': {
                            'Days': 1,
                        },
                        'Filter': {
                            'Prefix': 'upload/',
                        },
                        'Status': 'Enabled',
                        'NoncurrentVersionExpiration': {
                            'NoncurrentDays': 1,
                        },
                        'AbortIncompleteMultipartUpload': {
                            'DaysAfterInitiation': 1,
                        },
                    },
                ],
            },
        )
        print(f"Bucket {BUCKET_NAME} created", file=sys.stderr)
        put_pre_existing_objects(
            10*MB, 'pre-existing-10MB-aes256-c', sse='aes256-c')
        put_pre_existing_objects(
            10*MB, 'pre-existing-10MB-aes256', sse='aes256')
        put_pre_existing_objects(
            10*MB, 'pre-existing-10MB-kms', sse='kms')
        put_pre_existing_objects(10*MB, 'pre-existing-10MB')
        put_pre_existing_objects(1*MB, 'pre-existing-1MB')
        put_pre_existing_objects(0, 'pre-existing-empty')

    except botocore.exceptions.ClientError as e:
        # The bucket already exists. That's fine.
        if e.response['Error']['Code'] == 'BucketAlreadyOwnedByYou' or e.response['Error']['Code'] == 'BucketAlreadyExists':
            print(
                f"Bucket {PUBLIC_BUCKET_NAME} not created, skip initializing.", file=sys.stderr)
            return
        raise e


def create_bucket_with_public_object():
    try:
        s3_client.create_bucket(Bucket=PUBLIC_BUCKET_NAME,
                                CreateBucketConfiguration={'LocationConstraint': REGION})
        print(f"Bucket {PUBLIC_BUCKET_NAME} created", file=sys.stderr)
        put_pre_existing_objects(
            1*MB, 'pre-existing-1MB', bucket=PUBLIC_BUCKET_NAME, public_read=True)
    except botocore.exceptions.ClientError as e:
        # The bucket already exists. That's fine.
        if e.response['Error']['Code'] == 'BucketAlreadyOwnedByYou' or e.response['Error']['Code'] == 'BucketAlreadyExists':
            print(
                f"Bucket {PUBLIC_BUCKET_NAME} not created, skip initializing.", file=sys.stderr)
            return
        raise e


def cleanup(bucket_name):
    bucket = s3.Bucket(bucket_name)
    bucket.objects.all().delete()
    s3_client.delete_bucket(Bucket=bucket_name)
    print(f"Bucket {bucket_name} deleted", file=sys.stderr)


parser = argparse.ArgumentParser()
parser.add_argument(
    'action',
    choices=['init', 'clean'],
    help='Initialize or clean up the test buckets')

args = parser.parse_args()

if args.action == 'init':
    create_bucket_with_lifecycle()
    create_bucket_with_public_object()

if args.action == 'clean':
    cleanup(BUCKET_NAME)
    cleanup(PUBLIC_BUCKET_NAME)