AWS SDK for Python (Boto3)

Prev Next

Available in Classic and VPC

SDK for S3 API for Python

This is an example using Object Storage of NAVER Cloud Platform using SDK for Python provided by AWS S3. The example is based on AWS Python SDK 1.6.19 version.

Install SDK

pip install boto3==1.6.19
Note

Example

Note

You should enter the registered API authentication key information for the access_key and secret_key values used in the example.

Create bucket

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    s3.create_bucket(Bucket=bucket_name)

View bucket lists

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    response = s3.list_buckets()
    
    for bucket in response.get('Buckets', []):
        print (bucket.get('Name'))

Delete bucket

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    s3.delete_bucket(Bucket=bucket_name)

Upload files

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    # create folder
    object_name = 'sample-folder/'

    s3.put_object(Bucket=bucket_name, Key=object_name)

    # upload file
    object_name = 'sample-object'
    local_file_path = '/tmp/test.txt'

    s3.upload_file(local_file_path, bucket_name, object_name)

View file lists

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    bucket_name = 'sample-bucket'

    # list all in the bucket
    max_keys = 300
    response = s3.list_objects(Bucket=bucket_name, MaxKeys=max_keys)

    print('list all in the bucket')

    while True:
        print('IsTruncated=%r' % response.get('IsTruncated'))
        print('Marker=%s' % response.get('Marker'))
        print('NextMarker=%s' % response.get('NextMarker'))

        print('Object List')
        for content in response.get('Contents'):
            print(' Name=%s, Size=%d, Owner=%s' % \
                  (content.get('Key'), content.get('Size'), content.get('Owner').get('ID')))

        if response.get('IsTruncated'):
            response = s3.list_objects(Bucket=bucket_name, MaxKeys=max_keys,
                                       Marker=response.get('NextMarker'))
        else:
            break

    # top level folders and files in the bucket
    delimiter = '/'
    max_keys = 300

    response = s3.list_objects(Bucket=bucket_name, Delimiter=delimiter, MaxKeys=max_keys)

    print('top level folders and files in the bucket')

    while True:
        print('IsTruncated=%r' % response.get('IsTruncated'))
        print('Marker=%s' % response.get('Marker'))
        print('NextMarker=%s' % response.get('NextMarker'))

        print('Folder List')
        for folder in response.get('CommonPrefixes'):
            print(' Name=%s' % folder.get('Prefix'))

        print('File List')
        for content in response.get('Contents'):
            print(' Name=%s, Size=%d, Owner=%s' % \
                  (content.get('Key'), content.get('Size'), content.get('Owner').get('ID')))

        if response.get('IsTruncated'):
            response = s3.list_objects(Bucket=bucket_name, Delimiter=delimiter, MaxKeys=max_keys,
                                       Marker=response.get('NextMarker'))
        else:
            break

Download files

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    bucket_name = 'sample-bucket'

    object_name = 'sample-object'
    local_file_path = '/tmp/test.txt'

    s3.download_file(bucket_name, object_name, local_file_path)

Delete files

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    
    bucket_name = 'sample-bucket'
    object_name = 'sample-object'

    s3.delete_object(Bucket=bucket_name, Key=object_name)

Set ACL

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    bucket_name = 'sample-bucket'

    # set bucket ACL
    # add read permission to anonymous
    s3.put_bucket_acl(Bucket=bucket_name, ACL='public-read')

    response = s3.get_bucket_acl(Bucket=bucket_name)

    # set object ACL
    # add read permission to user by ID
    object_name = 'sample-object'
    owner_id = 'test-owner-id'
    target_id = 'test-user-id'

    s3.put_object_acl(Bucket=bucket_name, Key=object_name,
                      AccessControlPolicy={
                          'Grants': [
                              {
                                  'Grantee': {
                                      'ID': owner_id,
                                      'Type': 'CanonicalUser'
                                  },
                                  'Permission': 'FULL_CONTROL'
                              },
                              {
                                  'Grantee': {
                                      'ID': target_id,
                                      'Type': 'CanonicalUser'
                                  },
                                  'Permission': 'READ'
                              }
                          ],
                          'Owner': {
                              'ID': owner_id
                          }
                      })

    response = s3.get_object_acl(Bucket=bucket_name, Key=object_name)

Upload multi-parts

Caution

If upload of multi-parts is not completed, residual files remain in the bucket. The residual files are included in the bucket capacity and charged. Please be cautioned that the following method to prevent unnecessary charges by deleting incomplete multi-part objects.

  1. Receive notifications for incomplete multi-part upload objects through Cloud Advisor
    1. For more information on Cloud Advisor, see the Cloud Advisor Guide.
  2. Check and delete incomplete multi-part objects
    1. View information on canceled or uncompleted multi-part upload: ListMultipartUploads API Guide
    2. Delete multi-part: AbortMultipartUpload API Guide
import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY_ID'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
    bucket_name = 'sample-bucket'
    object_name = 'sample-large-object'
    
    local_file = '/tmp/sample.file'

    # initialize and get upload ID
    create_multipart_upload_response = s3.create_multipart_upload(Bucket=bucket_name, Key=object_name)
    upload_id = create_multipart_upload_response['UploadId']

    part_size = 10 * 1024 * 1024
    parts = []

    # upload parts
    with open(local_file, 'rb') as f:
        part_number = 1
        while True:
            data = f.read(part_size)
            if not len(data):
                break
            upload_part_response = s3.upload_part(Bucket=bucket_name, Key=object_name, PartNumber=part_number, UploadId=upload_id, Body=data)
            parts.append({
                'PartNumber': part_number,
                'ETag': upload_part_response['ETag']
            })
            part_number += 1

    multipart_upload = {'Parts': parts}

    # abort multipart upload
    # s3.abort_multipart_upload(Bucket=bucket_name, Key=object_name, UploadId=upload_id)

    # complete multipart upload
    s3.complete_multipart_upload(Bucket=bucket_name, Key=object_name, UploadId=upload_id, MultipartUpload=multipart_upload)

Request encryption (SSE-C) using keys provided by customers

Note

Some requests are not available in the console if the object is encrypted based on SSE-C.

import boto3
import secrets

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY_ID'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    # S3 client
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)

    # create encryption key
    sse_key = secrets.token_bytes(32)
    sse_conf = { "SSECustomerKey": sse_key, "SSECustomerAlgorithm": "AES256"}

    bucket_name = 'sample-bucket'
    object_name = 'sample-object'
    
    # upload object
    local_file_path = '/tmp/sample.txt'
    s3.upload_file(local_file_path, bucket_name, object_name, sse_conf)

    # download object
    download_file_path = '/tmp/sample-download.txt'
    s3.download_file(bucket_name, object_name, download_file_path, sse_conf)

Set cross-origin resource sharing (CORS)

import boto3

service_name = 's3'
endpoint_url = 'https://kr.object.ncloudstorage.com'
region_name = 'kr-standard'
access_key = 'ACCESS_KEY_ID'
secret_key = 'SECRET_KEY'

if __name__ == "__main__":
    s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key)
                      
    bucket_name = 'sample-bucket'

# Define the configuration rules
    cors_configuration = {
        'CORSRules': [{
            'AllowedHeaders': ['*'],
            'AllowedMethods': ['GET', 'PUT'],
            'AllowedOrigins': ['*'],
            'MaxAgeSeconds': 3000
        }]
    }

    # Set CORS configuration
    s3.put_bucket_cors(Bucket=bucket_name,
                    CORSConfiguration=cors_configuration)

    # Get CORS configuration
    response = s3.get_bucket_cors(Bucket=bucket_name)
    print(response['CORSRules'])