Python용 AWS SDK (Boto3)
    • PDF

    Python용 AWS SDK (Boto3)

    • PDF

    Article Summary

    Classic/VPC 환경에서 이용 가능합니다.

    Python용 SDK for S3 API

    AWS S3에서 제공하는 Python용 SDK를 이용해 네이버 클라우드 플랫폼의 Object Storage를 사용하는 예제입니다. AWS Python SDK 1.6.19 버전을 기준으로 작성되었습니다.

    SDK 설치

    pip install boto3==1.6.19
    
    참고

    예제

    참고

    예제에서 사용하는 access_key, secret_key 값에는 등록한 API 인증키 정보를 입력해야 합니다.

    버킷 생성

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
    
        bucket_name = 'sample-bucket'
    
        s3.create_bucket(Bucket=bucket_name)
    
    

    버킷 목록 조회

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
    
        response = s3.list_buckets()
        
        for bucket in response.get('Buckets', []):
            print (bucket.get('Name'))
    
    

    버킷 삭제

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
    
        bucket_name = 'sample-bucket'
    
        s3.delete_bucket(Bucket=bucket_name)
    
    

    파일 업로드

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
    
        bucket_name = 'sample-bucket'
    
        # create folder
        object_name = 'sample-folder/'
    
        s3.put_object(Bucket=bucket_name, Key=object_name)
    
        # upload file
        object_name = 'sample-object'
        local_file_path = '/tmp/test.txt'
    
        s3.upload_file(local_file_path, bucket_name, object_name)
    
    

    파일 목록 조회

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
    
        bucket_name = 'sample-bucket'
    
        # list all in the bucket
        max_keys = 300
        response = s3.list_objects(Bucket=bucket_name, MaxKeys=max_keys)
    
        print('list all in the bucket')
    
        while True:
            print('IsTruncated=%r' % response.get('IsTruncated'))
            print('Marker=%s' % response.get('Marker'))
            print('NextMarker=%s' % response.get('NextMarker'))
    
            print('Object List')
            for content in response.get('Contents'):
                print(' Name=%s, Size=%d, Owner=%s' % \
                      (content.get('Key'), content.get('Size'), content.get('Owner').get('ID')))
    
            if response.get('IsTruncated'):
                response = s3.list_objects(Bucket=bucket_name, MaxKeys=max_keys,
                                           Marker=response.get('NextMarker'))
            else:
                break
    
        # top level folders and files in the bucket
        delimiter = '/'
        max_keys = 300
    
        response = s3.list_objects(Bucket=bucket_name, Delimiter=delimiter, MaxKeys=max_keys)
    
        print('top level folders and files in the bucket')
    
        while True:
            print('IsTruncated=%r' % response.get('IsTruncated'))
            print('Marker=%s' % response.get('Marker'))
            print('NextMarker=%s' % response.get('NextMarker'))
    
            print('Folder List')
            for folder in response.get('CommonPrefixes'):
                print(' Name=%s' % folder.get('Prefix'))
    
            print('File List')
            for content in response.get('Contents'):
                print(' Name=%s, Size=%d, Owner=%s' % \
                      (content.get('Key'), content.get('Size'), content.get('Owner').get('ID')))
    
            if response.get('IsTruncated'):
                response = s3.list_objects(Bucket=bucket_name, Delimiter=delimiter, MaxKeys=max_keys,
                                           Marker=response.get('NextMarker'))
            else:
                break
    
    

    파일 다운로드

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
        bucket_name = 'sample-bucket'
    
        object_name = 'sample-object'
        local_file_path = '/tmp/test.txt'
    
        s3.download_file(bucket_name, object_name, local_file_path)
    
    

    파일 삭제

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
        
        bucket_name = 'sample-bucket'
        object_name = 'sample-object'
    
        s3.delete_object(Bucket=bucket_name, Key=object_name)
    
    

    ACL 설정

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
        bucket_name = 'sample-bucket'
    
        # set bucket ACL
        # add read permission to anonymous
        s3.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
    
        response = s3.get_bucket_acl(Bucket=bucket_name)
    
        # set object ACL
        # add read permission to user by ID
        object_name = 'sample-object'
        owner_id = 'test-owner-id'
        target_id = 'test-user-id'
    
        s3.put_object_acl(Bucket=bucket_name, Key=object_name,
                          AccessControlPolicy={
                              'Grants': [
                                  {
                                      'Grantee': {
                                          'ID': owner_id,
                                          'Type': 'CanonicalUser'
                                      },
                                      'Permission': 'FULL_CONTROL'
                                  },
                                  {
                                      'Grantee': {
                                          'ID': target_id,
                                          'Type': 'CanonicalUser'
                                      },
                                      'Permission': 'READ'
                                  }
                              ],
                              'Owner': {
                                  'ID': owner_id
                              }
                          })
    
        response = s3.get_object_acl(Bucket=bucket_name, Key=object_name)
    
    

    멀티 파트 업로드

    주의

    멀티 파트 업로드를 완료하지 않을 경우 잔여 파일이 버킷에 남게 되며, 잔여 파일은 버킷 용량에 포함되어 과금됩니다. 아래 방법을 통해 불완전한 멀티파트 객체를 삭제하여 불필요한 과금이 되지 않도록 주의하시기 바랍니다.

    1. Cloud Advisor를 통해 불완전한 멀티파트 업로드 객체에 대해 알림 수신
      1. Cloud Advisor에 대한 자세한 내용은 Cloud Advisor 사용 가이드를 확인해 주십시오.
    2. 불완전한 멀티파트 객체 확인 및 삭제
      1. 취소 또는 완료되지 않은 멀티파트 업로드 정보 조회: ListMultipartUploads API 가이드
      2. 멀티파트 삭제: AbortMultipartUpload API 가이드
    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY_ID'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
        bucket_name = 'sample-bucket'
        object_name = 'sample-large-object'
        
        local_file = '/tmp/sample.file'
    
        # initialize and get upload ID
        create_multipart_upload_response = s3.create_multipart_upload(Bucket=bucket_name, Key=object_name)
        upload_id = create_multipart_upload_response['UploadId']
    
        part_size = 10 * 1024 * 1024
        parts = []
    
        # upload parts
        with open(local_file, 'rb') as f:
            part_number = 1
            while True:
                data = f.read(part_size)
                if not len(data):
                    break
                upload_part_response = s3.upload_part(Bucket=bucket_name, Key=object_name, PartNumber=part_number, UploadId=upload_id, Body=data)
                parts.append({
                    'PartNumber': part_number,
                    'ETag': upload_part_response['ETag']
                })
                part_number += 1
    
        multipart_upload = {'Parts': parts}
    
        # abort multipart upload
        # s3.abort_multipart_upload(Bucket=bucket_name, Key=object_name, UploadId=upload_id)
    
        # complete multipart upload
        s3.complete_multipart_upload(Bucket=bucket_name, Key=object_name, UploadId=upload_id, MultipartUpload=multipart_upload)
    
    

    고객 제공 키를 사용한 암호화(SSE-C) 요청

    참고

    SSE-C 기반으로 객체를 암호화할 경우 콘솔에서 일부 요청을 사용할 수 없습니다.

    import boto3
    import secrets
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY_ID'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        # S3 client
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
    
        # create encryption key
        sse_key = secrets.token_bytes(32)
        sse_conf = { "SSECustomerKey": sse_key, "SSECustomerAlgorithm": "AES256"}
    
        bucket_name = 'sample-bucket'
        object_name = 'sample-object'
        
        # upload object
        local_file_path = '/tmp/sample.txt'
        s3.upload_file(local_file_path, bucket_name, object_name, sse_conf)
    
        # download object
        download_file_path = '/tmp/sample-download.txt'
        s3.download_file(bucket_name, object_name, download_file_path, sse_conf)
    
    

    CORS(Cross-Origin Resource Sharing) 설정

    import boto3
    
    service_name = 's3'
    endpoint_url = 'https://kr.object.ncloudstorage.com'
    region_name = 'kr-standard'
    access_key = 'ACCESS_KEY_ID'
    secret_key = 'SECRET_KEY'
    
    if __name__ == "__main__":
        s3 = boto3.client(service_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
                          aws_secret_access_key=secret_key)
                          
        bucket_name = 'sample-bucket'
    
    # Define the configuration rules
        cors_configuration = {
            'CORSRules': [{
                'AllowedHeaders': ['*'],
                'AllowedMethods': ['GET', 'PUT'],
                'AllowedOrigins': ['*'],
                'MaxAgeSeconds': 3000
            }]
        }
    
        # Set CORS configuration
        s3.put_bucket_cors(Bucket=bucket_name,
                        CORSConfiguration=cors_configuration)
    
        # Get CORS configuration
        response = s3.get_bucket_cors(Bucket=bucket_name)
        print(response['CORSRules'])
    

    이 문서가 도움이 되었습니까?

    Changing your password will log you out immediately. Use the new password to log back in.
    First name must have atleast 2 characters. Numbers and special characters are not allowed.
    Last name must have atleast 1 characters. Numbers and special characters are not allowed.
    Enter a valid email
    Enter a valid password
    Your profile has been successfully updated.