Bruno Favalli

Sviluppatore Python/Django

Upload e Download su Amazon S3 esempio


import logging
import boto3
from botocore.exceptions import ClientError
import os
import requests
import errno

#File che si trova in computer
file_name = "teste.csv"
#File che deve diventare su Amazon, più il nome della cartella che deve essere creato all'interno del bucket
object_name = "30/teste666.csv"
#Name bucket
bucket = "xxxxxxxxxxxx"

#UPLOAD
def upload_file(file_name, bucket, object_name=None):
# Se S3 object_name non è stato specificato, utilizzare file_name
if object_name is None:
object_name = file_name

# Upload the file
s3 = boto3.client('s3',
aws_access_key_id = "xxxxxxxxxxxxxxxxxxxxxxxx",
aws_secret_access_key = "xxxxxxxxxxxxxxxxxxxxxxxxxxxx")
try:
response = s3.upload_file(file_name, bucket, object_name)
upload_file(file_name, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True

#Chiamare il metodo
upload_file(file_name,bucket,object_name)

-----------------------------------------------------------------
#Download
def assert_dir_exists(path):
"""
Checks if directory tree in path exists. If not it created them.
:param path: the path to check if it exists
"""
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise

def download_dir(client, bucket, path, target):
"""
Downloads recursively the given S3 path to the target directory.
:param client: S3 client to use.
:param bucket: the name of the bucket to download from
:param path: The S3 directory to download.
:param target: the local directory to download the files to.
"""

# Handle missing / at end of prefix
if not path.endswith('/'):
path += '/'

paginator = client.get_paginator('list_objects_v2')
for result in paginator.paginate(Bucket=bucket, Prefix=path):
print(result)
# Download each file individually
for key in result['Contents']:
# Calculate relative path
rel_path = key['Key'][len(path):]
# Skip paths ending in /
if not key['Key'].endswith('/'):
local_file_path = os.path.join(target, rel_path)
# Make sure directories exist
local_file_dir = os.path.dirname(local_file_path)
assert_dir_exists(local_file_dir)
client.download_file(bucket, key['Key'], local_file_path)

client = boto3.client('s3',
aws_access_key_id = "xxxxxxxxxxxxxxxxxxxxxxxx",
aws_secret_access_key = "xxxxxxxxxxxxxxxxxxxxxxxxxxxx")

download_dir(client, 'xxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxx', 'downloads')

English EN Italian IT Portuguese PT