How to upload a file to a directory in an S3 bucket using boto

I want to copy file to s3 bucket using python.

Example: I have a bucket name = test. And in the bucket I have 2 folders with the name "dump" and "input". Now I want to copy a file from the local directory to the S3 folder "dump" using python ... Can someone help me?

+87
python amazon-s3 amazon-web-services boto
Feb 26 '13 at 9:47 on
source share
12 answers

Try it...

import boto import boto.s3 import sys from boto.s3.key import Key AWS_ACCESS_KEY_ID = '' AWS_SECRET_ACCESS_KEY = '' bucket_name = AWS_ACCESS_KEY_ID.lower() + '-dump' conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) bucket = conn.create_bucket(bucket_name, location=boto.s3.connection.Location.DEFAULT) testfile = "replace this with an actual filename" print 'Uploading %s to Amazon S3 bucket %s' % \ (testfile, bucket_name) def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() k = Key(bucket) k.key = 'my test file' k.set_contents_from_filename(testfile, cb=percent_cb, num_cb=10) 

[UPDATE] I'm not a pythonist, so thanks for the import tips. Also, I would not recommend placing credentials in your own source code. If you use this inside AWS, use IAM credentials with instance profiles ( http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html ) and keep the same behavior in your Dev / Test environment, use something like a hologram from AdRoll ( https://github.com/AdRoll/hologram )

+94
Feb 26 '13 at 11:04 on
source share

No need to make it complicated:

 s3_connection = boto.connect_s3() bucket = s3_connection.get_bucket('your bucket name') key = boto.s3.key.Key(bucket, 'some_file.zip') with open('some_file.zip') as f: key.send_file(f) 
+42
Jun 29 '15 at 10:00
source share

I used this and it's very simple to implement

 import tinys3 conn = tinys3.Connection('S3_ACCESS_KEY','S3_SECRET_KEY',tls=True) f = open('some_file.zip','rb') conn.upload('some_file.zip',f,'my_bucket') 

https://www.smore.com/labs/tinys3/

+34
Dec 24 '14 at 8:48
source share
 import boto3 s3 = boto3.resource('s3') BUCKET = "test" s3.Bucket(BUCKET).upload_file("your/local/file", "dump/file") 
+32
Nov 03 '17 at 15:17
source share
 from boto3.s3.transfer import S3Transfer import boto3 #have all the variables populated which are required below client = boto3.client('s3', aws_access_key_id=access_key,aws_secret_access_key=secret_key) transfer = S3Transfer(client) transfer.upload_file(filepath, bucket_name, folder_name+"/"+filename) 
+12
Jan 31 '17 at 12:34 on
source share

This will also work:

 import os import boto import boto.s3.connection from boto.s3.key import Key try: conn = boto.s3.connect_to_region('us-east-1', aws_access_key_id = 'AWS-Access-Key', aws_secret_access_key = 'AWS-Secrete-Key', # host = 's3-website-us-east-1.amazonaws.com', # is_secure=True, # uncomment if you are not using ssl calling_format = boto.s3.connection.OrdinaryCallingFormat(), ) bucket = conn.get_bucket('YourBucketName') key_name = 'FileToUpload' path = 'images/holiday' #Directory Under which file should get upload full_key_name = os.path.join(path, key_name) k = bucket.new_key(full_key_name) k.set_contents_from_filename(key_name) except Exception,e: print str(e) print "error" 
+10
Feb 01 '17 at 8:19 on
source share
 import boto from boto.s3.key import Key AWS_ACCESS_KEY_ID = '' AWS_SECRET_ACCESS_KEY = '' END_POINT = '' # eg. us-east-1 S3_HOST = '' # eg. s3.us-east-1.amazonaws.com BUCKET_NAME = 'test' FILENAME = 'upload.txt' UPLOADED_FILENAME = 'dumps/upload.txt' # include folders in file path. If it doesn't exist, it will be created s3 = boto.s3.connect_to_region(END_POINT, aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, host=S3_HOST) bucket = s3.get_bucket(BUCKET_NAME) k = Key(bucket) k.key = UPLOADED_FILENAME k.set_contents_from_filename(FILENAME) 
+4
Mar 02 '17 at 13:23
source share

Upload the file to s3 during the credential session.

 import boto3 session = boto3.Session( aws_access_key_id='AWS_ACCESS_KEY_ID', aws_secret_access_key='AWS_SECRET_ACCESS_KEY', ) s3 = session.resource('s3') # Filename - File to upload # Bucket - Bucket to upload to (the top level directory under AWS S3) # Key - S3 object name (can contain subdirectories). If not specified then file_name is used s3.meta.client.upload_file(Filename='input_file_path', Bucket='bucket_name', Key='s3_output_key') 
+4
Feb 08 '19 at 15:22
source share

These are three liners. Just follow the instructions in the boto3 documentation .

 import boto3 s3 = boto3.resource(service_name = 's3') s3.meta.client.upload_file(Filename = 'C:/foo/bar/baz.filetype', Bucket = 'yourbucketname', Key = 'baz.filetype') 

Here are some important arguments:

Options:

Filename ( str ) - path to the file to upload. Bucket ( str ) - the name of the bucket being loaded. Key ( str ) is the name you want to assign to your file in your s3 bucket. It may be the same as the file name or another name of your choice, but the file type should remain unchanged.

Note. I assume that you saved your credentials in the ~\.aws as suggested in the best configuration settings in the boto3 documentation .

+3
Nov 29 '18 at 0:20
source share

An example of loading a folder as the following code and image of the S3 folder enter image description here

 import boto import boto.s3 import boto.s3.connection import os.path import sys # Fill in info on data to upload # destination bucket name bucket_name = 'willie20181121' # source directory sourceDir = '/home/willie/Desktop/x/' #Linux Path # destination directory name (on s3) destDir = '/test1/' 'S3 Path #max size in bytes before uploading in parts. between 1 and 5 GB recommended MAX_SIZE = 20 * 1000 * 1000 #size of parts when uploading in parts PART_SIZE = 6 * 1000 * 1000 access_key = 'MPBVAQ*******IT****' secret_key = '11t63yDV***********HgUcgMOSN*****' conn = boto.connect_s3( aws_access_key_id = access_key, aws_secret_access_key = secret_key, host = '******.org.tw', is_secure=False, # uncomment if you are not using ssl calling_format = boto.s3.connection.OrdinaryCallingFormat(), ) bucket = conn.create_bucket(bucket_name, location=boto.s3.connection.Location.DEFAULT) uploadFileNames = [] for (sourceDir, dirname, filename) in os.walk(sourceDir): uploadFileNames.extend(filename) break def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() for filename in uploadFileNames: sourcepath = os.path.join(sourceDir + filename) destpath = os.path.join(destDir, filename) print ('Uploading %s to Amazon S3 bucket %s' % \ (sourcepath, bucket_name)) filesize = os.path.getsize(sourcepath) if filesize > MAX_SIZE: print ("multipart upload") mp = bucket.initiate_multipart_upload(destpath) fp = open(sourcepath,'rb') fp_num = 0 while (fp.tell() < filesize): fp_num += 1 print ("uploading part %i" %fp_num) mp.upload_part_from_file(fp, fp_num, cb=percent_cb, num_cb=10, size=PART_SIZE) mp.complete_upload() else: print ("singlepart upload") k = boto.s3.key.Key(bucket) k.key = destpath k.set_contents_from_filename(sourcepath, cb=percent_cb, num_cb=10) 

PS: for a sitelink URL

+1
Dec 03 '18 at 8:23
source share
 xmlstr = etree.tostring(listings, encoding='utf8', method='xml') conn = boto.connect_s3( aws_access_key_id = access_key, aws_secret_access_key = secret_key, # host = '<bucketName>.s3.amazonaws.com', host = 'bycket.s3.amazonaws.com', #is_secure=False, # uncomment if you are not using ssl calling_format = boto.s3.connection.OrdinaryCallingFormat(), ) conn.auth_region_name = 'us-west-1' bucket = conn.get_bucket('resources', validate=False) key= bucket.get_key('filename.txt') key.set_contents_from_string("SAMPLE TEXT") key.set_canned_acl('public-read') 
0
Nov 13 '18 at 19:59
source share

Using boto3

 import logging import boto3 from botocore.exceptions import ClientError def upload_file(file_name, bucket, object_name=None): """Upload a file to an S3 bucket :param file_name: File to upload :param bucket: Bucket to upload to :param object_name: S3 object name. If not specified then file_name is used :return: True if file was uploaded, else False """ # If S3 object_name was not specified, use file_name if object_name is None: object_name = file_name # Upload the file s3_client = boto3.client('s3') try: response = s3_client.upload_file(file_name, bucket, object_name) except ClientError as e: logging.error(e) return False return True 

For more: - https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-uploading-files.html

0
Aug 22 '19 at 11:10
source share



All Articles