如何使用Boto在Amazon S3上使用python脚本将文件从一个存储桶复制到另一个存储桶 [英] How to use python script to copy files from one bucket to another bucket at the Amazon S3 with boto

查看:90
本文介绍了如何使用Boto在Amazon S3上使用python脚本将文件从一个存储桶复制到另一个存储桶的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

如何使用Boto在Amazon S3上使用Python脚本将文件从一个存储桶复制到另一个存储桶?

How to use Python script to copy files from one bucket to another bucket at the Amazon S3 with boto?

我知道如何创建,但如何将其复制到另一个存储桶.

I know how to create but how to copy it to another bucket.

import boto
import boto.s3.connection

#CREATING A CONNECTION¶
access_key = 'MPB**********ITMO'
secret_key = '11t63y************XojO7b'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = 'twg****.org.tw',
        is_secure=False,               # uncomment if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )

#CREATING A BUCKET¶
bucket = conn.create_bucket('aaaa')

参考:
https://github.com/boto/boto/blob/开发/docs/source/s3_tut.rst
http://docs.ceph.com/docs/master/radosgw/s3/python/

reference:
https://github.com/boto/boto/blob/develop/docs/source/s3_tut.rst
http://docs.ceph.com/docs/master/radosgw/s3/python/

推荐答案

import boto
import boto.s3.connection


#CREATING A CONNECTION¶
access_key = 'MPB*******MO'
secret_key = '11t6******rVYXojO7b'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = 'twg******.tw',
        is_secure=False,               # uncomment if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )    
src = conn.get_bucket('roger123weddec052335422018')
dst = conn.get_bucket('aaa/aa/')  

for k in src.list():
    # copy stuff to your destination here
    dst.copy_key(k.key, src.name, k.key)
    # then delete the source key
    #k.delete()

=========================================

===========================================

folders = bucket.list("","/")
for folder in folders:
    print (folder.name)

======================================

========================================

k = bucket.new_key('abc/123/')
k.set_contents_from_string('')

============================================

列出自已的桶¶

=============================================

for bucket in conn.get_all_buckets():
        print ("{name}\t{created}".format(
                name = bucket.name,
                created = bucket.creation_date,
        ))

创建一个桶¶

#bucket = conn.create_bucket('willie20181121')
bucket = conn.create_bucket('roger123.Tuedec040445192018')
print(bucket.name)

================================================ =========

========================================================

foldername=','
    for key in bucket.list():
            print ("{name}\t{size}\t{modified}\t{xx}\t{yy}\t{zz}".format(
                    name = key.name, # = key.key
                    size = key.size,
                    modified = key.last_modified,
                    xx=key.set_contents_from_string,
                    yy=key.owner.id,
                zz=key.name.startswith('image'),
            #qq=bucket.name,
            #aa=key.set_contents_from_string.startswith('//'),
                   ))
            xxx = key.key
            #print(len(xxx.split('/')))
            if len(xxx.split('/'))==2: 
                 if foldername.find(xxx.split('/')[0])==-1:
                    foldername= foldername + xxx.split('/')[0] +","
    #print(foldername)

删除桶¶

#conn.delete_bucket('willietest20181121')

创建对象¶

 #key = bucket.new_key('hello.txt')
 #key.set_contents_from_string('Hello World!11:52')

下载对象(到文件)¶

 #key = bucket.get_key('hello.txt')
 #key.get_contents_to_filename('/home/willie/Desktop/hello.txt')

删除对象

#bucket.delete_key('hello.txt')

================================================ ========================== 插入文件

========================================================================== Insert files

import boto
import boto.s3
import boto.s3.connection
import os.path
import sys

#https://gist.github.com/SavvyGuard/6115006

def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

# Fill in info on data to upload
# destination bucket name
bucket_name = 'willie20181121_'
# source directory
sourceDir = '/home/willie/Desktop/x/'
# destination directory name (on s3)
destDir = '/test2/'

#max size in bytes before uploading in parts. between 1 and 5 GB recommended
MAX_SIZE = 20 * 1000 * 1000
#size of parts when uploading in parts
PART_SIZE = 6 * 1000 * 1000

access_key = 'MPBVAQPULDHZIFUQITMO'
secret_key = '11t63yDVZTlStKoBBxHl35HgUcgMOSNrVYXojO7b'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = 'twgc-s3.nchc.org.tw',
        is_secure=False,               # uncomment if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )
bucket = conn.get_bucket(bucket_name,
        location=boto.s3.connection.Location.DEFAULT)


uploadFileNames = []
for (sourceDir, dirname, filename) in os.walk(sourceDir):
    #uploadFileNames.extend(filename)
    #print("=="+filename)
    break
uploadFileNames.extend(["1.jpg"])
uploadFileNames.extend(["2.py"])

for filename in uploadFileNames:
    sourcepath = os.path.join(sourceDir + filename)
    #sourcepath = os.path.join(filename)
    destpath = os.path.join(destDir, filename)
    print ('Uploading %s to Amazon S3 bucket %s' % \
           (sourcepath, bucket_name))
    #print("==="+ sourcepath)
    filesize = os.path.getsize(sourcepath)
    if filesize > MAX_SIZE:
        print ("multipart upload")
        mp = bucket.initiate_multipart_upload(destpath)
        fp = open(sourcepath,'rb')
        fp_num = 0
        while (fp.tell() < filesize):
            fp_num += 1
            print ("uploading part %i" %fp_num)
            mp.upload_part_from_file(fp, fp_num, cb=percent_cb, num_cb=10, size=PART_SIZE)

        mp.complete_upload()

    else:
        print ("singlepart upload")
        k = boto.s3.key.Key(bucket)
        k.key = destpath
        #print(sourcepath)
        k.set_contents_from_filename(sourcepath, cb=percent_cb, num_cb=10)

================= 兴奋性测试

================= excetpion testing

try:
    key = bucket.get_key('Mail1.txt')    
    key.get_contents_to_filename('/home/willie/Desktop/mail.txt')
except Exception   as e:
    result="False"
    print("=="+str(e.args))

这篇关于如何使用Boto在Amazon S3上使用python脚本将文件从一个存储桶复制到另一个存储桶的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
相关文章
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆