From 9644ca46ec08cd5e7cd570c3cad04ee5a47fb8f6 Mon Sep 17 00:00:00 2001 From: Felix Hageloh Date: Mon, 5 Oct 2015 21:22:33 +0200 Subject: [PATCH] Added support for List Parts As described here http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadListParts.html Cleaned up related code a bit as well. --- lib/fakes3/file_store.rb | 33 +++++++++++++++++++++++++-------- lib/fakes3/server.rb | 32 +++++++++++++++++++++----------- lib/fakes3/xml_adapter.rb | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 80 insertions(+), 19 deletions(-) diff --git a/lib/fakes3/file_store.rb b/lib/fakes3/file_store.rb index 03e98f3f..55a514fb 100644 --- a/lib/fakes3/file_store.rb +++ b/lib/fakes3/file_store.rb @@ -213,26 +213,43 @@ def do_store_object(bucket, object_name, filedata, request) end end - def combine_object_parts(bucket, upload_id, object_name, parts, request) - upload_path = File.join(@root, bucket.name) - base_path = File.join(upload_path, "#{upload_id}_#{object_name}") + def list_object_parts(bucket_name, object_name, upload_id) + pattern = File.join(@root, bucket_name, "#{upload_id}_#{object_name}*") + Dir.glob(pattern).map do |path| + file = File.open(File.join(path, SHUCK_METADATA_DIR, 'content'), 'rb') + part = file.read + file.close + { + part_num: path.match(/_part(\d+)$/)[1], + etag: Digest::MD5.hexdigest(part), + size: part.size, + last_mod: File.mtime(path).utc.iso8601(SUBSECOND_PRECISION) + } + end + end + + def combine_object_parts(bucket_name, object_name, upload_id, parts, request) + upload_path = File.join(@root, bucket_name) + base_path = File.join(upload_path, "#{upload_id}_#{object_name}") complete_file = "" - chunk = "" - part_paths = [] + part_paths = [] parts.sort_by { |part| part[:number] }.each do |part| part_path = "#{base_path}_part#{part[:number]}" content_path = File.join(part_path, SHUCK_METADATA_DIR, 'content') - File.open(content_path, 'rb') { |f| chunk = f.read } + file = File.open(content_path, 'rb') + chunk = file.read etag = Digest::MD5.hexdigest(chunk) - raise new Error "invalid file chunk" unless part[:etag] == etag + fail StandardError, "invalid part #{part[:etag]}" unless part[:etag] == etag complete_file << chunk - part_paths << part_path + part_paths << part_path + file.close end + bucket = get_bucket(bucket_name) object = do_store_object(bucket, object_name, complete_file, request) # clean up parts diff --git a/lib/fakes3/server.rb b/lib/fakes3/server.rb index 63a7814f..d5a0a7db 100644 --- a/lib/fakes3/server.rb +++ b/lib/fakes3/server.rb @@ -15,6 +15,7 @@ module FakeS3 class Request CREATE_BUCKET = "CREATE_BUCKET" LIST_BUCKETS = "LIST_BUCKETS" + LIST_PARTS = "LIST_PARTS" LS_BUCKET = "LS_BUCKET" HEAD = "HEAD" STORE = "STORE" @@ -91,6 +92,16 @@ def do_GET(request, response) response.status = 200 response.body = XmlAdapter.acl() response['Content-Type'] = 'application/xml' + when 'LIST_PARTS' + upload_id = s_req.query['uploadId'] + response.status = 200 + response.body = XmlAdapter.list_parts_result( + s_req.bucket, + s_req.object, + upload_id, + @store.list_object_parts(s_req.bucket, s_req.object, upload_id) + ) + response['Content-Type'] = 'application/xml' when 'GET' real_obj = @store.get_object(s_req.bucket,s_req.object,request) if !real_obj @@ -246,17 +257,14 @@ def do_POST(request,response) eos elsif query.has_key?('uploadId') - upload_id = query['uploadId'].first - bucket_obj = @store.get_bucket(s_req.bucket) - real_obj = @store.combine_object_parts( - bucket_obj, - upload_id, + combined_obj = @store.combine_object_parts( + s_req.bucket, s_req.object, - parse_complete_multipart_upload(request), - request + query['uploadId'].first, + parse_complete_multipart_upload(s_req.webrick_request), + s_req.webrick_request ) - - response.body = XmlAdapter.complete_multipart_result real_obj + response.body = XmlAdapter.complete_multipart_result combined_obj elsif request.content_type =~ /^multipart\/form-data; boundary=(.+)/ key = request.query['key'] @@ -373,6 +381,9 @@ def normalize_get(webrick_req, s_req) else if query["acl"] == "" s_req.type = Request::GET_ACL + elsif query.has_key?('uploadId') + s_req.type = Request::LIST_PARTS + s_req.query = query else s_req.type = Request::GET end @@ -423,8 +434,6 @@ def normalize_put(webrick_req, s_req) s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/") s_req.type = Request::COPY end - - s_req.webrick_request = webrick_req end def normalize_post(webrick_req,s_req) @@ -449,6 +458,7 @@ def normalize_request(webrick_req) host = host_header.split(':')[0] s_req = Request.new + s_req.webrick_request = webrick_req s_req.path = webrick_req.path s_req.is_path_style = true diff --git a/lib/fakes3/xml_adapter.rb b/lib/fakes3/xml_adapter.rb index f575df02..c7ca6aa4 100644 --- a/lib/fakes3/xml_adapter.rb +++ b/lib/fakes3/xml_adapter.rb @@ -200,6 +200,40 @@ def self.copy_object_result(object) output end + # + # example-bucket + # example-object + # upload-id + # 0 + # + # 1 + # 2010-11-10T20:48:34.000Z + # "7778aef83f66abc1fa1e8477f296d394" + # 10485760 + # + # + def self.list_parts_result(bucket_name, object_name, upload_id, parts) + output = "" + xml = Builder::XmlMarkup.new(:target => output) + xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8" + xml.ListPartsResult do |result| + result.bucket(bucket_name) + result.key(object_name) + result.UploadId(upload_id) + result.PartNumberMarker(0) + parts.each do |part| + result.Part do |part_xml| + part_xml.PartNumber(part[:part_num]) + part_xml.ETag(part[:etag].inspect) + part_xml.Size(part[:size]) + part_xml.LastModified(part[:last_mod]) + end + end + end + + output + end + # # http://Example-Bucket.s3.amazonaws.com/Example-Object # Example-Bucket