Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Added support for List Parts #119

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 52 additions & 8 deletions lib/fakes3/file_store.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
require 'fakes3/rate_limitable_file'
require 'digest/md5'
require 'yaml'
require 'rexml/document'
include REXML

module FakeS3
class FileStore
Expand All @@ -24,6 +26,18 @@ def initialize(root)
bucket_obj = Bucket.new(bucket_name,Time.now,[])
@buckets << bucket_obj
@bucket_hash[bucket_name] = bucket_obj

#pre-load objects into bucket, so ListObjects calls work.
Dir[File.join(bucket,'/**/.fakes3_metadataFFF')].each do |fullpath|
content_file = Pathname.new("#{fullpath}/content")
metadata_file = Pathname.new("#{fullpath}/metadata")
next unless content_file.exist? && metadata_file.exist?

key = fullpath.sub('/.fakes3_metadataFFF', '').sub(bucket + '/', '')
object = get_object(bucket_name, key, 'norequest')
bucket_obj.add(object)
object.io.close
end
end
end

Expand Down Expand Up @@ -213,26 +227,43 @@ def do_store_object(bucket, object_name, filedata, request)
end
end

def combine_object_parts(bucket, upload_id, object_name, parts, request)
upload_path = File.join(@root, bucket.name)
base_path = File.join(upload_path, "#{upload_id}_#{object_name}")
def list_object_parts(bucket_name, object_name, upload_id)
pattern = File.join(@root, bucket_name, "#{upload_id}_#{object_name}*")
Dir.glob(pattern).map do |path|
file = File.open(File.join(path, SHUCK_METADATA_DIR, 'content'), 'rb')
part = file.read
file.close
{
part_num: path.match(/_part(\d+)$/)[1],
etag: Digest::MD5.hexdigest(part),
size: part.size,
last_mod: File.mtime(path).utc.iso8601(SUBSECOND_PRECISION)
}
end
end

def combine_object_parts(bucket_name, object_name, upload_id, parts, request)
upload_path = File.join(@root, bucket_name)
base_path = File.join(upload_path, "#{upload_id}_#{object_name}")

complete_file = ""
chunk = ""
part_paths = []

part_paths = []
parts.sort_by { |part| part[:number] }.each do |part|
part_path = "#{base_path}_part#{part[:number]}"
content_path = File.join(part_path, SHUCK_METADATA_DIR, 'content')

File.open(content_path, 'rb') { |f| chunk = f.read }
file = File.open(content_path, 'rb')
chunk = file.read
etag = Digest::MD5.hexdigest(chunk)

raise new Error "invalid file chunk" unless part[:etag] == etag
fail StandardError, "invalid part #{part[:etag]}" unless part[:etag] == etag
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why did you switch this to fail?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

following the rule of thumb of only using raise in rescue blocks (in other words: because my linter told me so)

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

forgot to mention: I can change that to raise if that's more in line with you codebase

complete_file << chunk
part_paths << part_path
part_paths << part_path
file.close
end

bucket = get_bucket(bucket_name)
object = do_store_object(bucket, object_name, complete_file, request)

# clean up parts
Expand All @@ -256,6 +287,19 @@ def delete_object(bucket,object_name,request)
end
end

def delete_objects(bucket, request)
begin
xmldoc = Document.new(request.body)
xmldoc.elements.each("Delete/Object/Key") do |key|
delete_object(bucket, key.text, request)
end
rescue
puts $!
$!.backtrace.each { |line| puts line }
return nil
end
end

# TODO: abstract getting meta data from request.
def create_metadata(content,request)
metadata = {}
Expand Down
44 changes: 33 additions & 11 deletions lib/fakes3/server.rb
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ module FakeS3
class Request
CREATE_BUCKET = "CREATE_BUCKET"
LIST_BUCKETS = "LIST_BUCKETS"
LIST_PARTS = "LIST_PARTS"
LS_BUCKET = "LS_BUCKET"
HEAD = "HEAD"
STORE = "STORE"
Expand All @@ -24,6 +25,7 @@ class Request
SET_ACL = "SET_ACL"
MOVE = "MOVE"
DELETE_OBJECT = "DELETE_OBJECT"
DELETE_OBJECTS = "DELETE_OBJECTS"
DELETE_BUCKET = "DELETE_BUCKET"

attr_accessor :bucket,:object,:type,:src_bucket,
Expand Down Expand Up @@ -91,6 +93,16 @@ def do_GET(request, response)
response.status = 200
response.body = XmlAdapter.acl()
response['Content-Type'] = 'application/xml'
when 'LIST_PARTS'
upload_id = s_req.query['uploadId']
response.status = 200
response.body = XmlAdapter.list_parts_result(
s_req.bucket,
s_req.object,
upload_id,
@store.list_object_parts(s_req.bucket, s_req.object, upload_id)
)
response['Content-Type'] = 'application/xml'
when 'GET'
real_obj = @store.get_object(s_req.bucket,s_req.object,request)
if !real_obj
Expand Down Expand Up @@ -230,6 +242,10 @@ def do_multipartPUT(request, response)
end

def do_POST(request,response)
if request.query_string =~ /delete/i
return do_DELETE(request, response)
end

s_req = normalize_request(request)
key = request.query['key']
query = CGI::parse(request.request_uri.query || "")
Expand All @@ -246,17 +262,14 @@ def do_POST(request,response)
</InitiateMultipartUploadResult>
eos
elsif query.has_key?('uploadId')
upload_id = query['uploadId'].first
bucket_obj = @store.get_bucket(s_req.bucket)
real_obj = @store.combine_object_parts(
bucket_obj,
upload_id,
combined_obj = @store.combine_object_parts(
s_req.bucket,
s_req.object,
parse_complete_multipart_upload(request),
request
query['uploadId'].first,
parse_complete_multipart_upload(s_req.webrick_request),
s_req.webrick_request
)

response.body = XmlAdapter.complete_multipart_result real_obj
response.body = XmlAdapter.complete_multipart_result combined_obj
elsif request.content_type =~ /^multipart\/form-data; boundary=(.+)/
key = request.query['key']

Expand Down Expand Up @@ -309,6 +322,9 @@ def do_DELETE(request, response)
@store.delete_object(bucket_obj,s_req.object,s_req.webrick_request)
when Request::DELETE_BUCKET
@store.delete_bucket(s_req.bucket)
when Request::DELETE_OBJECTS
bucket_obj = @store.get_bucket(s_req.bucket)
@store.delete_objects(bucket_obj, s_req.webrick_request)
end

response.status = 204
Expand Down Expand Up @@ -373,6 +389,9 @@ def normalize_get(webrick_req, s_req)
else
if query["acl"] == ""
s_req.type = Request::GET_ACL
elsif query.has_key?('uploadId')
s_req.type = Request::LIST_PARTS
s_req.query = query
else
s_req.type = Request::GET
end
Expand Down Expand Up @@ -423,8 +442,6 @@ def normalize_put(webrick_req, s_req)
s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/")
s_req.type = Request::COPY
end

s_req.webrick_request = webrick_req
end

def normalize_post(webrick_req,s_req)
Expand All @@ -441,6 +458,10 @@ def normalize_post(webrick_req,s_req)
else
s_req.object = path[1..-1]
end

if webrick_req.query_string =~ /delete/i
s_req.type = Request::DELETE_OBJECTS
end
end

# This method takes a webrick request and generates a normalized FakeS3 request
Expand All @@ -449,6 +470,7 @@ def normalize_request(webrick_req)
host = host_header.split(':')[0]

s_req = Request.new
s_req.webrick_request = webrick_req
s_req.path = webrick_req.path
s_req.is_path_style = true

Expand Down
34 changes: 34 additions & 0 deletions lib/fakes3/xml_adapter.rb
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,40 @@ def self.copy_object_result(object)
output
end

# <ListPartsResult>
# <Bucket>example-bucket</Bucket>
# <Key>example-object</Key>
# <UploadId>upload-id</UploadId>
# <PartNumberMarker>0</PartNumberMarker>
# <Part>
# <PartNumber>1</PartNumber>
# <LastModified>2010-11-10T20:48:34.000Z</LastModified>
# <ETag>"7778aef83f66abc1fa1e8477f296d394"</ETag>
# <Size>10485760</Size>
# </Part>
# </ListPartsResult>
def self.list_parts_result(bucket_name, object_name, upload_id, parts)
output = ""
xml = Builder::XmlMarkup.new(:target => output)
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
xml.ListPartsResult do |result|
result.bucket(bucket_name)
result.key(object_name)
result.UploadId(upload_id)
result.PartNumberMarker(0)
parts.each do |part|
result.Part do |part_xml|
part_xml.PartNumber(part[:part_num])
part_xml.ETag(part[:etag].inspect)
part_xml.Size(part[:size])
part_xml.LastModified(part[:last_mod])
end
end
end

output
end

# <CompleteMultipartUploadResult>
# <Location>http://Example-Bucket.s3.amazonaws.com/Example-Object</Location>
# <Bucket>Example-Bucket</Bucket>
Expand Down