diff --git a/.gitignore b/.gitignore
index ee63d49d..ff3b1f06 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,3 +6,5 @@ test_root
# Don't check in RVM/rbenv files
.ruby-version
+
+fakes3.sublime-workspace
diff --git a/Gemfile.lock b/Gemfile.lock
index a8fc2e95..4ac317a7 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -16,17 +16,14 @@ GEM
json (~> 1.4)
nokogiri (>= 1.4.4)
builder (3.2.2)
- byebug (4.0.1)
- columnize (= 0.9.0)
- rb-readline (= 0.5.2)
- columnize (0.9.0)
json (1.8.1)
mime-types (1.25)
mini_portile (0.6.1)
nokogiri (1.6.4.1)
mini_portile (~> 0.6.0)
+ nokogiri (1.6.4.1-x64-mingw32)
+ mini_portile (~> 0.6.0)
rake (10.1.0)
- rb-readline (0.5.2)
rest-client (1.6.7)
mime-types (>= 1.16)
right_aws (3.1.0)
@@ -37,13 +34,16 @@ GEM
PLATFORMS
ruby
+ x64-mingw32
DEPENDENCIES
aws-s3
aws-sdk-v1
bundler (>= 1.0.0)
- byebug
fakes3!
rake
rest-client
right_aws
+
+BUNDLED WITH
+ 1.10.6
diff --git a/Rakefile b/Rakefile
index a0b7faed..51ef32dd 100644
--- a/Rakefile
+++ b/Rakefile
@@ -6,7 +6,7 @@ Bundler::GemHelper.install_tasks
Rake::TestTask.new(:test) do |t|
t.libs << "."
- t.test_files =
+ t.test_files =
FileList['test/*_test.rb'].exclude('test/s3_commands_test.rb')
end
diff --git a/lib/fakes3/file_store.rb b/lib/fakes3/file_store.rb
index 3f2d4111..030df31b 100644
--- a/lib/fakes3/file_store.rb
+++ b/lib/fakes3/file_store.rb
@@ -5,6 +5,8 @@
require 'fakes3/rate_limitable_file'
require 'digest/md5'
require 'yaml'
+require 'rexml/document'
+include REXML
module FakeS3
class FileStore
@@ -24,6 +26,20 @@ def initialize(root)
bucket_obj = Bucket.new(bucket_name,Time.now,[])
@buckets << bucket_obj
@bucket_hash[bucket_name] = bucket_obj
+
+ #pre-load objects into bucket, so ListObjects calls work.
+ Dir[File.join(bucket,'/**/.fakes3_metadataFFF')].each do |fullpath|
+ key = fullpath.sub('/.fakes3_metadataFFF', '').sub(bucket + '/', '')
+ object = get_object(bucket_name, key, 'norequest')
+ bucket_obj.add(object)
+ object.io.close
+ end
+ end
+
+ puts "=================================================="
+ puts "Buckets initialized with contents:"
+ buckets.each do |b|
+ puts "#{b.name} - #{b.objects.count} items"
end
end
@@ -228,7 +244,7 @@ def combine_object_parts(bucket, upload_id, object_name, parts, request)
File.open(content_path, 'rb') { |f| chunk = f.read }
etag = Digest::MD5.hexdigest(chunk)
- raise new Error "invalid file chunk" unless part[:etag] == etag
+ raise new Error "invalid file chunk" unless part[:etag] == etag
complete_file << chunk
part_paths << part_path
end
@@ -256,6 +272,19 @@ def delete_object(bucket,object_name,request)
end
end
+ def delete_objects(bucket,request)
+ begin
+ xmldoc = Document.new(request.body)
+ xmldoc.elements.each("Delete/Object/Key") do |key|
+ delete_object(bucket, key.text, request)
+ end
+ rescue
+ puts $!
+ $!.backtrace.each { |line| puts line }
+ return nil
+ end
+ end
+
# TODO: abstract getting meta data from request.
def create_metadata(content,request)
metadata = {}
diff --git a/lib/fakes3/server.rb b/lib/fakes3/server.rb
index 00affde9..2fa7eff8 100644
--- a/lib/fakes3/server.rb
+++ b/lib/fakes3/server.rb
@@ -22,8 +22,9 @@ class Request
GET_ACL = "GET_ACL"
SET_ACL = "SET_ACL"
MOVE = "MOVE"
- DELETE_OBJECT = "DELETE_OBJECT"
DELETE_BUCKET = "DELETE_BUCKET"
+ DELETE_OBJECT = "DELETE_OBJECT"
+ DELETE_OBJECTS = "DELETE_OBJECTS"
attr_accessor :bucket,:object,:type,:src_bucket,
:src_object,:method,:webrick_request,
@@ -73,12 +74,7 @@ def do_GET(request, response)
if bucket_obj
response.status = 200
response['Content-Type'] = "application/xml"
- query = {
- :marker => s_req.query["marker"] ? s_req.query["marker"].to_s : nil,
- :prefix => s_req.query["prefix"] ? s_req.query["prefix"].to_s : nil,
- :max_keys => s_req.query["max_keys"] ? s_req.query["max_keys"].to_s : nil,
- :delimiter => s_req.query["delimiter"] ? s_req.query["delimiter"].to_s : nil
- }
+ query = get_options(s_req)
bq = bucket_obj.query_for_range(query)
response.body = XmlAdapter.bucket_query(bq)
else
@@ -229,6 +225,10 @@ def do_multipartPUT(request, response)
end
def do_POST(request,response)
+ if request.query_string =~ /delete/i
+ return do_DELETE(request, response)
+ end
+
s_req = normalize_request(request)
key = request.query['key']
query = CGI::parse(request.request_uri.query || "")
@@ -301,17 +301,25 @@ def do_POST(request,response)
def do_DELETE(request,response)
s_req = normalize_request(request)
-
+ response.status = 204
+ response.body = ""
+
case s_req.type
when Request::DELETE_OBJECT
bucket_obj = @store.get_bucket(s_req.bucket)
@store.delete_object(bucket_obj,s_req.object,s_req.webrick_request)
when Request::DELETE_BUCKET
@store.delete_bucket(s_req.bucket)
+ when Request::DELETE_OBJECTS
+ bucket_obj = @store.get_bucket(s_req.bucket)
+ @store.delete_objects(bucket_obj,s_req.webrick_request)
+ response.status = 200
+ response.body = <<-eos.strip
+
+
+
+ eos
end
-
- response.status = 204
- response.body = ""
end
def do_OPTIONS(request, response)
@@ -441,6 +449,10 @@ def normalize_post(webrick_req,s_req)
else
s_req.object = path[1..-1]
end
+
+ if webrick_req.query_string =~ /delete/i
+ s_req.type = Request::DELETE_OBJECTS
+ end
end
# This method takes a webrick request and generates a normalized FakeS3 request
@@ -477,6 +489,11 @@ def normalize_request(webrick_req)
return s_req
end
+ def gral_strip(string, chars)
+ chars = Regexp.escape(chars)
+ string.gsub(/\A[#{chars}]+|[#{chars}]+\z/, "")
+ end
+
def parse_complete_multipart_upload request
parts_xml = ""
request.body { |chunk| parts_xml << chunk }
@@ -487,7 +504,7 @@ def parse_complete_multipart_upload request
parts_xml.collect do |xml|
{
number: xml[/\(\d+)\<\/PartNumber\>/, 1].to_i,
- etag: xml[/\\"(.+)\"\<\/ETag\>/, 1]
+ etag: gral_strip(xml[/\(.+)\<\/ETag\>/, 1], "\"")
}
end
end
@@ -501,6 +518,15 @@ def dump_request(request)
end
puts "----------End Dump -------------"
end
+
+ def get_options(s_req)
+ return {
+ :marker => s_req.query["marker"] ? s_req.query["marker"].to_s : nil,
+ :prefix => s_req.query["prefix"] ? s_req.query["prefix"].to_s : nil,
+ :max_keys => s_req.query["max_keys"] ? s_req.query["max_keys"].to_s : nil,
+ :delimiter => s_req.query["delimiter"] ? s_req.query["delimiter"].to_s : nil
+ }
+ end
end
diff --git a/lib/fakes3/sorted_object_list.rb b/lib/fakes3/sorted_object_list.rb
index a84ed003..9afbbf5c 100644
--- a/lib/fakes3/sorted_object_list.rb
+++ b/lib/fakes3/sorted_object_list.rb
@@ -1,11 +1,12 @@
require 'set'
module FakeS3
class S3MatchSet
- attr_accessor :matches,:is_truncated,:common_prefixes
+ attr_accessor :matches,:is_truncated,:common_prefixes,:next_marker
def initialize
@matches = []
@is_truncated = false
@common_prefixes = []
+ @next_marker = ""
end
end
@@ -102,7 +103,7 @@ def list(options)
ms.common_prefixes << base_prefix + chunks[0] + delimiter
last_chunk = chunks[0]
else
- is_truncated = true
+ ms.is_truncated = true
break
end
end
@@ -117,7 +118,8 @@ def list(options)
if count <= max_keys
ms.matches << s3_object
else
- is_truncated = true
+ ms.is_truncated = true
+ ms.next_marker = s3_object.name
break
end
end
diff --git a/lib/fakes3/xml_adapter.rb b/lib/fakes3/xml_adapter.rb
index f575df02..13ee1179 100644
--- a/lib/fakes3/xml_adapter.rb
+++ b/lib/fakes3/xml_adapter.rb
@@ -218,5 +218,20 @@ def self.complete_multipart_result(object)
}
output
end
+
+ #
+ # 2009-10-28T22:32:00
+ # "9b2cf535f27731c974343645a3985328"
+ #
+ def self.delete_objects_result(object)
+ output = ""
+ xml = Builder::XmlMarkup.new(:target => output)
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
+ xml.DeleteResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |result|
+
+ }
+ output
+ end
+
end
end
diff --git a/test/aws_sdk_commands_test.rb b/test/aws_sdk_commands_test.rb
index a08a7992..20b9d4dc 100644
--- a/test/aws_sdk_commands_test.rb
+++ b/test/aws_sdk_commands_test.rb
@@ -10,6 +10,21 @@ def setup
:use_ssl => false)
end
+ def test_list_objects
+ #assemble
+ bucket = @s3.buckets["test_list_objects"]
+ object = bucket.objects["key1"]
+ object.write("asdf")
+ object.copy_to("prefix1/sub1/key2")
+ object.copy_to("prefix1/sub2/key3")
+
+ #act & assert
+ assert_equal 3, bucket.objects.count
+ assert_equal 2, bucket.objects.with_prefix('prefix1/').count
+ assert_equal 1, bucket.objects.with_prefix('prefix1/sub2/').count
+ assert (not bucket.objects.with_prefix('prefix1/').collect(&:key).include? 'key1')
+ end
+
def test_copy_to
bucket = @s3.buckets["test_copy_to"]
object = bucket.objects["key1"]
@@ -28,4 +43,35 @@ def test_multipart_upload
assert object.exists?
assert_equal "thisisaverybigfile", object.read
end
+
+ def test_delete_multiple
+ #assemble
+ bucket = @s3.buckets["test_delete_multiple"]
+ object = bucket.objects["key1"]
+ object.write("asdf")
+ object.copy_to("key2")
+ assert_equal 2, bucket.objects.count
+
+ #act
+ bucket.objects.delete_all
+
+ #assert
+ assert_equal 0, bucket.objects.count
+ end
+
+ def test_delete_multiple_with_prefix
+ #assemble
+ bucket = @s3.buckets["test_delete_multiple"]
+ object = bucket.objects["key1"]
+ object.write("asdf")
+ object.copy_to("prefix1/key2")
+ object.copy_to("prefix1/key3")
+ assert_equal 3, bucket.objects.count
+
+ #act
+ bucket.objects.with_prefix('prefix1/').delete_all
+
+ #assert
+ assert_equal 1, bucket.objects.count
+ end
end