From 880d31bc78f74754a01d9bb55e5d4228e62b513d Mon Sep 17 00:00:00 2001 From: Jaime Melis Date: Wed, 3 Feb 2016 13:50:41 +0100 Subject: [PATCH] Feature #4217: Extend s3 drivers to support ceph/s3 --- src/datastore_mad/remotes/downloader.sh | 11 +++-- src/datastore_mad/remotes/fs/cp | 5 ++- src/market_mad/remotes/s3/S3.rb | 56 ++++++++++++++----------- src/market_mad/remotes/s3/delete | 22 +++++++--- src/market_mad/remotes/s3/import | 23 +++++++--- src/market_mad/remotes/s3/monitor | 20 +++++++-- 6 files changed, 92 insertions(+), 45 deletions(-) diff --git a/src/datastore_mad/remotes/downloader.sh b/src/datastore_mad/remotes/downloader.sh index e365a2beee..c4ff24d9b1 100755 --- a/src/datastore_mad/remotes/downloader.sh +++ b/src/datastore_mad/remotes/downloader.sh @@ -131,10 +131,11 @@ function s3_request { FROM="$1" + ENDPOINT=${S3_ENDPOINT:-https://s3.amazonaws.com} OBJECT=$(basename $FROM) BUCKET=$(basename $(dirname $FROM)) - DATE="`date +'%a, %d %b %Y %H:%M:%S %z'`" + DATE="`date -u +'%a, %d %b %Y %H:%M:%S GMT'`" AUTH_STRING="GET\n\n\n${DATE}\n/${BUCKET}/${OBJECT}" SIGNED_AUTH_STRING=`echo -en "$AUTH_STRING" | \ @@ -143,7 +144,7 @@ function s3_request echo " -H \"Date: ${DATE}\"" \ " -H \"Authorization: AWS ${S3_ACCESS_KEY_ID}:${SIGNED_AUTH_STRING}\"" \ - " https://${BUCKET}.s3.amazonaws.com/${OBJECT}" + " ${ENDPOINT}/${BUCKET}/${OBJECT}" } function get_rbd_cmd @@ -234,13 +235,15 @@ TO="$2" # File used by the hasher function to store the resulting hash export HASH_FILE="/tmp/downloader.hash.$$" +GLOBAL_CURL_ARGS="--fail -sS -k -L" + case "$FROM" in http://*|https://*) # -k so it does not check the certificate # -L to follow redirects # -sS to hide output except on failure # --limit_rate to limit the bw - curl_args="-sS -k -L $FROM" + curl_args="$GLOBAL_CURL_ARGS $FROM" if [ -n "$LIMIT_RATE" ]; then curl_args="--limit-rate $LIMIT_RATE $curl_args" @@ -265,7 +268,7 @@ s3://*) fi curl_args="$(s3_request $FROM)" - command="curl $curl_args" + command="curl $GLOBAL_CURL_ARGS $curl_args" ;; rbd://*) command="$(get_rbd_cmd $FROM)" diff --git a/src/datastore_mad/remotes/fs/cp b/src/datastore_mad/remotes/fs/cp index 3f779d5853..987d39f3c7 100755 --- a/src/datastore_mad/remotes/fs/cp +++ b/src/datastore_mad/remotes/fs/cp @@ -59,7 +59,8 @@ done < <($XPATH /DS_DRIVER_ACTION_DATA/DATASTORE/BASE_PATH \ /DS_DRIVER_ACTION_DATA/DATASTORE/TEMPLATE/NO_DECOMPRESS \ /DS_DRIVER_ACTION_DATA/DATASTORE/TEMPLATE/LIMIT_TRANSFER_BW \ /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/ACCESS_KEY_ID \ - /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/SECRET_ACCESS_KEY) + /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/SECRET_ACCESS_KEY \ + /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/ENDPOINT) unset i @@ -76,10 +77,12 @@ NO_DECOMPRESS="${XPATH_ELEMENTS[i++]}" LIMIT_TRANSFER_BW="${XPATH_ELEMENTS[i++]}" S3_ACCESS_KEY_ID="${XPATH_ELEMENTS[i++]}" S3_SECRET_ACCESS_KEY="${XPATH_ELEMENTS[i++]}" +S3_ENDPOINT="${XPATH_ELEMENTS[i++]}" # Must be made available to downloader.sh export S3_ACCESS_KEY_ID export S3_SECRET_ACCESS_KEY +export S3_ENDPOINT DST=`generate_image_path` IMAGE_HASH=`basename $DST` diff --git a/src/market_mad/remotes/s3/S3.rb b/src/market_mad/remotes/s3/S3.rb index 4ea8165c65..59510898a8 100644 --- a/src/market_mad/remotes/s3/S3.rb +++ b/src/market_mad/remotes/s3/S3.rb @@ -14,34 +14,40 @@ # limitations under the License. # # -------------------------------------------------------------------------- # -# This class is a generic wrapper to the s3 upload and delete facilities. +require 'aws-sdk' + +# This class is a generic wrapper to the s3 gem. # It can either handle simple or multipart uploads, but the logic to decide # which uploader to use is not included in this class. class S3 + attr_accessor :name, :client + def initialize(h) - @name = h[:name] + @client = Aws::S3::Client.new(h) + end - @config = { - :bucket => h[:bucket], - :md5 => h[:md5], - :region => h[:region], - :access_key_id => h[:access_key_id], - :secret_access_key => h[:secret_access_key] - } + def bucket=(bucket) + @bucket = bucket - @client = Aws::S3::Client.new({ - :region => @config[:region], - :access_key_id => @config[:access_key_id], - :secret_access_key => @config[:secret_access_key] - }) - - @parts = [] - @part_number = 1 + # Implicit creation of the bucket + begin + @client.head_bucket({ + :bucket => @bucket + }) + rescue Aws::S3::Errors::NotFound + puts "create" + @client.create_bucket({ + :bucket => @bucket + }) + end end def create_multipart_upload + @parts = [] + @part_number = 1 + resp = @client.create_multipart_upload({ - :bucket => @config[:bucket], + :bucket => @bucket, :key => @name }) @@ -50,7 +56,7 @@ class S3 def complete_multipart_upload @client.complete_multipart_upload({ - :bucket => @config[:bucket], + :bucket => @bucket, :key => @name, :upload_id => @upload_id, :multipart_upload => {:parts => @parts} @@ -61,7 +67,7 @@ class S3 @client.abort_multipart_upload({ :upload_id => @upload_id, :key => @name, - :bucket => @config[:bucket] + :bucket => @bucket }) end @@ -71,7 +77,7 @@ class S3 :upload_id => @upload_id, :part_number => @part_number, :key => @name, - :bucket => @config[:bucket] + :bucket => @bucket }) @parts << { @@ -85,14 +91,14 @@ class S3 def put_object(body) @client.put_object({ :body => body, - :bucket => @config[:bucket], + :bucket => @bucket, :key => @name }) end def delete_object @client.delete_object({ - :bucket => @config[:bucket], + :bucket => @bucket, :key => @name }) end @@ -100,7 +106,7 @@ class S3 def exists? begin !!@client.head_object({ - :bucket => @config[:bucket], + :bucket => @bucket, :key => @name }) rescue Aws::S3::Errors::NotFound @@ -110,7 +116,7 @@ class S3 def get_bucket_size resp = @client.list_objects({ - bucket: @config[:bucket] + bucket: @bucket }) size = 0 diff --git a/src/market_mad/remotes/s3/delete b/src/market_mad/remotes/s3/delete index 347773ab58..844018f11f 100755 --- a/src/market_mad/remotes/s3/delete +++ b/src/market_mad/remotes/s3/delete @@ -39,7 +39,6 @@ $: << File.dirname(__FILE__) require 'base64' require 'rexml/document' require 'getoptlong' -require 'aws-sdk' require 'pp' require 'S3' @@ -52,11 +51,17 @@ xml = REXML::Document.new(Base64::decode64(ARGV[0])).root source = xpath(xml, 'MARKETPLACEAPP/SOURCE') +# required access_key_id = xpath(xml, 'MARKETPLACE/TEMPLATE/ACCESS_KEY_ID') secret_access_key = xpath(xml, 'MARKETPLACE/TEMPLATE/SECRET_ACCESS_KEY') bucket = xpath(xml, 'MARKETPLACE/TEMPLATE/BUCKET') region = xpath(xml, 'MARKETPLACE/TEMPLATE/REGION') +# optional +signature_version = xpath(xml, 'MARKETPLACE/TEMPLATE/SIGNATURE_VERSION') +endpoint = xpath(xml, 'MARKETPLACE/TEMPLATE/ENDPOINT') +force_path_style = xpath(xml, 'MARKETPLACE/TEMPLATE/FORCE_PATH_STYLE') + name = File.basename(source) if name.empty? @@ -64,13 +69,20 @@ if name.empty? exit 1 end -s3 = S3.new( - :name => name, - :bucket => bucket, +s3_config = { :region => region, :access_key_id => access_key_id, :secret_access_key => secret_access_key -) +} + +s3_config[:signature_version] = signature_version if !signature_version.to_s.empty? +s3_config[:endpoint] = endpoint if !endpoint.to_s.empty? +s3_config[:force_path_style] = true if force_path_style.to_s.downcase == "yes" + +s3 = S3.new(s3_config) + +s3.name = name +s3.bucket = bucket if !s3.exists? STDERR.puts "Object '#{name}' does not exist." diff --git a/src/market_mad/remotes/s3/import b/src/market_mad/remotes/s3/import index 47e2a48057..2f1da80db5 100755 --- a/src/market_mad/remotes/s3/import +++ b/src/market_mad/remotes/s3/import @@ -39,7 +39,6 @@ $: << File.dirname(__FILE__) require 'base64' require 'rexml/document' require 'getoptlong' -require 'aws-sdk' require 'open3' require 'pp' @@ -59,11 +58,17 @@ md5 = xpath(xml, 'MD5') id = xpath(xml, 'MARKETPLACEAPP/ID') +# required access_key_id = xpath(xml, 'MARKETPLACE/TEMPLATE/ACCESS_KEY_ID') secret_access_key = xpath(xml, 'MARKETPLACE/TEMPLATE/SECRET_ACCESS_KEY') bucket = xpath(xml, 'MARKETPLACE/TEMPLATE/BUCKET') region = xpath(xml, 'MARKETPLACE/TEMPLATE/REGION') +# optional +signature_version = xpath(xml, 'MARKETPLACE/TEMPLATE/SIGNATURE_VERSION') +endpoint = xpath(xml, 'MARKETPLACE/TEMPLATE/ENDPOINT') +force_path_style = xpath(xml, 'MARKETPLACE/TEMPLATE/FORCE_PATH_STYLE') + name = "marketapp-#{id}" source = "s3://#{bucket}/#{name}" @@ -71,14 +76,20 @@ source = "s3://#{bucket}/#{name}" READ_LENGTH = 10*1024*1024 # Read in chunks of 10MB -s3 = S3.new( - :name => name, - :md5 => md5, - :bucket => bucket, +s3_config = { :region => region, :access_key_id => access_key_id, :secret_access_key => secret_access_key -) +} + +s3_config[:signature_version] = signature_version if !signature_version.to_s.empty? +s3_config[:endpoint] = endpoint if !endpoint.to_s.empty? +s3_config[:force_path_style] = true if force_path_style.to_s.downcase == "yes" + +s3 = S3.new(s3_config) + +s3.name = name +s3.bucket = bucket if s3.exists? STDERR.puts "Object '#{name}' already exists." diff --git a/src/market_mad/remotes/s3/monitor b/src/market_mad/remotes/s3/monitor index 1677b700f7..181ffb950d 100755 --- a/src/market_mad/remotes/s3/monitor +++ b/src/market_mad/remotes/s3/monitor @@ -41,7 +41,6 @@ TOTAL_MB_DEFAULT = 1048576 # Default maximum 1TB require 'base64' require 'rexml/document' require 'getoptlong' -require 'aws-sdk' require 'pp' require 'S3' @@ -52,18 +51,31 @@ end xml = REXML::Document.new(Base64::decode64(ARGV[0])).root +# required access_key_id = xpath(xml, 'MARKETPLACE/TEMPLATE/ACCESS_KEY_ID') secret_access_key = xpath(xml, 'MARKETPLACE/TEMPLATE/SECRET_ACCESS_KEY') bucket = xpath(xml, 'MARKETPLACE/TEMPLATE/BUCKET') region = xpath(xml, 'MARKETPLACE/TEMPLATE/REGION') total_mb = xpath(xml, 'MARKETPLACE/TEMPLATE/TOTAL_MB') || TOTAL_MB_DEFAULT -s3 = S3.new( - :bucket => bucket, +# optional +signature_version = xpath(xml, 'MARKETPLACE/TEMPLATE/SIGNATURE_VERSION') +endpoint = xpath(xml, 'MARKETPLACE/TEMPLATE/ENDPOINT') +force_path_style = xpath(xml, 'MARKETPLACE/TEMPLATE/FORCE_PATH_STYLE') + +s3_config = { :region => region, :access_key_id => access_key_id, :secret_access_key => secret_access_key -) +} + +s3_config[:signature_version] = signature_version if !signature_version.to_s.empty? +s3_config[:endpoint] = endpoint if !endpoint.to_s.empty? +s3_config[:force_path_style] = true if force_path_style.to_s.downcase == "yes" + +s3 = S3.new(s3_config) + +s3.bucket = bucket used_mb = (s3.get_bucket_size.to_f/1024/1024).ceil free_mb = total_mb - used_mb