Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
139 changes: 93 additions & 46 deletions lib/cloud_controller/benchmark/blobstore.rb
Original file line number Diff line number Diff line change
@@ -1,48 +1,70 @@
# frozen_string_literal: true

require 'benchmark'
require 'find'
require 'zip'
require 'tempfile'
require 'fileutils'
require 'securerandom'

module VCAP::CloudController
module Benchmark
class Blobstore
SIZES = {
'0.005MB' => (0.005 * 1024 * 1024).to_i,
'0.01MB' => (0.01 * 1024 * 1024).to_i,
'0.1MB' => (0.1 * 1024 * 1024).to_i,
'1MB' => 1 * 1024 * 1024,
'10MB' => 10 * 1024 * 1024,
'50MB' => 50 * 1024 * 1024,
'100MB' => 100 * 1024 * 1024,
'500MB' => 400 * 1024 * 1024,
'1000MB' => 1000 * 1024 * 1024
}.freeze

CHUNK_1MB = '0'.b * (1024 * 1024)

def perform
big_droplet_guids = []
resource_dir = generate_resources

resource_timing = resource_match(resource_dir)
puts("resource match timing: #{resource_timing * 1000}ms")
log_timing('resource match timing', resource_match(resource_dir))

zip_output_dir = Dir.mktmpdir
zip_file = zip_resources(resource_dir, zip_output_dir)

package_guid, resource_timing = upload_package(zip_file)
puts("package upload timing: #{resource_timing * 1000}ms")

resource_timing = download_package(package_guid, resource_dir)
puts("package download timing: #{resource_timing * 1000}ms")
package_guid, timing = upload_package(zip_file)
log_timing('package upload timing', timing)
log_timing('package download timing', download_package(package_guid, resource_dir))

bytes_read, resource_timing = download_buildpacks(resource_dir)
bytes_read, timing = download_buildpacks(resource_dir)
puts("downloaded #{Buildpack.count} buildpacks, total #{bytes_read} bytes read")
puts("buildpack download timing: #{resource_timing * 1000}ms")
log_timing('buildpack download timing', timing)

droplet_guid, resource_timing = upload_droplet(zip_file)
puts("droplet upload timing: #{resource_timing * 1000}ms")
upload_lines = []
download_lines = []

resource_timing = download_droplet(droplet_guid, resource_dir)
puts("droplet download timing: #{resource_timing * 1000}ms")
SIZES.each do |label, bytes|
Tempfile.create(["big-droplet-#{label}", '.bin'], resource_dir) do |tempfile|
write_file_of_size(tempfile.path, bytes)

big_droplet_file = Tempfile.new('big-droplet', resource_dir)
big_droplet_file.write('abc' * 1024 * 1024 * 100)
big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path)
puts("big droplet upload timing: #{resource_timing * 1000}ms")
guid, upload_timing = upload_droplet(tempfile.path)
big_droplet_guids << guid

download_timing = download_droplet(guid, resource_dir)

upload_lines << format_timing("droplet #{label} upload timing", upload_timing)
download_lines << format_timing("droplet #{label} download timing", download_timing)
end
end

resource_timing = download_droplet(big_droplet_guid, resource_dir)
puts("big droplet download timing: #{resource_timing * 1000}ms")
puts(upload_lines.join("\n"))
puts(download_lines.join("\n"))
ensure
FileUtils.remove_dir(resource_dir, true)
FileUtils.remove_dir(zip_output_dir, true)
package_blobstore_client.delete(package_guid) if package_guid
droplet_blobstore_client.delete(droplet_guid) if droplet_guid
droplet_blobstore_client.delete(big_droplet_guid) if big_droplet_guid

safe_delete(package_blobstore_client, package_guid)
Array(big_droplet_guids).each { |g| safe_delete(droplet_blobstore_client, g) }
end

def resource_match(dir_path)
Expand All @@ -60,46 +82,73 @@ def upload_package(package_path)
end

def download_package(package_guid, tmp_dir)
tempfile = Tempfile.new('package-download-benchmark', tmp_dir)
::Benchmark.realtime do
package_blobstore_client.download_from_blobstore(package_guid, tempfile.path)
Tempfile.create('package-download-benchmark', tmp_dir) do |tempfile|
::Benchmark.realtime do
package_blobstore_client.download_from_blobstore(package_guid, tempfile.path)
end
end
end

def download_buildpacks(tmp_dir)
tempfile = Tempfile.new('buildpack-download-benchmark', tmp_dir)
bytes_read = 0

timing = ::Benchmark.realtime do
bytes_read = Buildpack.map do |buildpack|
buildpack_blobstore_client.download_from_blobstore(buildpack.key, tempfile.path)
File.stat(tempfile.path).size
end.sum
Tempfile.create('buildpack-download-benchmark', tmp_dir) do |tempfile|
bytes_read = 0
timing = ::Benchmark.realtime do
bytes_read = Buildpack.map do |buildpack|
buildpack_blobstore_client.download_from_blobstore(buildpack.key, tempfile.path)
File.stat(tempfile.path).size
end.sum
end
[bytes_read, timing]
end

[bytes_read, timing]
end

def upload_droplet(droplet_path)
copy_to_blobstore(droplet_path, droplet_blobstore_client)
end

def download_droplet(droplet_guid, tmp_dir)
tempfile = Tempfile.new('droplet-download-benchmark', tmp_dir)

::Benchmark.realtime do
droplet_blobstore_client.download_from_blobstore(droplet_guid, tempfile.path)
Tempfile.create('droplet-download-benchmark', tmp_dir) do |tempfile|
::Benchmark.realtime do
droplet_blobstore_client.download_from_blobstore(droplet_guid, tempfile.path)
end
end
end

private

def log_timing(label, seconds)
puts("#{label}: #{(seconds * 1000).round(3)}ms")
end

def format_timing(label, seconds)
"#{label}: #{(seconds * 1000).round(3)}ms"
end

def safe_delete(client, guid)
return if guid.nil?

client.delete(guid)
rescue StandardError => e
# don't fail the benchmark run if cleanup fails
warn("cleanup failed for guid=#{guid}: #{e.class}: #{e.message}")
end

def write_file_of_size(path, bytes)
File.open(path, 'wb') do |f|
remaining = bytes
while remaining > 0
to_write = [CHUNK_1MB.bytesize, remaining].min
f.write(CHUNK_1MB, to_write)
remaining -= to_write
end
end
end

def generate_resources
dir = Dir.mktmpdir

100.times.each do |i|
f = File.open(File.join(dir, i.to_s), 'w')
f.write('foo' * (65_536 + i))
100.times do |i|
File.write(File.join(dir, i.to_s), 'foo' * (65_536 + i))
end

dir
Expand All @@ -110,9 +159,7 @@ def zip_resources(resource_dir, output_dir)
Zip::File.open(zip_file, create: true) do |zipfile|
Find.find(resource_dir).
select { |f| File.file?(f) }.
each do |file|
zipfile.add(File.basename(file), file)
end
each { |file| zipfile.add(File.basename(file), file) }
end
zip_file
end
Expand Down
1 change: 1 addition & 0 deletions lib/cloud_controller/config.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
require 'cloud_controller/config_schemas/worker_schema'
require 'cloud_controller/config_schemas/deployment_updater_schema'
require 'cloud_controller/config_schemas/rotate_database_key_schema'
require 'cloud_controller/config_schemas/blobstore_benchmarks_schema'
require 'utils/hash_utils'

module VCAP::CloudController
Expand Down
81 changes: 81 additions & 0 deletions lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
require 'vcap/config'

module VCAP::CloudController
module ConfigSchemas
class BlobstoreBenchmarksSchema < VCAP::Config
# rubocop:disable Metrics/BlockLength
define_schema do
blobstore_section = {
blobstore_type: String,
blobstore_provider: String,

optional(:connection_config) => Hash,
optional(:fog_connection) => Hash,

fog_aws_storage_options: Hash,
fog_gcp_storage_options: Hash,

optional(:resource_directory_key) => String,
optional(:buildpack_directory_key) => String,
optional(:app_package_directory_key) => String,
optional(:droplet_directory_key) => String,

optional(:maximum_size) => Integer,
optional(:minimum_size) => Integer,
optional(:max_package_size) => Integer,
optional(:max_valid_packages_stored) => Integer,
optional(:max_staged_droplets_stored) => Integer
}

{
optional(:logging) => {
optional(:level) => String,
optional(:file) => String,
optional(:syslog) => String,
optional(:stdout_sink_enabled) => bool
},

db: {
optional(:database) => Hash, # db connection hash for sequel\
max_connections: Integer, # max connections in the connection pool
pool_timeout: Integer, # timeout before raising an error when connection can't be established to the db
log_level: String, # debug, info, etc.
log_db_queries: bool,
ssl_verify_hostname: bool,
connection_validation_timeout: Integer,
optional(:ca_cert_path) => String
},
storage_cli_config_file_resource_pool: String,
storage_cli_config_file_buildpacks: String,
storage_cli_config_file_packages: String,
storage_cli_config_file_droplets: String,

db_encryption_key: enum(String, NilClass),

optional(:database_encryption) => {
keys: Hash,
current_key_label: String,
optional(:pbkdf2_hmac_iterations) => Integer
},

resource_pool: blobstore_section,
buildpacks: blobstore_section,
packages: blobstore_section,
droplets: blobstore_section,

pid_filename: String,
index: Integer, # Component index (cc-0, cc-1, etc)
name: String, # Component name (api_z1, api_z2)
default_app_ssh_access: bool
}
end
# rubocop:enable Metrics/BlockLength

class << self
def configure_components(config)
ResourcePool.instance = ResourcePool.new(config)
end
end
end
end
end
3 changes: 2 additions & 1 deletion lib/tasks/blobstore_benchmarks.rake
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ require 'cloud_controller/benchmark/blobstore'
namespace :benchmarks do
desc 'Perform blobstore benchmark'
task perform_blobstore_benchmark: :environment do
BackgroundJobEnvironment.new(RakeConfig.config).setup_environment do
RakeConfig.context = :blobstore_benchmarks
BoshErrandEnvironment.new(RakeConfig.config).setup_environment do
VCAP::CloudController::Benchmark::Blobstore.new.perform
end
end
Expand Down