Class: ApceraApiHelper
- Inherits:
-
Object
- Object
- ApceraApiHelper
- Defined in:
- lib/apcera_api_helper.rb
Instance Attribute Summary collapse
-
#api_host ⇒ Object
readonly
Returns the value of attribute api_host.
-
#docker_layer_hash ⇒ Object
readonly
Returns the value of attribute docker_layer_hash.
-
#domain ⇒ Object
readonly
Returns the value of attribute domain.
-
#sandbox ⇒ Object
readonly
Returns the value of attribute sandbox.
-
#target ⇒ Object
readonly
Returns the value of attribute target.
Class Method Summary collapse
Instance Method Summary collapse
- #_get_job_preferences_by_name(namespace, job_name) ⇒ Object
- #add_service_to_job(job_fqn, service_fqn, name) ⇒ Object
- #banner(text) ⇒ Object
- #build_docker_job_prototype(namespace, job_name, image_url, start_command) ⇒ Object
- #check_task_status(task_uuid) ⇒ Object
- #create_and_stage_git_repo(repo_name, clone_to, repo_subdir, namespace, job_name, change_to = "", pipeline = nil, tags = {}) ⇒ Object
-
#create_and_stage_package(filename, namespace, job_name, change_to = "", pipeline = nil, tags = {}) ⇒ Object
If filename is “*” then it does the whole directory Example: stage_package(“*”, “/sandbox/fred.flintstone”, “my_new_job”).
-
#create_binding(binding) ⇒ Object
Binding operations.
- #create_docker_job_new_api(image_url, job_namespace, job_name, start_command, exposed_port = 0, environment = {}, allow_egress = false, ram_mb = 128, disk_mb = 16) ⇒ Object
- #create_job(job) ⇒ Object
- #create_package(package) ⇒ Object
- #debug_mode ⇒ Object
- #debug_mode=(mode) ⇒ Object
- #debug_mode? ⇒ Boolean
- #delete_job(job_name) ⇒ Object
- #delete_package(package_name) ⇒ Object
-
#find_packages(opt = {}) ⇒ Object
Package operations.
- #get_base_domain ⇒ Object
- #get_cluster_metrics(opts = {}) ⇒ Object
- #get_docker_namespace(job_name) ⇒ Object
- #get_info ⇒ Object
- #get_instance_manager_metrics(opts = {}) ⇒ Object
- #get_instance_managers ⇒ Object
- #get_job_by_name(name) ⇒ Object
-
#get_job_logs(uuid, lines) ⇒ Object
get job logs.
-
#get_job_metrics(opts = {}) ⇒ Object
Metrics operations.
- #get_job_route(namespace, job_name) ⇒ Object
- #get_job_state_by_name(name) ⇒ Object
-
#get_logs(key) ⇒ Object
Log Operations.
- #get_matching_staging_pipeline(name) ⇒ Object
- #get_namespace_metrics(opts = {}) ⇒ Object
- #get_package_by_name(fqn) ⇒ Object
- #get_package_by_tag(tag) ⇒ Object
- #get_package_by_uuid(uuid) ⇒ Object
- #get_package_for_docker_layer(docker_namespace, layer_id) ⇒ Object
- #get_package_uuid_for_docker_layer(docker_namespace, layer_id) ⇒ Object
- #get_route_metrics(opts = {}) ⇒ Object
- #get_sandbox ⇒ Object
- #get_server_info ⇒ Object
- #get_service_by_name(name) ⇒ Object
- #get_staging_pipeline_by_name(name) ⇒ Object
-
#initialize(enable_debugging, target = "", bearer = "") ⇒ ApceraApiHelper
constructor
cache_file_path = File.expand_path(“~/.apcera_api”).
-
#initiate_docker_job(namespace, job_name, image_url, start_command = "") ⇒ Object
Docker Operations.
- #job_exist?(name) ⇒ Boolean
- #job_from_docker(job_name, job_namespace, registry, image_name, image_tag, start_command = "", allow_egress = false, port = 0, environment = {}, user_name = "", password = "") ⇒ Object
- #job_from_package(namespace, job_name, allow_egress, port = 0, environment = {}, start_command = "") ⇒ Object
-
#jobs_get(opt = {}) ⇒ Object
Job operations.
-
#jobs_routes ⇒ Object
Some route operations.
- #jobs_routes_for_endpoint(endpoint_name) ⇒ Object
- #link_jobs(source_job, target_job, name, port) ⇒ Object
- #package_exist?(fqn) ⇒ Boolean
- #package_from_files(filename, namespace, job_name, port = 0) ⇒ Object
-
#runtimes_get ⇒ Object
Runtime operations.
- #scale_job(job_fqn, number_of_instances) ⇒ Object
- #service_exist?(service) ⇒ Boolean
-
#show_wait_spinner(fps = 10) ⇒ Object
Utility Operations.
- #spin_until_package_staged(package_name) ⇒ Object
-
#stagingpipelines_get(opts = {}) ⇒ Object
Staging Pipeline operations.
- #start_job(job_name, num_instances = 1) ⇒ Object
- #stop_job(job_name) ⇒ Object
- #update_job(job) ⇒ Object
- #update_package_by_uuid(uuid, package) ⇒ Object
- #update_package_resources_by_uuid(uuid, payload) ⇒ Object
- #wait_until_package_staged(package_name) ⇒ Object
Constructor Details
#initialize(enable_debugging, target = "", bearer = "") ⇒ ApceraApiHelper
cache_file_path = File.expand_path(“~/.apcera_api”)
210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 |
# File 'lib/apcera_api_helper.rb', line 210 def initialize(enable_debugging, target = "", bearer = "") self.debug_mode = enable_debugging apc_env = ApceraAuth.new(target, bearer) if (self.debug_mode) puts apc_env.to_s end @domain = apc_env.domain Apcera.configure do |config| config.scheme = apc_env.scheme # If we are using APP_TOKEN these won't be set # if !apc_env.token.nil? config.api_key['authorization'] = apc_env.token end if !apc_env.token_prefix.nil? config.api_key_prefix['authorization'] = apc_env.token_prefix end config.host = apc_env.api_host config.base_path = '/v1' config.debugging = enable_debugging config.verify_ssl = false end @_bindings_api ||= Apcera::ServicesAndBindingsApi.new @_instances_api ||= Apcera::InstancesApi.new @_jobs_api ||= Apcera::JobsApi.new @_logs_api ||= Apcera::LogsApi.new @_metrics_api ||= Apcera::MetricsApi.new @_packages_api ||= Apcera::PackagesApi.new @_stagingpipeline_api ||= Apcera::StagingPipelinesApi.new @_stagingpipeline_api ||= Apcera::StagingPipelinesApi.new @_utilities_api ||= Apcera::UtilitiesApi.new sb = self.get_sandbox() @sandbox = sb.namespace end |
Instance Attribute Details
#api_host ⇒ Object (readonly)
Returns the value of attribute api_host.
205 206 207 |
# File 'lib/apcera_api_helper.rb', line 205 def api_host @api_host end |
#docker_layer_hash ⇒ Object (readonly)
Returns the value of attribute docker_layer_hash.
202 203 204 |
# File 'lib/apcera_api_helper.rb', line 202 def docker_layer_hash @docker_layer_hash end |
#domain ⇒ Object (readonly)
Returns the value of attribute domain.
206 207 208 |
# File 'lib/apcera_api_helper.rb', line 206 def domain @domain end |
#sandbox ⇒ Object (readonly)
Returns the value of attribute sandbox.
203 204 205 |
# File 'lib/apcera_api_helper.rb', line 203 def sandbox @sandbox end |
#target ⇒ Object (readonly)
Returns the value of attribute target.
204 205 206 |
# File 'lib/apcera_api_helper.rb', line 204 def target @target end |
Class Method Details
.notice(text) ⇒ Object
1106 1107 1108 |
# File 'lib/apcera_api_helper.rb', line 1106 def self.notice(text) puts text end |
Instance Method Details
#_get_job_preferences_by_name(namespace, job_name) ⇒ Object
1042 1043 1044 |
# File 'lib/apcera_api_helper.rb', line 1042 def _get_job_preferences_by_name(namespace, job_name) @_jobs_api.preferences_job_get("job::#{namespace}::#{job_name}") end |
#add_service_to_job(job_fqn, service_fqn, name) ⇒ Object
870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 |
# File 'lib/apcera_api_helper.rb', line 870 def add_service_to_job(job_fqn, service_fqn, name) self.("Trying to bind service #{service_fqn} to #{job_fqn} as #{name}") # first we need the provider for the service # provider = self.get_service_by_name(service_fqn) puts ">>>>Provider is #{provider.to_yaml}" binding = Apcera::Binding.new({ :fqn => "binding::#{self.sandbox}::#{SecureRandom.uuid}", :job_fqn => job_fqn, :name => name, :provider_fqn => provider.provider_fqn, :service_fqn => service_fqn }) if self.debug_mode? ApceraApiHelper.notice binding.to_yaml end self.create_binding(binding) end |
#banner(text) ⇒ Object
1099 1100 1101 1102 1103 1104 |
# File 'lib/apcera_api_helper.rb', line 1099 def (text) puts "\n\n" puts "################################################################################" puts "# #{text}" puts "################################################################################" end |
#build_docker_job_prototype(namespace, job_name, image_url, start_command) ⇒ Object
973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 |
# File 'lib/apcera_api_helper.rb', line 973 def build_docker_job_prototype(namespace, job_name, image_url, start_command) docker_origin = Apcera::DockerOrigin.new( "ImageName" => "", "ImageTag" => "", "RegistryURL" => image_url, "Volumes" => nil ) port = Apcera::Port.new({ :optional => true, :number => 222 }) app = Apcera::ProcessObject.new({ # :start_command_raw =>, :start_command => start_command, :start_command_timeout => 30, :stop_command_raw => [], :stop_command => "", :stop_timeout => 5, :user => "root" }) process = Apcera::Process.new({ :app => app}) resource = Apcera::Resource.new({ :cpu => 0, :memory => 768 * 1024 * 1024, :disk => 256 * 1024 * 1024, :network => 1 * 1024 * 1024 * 1024, :netmax => 0 }) restart = Apcera::RestartConfig.new({ :restart_mode => "no", :maximum_attempts => 0 }) rollout = Apcera::RolloutConfig.new({ :force_stop_old_instances_after => 0, :flapping_minimum_restarts => 0, :flapping_percent => 0, :flapping_window => 0, :errored_state_window => 0 }) job_proto = Apcera::Job.new({ :docker_origin => docker_origin, :fqn => "job::#{namespace}::#{job_name}", :name => job_name, :num_instances => 1, :packages => nil, :ports => [port], :processes => process, :resources => resource, :restart => restart, :rollout => rollout, :state => "unknown", :uuid => "", :version_id => 0, :weight => 0 }) job_proto. = {"app" => job_name, "docker" => job_name, "ssh" => "true"} job_proto end |
#check_task_status(task_uuid) ⇒ Object
736 737 738 |
# File 'lib/apcera_api_helper.rb', line 736 def check_task_status(task_uuid) @_jobs_api.tasks_uuid_get(task_uuid) end |
#create_and_stage_git_repo(repo_name, clone_to, repo_subdir, namespace, job_name, change_to = "", pipeline = nil, tags = {}) ⇒ Object
431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 |
# File 'lib/apcera_api_helper.rb', line 431 def create_and_stage_git_repo(repo_name, clone_to, repo_subdir, namespace, job_name, change_to = "", pipeline = nil, = {}) save_dir = Dir.pwd puts "save_dir is #{save_dir}" tmp_dir = Dir.mktmpdir("git-") puts "tmp_dir is #{tmp_dir}" Dir.chdir(tmp_dir) puts "now in #{Dir.pwd}" system("git clone #{repo_name} #{clone_to}") Dir.chdir("#{clone_to}/#{repo_subdir}") create_and_stage_package("*", namespace, job_name, change_to, pipeline, ) Dir.chdir(save_dir) FileUtils.rm_rf(tmp_dir) end |
#create_and_stage_package(filename, namespace, job_name, change_to = "", pipeline = nil, tags = {}) ⇒ Object
If filename is “*” then it does the whole directory Example:
stage_package("*", "/sandbox/fred.flintstone", "my_new_job")
452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 |
# File 'lib/apcera_api_helper.rb', line 452 def create_and_stage_package(filename, namespace, job_name, change_to = "", pipeline = nil, = {}) file_list = filename if ((filename != "*") && File.exist?("start.sh")) file_list = "#{filename} start.sh" end if pipeline.nil? pipeline = self.get_matching_staging_pipeline(filename) end if (pipeline.nil?) ApceraApiHelper.notice "Fatal ERROR, could not determine staging pipeline for #{filename}" exit end # when we stage, we only need the uuid # sp = Apcera::StagingPipeline.new({ :uuid => pipeline.uuid }) self.("Tarring up the files") tarball_path = "/tmp/pkg-#{job_name.gsub("/", "-")}.tgz" base_dir_opt = "" if change_to.nonblank? base_dir_opt = "-C #{change_to}" end tar_command = "COPYFILE_DISABLE=1 tar #{base_dir_opt} -czf #{tarball_path} #{file_list}" system(tar_command) contents = File.binread(tarball_path) hex_digest = Digest::SHA1.hexdigest(contents.to_s) sha256_digest = Digest::SHA256.hexdigest(contents.to_s) length = File.size(tarball_path) # We are now done with the file # File.delete(tarball_path) # At some point will need to switch to the sha256 digest. Looks like that is now. # # resource = Apcera::PackageResource.new({:length => length, :digest => sha256_digest}) # resource = Apcera::PackageResource.new() package_fqn = "package::#{namespace}::#{job_name}" new_pkg = Apcera::Package.new({ :name => job_name, :fqn => package_fqn, :staging_pipeline => sp }) new_pkg.resources = [resource] new_pkg. = if self.debug_mode? self.("Package Is") ApceraApiHelper.notice new_pkg.to_yaml end ApceraApiHelper.notice("Creating a placeholder for the new package #{new_pkg.fqn}") begin created_pkg = self.create_package(new_pkg) rescue # Handle the race condition where the package has been created since this function started # ApceraApiHelper.notice "post failed, validate presense" check_pkg = self.get_package_by_name(new_pkg.fqn) if !check_pkg.nil? ApceraApiHelper.notice "Package State is #{check_pkg.state}" return ### SHOULD I REALLY?? end end ApceraApiHelper.notice("Uploading the new package... #{created_pkg.uuid} resource #{created_pkg.resource.uuid}" ) @_packages_api.packages_package_uuid_resources_resource_uuid_put(created_pkg.uuid, created_pkg.resource.uuid, contents, "sha256:#{sha256_digest}") # bar = self.update_package_resources_by_uuid(created_pkg.uuid, contents) # ApceraApiHelper.notice self.get_logs("staging.#{created_pkg.uuid}") ApceraApiHelper.notice("Waiting for package #{job_name} to stage (might take some time)...") pkg = self.spin_until_package_staged(package_fqn) ApceraApiHelper.notice "Package (#{job_name}) staging status: #{pkg.state}" pkg end |
#create_binding(binding) ⇒ Object
Binding operations
849 850 851 |
# File 'lib/apcera_api_helper.rb', line 849 def create_binding(binding) @_bindings_api.bindings_post(binding) end |
#create_docker_job_new_api(image_url, job_namespace, job_name, start_command, exposed_port = 0, environment = {}, allow_egress = false, ram_mb = 128, disk_mb = 16) ⇒ Object
689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 |
# File 'lib/apcera_api_helper.rb', line 689 def create_docker_job_new_api(image_url, job_namespace, job_name, start_command, exposed_port = 0, environment = {}, allow_egress = false, ram_mb = 128, disk_mb = 16) job_fqn = "job::#{job_namespace}::#{job_name}" sc = start_command.split(" ") puts "Trying to do job #{job_fqn}" resource = Apcera::Resource.new({ :cpu => 0, :memory => ram_mb * 1024 * 1024, :disk => disk_mb * 1024 * 1024, :network => 1 * 1024 * 1024 * 1024, :netmax => 0 }) restart = Apcera::RestartConfig.new({ :restart_mode => "always", :maximum_attempts => 3 }) docker_job = Apcera::CreateDockerJobRequest.new({ :'allow_egress' => allow_egress, :'image_url' => image_url, :'job_fqn' => job_fqn, :'start_command' => sc, :'resources' => resource, :'restart' => restart }) docker_job.env = environment # Set up the port # if (!exposed_port.nil? && exposed_port != 0) docker_job.exposed_ports = [exposed_port] # need to also set the route also # sub_route = job_namespace.split("/").reverse.join(".").chomp(".") route_name = "http://#{job_name}.#{sub_route}.#{self.get_base_domain}" docker_job.routes = {route_name => exposed_port} end task = @_jobs_api.docker_jobs_post(docker_job) task end |
#create_job(job) ⇒ Object
740 741 742 |
# File 'lib/apcera_api_helper.rb', line 740 def create_job(job) @_jobs_api.jobs_post(job) end |
#create_package(package) ⇒ Object
381 382 383 |
# File 'lib/apcera_api_helper.rb', line 381 def create_package(package) @_packages_api.packages_post(package) end |
#debug_mode ⇒ Object
263 264 265 |
# File 'lib/apcera_api_helper.rb', line 263 def debug_mode() @debug_mode end |
#debug_mode=(mode) ⇒ Object
254 255 256 257 |
# File 'lib/apcera_api_helper.rb', line 254 def debug_mode=(mode) @debug_mode = mode Apcera::Configuration.debugging = mode end |
#debug_mode? ⇒ Boolean
259 260 261 |
# File 'lib/apcera_api_helper.rb', line 259 def debug_mode?() @debug_mode end |
#delete_job(job_name) ⇒ Object
806 807 808 809 810 |
# File 'lib/apcera_api_helper.rb', line 806 def delete_job(job_name) job = self.get_job_by_name(job_name) ApceraApiHelper.notice("Trying to delete job w/ uuid #{job.uuid}") @_jobs_api.jobs_uuid_delete(job.uuid) end |
#delete_package(package_name) ⇒ Object
393 394 395 396 397 |
# File 'lib/apcera_api_helper.rb', line 393 def delete_package(package_name) package = self.get_package_by_name(package_name) ApceraApiHelper.notice("Trying to delete package uuid #{package.uuid}") @_packages_api.packages_uuid_delete(package.uuid) end |
#find_packages(opt = {}) ⇒ Object
Package operations
343 344 345 |
# File 'lib/apcera_api_helper.rb', line 343 def find_packages(opt = {}) @_packages_api.packages_get(opt) end |
#get_base_domain ⇒ Object
1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 |
# File 'lib/apcera_api_helper.rb', line 1082 def get_base_domain() if @_base_domain.nil? foo = @_utilities_api.info_get() domain = foo.url.split(/[:\/]/).last domain.gsub!("api.", "") @_base_domain = domain end return @_base_domain end |
#get_cluster_metrics(opts = {}) ⇒ Object
329 330 331 |
# File 'lib/apcera_api_helper.rb', line 329 def get_cluster_metrics(opts = {}) @_metrics_api.metrics_cluster_get(opts) end |
#get_docker_namespace(job_name) ⇒ Object
1046 1047 1048 1049 |
# File 'lib/apcera_api_helper.rb', line 1046 def get_docker_namespace(job_name) prefs = self._get_job_preferences_by_name(self.sandbox, job_name) prefs.docker_cache_namespace end |
#get_info ⇒ Object
267 268 269 |
# File 'lib/apcera_api_helper.rb', line 267 def get_info @_utilities_api.info_get() end |
#get_instance_manager_metrics(opts = {}) ⇒ Object
325 326 327 |
# File 'lib/apcera_api_helper.rb', line 325 def get_instance_manager_metrics(opts = {}) @_metrics_api.metrics_instance_managers_get(opts) end |
#get_instance_managers ⇒ Object
271 272 273 |
# File 'lib/apcera_api_helper.rb', line 271 def get_instance_managers() @_instances_api.instance_managers_get() end |
#get_job_by_name(name) ⇒ Object
767 768 769 770 |
# File 'lib/apcera_api_helper.rb', line 767 def get_job_by_name(name) ret = @_jobs_api.jobs_get({:fqn => name}) ret.length > 0 ? ret[0] : nil end |
#get_job_logs(uuid, lines) ⇒ Object
get job logs
895 896 897 898 899 |
# File 'lib/apcera_api_helper.rb', line 895 def get_job_logs(uuid, lines) @_jobs_api.jobs_uuid_logs_get(uuid, opts = {:lines => lines}) end |
#get_job_metrics(opts = {}) ⇒ Object
Metrics operations
321 322 323 |
# File 'lib/apcera_api_helper.rb', line 321 def get_job_metrics(opts = {}) @_metrics_api.metrics_jobs_get(opts) end |
#get_job_route(namespace, job_name) ⇒ Object
830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 |
# File 'lib/apcera_api_helper.rb', line 830 def get_job_route(namespace, job_name) job = self.get_job_by_name("job::#{namespace}::#{job_name}") url = "" if !job.ports.nil? job.ports.each do | port | if !port.routes.nil? route = port.routes[0] url = "#{route.type}://#{route.endpoint}" break end end end url end |
#get_job_state_by_name(name) ⇒ Object
778 779 780 781 782 |
# File 'lib/apcera_api_helper.rb', line 778 def get_job_state_by_name(name) ret = @_jobs_api.jobs_get({:fqn => name}) ret.length > 0 ? ret[0].state : "JOB_NOT_FOUND" end |
#get_logs(key) ⇒ Object
Log Operations
903 904 905 |
# File 'lib/apcera_api_helper.rb', line 903 def get_logs(key) @_logs_api.logs_channel_get(key).to_yaml end |
#get_matching_staging_pipeline(name) ⇒ Object
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 |
# File 'lib/apcera_api_helper.rb', line 296 def get_matching_staging_pipeline(name) ApceraApiHelper.notice "get_matching_staging_pipeline for #{name}" runtimes = self.runtimes_get() staging_pipeline = "" # First find the matching runtime # runtimes.each do | runtime | runtime.patterns.each do | pattern | # If the name is a star, we are doing the whole directory # if ((name == "*") && (!Dir.glob(pattern).empty?) || (name != "*") && (File.fnmatch(pattern, name))) staging_pipeline = "stagpipe::/apcera::#{runtime.runtime}" break end end end pipeline = self.get_staging_pipeline_by_name(staging_pipeline) return pipeline end |
#get_namespace_metrics(opts = {}) ⇒ Object
337 338 339 |
# File 'lib/apcera_api_helper.rb', line 337 def get_namespace_metrics(opts = {}) @_metrics_api.metrics_namespace_get(opts) end |
#get_package_by_name(fqn) ⇒ Object
351 352 353 354 |
# File 'lib/apcera_api_helper.rb', line 351 def get_package_by_name(fqn) ret = @_packages_api.packages_get({:fqn => fqn}) ret.length > 0 ? ret[0] : nil end |
#get_package_by_tag(tag) ⇒ Object
371 372 373 374 |
# File 'lib/apcera_api_helper.rb', line 371 def get_package_by_tag(tag) ret = @_packages_api.find_packages({:tag => tag}) ret.length > 0 ? ret[0] : nil end |
#get_package_by_uuid(uuid) ⇒ Object
376 377 378 379 |
# File 'lib/apcera_api_helper.rb', line 376 def get_package_by_uuid(uuid) ret = @_packages_api.packages_uuid_get(uuid) ret.length > 0 ? ret[0] : nil end |
#get_package_for_docker_layer(docker_namespace, layer_id) ⇒ Object
356 357 358 359 360 361 362 363 |
# File 'lib/apcera_api_helper.rb', line 356 def get_package_for_docker_layer(docker_namespace, layer_id) # ret = @_packages_api.find_packages({:fqn => docker_namespace, # :matchPartialFQN => true, # :tag => "docker_layer_id,#{layer_id}" }) ret = @_packages_api.packages_get({:fqn => "package::#{docker_namespace}", :match_partial_fqn => "true", :tag => "docker_layer_id,#{layer_id}" }) ret.length > 0 ? ret[0] : nil end |
#get_package_uuid_for_docker_layer(docker_namespace, layer_id) ⇒ Object
365 366 367 368 369 |
# File 'lib/apcera_api_helper.rb', line 365 def get_package_uuid_for_docker_layer(docker_namespace, layer_id) ret = self.get_package_for_docker_layer(docker_namespace, layer_id) uuid = !ret.nil? ? ret.uuid : "" uuid end |
#get_route_metrics(opts = {}) ⇒ Object
333 334 335 |
# File 'lib/apcera_api_helper.rb', line 333 def get_route_metrics(opts = {}) @_metrics_api.metrics_route_counters_get(opts) end |
#get_sandbox ⇒ Object
281 282 283 |
# File 'lib/apcera_api_helper.rb', line 281 def get_sandbox() @_utilities_api.namespace_default_get() end |
#get_server_info ⇒ Object
1094 1095 1096 1097 |
# File 'lib/apcera_api_helper.rb', line 1094 def get_server_info() foo = @_utilities_api.info_get() return foo end |
#get_service_by_name(name) ⇒ Object
772 773 774 775 776 |
# File 'lib/apcera_api_helper.rb', line 772 def get_service_by_name(name) ret = @_bindings_api.services_get({:fqn => name}) puts ret.to_yaml ret.length > 0 ? ret[0] : nil end |
#get_staging_pipeline_by_name(name) ⇒ Object
291 292 293 294 |
# File 'lib/apcera_api_helper.rb', line 291 def get_staging_pipeline_by_name(name) ret = @_stagingpipeline_api.stagingpipelines_get({:fqn => name}) ret.length > 0 ? ret[0] : nil end |
#initiate_docker_job(namespace, job_name, image_url, start_command = "") ⇒ Object
Docker Operations
909 910 911 912 913 914 915 916 |
# File 'lib/apcera_api_helper.rb', line 909 def initiate_docker_job(namespace, job_name, image_url, start_command = "" ) job_proto = self.build_docker_job_prototype(namespace, job_name, image_url, start_command) @_jobs_api.docker_job_check(job_proto) # return the prototype so we can use it later # job_proto end |
#job_exist?(name) ⇒ Boolean
681 682 683 |
# File 'lib/apcera_api_helper.rb', line 681 def job_exist?(name) !self.get_job_by_name(name).nil? end |
#job_from_docker(job_name, job_namespace, registry, image_name, image_tag, start_command = "", allow_egress = false, port = 0, environment = {}, user_name = "", password = "") ⇒ Object
918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 |
# File 'lib/apcera_api_helper.rb', line 918 def job_from_docker(job_name, job_namespace, registry, image_name, image_tag, start_command = "", allow_egress = false, port = 0, environment = {}, user_name = "", password = "") # Make sure that the job isn't running # base_fqn = "#{job_namespace}::#{job_name}" job_fqn = "job::#{base_fqn}" # begin if self.job_exist?(job_fqn) ApceraApiHelper.notice "error, job #{job_fqn} exists already, aborting" exit end # rescue # puts "crapped out checking #{job_name}" # exit # end # This is the stuff from the scratchpad # parts = registry.split(/[:\/]/) scheme = parts.first domain = parts.last userinfo = "" if user_name.nonblank? && password.nonblank? userinfo = URI::encode("#{user_name}:#{password}@") end image_suffix = "" if image_tag.nonblank? image_suffix = ":#{image_tag}" end endpoint_scheme = "#{scheme}://#{userinfo}" registry_url = "#{endpoint_scheme}#{domain}" image_url = "#{registry_url}/#{image_name}#{image_suffix}" djt = self.create_docker_job_new_api(image_url, job_namespace, job_name, start_command, port, environment, allow_egress) puts "docker job task location for #{job_name} is #{djt.location}" location = djt.location uuid = location.split("/").last task = self.check_task_status(uuid) if task.nil? puts "YES, the task is nil for #{job_name}" end while task.state == "running" # sleep a random time between 5 and 10 seconds sleep rand(5..10) task = self.check_task_status(uuid) puts "Staging process (#{job_name}): *** #{task.state} ***" end return self.get_job_by_name(job_fqn) end |
#job_from_package(namespace, job_name, allow_egress, port = 0, environment = {}, start_command = "") ⇒ Object
561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 |
# File 'lib/apcera_api_helper.rb', line 561 def job_from_package(namespace, job_name, allow_egress, port = 0, environment = {}, start_command = "") fqn_base = "#{namespace}::#{job_name}" package_fqn = "package::#{fqn_base}" job_fqn = "job::#{fqn_base}" if self.job_exist?(job_fqn) ApceraApiHelper.notice "Job #{job_fqn} already exists, aborting" return nil end self.("Getting package uuid for package #{package_fqn}") pkg = self.get_package_by_name(package_fqn) if pkg.nil? ApceraApiHelper.notice "Error, package #{package_fqn} not found, aborting" exit end # Need to build the route name # route = namespace.split("/").reverse.join(".").chomp(".") port_route = Apcera::Port.new({ :optional => false, :number => port, :routes => [{:type => "http", :endpoint => "#{job_name}.#{route}.#{@domain}", :weight => 0}] }) app = Apcera::ProcessObject.new({ # :start_command_raw => "", :start_command => start_command, :start_command_timeout => 30, # :stop_command_raw => "", :stop_command => "", :stop_timeout => 5 }) app.environment = environment resource = Apcera::Resource.new({ :cpu => 0, :memory => 256 * 1024 * 1024, :disk => 256 * 1024 * 1024, :network => 5 * 1024 * 1024, :netmax => 0 }) rollout = Apcera::RolloutConfig.new({ :force_stop_old_instances_after => 0, :flapping_minimum_restarts => 0, :flapping_percent => 0, :flapping_window => 0, :errored_state_window => 0 }) process = Apcera::Process.new({ :app => app}) # If restart mode is always, max needs to be set - if it is set to restart = Apcera::RestartConfig.new({ :restart_mode => "always", :maximum_attempts => 0 }) # Gather up all of the above # job_proto = Apcera::Job.new({ :uuid => "", :ports => [port_route], :name => job_name, :fqn => job_fqn, :num_instances => 1, :packages => [{:uuid => pkg.uuid}], :processes => process, :resources => resource, :rollout => rollout, :restart => restart, :state => "unknown" }) # Ruby doesn't seem to like setting the tags in-line # job_proto. = {"app" => job_name} if self.debug_mode? self.("NEW JOB #{job_proto.to_yaml}") end new_job = self.create_job(job_proto) self.("CREATED JOB #{job_name}") tmp_pkg = self.get_package_by_name(package_fqn) tmp_pkg. = {"linked-job" =>"#{new_job.uuid}"} self.update_package_by_uuid(tmp_pkg.uuid, tmp_pkg) # Then post the bindings # if allow_egress self.("Posting the Allow-Egress binding for #{job_name}") ename = fqn_base.gsub("/", "_").gsub("::", "_").gsub(".", "_") binding = Apcera::Binding.new({ :name => "egress_for#{ename}", :fqn => "binding::#{namespace}::#{job_name}", :job_fqn => job_fqn, :service_fqn => "service::/apcera::outside" }) bound = self.create_binding(binding) end # return the new job # self.get_job_by_name(job_fqn) end |
#jobs_get(opt = {}) ⇒ Object
Job operations
677 678 679 |
# File 'lib/apcera_api_helper.rb', line 677 def jobs_get(opt = {}) @_jobs_api.jobs_get(opt) end |
#jobs_routes ⇒ Object
Some route operations
1053 1054 1055 |
# File 'lib/apcera_api_helper.rb', line 1053 def jobs_routes() @_jobs_api.jobs_routes_get() end |
#jobs_routes_for_endpoint(endpoint_name) ⇒ Object
1057 1058 1059 1060 1061 |
# File 'lib/apcera_api_helper.rb', line 1057 def jobs_routes_for_endpoint(endpoint_name) # @_jobs_api.jobs_routes_get() ApceraApiHelper.notice Base64.urlsafe_encode64(endpoint_name) @_jobs_api.jobs_routes_endpoint_get(Base64.urlsafe_encode64("zoom.jamie.smith.sandbox.demo.proveapcera.io")) end |
#link_jobs(source_job, target_job, name, port) ⇒ Object
853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 |
# File 'lib/apcera_api_helper.rb', line 853 def link_jobs(source_job, target_job, name, port) self.("Linking #{source_job} to #{target_job} via #{name} on port #{port}") binding = Apcera::Binding.new({ :fqn => "binding::#{self.sandbox}::#{SecureRandom.uuid}", :job_fqn => source_job, :name => name, :target_job_fqn => target_job, :target_job_port => port }) if self.debug_mode? ApceraApiHelper.notice binding.to_yaml end self.create_binding(binding) end |
#package_exist?(fqn) ⇒ Boolean
347 348 349 |
# File 'lib/apcera_api_helper.rb', line 347 def package_exist?(fqn) !get_package_by_name(fqn).nil? end |
#package_from_files(filename, namespace, job_name, port = 0) ⇒ Object
543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 |
# File 'lib/apcera_api_helper.rb', line 543 def package_from_files(filename, namespace, job_name, port = 0) # 1) Make sure that a job with that name isn't running # if self.job_exist?("job::#{namespace}::#{job_name}") ApceraApiHelper.notice "Error, job #{namespace}::#{job_name} already exists" return nil end # 2) Make sure that a package with that name isn't running # if self.package_exist?("package::#{namespace}::#{job_name}") ApceraApiHelper.notice "Error, package #{namespace}::#{job_name} already exists" return nil end create_and_stage_package(filename, namespace, job_name) end |
#runtimes_get ⇒ Object
Runtime operations
277 278 279 |
# File 'lib/apcera_api_helper.rb', line 277 def runtimes_get() @_utilities_api.runtimes_get() end |
#scale_job(job_fqn, number_of_instances) ⇒ Object
744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 |
# File 'lib/apcera_api_helper.rb', line 744 def scale_job(job_fqn, number_of_instances) job = get_job_by_name(job_fqn) return if job.nil? puts "Scaling #{job_fqn} from #{job.num_instances} to #{number_of_instances} " job.num_instances = number_of_instances puts "Payload is #{job.to_json}" begin Timeout.timeout(10) do self.update_job(job) end rescue Timeout::Error puts '========== REQUEST TO API TIMED OUT ==============' end STDOUT.flush puts "DONE Scaling #{job_fqn} from #{job.num_instances} to #{number_of_instances}" end |
#service_exist?(service) ⇒ Boolean
685 686 687 |
# File 'lib/apcera_api_helper.rb', line 685 def service_exist?(service) !self.get_service_by_name(service).nil? end |
#show_wait_spinner(fps = 10) ⇒ Object
Utility Operations
1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 |
# File 'lib/apcera_api_helper.rb', line 1065 def show_wait_spinner(fps=10) chars = %w[| / - \\] delay = 1.0/fps iter = 0 spinner = Thread.new do while iter do # Keep spinning until told otherwise print chars[(iter+=1) % chars.length] sleep delay print "\b" end end yield.tap{ # After yielding to the block, save the return value iter = false # Tell the thread to exit, cleaning up after itself… spinner.join # …and wait for it to do so. } # Use the block's return value as the method's end |
#spin_until_package_staged(package_name) ⇒ Object
410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 |
# File 'lib/apcera_api_helper.rb', line 410 def spin_until_package_staged(package_name) save_debug = Apcera::Configuration.debugging Apcera::Configuration.debugging = false pkg = get_package_by_name(package_name) show_wait_spinner{ while (['staging', 'uploading'].include?(pkg.state)) pkg = get_package_by_name(package_name) sleep 5 end } Apcera::Configuration.debugging = save_debug # return the package # pkg end |
#stagingpipelines_get(opts = {}) ⇒ Object
Staging Pipeline operations
287 288 289 |
# File 'lib/apcera_api_helper.rb', line 287 def stagingpipelines_get(opts = {}) @_stagingpipeline_api.stagingpipelines_get({:fqn => staging_pipeline}) end |
#start_job(job_name, num_instances = 1) ⇒ Object
784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 |
# File 'lib/apcera_api_helper.rb', line 784 def start_job(job_name, num_instances = 1) job = self.get_job_by_name(job_name) if self.debug_mode? ApceraApiHelper.notice job.to_yaml end if job.nil? ApceraApiHelper.notice "Error, job #{job_name} was not found" return nil elsif (job.state == "started" && job.num_instances == num_instances) ApceraApiHelper.notice "#{job_name} is already running with requested number of instanes (#{num_instances})" return job end job.state = "started" job.num_instances = num_instances job = self.update_job(job) ApceraApiHelper.notice "#{job_name} state is now #{job.state} with #{job.num_instances}" job end |
#stop_job(job_name) ⇒ Object
812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 |
# File 'lib/apcera_api_helper.rb', line 812 def stop_job(job_name) job = self.get_job_by_name(job_name) if self.debug_mode? ApceraApiHelper.notice "JOB is\n\n" ApceraApiHelper.notice job.to_yaml end if job.nil? || job.state == "stopped" ApceraApiHelper.notice "Error, job #{job_name} is either already stopped or not found" return job end job.state = "stopped" self.update_job(job) ApceraApiHelper.notice "Job #{job_name} state is now " + self.get_job_state_by_name(job_name) end |
#update_job(job) ⇒ Object
763 764 765 |
# File 'lib/apcera_api_helper.rb', line 763 def update_job(job) @_jobs_api.jobs_uuid_put(job.uuid, job) end |
#update_package_by_uuid(uuid, package) ⇒ Object
389 390 391 |
# File 'lib/apcera_api_helper.rb', line 389 def update_package_by_uuid(uuid, package) @_packages_api.packages_uuid_put(uuid, package) end |
#update_package_resources_by_uuid(uuid, payload) ⇒ Object
385 386 387 |
# File 'lib/apcera_api_helper.rb', line 385 def update_package_resources_by_uuid(uuid, payload) @_packages_api.packages_resources_uuid_put(uuid, payload) end |
#wait_until_package_staged(package_name) ⇒ Object
399 400 401 402 403 404 405 406 407 408 |
# File 'lib/apcera_api_helper.rb', line 399 def wait_until_package_staged(package_name) Apcera::Configuration.debugging = false ApceraApiHelper.notice("Checking for #{package_name}...") pkg = get_package_by_name(package_name) if (['staging', 'uploading'].include?(pkg.state)) self.spin_until_package_staged(package_name) end end |