Class: Application
- Inherits:
-
OpenShift::Cartridge
- Object
- OpenShift::Cartridge
- Application
- Defined in:
- app/models/application.rb
Constant Summary collapse
- APP_NAME_MAX_LENGTH =
32
- DEFAULT_NODE_PROFILE =
"small"
- UNSCALABLE_FRAMEWORKS =
["jenkins-1.4", "diy-0.1"]
- SCALABLE_EMBEDDED_CARTS =
["mysql-5.1", "mongodb-2.2", "postgresql-8.4", "jenkins-client-1.4"]
Instance Attribute Summary collapse
-
#aliases ⇒ Object
Returns the value of attribute aliases.
-
#cart_data ⇒ Object
Returns the value of attribute cart_data.
-
#comp_instance_map ⇒ Object
Returns the value of attribute comp_instance_map.
-
#configure_order ⇒ Object
Returns the value of attribute configure_order.
-
#conn_endpoints_list ⇒ Object
Returns the value of attribute conn_endpoints_list.
-
#creation_time ⇒ Object
Returns the value of attribute creation_time.
-
#destroyed_gears ⇒ Object
Returns the value of attribute destroyed_gears.
-
#domain ⇒ Object
Returns the value of attribute domain.
-
#group_instance_map ⇒ Object
Returns the value of attribute group_instance_map.
-
#group_override_map ⇒ Object
Returns the value of attribute group_override_map.
-
#init_git_url ⇒ Object
Returns the value of attribute init_git_url.
-
#ngears ⇒ Object
Returns the value of attribute ngears.
-
#node_profile ⇒ Object
Returns the value of attribute node_profile.
-
#proxy_cartridge ⇒ Object
Returns the value of attribute proxy_cartridge.
-
#scalable ⇒ Object
Returns the value of attribute scalable.
-
#ssh_keys ⇒ Object
Returns the value of attribute ssh_keys.
-
#start_order ⇒ Object
Returns the value of attribute start_order.
-
#state ⇒ Object
Returns the value of attribute state.
-
#usage_records ⇒ Object
Returns the value of attribute usage_records.
-
#user ⇒ Object
Returns the value of attribute user.
-
#user_agent ⇒ Object
Returns the value of attribute user_agent.
-
#uuid ⇒ Object
Returns the value of attribute uuid.
-
#working_comp_inst_hash ⇒ Object
Returns the value of attribute working_comp_inst_hash.
-
#working_group_inst_hash ⇒ Object
Returns the value of attribute working_group_inst_hash.
Class Method Summary collapse
-
.find(user, app_name) ⇒ Application
Find an application to which user has access.
-
.find_all(user) ⇒ Array<Application>
Find an applications to which user has access.
- .find_by_gear_uuid(gear_uuid) ⇒ Object
- .find_by_uuid(uuid) ⇒ Object
- .get_available_cartridges(cart_type = nil) ⇒ Object
- .hash_to_obj(hash) ⇒ Object
Instance Method Summary collapse
- #add_alias(t_server_alias) ⇒ Object
- #add_dependency(dep) ⇒ Object
- #add_dns(appname, namespace, public_hostname) ⇒ Object
- #add_group_override(from, to) ⇒ Object
- #add_node_settings(gears = nil) ⇒ Object
- #add_to_requires_feature(feature) ⇒ Object
-
#cleanup_and_delete ⇒ Object
Convenience method to cleanup an application.
-
#comp_instances ⇒ Array<Hash>
Provides an array version of the component instance map for saving in the datastore.
-
#comp_instances=(data) ⇒ Object
Rebuilds the component instance map from an array of hashes or objects.
- #complete_namespace_update(new_ns, old_ns) ⇒ Object
- #conceal_port(dependency = nil) ⇒ Object
-
#configure_dependencies ⇒ Object
Elaborates the descriptor, configures cartridges that were added to the application dependencies.
- #container ⇒ ApplicationContainerProxy deprecated Deprecated.
-
#create ⇒ ResultIO
Processes the application descriptor and creates all the gears necessary to host the application.
- #create_dns ⇒ Object
-
#delete ⇒ Object
Deletes the application object from the datastore.
-
#destroy(force = false) ⇒ Object
Destroys all gears.
- #destroy_dns ⇒ Object
-
#elaborate_descriptor {|deleted_components_list| ... } ⇒ Object
Parse the descriptor and build or update the runtime descriptor structure.
- #embedded ⇒ Array<String> deprecated Deprecated.
- #execute_connections ⇒ Object
-
#expose_port(dependency = nil) ⇒ Object
Invokes expose_port for a particular dependency on all gears that host it.
- #extended_validator ⇒ Object
-
#force_stop(dependency = nil, throw_exception_on_failure = true) ⇒ Object
Force stop a particular dependency on all gears that host it.
- #framework ⇒ String deprecated Deprecated.
- #framework_cartridge ⇒ String deprecated Deprecated.
- #gear ⇒ Gear deprecated Deprecated.
- #gears ⇒ Object
- #get_name_prefix ⇒ Object
- #get_public_ip_address ⇒ Object
- #get_user_min_max(cart_group_map) ⇒ Object
-
#group_instances ⇒ Array<Hash>
Provides an array version of the group instance map for saving in the datastore.
-
#group_instances=(data) ⇒ Object
Rebuilds the group instance map from an array of hashes or objects.
-
#health_check_path ⇒ String
Get path for checking application health.
-
#initialize(user = nil, app_name = nil, uuid = nil, node_profile = nil, framework = nil, template = nil, will_scale = false, domain = nil, init_git_url = nil) ⇒ Application
constructor
A new instance of Application.
- #prepare_namespace_update(dns_service, new_ns, old_ns) ⇒ Object
- #process_cartridge_commands(result) ⇒ Object
- #recreate_dns ⇒ Object
-
#reload(dependency = nil) ⇒ Object
Reload a particular dependency on all gears that host it.
- #remove_alias(t_server_alias) ⇒ Object
- #remove_dependency(dep) ⇒ Object
- #remove_from_requires_feature(feature) ⇒ Object
-
#restart(dependency = nil) ⇒ Object
Restart a particular dependency on all gears that host it.
-
#save ⇒ Object
Saves the application object in the datastore.
- #scaledown(comp_name = nil) ⇒ Object
- #scaleup(comp_name = nil) ⇒ Object
- #scaling_limits(dependency = nil) ⇒ Object
- #set_embedded_cart_info(cart_name, info) ⇒ Object deprecated Deprecated.
- #set_user_max(cart_group_map, max_scale) ⇒ Object
- #set_user_min(cart_group_map, min_scale) ⇒ Object
- #set_user_min_max(cart_group_map, min_scale, max_scale) ⇒ Object
- #show_port(dependency = nil) ⇒ Object
-
#show_state ⇒ Object
Get the state of the application on all gears.
-
#start(dependency = nil, stop_on_failure = true) ⇒ Object
Start a particular dependency on all gears that host it.
-
#status(dependency = nil, ret_reply = true) ⇒ Object
Retrieves status for a particular dependency on all gears that host it.
-
#stop(dependency = nil, force_stop_on_failure = true, throw_exception_on_failure = true) ⇒ Object
Stop a particular dependency on all gears that host it.
-
#system_messages(dependency = nil) ⇒ Object
Invokes system_messages for a particular dependency on all gears that host it.
- #template_scalable_app(app_name, framework) ⇒ Object
-
#threaddump(dependency = nil) ⇒ Object
Invokes threaddump for a particular dependency on all gears that host it.
-
#tidy(dependency = nil) ⇒ Object
Invokes tidy for a particular dependency on all gears that host it.
- #track_usage(gear, event, usage_type = ) ⇒ Object
- #web_cart ⇒ Object
Constructor Details
#initialize(user = nil, app_name = nil, uuid = nil, node_profile = nil, framework = nil, template = nil, will_scale = false, domain = nil, init_git_url = nil) ⇒ Application
Returns a new instance of Application.
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
# File 'app/models/application.rb', line 54 def initialize(user=nil, app_name=nil, uuid=nil, node_profile=nil, framework=nil, template=nil, will_scale=false, domain=nil, init_git_url=nil) self.user = user self.domain = domain self.node_profile = node_profile self.creation_time = DateTime::now().strftime self.uuid = uuid || OpenShift::Model.gen_uuid self.scalable = will_scale self.ngears = 0 if template.nil? if self.scalable descriptor_hash = YAML.load(template_scalable_app(app_name, framework)) from_descriptor(descriptor_hash) self.proxy_cartridge = "haproxy-1.4" else from_descriptor({"Name"=>app_name}) self.requires_feature = [] self.requires_feature << framework unless framework.nil? end @init_git_url = init_git_url unless init_git_url.nil? else template_descriptor = YAML.load(template.descriptor_yaml) template_descriptor["Name"] = app_name if not template_descriptor["Configure-Order"] requires_list = template_descriptor["Requires"] || [] template_descriptor["Configure-Order"] = requires_list end from_descriptor(template_descriptor) @init_git_url = template.git_url end self.categories -= ["cartridge"] end |
Instance Attribute Details
#aliases ⇒ Object
Returns the value of attribute aliases.
6 7 8 |
# File 'app/models/application.rb', line 6 def aliases @aliases end |
#cart_data ⇒ Object
Returns the value of attribute cart_data.
6 7 8 |
# File 'app/models/application.rb', line 6 def cart_data @cart_data end |
#comp_instance_map ⇒ Object
Returns the value of attribute comp_instance_map.
6 7 8 |
# File 'app/models/application.rb', line 6 def comp_instance_map @comp_instance_map end |
#configure_order ⇒ Object
Returns the value of attribute configure_order.
6 7 8 |
# File 'app/models/application.rb', line 6 def configure_order @configure_order end |
#conn_endpoints_list ⇒ Object
Returns the value of attribute conn_endpoints_list.
6 7 8 |
# File 'app/models/application.rb', line 6 def conn_endpoints_list @conn_endpoints_list end |
#creation_time ⇒ Object
Returns the value of attribute creation_time.
6 7 8 |
# File 'app/models/application.rb', line 6 def creation_time @creation_time end |
#destroyed_gears ⇒ Object
Returns the value of attribute destroyed_gears.
6 7 8 |
# File 'app/models/application.rb', line 6 def destroyed_gears @destroyed_gears end |
#domain ⇒ Object
Returns the value of attribute domain.
6 7 8 |
# File 'app/models/application.rb', line 6 def domain @domain end |
#group_instance_map ⇒ Object
Returns the value of attribute group_instance_map.
6 7 8 |
# File 'app/models/application.rb', line 6 def group_instance_map @group_instance_map end |
#group_override_map ⇒ Object
Returns the value of attribute group_override_map.
6 7 8 |
# File 'app/models/application.rb', line 6 def group_override_map @group_override_map end |
#init_git_url ⇒ Object
Returns the value of attribute init_git_url.
6 7 8 |
# File 'app/models/application.rb', line 6 def init_git_url @init_git_url end |
#ngears ⇒ Object
Returns the value of attribute ngears.
6 7 8 |
# File 'app/models/application.rb', line 6 def ngears @ngears end |
#node_profile ⇒ Object
Returns the value of attribute node_profile.
6 7 8 |
# File 'app/models/application.rb', line 6 def node_profile @node_profile end |
#proxy_cartridge ⇒ Object
Returns the value of attribute proxy_cartridge.
6 7 8 |
# File 'app/models/application.rb', line 6 def proxy_cartridge @proxy_cartridge end |
#scalable ⇒ Object
Returns the value of attribute scalable.
6 7 8 |
# File 'app/models/application.rb', line 6 def scalable @scalable end |
#ssh_keys ⇒ Object
Returns the value of attribute ssh_keys.
6 7 8 |
# File 'app/models/application.rb', line 6 def ssh_keys @ssh_keys end |
#start_order ⇒ Object
Returns the value of attribute start_order.
6 7 8 |
# File 'app/models/application.rb', line 6 def start_order @start_order end |
#state ⇒ Object
Returns the value of attribute state.
6 7 8 |
# File 'app/models/application.rb', line 6 def state @state end |
#usage_records ⇒ Object
Returns the value of attribute usage_records.
6 7 8 |
# File 'app/models/application.rb', line 6 def usage_records @usage_records end |
#user ⇒ Object
Returns the value of attribute user.
6 7 8 |
# File 'app/models/application.rb', line 6 def user @user end |
#user_agent ⇒ Object
Returns the value of attribute user_agent.
6 7 8 |
# File 'app/models/application.rb', line 6 def user_agent @user_agent end |
#uuid ⇒ Object
Returns the value of attribute uuid.
6 7 8 |
# File 'app/models/application.rb', line 6 def uuid @uuid end |
#working_comp_inst_hash ⇒ Object
Returns the value of attribute working_comp_inst_hash.
6 7 8 |
# File 'app/models/application.rb', line 6 def working_comp_inst_hash @working_comp_inst_hash end |
#working_group_inst_hash ⇒ Object
Returns the value of attribute working_group_inst_hash.
6 7 8 |
# File 'app/models/application.rb', line 6 def working_group_inst_hash @working_group_inst_hash end |
Class Method Details
.find(user, app_name) ⇒ Application
Find an application to which user has access
184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
# File 'app/models/application.rb', line 184 def self.find(user, app_name) return nil if app_name.nil? or app_name.empty? app = nil if user.applications user.applications.each do |next_app| if next_app.name.downcase == app_name.downcase app = next_app break end end else app = super(user.login, app_name) return nil unless app app.user = user app.reset_state end app end |
.find_all(user) ⇒ Array<Application>
Find an applications to which user has access
206 207 208 209 210 211 212 213 214 215 216 217 218 219 |
# File 'app/models/application.rb', line 206 def self.find_all(user) apps = nil if user.applications apps = user.applications else apps = super(user.login) apps.each do |app| app.user = user app.reset_state end user.applications = apps end apps end |
.find_by_gear_uuid(gear_uuid) ⇒ Object
221 222 223 224 225 226 227 228 229 230 231 232 233 |
# File 'app/models/application.rb', line 221 def self.find_by_gear_uuid(gear_uuid) hash = OpenShift::DataStore.instance.find_by_gear_uuid(gear_uuid) return nil unless hash user = CloudUser.hash_to_obj hash user.applications.each do |next_app| next_app.gears.each do |gear| if gear.uuid == gear_uuid return next_app,gear end end end return nil end |
.find_by_uuid(uuid) ⇒ Object
235 236 237 238 239 240 241 242 243 244 245 246 247 |
# File 'app/models/application.rb', line 235 def self.find_by_uuid(uuid) hash = OpenShift::DataStore.instance.find_by_uuid(self.name,uuid) return nil unless hash user = CloudUser.hash_to_obj hash app = nil user.applications.each do |next_app| if next_app.uuid == uuid app = next_app break end end return app end |
.Application.get_available_cartridges(cart_type) ⇒ Array<String> .Application.get_available_cartridges ⇒ Array<String>
267 268 269 |
# File 'app/models/application.rb', line 267 def self.get_available_cartridges(cart_type=nil) cart_names = CartridgeCache.cartridge_names(cart_type) end |
.hash_to_obj(hash) ⇒ Object
249 250 251 252 253 254 255 256 257 |
# File 'app/models/application.rb', line 249 def self.hash_to_obj(hash) domain = nil if hash["domain"] domain = Domain.hash_to_obj(hash["domain"]) end app = super(hash) app.domain = domain app end |
Instance Method Details
#add_alias(t_server_alias) ⇒ Object
1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 |
# File 'app/models/application.rb', line 1126 def add_alias(t_server_alias) # Server aliases validate as DNS host names in accordance with RFC # 1123 and RFC 952. Additionally, OpenShift does not allow an # Alias to be an IP address or a host in the service domain. # Since DNS is case insensitive, all names are downcased for # indexing/compares. server_alias = t_server_alias.downcase if !(server_alias =~ /\A[0-9a-zA-Z\-\.]+\z/) or (server_alias =~ /#{Rails.configuration.openshift[:domain_suffix]}$/) or (server_alias.length > 255 ) or (server_alias.length == 0 ) or (server_alias =~ /^\d+\.\d+\.\d+\.\d+$/) raise OpenShift::UserException.new("Invalid Server Alias '#{t_server_alias}' specified", 105) end self.aliases = [] unless self.aliases raise OpenShift::UserException.new("Alias '#{server_alias}' already exists for '#{@name}'", 255) if self.aliases.include? server_alias reply = ResultIO.new begin self.aliases.push(server_alias) self.save reply.append self.container.add_alias(self, self.gear, server_alias) rescue Exception => e Rails.logger.debug e. Rails.logger.debug e.backtrace.inspect reply.append self.container.remove_alias(self, self.gear, server_alias) self.aliases.delete(server_alias) self.save raise end reply end |
#add_dependency(dep) ⇒ Object
1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 |
# File 'app/models/application.rb', line 1180 def add_dependency(dep) reply = ResultIO.new self.class.notify_observers(:before_add_dependency, {:application => self, :dependency => dep, :reply => reply}) # Create persistent storage app entry on configure (one of the first things) Rails.logger.debug "DEBUG: Adding embedded app info from persistent storage: #{@name}:#{dep}" self.cart_data = {} if @cart_data.nil? raise OpenShift::UserException.new("#{dep} already embedded in '#{@name}'", 136) if self..include? dep if self.scalable allowed_cartridges = SCALABLE_EMBEDDED_CARTS & Application.get_available_cartridges.sort raise OpenShift::UserException.new("#{dep} cannot be embedded in scalable app '#{@name}'. Allowed cartridges: #{allowed_cartridges.join(', ')}", 108) if not SCALABLE_EMBEDDED_CARTS.include? dep end add_to_requires_feature(dep) begin reply.append self.configure_dependencies self.execute_connections rescue Exception => e remove_from_requires_feature(dep) self.elaborate_descriptor self.save raise e end self.class.notify_observers(:after_add_dependency, {:application => self, :dependency => dep, :reply => reply}) reply end |
#add_dns(appname, namespace, public_hostname) ⇒ Object
897 898 899 900 901 902 903 904 905 |
# File 'app/models/application.rb', line 897 def add_dns(appname, namespace, public_hostname) dns = OpenShift::DnsService.instance begin dns.register_application(appname, namespace, public_hostname) dns.publish ensure dns.close end end |
#add_group_override(from, to) ⇒ Object
1411 1412 1413 1414 1415 |
# File 'app/models/application.rb', line 1411 def add_group_override(from, to) prof = @profile_name_map[@default_profile] prof.group_overrides = [] if prof.group_overrides.nil? prof.group_overrides << [from, to] end |
#add_node_settings(gears = nil) ⇒ Object
861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 |
# File 'app/models/application.rb', line 861 def add_node_settings(gears=nil) reply = ResultIO.new gears = self.gears unless gears self.ssh_keys = {} unless self.ssh_keys if @user.env_vars || @user.ssh_keys || @user.system_ssh_keys tag = "" handle = RemoteJob.create_parallel_job RemoteJob.run_parallel_on_gears(gears, handle) { |exec_handle, gear| @user.env_vars.each do |key, value| job = gear.env_var_job_add(key, value) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) end if @user.env_vars @user.ssh_keys.each do |key_name, key_info| job = gear.ssh_key_job_add(key_info["key"], key_info["type"], key_name) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) end if @user.ssh_keys @user.system_ssh_keys.each do |key_name, key_info| job = gear.ssh_key_job_add(key_info, nil, key_name) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) end if @user.system_ssh_keys self.ssh_keys.each do |key_name, key_info| job = gear.ssh_key_job_add(key_info, nil, key_name) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) end } RemoteJob.get_parallel_run_results(handle) { |tag, gear, output, status| if status != 0 raise OpenShift::NodeException.new("Error applying settings to gear: #{gear} with status: #{status} and output: #{output}", 143) end } end reply end |
#add_to_requires_feature(feature) ⇒ Object
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
# File 'app/models/application.rb', line 97 def add_to_requires_feature(feature) prof = @profile_name_map[@default_profile] if self.scalable # add to the proxy component comp_name = "proxy" if comp_name.nil? prof = @profile_name_map[@default_profile] cinst = ComponentInstance::find_component_in_cart(prof, self, comp_name, self.get_name_prefix) raise OpenShift::NodeException.new("Cannot find component '#{comp_name}' in app #{self.name}.", 135, result_io) if cinst.nil? comp,profile,cart = cinst.get_component_definition(self) raise OpenShift::UserException.new("#{feature} already embedded in '#{@name}'", 136) if comp.depends.include? feature fcart = self.framework conn = OpenShift::Connection.new("#{feature}-web-#{fcart}") conn.components = ["proxy/#{feature}", "web/#{fcart}"] prof.add_connection(conn) conn = OpenShift::Connection.new("#{feature}-proxy-#{fcart}") conn.components = ["proxy/#{feature}", "proxy/#{fcart}"] prof.add_connection(conn) # FIXME: Booya - hacks galore -- fix this to be more generic when # scalable apps allow more components in SCALABLE_EMBEDDED_CARTS if feature == "jenkins-client-1.4" conn = OpenShift::Connection.new("#{feature}-proxy-haproxy-1.4") conn.components = ["proxy/#{feature}", "proxy/haproxy-1.4"] prof.add_connection(conn) end comp.depends << feature else self.requires_feature.each { |cart| conn = OpenShift::Connection.new("#{feature}-#{cart}") conn.components = [cart, feature] prof.add_connection(conn) } self.requires_feature << feature end end |
#cleanup_and_delete ⇒ Object
Convenience method to cleanup an application
329 330 331 332 333 334 335 |
# File 'app/models/application.rb', line 329 def cleanup_and_delete reply = ResultIO.new reply.append self.destroy_dns reply.append self.destroy self.delete reply end |
#comp_instances ⇒ Array<Hash>
Provides an array version of the component instance map for saving in the datastore.
1352 1353 1354 1355 |
# File 'app/models/application.rb', line 1352 def comp_instances @comp_instance_map = {} if @comp_instance_map.nil? @comp_instance_map.values end |
#comp_instances=(data) ⇒ Object
Rebuilds the component instance map from an array of hashes or objects
1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 |
# File 'app/models/application.rb', line 1359 def comp_instances=(data) comp_instance_map_will_change! @comp_instance_map = {} if @comp_instance_map.nil? data.each do |value| if value.class == ComponentInstance @comp_instance_map[value.name] = value else key = value["name"] @comp_instance_map[key] = ComponentInstance.new @comp_instance_map[key].attributes=value end end end |
#complete_namespace_update(new_ns, old_ns) ⇒ Object
1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 |
# File 'app/models/application.rb', line 1105 def complete_namespace_update(new_ns, old_ns) self.comp_instances.each do |comp_inst| comp_inst.cart_properties.each do |prop_key, prop_value| comp_inst.cart_properties[prop_key] = prop_value.gsub(/-#{old_ns}.#{Rails.configuration.openshift[:domain_suffix]}/, "-#{new_ns}.#{Rails.configuration.openshift[:domain_suffix]}") end end self..each_key do |framework| if self.[framework].has_key?('info') info = self.[framework]['info'] info.gsub!(/-#{old_ns}.#{Rails.configuration.openshift[:domain_suffix]}/, "-#{new_ns}.#{Rails.configuration.openshift[:domain_suffix]}") self.[framework]['info'] = info end end # elaborate descriptor again to execute connections, because connections need to be renewed self.elaborate_descriptor self.execute_connections self.domain.namespace = new_ns self.save end |
#conceal_port(dependency = nil) ⇒ Object
792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 |
# File 'app/models/application.rb', line 792 def conceal_port(dependency=nil) reply = ResultIO.new self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) next if comp_inst.name == "@@app" next if comp_inst.parent_cart_name == self.name next if comp_inst.parent_cart_name == self.proxy_cartridge group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.conceal_port(comp_inst) end # Not all cartridges will have this hook. f.each do |fail| next if fail[:exception].resultIO.exitcode == 127 raise fail[:exception] end end reply end |
#configure_dependencies ⇒ Object
Elaborates the descriptor, configures cartridges that were added to the application dependencies. If a node is empty after removing components, then the gear is destroyed. Errors that occur while removing cartridges are logged but no exception is thrown. If an error occurs while configuring a cartridge, then the cartirdge is deconfigures on all nodes and an exception is thrown.
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 |
# File 'app/models/application.rb', line 444 def configure_dependencies reply = ResultIO.new self.class.notify_observers(:before_application_configure, {:application => self, :reply => reply}) elaborate_descriptor exceptions = [] Rails.logger.debug "Configure order is #{self.configure_order.inspect}" #process new additions #TODO: fix configure after framework cartridge is no longer a requirement for adding embedded cartridges self.configure_order.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if comp_inst.parent_cart_name == self.name group_inst = self.group_instance_map[comp_inst.group_instance_name] begin group_inst.fulfil_requirements(self) run_on_gears(group_inst.get_unconfigured_gears(comp_inst), reply) do |gear, r| doExpose = false if self.scalable and comp_inst.parent_cart_name!=self.proxy_cartridge doExpose = true if not gear.configured_components.include? comp_inst.name end r.append gear.configure(comp_inst, @init_git_url) begin r.append gear.expose_port(comp_inst) if doExpose rescue Exception=>e end process_cartridge_commands(r) end rescue Exception => e Rails.logger.debug e. Rails.logger.debug e.backtrace.inspect if e.kind_of?(OpenShift::GearsException) successful_gears = [] successful_gears = e.successful.map{|g| g[:gear]} if e.successful failed_gears = [] failed_gears = e.failed.map{|g| g[:gear]} if e.failed gear_exception = e.exception #remove failed component from all gears run_on_gears(successful_gears, reply, false) do |gear, r| r.append gear.deconfigure(comp_inst) process_cartridge_commands(r) end run_on_gears(failed_gears, reply, false) do |gear, r| r.append gear.deconfigure(comp_inst, true) process_cartridge_commands(r) end else gear_exception = e end # destroy any unused gears # TODO : if the destroy fails below... the user still sees the error as configure failure # Then to recover, if we re-elaborate (like in add_dependency), then the group instance will get lost # and any failed gears below will leak (i.e. they exist on node, their destroy failed, but they do not have any handle in Mongo) run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append group_inst.remove_gear(gear) if gear.configured_components.length == 0 end self.save exceptions << gear_exception end end unless exceptions.empty? raise exceptions.first end self.save self.class.notify_observers(:after_application_configure, {:application => self, :reply => reply}) reply end |
#container ⇒ ApplicationContainerProxy
Get the ApplicationContainerProxy object for the first gear the application is running on
1298 1299 1300 1301 |
# File 'app/models/application.rb', line 1298 def container return nil if self.gear.nil? return self.gear.get_proxy end |
#create ⇒ ResultIO
Processes the application descriptor and creates all the gears necessary to host the application. Destroys application on all gears if any gear fails
287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 |
# File 'app/models/application.rb', line 287 def create result_io = ResultIO.new gears_created = [] begin self.node_profile = DEFAULT_NODE_PROFILE unless self.node_profile elaborate_descriptor self.class.notify_observers(:before_application_create, {:application => self, :reply => result_io}) if self.scalable raise OpenShift::UserException.new("Scalable app cannot be of type #{UNSCALABLE_FRAMEWORKS.join(' ')}", "108", result_io) if UNSCALABLE_FRAMEWORKS.include? framework min_gear_count = 0 group_instances.uniq.each { |gi| min_gear_count += gi.min } if ((user.consumed_gears+min_gear_count) > user.max_gears) raise OpenShift::UserException.new("Creating this application requires #{min_gear_count} gears, and you are using #{user.consumed_gears} of your #{user.max_gears} available gears.", 104) end end user.applications = [] unless user.applications user.applications << self Rails.logger.debug "Creating gears" group_instances.uniq.each do |ginst| create_result, new_gear = ginst.add_gear(self) result_io.append create_result end self.gear.name = self.name unless scalable self.class.notify_observers(:application_creation_success, {:application => self, :reply => result_io}) rescue Exception => e Rails.logger.debug e. Rails.logger.debug e.backtrace.join("\n") Rails.logger.debug "Rolling back application gear creation" result_io.append self.destroy(true) self.class.notify_observers(:application_creation_failure, {:application => self, :reply => result_io}) raise ensure save end self.class.notify_observers(:after_application_create, {:application => self, :reply => result_io}) result_io end |
#create_dns ⇒ Object
907 908 909 910 911 912 913 914 915 916 |
# File 'app/models/application.rb', line 907 def create_dns reply = ResultIO.new self.class.notify_observers(:before_create_dns, {:application => self, :reply => reply}) public_hostname = self.container.get_public_hostname add_dns(@name, @domain.namespace, public_hostname) self.class.notify_observers(:after_create_dns, {:application => self, :reply => reply}) reply end |
#delete ⇒ Object
Deletes the application object from the datastore
280 281 282 |
# File 'app/models/application.rb', line 280 def delete super(user.login) end |
#destroy(force = false) ⇒ Object
Destroys all gears.
338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 |
# File 'app/models/application.rb', line 338 def destroy(force=false) reply = ResultIO.new self.class.notify_observers(:before_application_destroy, {:application => self, :reply => reply}) # Only need to destroy if application has been elaborated first unless self.configure_order.nil? # Destroy in the reverse order of configure. group_instances = [] self.configure_order.reverse.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if comp_inst.parent_cart_name == self.name group_inst = self.group_instance_map[comp_inst.group_instance_name] group_instances.delete(group_inst) group_instances << group_inst end failures = [] group_instances.each do |group_inst| s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append group_inst.remove_gear(gear, force) end failures += f end begin self.save if self.persisted? rescue Exception => e # pass on failure... because we maybe wanting a delete here instead anyway end failures.each do |data| Rails.logger.debug("Unable to clean up application on gear #{data[:gear]} due to exception #{data[:exception].}") Rails.logger.debug(data[:exception].backtrace.inspect) end raise OpenShift::NodeException.new("Could not destroy all gears of application.", 1, reply) if failures.length > 0 end self.class.notify_observers(:after_application_destroy, {:application => self, :reply => reply}) reply end |
#destroy_dns ⇒ Object
918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 |
# File 'app/models/application.rb', line 918 def destroy_dns reply = ResultIO.new self.class.notify_observers(:before_destroy_dns, {:application => self, :reply => reply}) dns = OpenShift::DnsService.instance begin dns.deregister_application(@name,@domain.namespace) if self.scalable # find the group instance where the web-cartridge is residing self.group_instance_map.keys.each { |ginst_name| ginst = self.group_instance_map[ginst_name] ginst.gears.each { |gear| dns.deregister_application(gear.name,@domain.namespace) } } end dns.publish ensure dns.close end self.class.notify_observers(:after_destroy_dns, {:application => self, :reply => reply}) reply end |
#elaborate_descriptor {|deleted_components_list| ... } ⇒ Object
Parse the descriptor and build or update the runtime descriptor structure
1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 |
# File 'app/models/application.rb', line 1418 def elaborate_descriptor self.group_instance_map = {} if group_instance_map.nil? self.comp_instance_map = {} if comp_instance_map.nil? self.working_comp_inst_hash = {} self.working_group_inst_hash = {} self.group_override_map = {} self.conn_endpoints_list = [] default_profile = @profile_name_map[@default_profile] default_profile.groups.each { |g| #gpath = self.name + "." + g.name gpath = self.get_name_prefix + g.get_name_prefix gi = working_group_inst_hash[gpath] if gi.nil? gi = self.group_instance_map[gpath] if gi.nil? gi = GroupInstance.new(self, self.name, self.default_profile, g.name, gpath) else gi.merge(self.name, self.default_profile, g.name, gpath) end else gi.merge(self.name, self.default_profile, g.name, gpath) end self.group_instance_map[gpath] = gi self.working_group_inst_hash[gpath] = gi gi.elaborate(default_profile, g, self.get_name_prefix, self) } # make connection_endpoints out of provided connections default_profile.connections.each { |conn| inst1 = ComponentInstance::find_component_in_cart(default_profile, self, conn.components[0], self.get_name_prefix) inst2 = ComponentInstance::find_component_in_cart(default_profile, self, conn.components[1], self.get_name_prefix) ComponentInstance::establish_connections(inst1, inst2, self) } # check self.comp_instance_map for component instances # check self.group_instance_map for group instances # check self.conn_endpoints_list for list of connection endpoints (fully resolved) # resolve group co-locations colocate_groups # get configure_order and start_order get_exec_order(default_profile) deleted_components_list = [] self.comp_instance_map.each { |k,v| deleted_components_list << k if self.working_comp_inst_hash[k].nil? } yield deleted_components_list if block_given? # delete entries in {group,comp}_instance_map that do # not exist in working_{group,comp}_inst_hash self.group_instance_map.delete_if { |k,v| v.component_instances.delete(k) if self.working_comp_inst_hash[k].nil? and v.component_instances.include?(k) self.working_group_inst_hash[k].nil? } self.comp_instance_map.delete_if { |k,v| self.working_comp_inst_hash[k].nil? } end |
#embedded ⇒ Array<String>
Provide a list of direct dependencies of the application that are hosted on the same gear as the “framework” cartridge.
1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 |
# File 'app/models/application.rb', line 1327 def = CartridgeCache.cartridge_names('embedded') retval = {} self.comp_instance_map.values.each do |comp_inst| if .include?(comp_inst.parent_cart_name) if comp_inst.cart_data.first.nil? retval[comp_inst.parent_cart_name] = comp_inst.cart_properties else retval[comp_inst.parent_cart_name] = comp_inst.cart_properties.merge({"info" => comp_inst.cart_data.first}) end end end retval end |
#execute_connections ⇒ Object
518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 |
# File 'app/models/application.rb', line 518 def execute_connections return if not self.scalable self.conn_endpoints_list.each { |conn| pub_inst = self.comp_instance_map[conn.from_comp_inst] pub_ginst = self.group_instance_map[pub_inst.group_instance_name] tag = "" handle = RemoteJob.create_parallel_job RemoteJob.run_parallel_on_gears(pub_ginst.gears, handle) { |exec_handle, gear| appname = gear.name connector_name = conn.from_connector.name cart = pub_inst.parent_cart_name input_args = [appname, self.domain.namespace, gear.uuid] job = gear.get_execute_connector_job(cart, connector_name, input_args) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) } pub_out = [] RemoteJob.get_parallel_run_results(handle) { |tag, gear, output, status| if status==0 pub_out.push("'#{gear}'='#{output}'") end } input_to_subscriber = Shellwords::shellescape(pub_out.join(' ')) Rails.logger.debug "Output of publisher - '#{pub_out}'" sub_inst = self.comp_instance_map[conn.to_comp_inst] sub_ginst = self.group_instance_map[sub_inst.group_instance_name] handle = RemoteJob.create_parallel_job RemoteJob.run_parallel_on_gears(sub_ginst.gears, handle) { |exec_handle, gear| appname = gear.name connector_name = conn.to_connector.name cart = sub_inst.parent_cart_name input_args = [appname, self.domain.namespace, gear.uuid, input_to_subscriber] job = gear.get_execute_connector_job(cart, connector_name, input_args) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) } # we dont care about subscriber's output/status } end |
#expose_port(dependency = nil) ⇒ Object
Invokes expose_port for a particular dependency on all gears that host it.
769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 |
# File 'app/models/application.rb', line 769 def expose_port(dependency=nil) reply = ResultIO.new self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) next if comp_inst.name == "@@app" next if comp_inst.parent_cart_name == self.name next if comp_inst.parent_cart_name == self.proxy_cartridge group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.expose_port(comp_inst) end # Not all cartridges will have this hook. f.each do |fail| next if fail[:exception].resultIO.exitcode == 127 raise fail[:exception] end end reply end |
#extended_validator ⇒ Object
45 46 47 |
# File 'app/models/application.rb', line 45 def extended_validator notify_observers(:validate_application) end |
#force_stop(dependency = nil, throw_exception_on_failure = true) ⇒ Object
Force stop a particular dependency on all gears that host it.
617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 |
# File 'app/models/application.rb', line 617 def force_stop(dependency=nil, throw_exception_on_failure=true) reply = ResultIO.new self.class.notify_observers(:before_force_stop, {:application => self, :reply => reply, :dependency => dependency}) self.start_order.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.force_stop(comp_inst) end raise f[0][:exception] if(f.length > 0 and throw_exception_on_failure) end self.class.notify_observers(:after_force_stop, {:application => self, :reply => reply, :dependency => dependency}) reply end |
#framework ⇒ String
Get the name of framework cartridge in use by the application
1315 1316 1317 1318 1319 1320 1321 1322 |
# File 'app/models/application.rb', line 1315 def framework framework_carts = CartridgeCache.cartridge_names('standalone') self.comp_instance_map.each { |cname, cinst| cartname = cinst.parent_cart_name return cartname if framework_carts.include? cartname } return nil end |
#framework_cartridge ⇒ String
Get the name of framework cartridge in use by the application without the version suffix
1306 1307 1308 1309 1310 |
# File 'app/models/application.rb', line 1306 def framework_cartridge fcart = self.framework return fcart.split('-')[0..-2].join('-') unless fcart.nil? return nil end |
#gear ⇒ Gear
Returns the first Gear object on which the application is running
1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 |
# File 'app/models/application.rb', line 1262 def gear if self.group_instances.nil? elaborate_descriptor end if scalable self.group_instance_map.keys.each { |ginst_name| return self.group_instance_map[ginst_name].gears.first if ginst_name.include? self.proxy_cartridge } end group_instance = self.group_instances.first return nil unless group_instance return group_instance.gears.first end |
#gears ⇒ Object
383 384 385 |
# File 'app/models/application.rb', line 383 def gears self.group_instances.uniq.map{ |ginst| ginst.gears }.flatten end |
#get_name_prefix ⇒ Object
1407 1408 1409 |
# File 'app/models/application.rb', line 1407 def get_name_prefix return "@@app" end |
#get_public_ip_address ⇒ Object
1250 1251 1252 1253 1254 1255 1256 1257 |
# File 'app/models/application.rb', line 1250 def get_public_ip_address begin return self.container.get_public_ip_address rescue Exception=>e Rails.logger.debug e.backtrace.inspect return nil end end |
#get_user_min_max(cart_group_map) ⇒ Object
956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 |
# File 'app/models/application.rb', line 956 def get_user_min_max(cart_group_map) sup_min = 0 sup_max = nil cart_current_min = 0 cart_current_max = nil cart_group_map.each do |group_name, component_instance_list| ginst = self.group_instance_map[group_name] sup_min += ginst.supported_min cart_current_min += ginst.min if sup_max.nil? or ginst.supported_max==-1 sup_max = ginst.supported_max else sup_max += ginst.supported_max unless sup_max==-1 end if cart_current_max.nil? or ginst.max==-1 cart_current_max = ginst.max else cart_current_max += ginst.max unless cart_current_max==-1 end end return cart_current_min, cart_current_max, sup_min, sup_max end |
#group_instances ⇒ Array<Hash>
Provides an array version of the group instance map for saving in the datastore.
1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 |
# File 'app/models/application.rb', line 1375 def group_instances @group_instance_map = {} if @group_instance_map.nil? values = @group_instance_map.values.uniq keys = @group_instance_map.keys values.each do |group_inst| group_inst.reused_by = keys.clone.delete_if{ |k| @group_instance_map[k] != group_inst } end values end |
#group_instances=(data) ⇒ Object
Rebuilds the group instance map from an array of hashes or objects
1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 |
# File 'app/models/application.rb', line 1389 def group_instances=(data) group_instance_map_will_change! @group_instance_map = {} if @group_instance_map.nil? data.each do |value| if value.class == GroupInstance value.reused_by.each do |k| @group_instance_map[k] = value end else ginst = GroupInstance.new(self) ginst.attributes=value ginst.reused_by.each do |k| @group_instance_map[k] = ginst end end end end |
#health_check_path ⇒ String
Get path for checking application health
1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 |
# File 'app/models/application.rb', line 1478 def health_check_path case self.framework_cartridge when 'php' page = 'health_check.php' when 'zend' page = 'health_check.php' when 'perl' page = 'health_check.pl' else page = 'health' end end |
#prepare_namespace_update(dns_service, new_ns, old_ns) ⇒ Object
1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 |
# File 'app/models/application.rb', line 1087 def prepare_namespace_update(dns_service, new_ns, old_ns) updated = true result_io = ResultIO.new begin self.gears.each do |gear| gear_result_io = gear.prepare_namespace_update(dns_service, new_ns, old_ns) updated = false unless gear_result_io.exitcode == 0 result_io.append gear_result_io end rescue Exception => e updated = false Rails.logger.debug "Exception caught updating namespace: #{e.}" Rails.logger.debug e.backtrace result_io.append e.resultIO if e.respond_to?('resultIO') and e.resultIO end return { :success => updated, :result_io => result_io } end |
#process_cartridge_commands(result) ⇒ Object
1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 |
# File 'app/models/application.rb', line 1491 def process_cartridge_commands(result) commands = result.cart_commands self.ssh_keys = {} unless self.ssh_keys app_jobs = { 'add_ssh_keys' => [], 'remove_ssh_keys' => [], 'remove_env_vars' => [] } commands.each do |command_item| case command_item[:command] when "SYSTEM_SSH_KEY_ADD" key = command_item[:args][0] self.user.add_system_ssh_key(self.name, key) when "SYSTEM_SSH_KEY_REMOVE" self.user.remove_system_ssh_key(self.name) when "APP_SSH_KEY_ADD" key_name = command_item[:args][0] key = command_item[:args][1] self.ssh_keys[key_name] = key app_jobs['add_ssh_keys'] << [key_name,key] when "APP_SSH_KEY_REMOVE" key_name = command_item[:args][0] key = self.ssh_keys.delete(key_name) app_jobs['remove_ssh_keys'] << key unless key.nil? when "ENV_VAR_ADD" key = command_item[:args][0] value = command_item[:args][1] self.user.add_env_var(key,value) when "ENV_VAR_REMOVE" key = command_item[:args][0] self.user.remove_env_var(key) when "APP_ENV_VAR_REMOVE" key = command_item[:args][0] app_jobs['remove_env_vars'] << key unless key.nil? when "BROKER_KEY_ADD" iv, token = OpenShift::AuthService.instance.generate_broker_key(self) self.user.add_save_job('adds', 'broker_auth_keys', [self.uuid, iv, token]) when "BROKER_KEY_REMOVE" self.user.add_save_job('removes', 'broker_auth_keys', [self.uuid]) end end if user.save_jobs user.save end handle = RemoteJob.create_parallel_job tag = "" RemoteJob.run_parallel_on_gears(self.gears, handle) { |exec_handle, gear| app_jobs.each do |action,value| case action when "remove_env_vars" value.each { |key| job = gear.env_var_job_remove(key) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) } when "add_ssh_keys" value.each { |key_info| key_name,key = key_info job = gear.ssh_key_job_add(key, nil, key_name) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) } when "remove_ssh_keys" value.each { |key| job = gear.ssh_key_job_remove(key, nil) RemoteJob.add_parallel_job(exec_handle, tag, gear, job) } end end } RemoteJob.get_parallel_run_results(handle) { |tag, gear, output, status| if status != 0 raise OpenShift::NodeException.new("Error updating settings on gear: #{gear} with status: #{status} and output: #{output}", 143) end } commands.clear end |
#recreate_dns ⇒ Object
941 942 943 944 945 946 947 948 949 950 951 952 953 954 |
# File 'app/models/application.rb', line 941 def recreate_dns reply = ResultIO.new self.class.notify_observers(:before_recreate_dns, {:application => self, :reply => reply}) dns = OpenShift::DnsService.instance begin public_hostname = self.container.get_public_hostname dns.modify_application(@name, @domain.namespace, public_hostname) dns.publish ensure dns.close end self.class.notify_observers(:after_recreate_dns, {:application => self, :reply => reply}) reply end |
#reload(dependency = nil) ⇒ Object
Reload a particular dependency on all gears that host it.
657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 |
# File 'app/models/application.rb', line 657 def reload(dependency=nil) dependency = self.framework if dependency.nil? reply = ResultIO.new self.class.notify_observers(:before_reload, {:application => self, :reply => reply, :dependency => dependency}) self.start_order.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.reload(comp_inst) end raise f[0][:exception] if(f.length > 0) end self.class.notify_observers(:after_reload, {:application => self, :reply => reply, :dependency => dependency}) reply end |
#remove_alias(t_server_alias) ⇒ Object
1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 |
# File 'app/models/application.rb', line 1159 def remove_alias(t_server_alias) server_alias = t_server_alias.downcase self.aliases = [] unless self.aliases reply = ResultIO.new begin reply.append self.container.remove_alias(self, self.gear, server_alias) rescue Exception => e Rails.logger.debug e. Rails.logger.debug e.backtrace.inspect raise ensure if self.aliases.include? server_alias self.aliases.delete(server_alias) self.save else raise OpenShift::UserException.new("Alias '#{server_alias}' does not exist for '#{@name}'", 255, reply) end end reply end |
#remove_dependency(dep) ⇒ Object
1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 |
# File 'app/models/application.rb', line 1207 def remove_dependency(dep) reply = ResultIO.new self.class.notify_observers(:before_remove_dependency, {:application => self, :dependency => dep, :reply => reply}) self. = {} unless self. raise OpenShift::UserException.new("#{dep} not embedded in '#{@name}', try adding it first", 135) unless self..include? dep raise OpenShift::UserException.new("#{dep} is not allowed to be removed from '#{@name}'. It is a required dependency for a scalable application.", 137) if (self.scalable and self.proxy_cartridge==dep) remove_from_requires_feature(dep) elaborate_descriptor { |removed_component_instances| #remove unused components removed_component_instances.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if comp_inst.parent_cart_name == self.name group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| unless gear.configured_components.length == 1 && gear.configured_components.first == comp_inst.name reply.append gear.deconfigure(comp_inst) process_cartridge_commands(r) end end f.each do |failed_data| Rails.logger.debug("Failed to deconfigure cartridge #{comp_inst.parent_cart_name} on gear #{failed_data[:gear].server_identity}:#{failed_data[:gear].uuid}") Rails.logger.debug("Exception #{failed_data[:exception].}") Rails.logger.debug("#{failed_data[:exception].backtrace.inspect}") end run_on_gears(group_inst.gears, reply, false) do |gear, r| if gear.configured_components.empty? || (gear.configured_components.length == 1 && gear.configured_components.first == comp_inst.name) reply.append group_inst.remove_gear(gear) end end if f.length > 0 raise Exception.new("Failed to remove #{dep} from application #{self.name}. Try again or report to OpenShift Support.") end end } self.save self.class.notify_observers(:after_remove_dependency, {:application => self, :dependency => dep, :reply => reply}) reply end |
#remove_from_requires_feature(feature) ⇒ Object
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
# File 'app/models/application.rb', line 162 def remove_from_requires_feature(feature) prof = @profile_name_map[@default_profile] if prof.connection_name_map prof.connection_name_map.delete_if {|k,v| v.components[0].include? feature or v.components[1].include? feature } end if self.scalable comp_name = "proxy" if comp_name.nil? prof = @profile_name_map[@default_profile] cinst = ComponentInstance::find_component_in_cart(prof, self, comp_name, self.get_name_prefix) raise OpenShift::NodeException.new("Cannot find component '#{comp_name}' in app #{self.name}.", 135, result_io) if cinst.nil? comp,profile,cart = cinst.get_component_definition(self) raise OpenShift::UserException.new("#{feature} not embedded in '#{@name}', try adding it first", 135) if not comp.depends.include? feature comp.depends.delete(feature) else self.requires_feature.delete feature end end |
#restart(dependency = nil) ⇒ Object
Restart a particular dependency on all gears that host it.
637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 |
# File 'app/models/application.rb', line 637 def restart(dependency=nil) reply = ResultIO.new self.class.notify_observers(:before_restart, {:application => self, :reply => reply, :dependency => dependency}) self.start_order.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.restart(comp_inst) end raise f[0][:exception] if(f.length > 0) end self.class.notify_observers(:after_restart, {:application => self, :reply => reply, :dependency => dependency}) reply end |
#save ⇒ Object
Saves the application object in the datastore
272 273 274 275 276 277 |
# File 'app/models/application.rb', line 272 def save super(user.login) self.ngears = 0 self.usage_records = nil self.destroyed_gears = [] end |
#scaledown(comp_name = nil) ⇒ Object
409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 |
# File 'app/models/application.rb', line 409 def scaledown(comp_name=nil) result_io = ResultIO.new if not self.scalable raise OpenShift::UserException.new("Cannot scale a non-scalable application", 255, result_io) end comp_name = "web" if comp_name.nil? prof = @profile_name_map[@default_profile] cinst = ComponentInstance::find_component_in_cart(prof, self, comp_name, self.get_name_prefix) raise OpenShift::NodeException.new("Cannot find #{comp_name} in app #{self.name}.", 1, result_io) if cinst.nil? ginst = self.group_instance_map[cinst.group_instance_name] raise OpenShift::NodeException.new("Cannot find group #{cinst.group_instance_name} for #{comp_name} in app #{self.name}.", 1, result_io) if ginst.nil? # remove any gear out of this ginst raise OpenShift::UserException.new("Cannot scale below minimum gear requirements", 1, result_io) if ginst.gears.length <= ginst.min gear = ginst.gears.last dns = OpenShift::DnsService.instance begin dns.deregister_application(gear.name, @domain.namespace) dns.publish ensure dns.close end result_io.append ginst.remove_gear(gear) # inform anyone who needs to know that this gear is no more self.configure_dependencies self.execute_connections result_io end |
#scaleup(comp_name = nil) ⇒ Object
387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 |
# File 'app/models/application.rb', line 387 def scaleup(comp_name=nil) result_io = ResultIO.new if not self.scalable raise OpenShift::UserException.new("Cannot scale a non-scalable application", 255, result_io) end comp_name = "web" if comp_name.nil? prof = @profile_name_map[@default_profile] cinst = ComponentInstance::find_component_in_cart(prof, self, comp_name, self.get_name_prefix) raise OpenShift::NodeException.new("Cannot find #{comp_name} in app #{self.name}.", 1, result_io) if cinst.nil? ginst = self.group_instance_map[cinst.group_instance_name] raise OpenShift::NodeException.new("Cannot find group #{cinst.group_instance_name} for #{comp_name} in app #{self.name}.", 1, result_io) if ginst.nil? raise OpenShift::UserException.new("Cannot scale up beyond maximum gear limit in app #{self.name}.", 104, result_io) if ginst.gears.length >= ginst.max and ginst.max > 0 raise OpenShift::UserException.new("Cannot scale up beyond gear limit '#{user.max_gears}'", 104, result_io) if user.consumed_gears >= user.max_gears result, new_gear = ginst.add_gear(self) result_io.append result result_io.append self.configure_dependencies self.execute_connections result_io end |
#scaling_limits(dependency = nil) ⇒ Object
1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 |
# File 'app/models/application.rb', line 1279 def scaling_limits(dependency=nil) if dependency.nil? if self.scalable dependency = "web" else dependency = self.framework end end prof = @profile_name_map[@default_profile] cinst = ComponentInstance::find_component_in_cart(prof, self, dependency, self.get_name_prefix) raise OpenShift::NodeException.new("Cannot find #{dependency} component in app #{self.name}.", 135, ResultIO.new) if cinst.nil? ginst = self.group_instance_map[cinst.group_instance_name] return ginst.min,ginst.max end |
#set_embedded_cart_info(cart_name, info) ⇒ Object
Provide a way of updating the component information for a given cartridge
1344 1345 1346 1347 1348 |
# File 'app/models/application.rb', line 1344 def (cart_name, info) self.comp_instance_map.values.each do |comp_inst| comp_inst.cart_data = [info] if cart_name == comp_inst.parent_cart_name end end |
#set_user_max(cart_group_map, max_scale) ⇒ Object
1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 |
# File 'app/models/application.rb', line 1053 def set_user_max(cart_group_map, max_scale) return if not max_scale cart_current_min, cart_current_max, sup_min, sup_max = get_user_min_max(cart_group_map) sup_max = 1000000 if sup_max==-1 max_scale_int = Integer(max_scale) max_scale_int = 1000000 if max_scale_int==-1 if (max_scale_int and ( max_scale_int > sup_max or max_scale_int < cart_current_min) ) raise OpenShift::UserException.new("Invalid scales_to factor #{max_scale} provided. Value out of allowed range ( #{cart_current_min} : #{sup_max==1000000 ? -1 : sup_max} ).", 168) end target_max = Integer(max_scale) cart_group_map.keys.each { |group_name, component_instances| gi = self.group_instance_map[group_name] if target_max==-1 next if gi.supported_max!=-1 gi.max = target_max break end if gi.supported_max==-1 or( (gi.supported_max-gi.min) > target_max ) rest_total = 0 cart_group_map.keys.each { |other_group_name| next if other_group_name==group_name other_gi = self.group_instance_map[other_group_name] if other_gi.max == -1 other_gi.max==other_gi.min end rest_total += other_gi.max } gi.max = (target_max-rest_total) break end } self.save end |
#set_user_min(cart_group_map, min_scale) ⇒ Object
1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 |
# File 'app/models/application.rb', line 1010 def set_user_min(cart_group_map, min_scale) return if not min_scale cart_current_min, cart_current_max, sup_min, sup_max = get_user_min_max(cart_group_map) cart_current_max = 1000000 if cart_current_max==-1 if (Integer(min_scale) < sup_min or Integer(min_scale) > cart_current_max) raise OpenShift::UserException.new("Invalid scales_from factor #{min_scale} provided. Value out of allowed range ( #{sup_min} : #{cart_current_max==1000000 ? -1 : cart_current_max} ).", 168) end target_min = Integer(min_scale) - cart_current_min iter = cart_group_map.keys.each while target_min != 0 do begin group_name = iter.next break if group_name.nil? rescue Exception=>e break end ginst = self.group_instance_map[group_name] ginst_max = ginst.max ginst_max = 1000000 if ginst.max==-1 if target_min > 0 if (ginst_max-ginst.min)>target_min ginst.min += target_min target_min = 0 else target_min -= (ginst_max-ginst.min) ginst.min = ginst_max end else if (ginst.supported_min-ginst.min) < target_min ginst.min += target_min target_min = 0 else target_min += (ginst.min-ginst.supported_min) ginst.min = ginst.supported_min end end end self.save if target_min != 0 raise OpenShift::UserException.new("Could not completely distribute scales_from to all groups. Value constrained to #{Integer(min_scale)-target_min}", 169) end end |
#set_user_min_max(cart_group_map, min_scale, max_scale) ⇒ Object
979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 |
# File 'app/models/application.rb', line 979 def set_user_min_max(cart_group_map, min_scale, max_scale) if min_scale and max_scale and Integer(min_scale) > Integer(max_scale) and Integer(max_scale)!=-1 #raise OpenShift::UserException.new("Invalid scaling factors provided. Minimum (#{min_scale}) should always be less than maximum (#{max_scale}).", 170) tmp = min_scale min_scale = max_scale max_scale = tmp end cart_current_min, cart_current_max, sup_min, sup_max = get_user_min_max(cart_group_map) if min_scale and Integer(min_scale)-cart_current_min<0 # set min first set_user_min(cart_group_map, min_scale) set_user_max(cart_group_map, max_scale) else set_user_max(cart_group_map, max_scale) set_user_min(cart_group_map, min_scale) end if self.scalable prof = @profile_name_map[@default_profile] cinst = ComponentInstance::find_component_in_cart(prof, self, self.proxy_cartridge, self.get_name_prefix) if cinst group_inst = self.group_instance_map[cinst.group_instance_name] reply = ResultIO.new s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| gear.reload(cinst) end end end end |
#show_port(dependency = nil) ⇒ Object
815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 |
# File 'app/models/application.rb', line 815 def show_port(dependency=nil) reply = ResultIO.new self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) next if comp_inst.name == "@@app" next if comp_inst.parent_cart_name == self.name next if comp_inst.parent_cart_name == self.proxy_cartridge Rails.logger.debug( comp_inst.inspect ) Rails.logger.debug( "\n" ) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.show_port(comp_inst) end # Not all cartridges will have this hook. f.each do |fail| next if fail[:exception].resultIO.exitcode == 127 raise fail[:exception] end end reply end |
#show_state ⇒ Object
Get the state of the application on all gears.
842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 |
# File 'app/models/application.rb', line 842 def show_state() gear_states = {} tag = "" handle = RemoteJob.create_parallel_job RemoteJob.run_parallel_on_gears(self.gears, handle) { |exec_handle, gear| job = gear.app_state_job_show() RemoteJob.add_parallel_job(exec_handle, tag, gear, job) } RemoteJob.get_parallel_run_results(handle) { |tag, gear, output, status| if status != 0 Rails.logger.error("Error getting application state from gear: '#{gear}' with status: '#{status}' and output: #{output}") gear_states[gear] = 'unknown' else gear_states[gear] = output end } gear_states end |
#start(dependency = nil, stop_on_failure = true) ⇒ Object
Start a particular dependency on all gears that host it. If unable to start a component, the application is stopped on all gears
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 |
# File 'app/models/application.rb', line 565 def start(dependency=nil, stop_on_failure=true) reply = ResultIO.new self.class.notify_observers(:before_start, {:application => self, :reply => reply, :dependency => dependency}) self.start_order.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) next if comp_inst.parent_cart_name == self.name begin group_inst = self.group_instance_map[comp_inst.group_instance_name] run_on_gears(group_inst.gears, reply) do |gear, r| r.append gear.start(comp_inst) end rescue Exception => e gear_exception = e.[:exception] self.stop(dependency,false,false) if stop_on_failure raise gear_exception end end self.class.notify_observers(:after_start, {:application => self, :reply => reply, :dependency => dependency}) reply end |
#status(dependency = nil, ret_reply = true) ⇒ Object
Retrieves status for a particular dependency on all gears that host it.
678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 |
# File 'app/models/application.rb', line 678 def status(dependency=nil, ret_reply=true) reply = ResultIO.new app_status = [] tag = "" handle = RemoteJob.create_parallel_job self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) next if comp_inst.parent_cart_name == self.name group_inst = self.group_instance_map[comp_inst.group_instance_name] group_inst.gears.each do |gear| job = gear.status_job(comp_inst) RemoteJob.add_parallel_job(handle, tag, gear, job) end end if RemoteJob.has_jobs(handle) RemoteJob.run_parallel_on_gears([], handle) { } RemoteJob.get_parallel_run_results(handle) { |tag, gear, output, rc| if rc != 0 Rails.logger.error "Error: Getting '#{dependency}' status from gear '#{gear}', errcode: '#{rc}' and output: #{output}" raise OpenShift::UserException.new("Error: Getting '#{dependency}' status from gear '#{gear}', errcode: '#{rc}' and output: #{output}", 143) else r = ResultIO.new r.resultIO << "#{output}\n" reply.append r app_status.push({"gear_id" => gear, "message" => output}) unless ret_reply end } end if ret_reply return reply else return app_status end end |
#stop(dependency = nil, force_stop_on_failure = true, throw_exception_on_failure = true) ⇒ Object
Stop a particular dependency on all gears that host it.
592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 |
# File 'app/models/application.rb', line 592 def stop(dependency=nil, force_stop_on_failure=true, throw_exception_on_failure=true) reply = ResultIO.new self.class.notify_observers(:before_stop, {:application => self, :reply => reply, :dependency => dependency}) self.start_order.reverse.each do |comp_inst_name| comp_inst = self.comp_instance_map[comp_inst_name] next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) next if comp_inst.parent_cart_name == self.name group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.stop(comp_inst) end if(f.length > 0) self.force_stop(dependency,false) if(force_stop_on_failure) raise f[0][:exception] if(throw_exception_on_failure) end end self.class.notify_observers(:after_stop, {:application => self, :reply => reply, :dependency => dependency}) reply end |
#system_messages(dependency = nil) ⇒ Object
Invokes system_messages for a particular dependency on all gears that host it.
752 753 754 755 756 757 758 759 760 761 762 763 764 765 |
# File 'app/models/application.rb', line 752 def (dependency=nil) reply = ResultIO.new self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.(comp_inst) end raise f[0][:exception] if(f.length > 0) end reply end |
#template_scalable_app(app_name, framework) ⇒ Object
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 |
# File 'app/models/application.rb', line 134 def template_scalable_app(app_name, framework) return " Name: #{app_name} Components: proxy: Dependencies: [#{framework}, \"haproxy-1.4\"] web: Dependencies: [#{framework}] Groups: proxy: Components: proxy: proxy web: Components: web: web GroupOverrides: - [\"proxy\", \"proxy/haproxy-1.4\"] - [\"proxy\", \"proxy/#{framework}\"] - [\"web\", \"web/#{framework}\"] Connections: auto-scale: Components: [\"proxy/haproxy-1.4\", \"web/#{framework}\"] proxy-web: Components: [\"proxy/#{framework}\", \"web/#{framework}\"] Configure-Order: [\"proxy/#{framework}\", \"proxy/haproxy-1.4\"] " end |
#threaddump(dependency = nil) ⇒ Object
Invokes threaddump for a particular dependency on all gears that host it.
735 736 737 738 739 740 741 742 743 744 745 746 747 748 |
# File 'app/models/application.rb', line 735 def threaddump(dependency=nil) reply = ResultIO.new self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.threaddump(comp_inst) end raise f[0][:exception] if(f.length > 0) end reply end |
#tidy(dependency = nil) ⇒ Object
Invokes tidy for a particular dependency on all gears that host it.
717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 |
# File 'app/models/application.rb', line 717 def tidy(dependency=nil) dependency = self.framework if dependency.nil? reply = ResultIO.new self.comp_instance_map.each do |comp_inst_name, comp_inst| next if !dependency.nil? and (comp_inst.parent_cart_name != dependency) group_inst = self.group_instance_map[comp_inst.group_instance_name] s,f = run_on_gears(group_inst.gears, reply, false) do |gear, r| r.append gear.tidy(comp_inst) end raise f[0][:exception] if(f.length > 0) end reply end |
#track_usage(gear, event, usage_type = ) ⇒ Object
1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 |
# File 'app/models/application.rb', line 1563 def track_usage(gear, event, usage_type=UsageRecord::USAGE_TYPES[:gear_usage]) if Rails.configuration.usage_tracking[:datastore_enabled] now = Time.now.utc uuid = OpenShift::Model.gen_uuid self.usage_records = [] unless usage_records usage_record = UsageRecord.new(event, user, now, uuid, usage_type) case usage_type when UsageRecord::USAGE_TYPES[:gear_usage] usage_record.gear_uuid = gear.uuid usage_record.gear_size = gear.node_profile when UsageRecord::USAGE_TYPES[:addtl_fs_gb] usage_record.gear_uuid = gear.uuid usage_record.addtl_fs_gb = gear.group_instance.addtl_fs_gb end self.usage_records << usage_record self.class.notify_observers(:track_usage, {:gear_uuid => gear.uuid, :login => gear.app.user.login, :event => event, :time => now, :uuid => uuid, :usage_type => usage_type, :gear_size => gear.node_profile, :addtl_fs_gb => gear.group_instance.addtl_fs_gb}) end if Rails.configuration.usage_tracking[:syslog_enabled] usage_string = "User: #{user.login} Event: #{event}" case usage_type when UsageRecord::USAGE_TYPES[:gear_usage] usage_string += " Gear: #{gear.uuid} Gear Size: #{gear.node_profile}" when UsageRecord::USAGE_TYPES[:addtl_fs_gb] usage_string += " Gear: #{gear.uuid} Addtl File System GB: #{gear.group_instance.addtl_fs_gb}" end begin Syslog.open('openshift_usage', Syslog::LOG_PID) { |s| s.notice usage_string } rescue Exception => e # Can't fail because of a secondary logging error Rails.logger.error e. Rails.logger.error e.backtrace end end end |
#web_cart ⇒ Object
379 380 381 |
# File 'app/models/application.rb', line 379 def web_cart return framework end |