Class: XRBP::Model::Node
- Extended by:
- Base::ClassMethods
- Defined in:
- lib/xrbp/model/node.rb
Constant Summary collapse
- DEFAULT_CRAWL_PORT =
51235
Instance Attribute Summary collapse
-
#addr ⇒ Object
Returns the value of attribute addr.
-
#ip ⇒ Object
Returns the value of attribute ip.
-
#ledgers ⇒ Object
Returns the value of attribute ledgers.
-
#port ⇒ Object
Returns the value of attribute port.
-
#type ⇒ Object
Returns the value of attribute type.
-
#uptime ⇒ Object
Returns the value of attribute uptime.
-
#version ⇒ Object
Returns the value of attribute version.
Attributes included from Base::ClassMethods
Attributes inherited from Base
Class Method Summary collapse
-
.crawl(start, opts = {}) ⇒ Object
Crawl nodes via WebClient::Connection.
-
.from_peer(p) ⇒ Node
Return new node from the specified peer object.
-
.parse_url(url) ⇒ Node
Return new node from the specified url.
Instance Method Summary collapse
- #==(o) ⇒ Object
-
#complete_ledgers(opts = {}) ⇒ Object
Retrieve ledgers which this server has.
-
#id ⇒ Object
Return unique node id.
-
#server_info(opts = {}, &bl) ⇒ Object
Retrieve server info via WebSocket::Connection.
-
#url ⇒ Object
Return node url.
-
#valid? ⇒ Boolean
Return bool indicating if this node is valid for crawling.
Methods included from Base::ClassMethods
Methods inherited from Base
#full_opts, #initialize, #set_opts
Constructor Details
This class inherits a constructor from XRBP::Model::Base
Instance Attribute Details
#addr ⇒ Object
Returns the value of attribute addr.
14 15 16 |
# File 'lib/xrbp/model/node.rb', line 14 def addr @addr end |
#ip ⇒ Object
Returns the value of attribute ip.
13 14 15 |
# File 'lib/xrbp/model/node.rb', line 13 def ip @ip end |
#ledgers ⇒ Object
Returns the value of attribute ledgers.
14 15 16 |
# File 'lib/xrbp/model/node.rb', line 14 def ledgers @ledgers end |
#port ⇒ Object
Returns the value of attribute port.
13 14 15 |
# File 'lib/xrbp/model/node.rb', line 13 def port @port end |
#type ⇒ Object
Returns the value of attribute type.
14 15 16 |
# File 'lib/xrbp/model/node.rb', line 14 def type @type end |
#uptime ⇒ Object
Returns the value of attribute uptime.
14 15 16 |
# File 'lib/xrbp/model/node.rb', line 14 def uptime @uptime end |
#version ⇒ Object
Returns the value of attribute version.
14 15 16 |
# File 'lib/xrbp/model/node.rb', line 14 def version @version end |
Class Method Details
.crawl(start, opts = {}) ⇒ Object
Crawl nodes via WebClient::Connection
86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 |
# File 'lib/xrbp/model/node.rb', line 86 def self.crawl(start, opts={}) set_opts(opts) delay = opts[:delay] || 1 queue = Array.new queue << start connection.add_plugin :result_parser unless connection.plugin?(:result_parser) connection.add_plugin Parsers::NodePeers unless connection.plugin?(Parsers::NodePeers) connection.ssl_verify_peer = false connection.ssl_verify_host = false until connection.force_quit? node = queue.shift node = parse_url node unless node.is_a?(Node) connection.emit :precrawl, node connection.url = node.url peers = connection.perform if peers.nil? || peers.empty? queue << node connection.emit :crawlerr, node connection.rsleep(delay) unless connection.force_quit? next end connection.emit :peers, node, peers peers.each { |peer| break if connection.force_quit? peer = Node.from_peer peer next unless peer.valid? # skip unless valid connection.emit :peer, node, peer queue << peer unless queue.include?(peer) } queue << node connection.emit :postcrawl, node connection.rsleep(delay) unless connection.force_quit? end end |
.from_peer(p) ⇒ Node
Return new node from the specified peer object
65 66 67 68 69 70 71 72 73 74 75 76 77 |
# File 'lib/xrbp/model/node.rb', line 65 def self.from_peer(p) n = new n.addr = p["public_key"] n.ip = p["ip"]&.gsub("::ffff:", "") n.port = p["port"] || DEFAULT_CRAWL_PORT n.version = p["version"].split("-").last n.uptime = p["uptime"] n.type = p["type"] n.ledgers = p["complete_ledgers"] n end |
.parse_url(url) ⇒ Node
Return new node from the specified url
51 52 53 54 55 56 57 58 59 |
# File 'lib/xrbp/model/node.rb', line 51 def self.parse_url(url) n = new uri = URI.parse(url) n.ip = Resolv.getaddress(uri.host) n.port = uri.port n end |
Instance Method Details
#==(o) ⇒ Object
43 44 45 |
# File 'lib/xrbp/model/node.rb', line 43 def ==(o) ip == o.ip && port == o.port end |
#complete_ledgers(opts = {}) ⇒ Object
Retrieve ledgers which this server has
138 139 140 |
# File 'lib/xrbp/model/node.rb', line 138 def complete_ledgers(opts={}) server_info(opts)["result"]["info"]["complete_ledgers"].split("-").collect { |l| l.to_i } end |
#id ⇒ Object
Return unique node id
17 18 19 |
# File 'lib/xrbp/model/node.rb', line 17 def id "#{ip}:#{port}" end |
#server_info(opts = {}, &bl) ⇒ Object
Retrieve server info via WebSocket::Connection
132 133 134 135 |
# File 'lib/xrbp/model/node.rb', line 132 def server_info(opts={}, &bl) set_opts(opts) connection.cmd(WebSocket::Cmds::ServerInfo.new, &bl) end |
#url ⇒ Object
Return node url
22 23 24 |
# File 'lib/xrbp/model/node.rb', line 22 def url "https://#{ip}:#{port}/crawl" end |
#valid? ⇒ Boolean
Return bool indicating if this node is valid for crawling
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
# File 'lib/xrbp/model/node.rb', line 27 def valid? return false unless ip && port # ensure no parsing errs begin # FIXME URI.parse is limiting our ability to traverse entire node-set, # some nodes are represented as IPv6 addresses which is throwing # things off. URI.parse(url) rescue false end true end |