Class: LogStash::Outputs::ElasticSearch::HttpClient::Pool
- Inherits:
-
Object
- Object
- LogStash::Outputs::ElasticSearch::HttpClient::Pool
- Defined in:
- lib/logstash/outputs/elasticsearch/http_client/pool.rb
Defined Under Namespace
Classes: BadResponseCodeError, HostUnreachableError, NoConnectionAvailableError
Constant Summary collapse
- ROOT_URI_PATH =
'/'.freeze
- LICENSE_PATH =
'/_license'.freeze
- VERSION_7_TO_7_14 =
::Gem::Requirement.new([">= 7.0.0", "< 7.14.0"])
- DEFAULT_OPTIONS =
{ :healthcheck_path => ROOT_URI_PATH, :sniffing_path => "/_nodes/http", :bulk_path => "/_bulk", :scheme => 'http', :resurrect_delay => 5, :sniffing => false, :sniffer_delay => 10, }.freeze
- BUILD_FLAVOR_SERVERLESS =
'serverless'.freeze
- ELASTIC_API_VERSION =
"Elastic-Api-Version".freeze
- DEFAULT_EAV_HEADER =
{ ELASTIC_API_VERSION => "2023-10-31" }.freeze
- ES1_SNIFF_RE_URL =
/\[([^\/]*)?\/?([^:]*):([0-9]+)\]/
- ES2_AND_ABOVE_SNIFF_RE_URL =
/([^\/]*)?\/?([^:]*):([0-9]+)/
Instance Attribute Summary collapse
-
#adapter ⇒ Object
readonly
Returns the value of attribute adapter.
-
#bulk_path ⇒ Object
readonly
Returns the value of attribute bulk_path.
-
#healthcheck_path ⇒ Object
readonly
Returns the value of attribute healthcheck_path.
-
#license_checker ⇒ Object
readonly
license_checker is used by the pool specs.
-
#logger ⇒ Object
readonly
Returns the value of attribute logger.
-
#resurrect_delay ⇒ Object
readonly
Returns the value of attribute resurrect_delay.
-
#sniffer_delay ⇒ Object
readonly
Returns the value of attribute sniffer_delay.
-
#sniffing ⇒ Object
readonly
Returns the value of attribute sniffing.
-
#sniffing_path ⇒ Object
readonly
Returns the value of attribute sniffing_path.
Instance Method Summary collapse
- #add_url(url) ⇒ Object
- #address_str_to_uri(addr_str) ⇒ Object
- #alive_urls_count ⇒ Object
-
#check_sniff ⇒ Object
Sniffs and returns the results.
- #close ⇒ Object
- #empty_url_meta ⇒ Object
- #get_connection ⇒ Object
-
#get_license(url) ⇒ Hash
Retrieve ES node license information.
- #get_root_path(url, params = {}) ⇒ Object
- #health_check_request(url) ⇒ Object
- #healthcheck!(register_phase = true) ⇒ Object
- #in_use_connections ⇒ Object
-
#initialize(logger, adapter, initial_urls = [], options = {}) ⇒ Pool
constructor
A new instance of Pool.
- #last_es_version ⇒ Object
- #major_version(version_string) ⇒ Object
- #mark_dead(url, error) ⇒ Object
- #maximum_seen_major_version ⇒ Object
- #normalize_url(uri) ⇒ Object
- #perform_request(method, path, params = {}, body = nil) ⇒ Object
- #perform_request_to_url(url, method, path, params = {}, body = nil) ⇒ Object
- #remove_url(url) ⇒ Object
- #resurrectionist_alive? ⇒ Boolean
- #return_connection(url) ⇒ Object
- #serverless? ⇒ Boolean
- #size ⇒ Object
- #sniff(nodes) ⇒ Object
-
#sniff! ⇒ Object
Sniffs the cluster then updates the internal URLs.
- #sniffer_alive? ⇒ Boolean
- #start ⇒ Object
- #start_resurrectionist ⇒ Object
- #start_sniffer ⇒ Object
- #stop_resurrectionist ⇒ Object
- #stop_sniffer ⇒ Object
- #test_serverless_connection(url, root_response) ⇒ Object
- #until_stopped(task_name, delay) ⇒ Object
- #update_initial_urls ⇒ Object
- #update_urls(new_urls) ⇒ Object
- #url_info ⇒ Object
- #url_meta(url) ⇒ Object
- #urls ⇒ Object
- #wait_for_in_use_connections ⇒ Object
- #with_connection ⇒ Object
Constructor Details
#initialize(logger, adapter, initial_urls = [], options = {}) ⇒ Pool
Returns a new instance of Pool.
71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 71 def initialize(logger, adapter, initial_urls=[], ={}) @logger = logger @adapter = adapter @metric = [:metric] @initial_urls = initial_urls raise ArgumentError, "No URL Normalizer specified!" unless [:url_normalizer] @url_normalizer = [:url_normalizer] DEFAULT_OPTIONS.merge().tap do |merged| @bulk_path = merged[:bulk_path] @sniffing_path = merged[:sniffing_path] @healthcheck_path = merged[:healthcheck_path] @resurrect_delay = merged[:resurrect_delay] @sniffing = merged[:sniffing] @sniffer_delay = merged[:sniffer_delay] end # Used for all concurrent operations in this class @state_mutex = Mutex.new # Holds metadata about all URLs @url_info = {} @stopping = false @license_checker = [:license_checker] || LogStash::PluginMixins::ElasticSearch::NoopLicenseChecker::INSTANCE @last_es_version = Concurrent::AtomicReference.new @build_flavor = Concurrent::AtomicReference.new end |
Instance Attribute Details
#adapter ⇒ Object (readonly)
Returns the value of attribute adapter.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def adapter @adapter end |
#bulk_path ⇒ Object (readonly)
Returns the value of attribute bulk_path.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def bulk_path @bulk_path end |
#healthcheck_path ⇒ Object (readonly)
Returns the value of attribute healthcheck_path.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def healthcheck_path @healthcheck_path end |
#license_checker ⇒ Object (readonly)
license_checker is used by the pool specs
50 51 52 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 50 def license_checker @license_checker end |
#logger ⇒ Object (readonly)
Returns the value of attribute logger.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def logger @logger end |
#resurrect_delay ⇒ Object (readonly)
Returns the value of attribute resurrect_delay.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def resurrect_delay @resurrect_delay end |
#sniffer_delay ⇒ Object (readonly)
Returns the value of attribute sniffer_delay.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def sniffer_delay @sniffer_delay end |
#sniffing ⇒ Object (readonly)
Returns the value of attribute sniffing.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def sniffing @sniffing end |
#sniffing_path ⇒ Object (readonly)
Returns the value of attribute sniffing_path.
49 50 51 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 49 def sniffing_path @sniffing_path end |
Instance Method Details
#add_url(url) ⇒ Object
407 408 409 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 407 def add_url(url) @url_info[url] ||= end |
#address_str_to_uri(addr_str) ⇒ Object
217 218 219 220 221 222 223 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 217 def address_str_to_uri(addr_str) matches = addr_str.match(ES1_SNIFF_RE_URL) || addr_str.match(ES2_AND_ABOVE_SNIFF_RE_URL) if matches host = matches[1].empty? ? matches[2] : matches[1] ::LogStash::Util::SafeURI.new("#{host}:#{matches[3]}") end end |
#alive_urls_count ⇒ Object
138 139 140 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 138 def alive_urls_count @state_mutex.synchronize { @url_info.values.select {|v| v[:state] == :alive }.count } end |
#check_sniff ⇒ Object
Sniffs and returns the results. Does not update internal URLs!
192 193 194 195 196 197 198 199 200 201 202 203 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 192 def check_sniff _, , resp = perform_request(:get, @sniffing_path) @metric.increment(:sniff_requests) parsed = LogStash::Json.load(resp.body) nodes = parsed['nodes'] if !nodes || nodes.empty? @logger.warn("Sniff returned no nodes! Will not update hosts.") return nil else sniff(nodes) end end |
#close ⇒ Object
111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 111 def close @state_mutex.synchronize { @stopping = true } logger.debug "Stopping sniffer" stop_sniffer logger.debug "Stopping resurrectionist" stop_resurrectionist logger.debug "Waiting for in use manticore connections" wait_for_in_use_connections logger.debug("Closing adapter #{@adapter}") @adapter.close end |
#empty_url_meta ⇒ Object
415 416 417 418 419 420 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 415 def { :in_use => 0, :state => :unknown } end |
#get_connection ⇒ Object
461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 461 def get_connection @state_mutex.synchronize do # The goal here is to pick a random connection from the least-in-use connections # We want some randomness so that we don't hit the same node over and over, but # we also want more 'fair' behavior in the event of high concurrency eligible_set = nil lowest_value_seen = nil @url_info.each do |url,| = [:in_use] next if [:state] == :dead if lowest_value_seen.nil? || < lowest_value_seen lowest_value_seen = eligible_set = [[url, ]] elsif lowest_value_seen == eligible_set << [url, ] end end return nil if eligible_set.nil? pick, = eligible_set.sample [:in_use] += 1 [pick, ] end end |
#get_license(url) ⇒ Hash
Retrieve ES node license information
244 245 246 247 248 249 250 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 244 def get_license(url) response = perform_request_to_url(url, :get, LICENSE_PATH) LogStash::Json.load(response.body) rescue => e logger.error("Unable to get license information", url: url.sanitized.to_s, exception: e.class, message: e.) {} end |
#get_root_path(url, params = {}) ⇒ Object
315 316 317 318 319 320 321 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 315 def get_root_path(url, params={}) resp = perform_request_to_url(url, :get, ROOT_URI_PATH, params) return resp, nil rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e logger.warn("Elasticsearch main endpoint returns #{e.response_code}", message: e., body: e.response_body) return nil, e end |
#health_check_request(url) ⇒ Object
252 253 254 255 256 257 258 259 260 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 252 def health_check_request(url) logger.debug("Running health check to see if an Elasticsearch connection is working", :healthcheck_url => url.sanitized.to_s, :path => @healthcheck_path) response = perform_request_to_url(url, :head, @healthcheck_path) return response, nil rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e logger.warn("Health check failed", code: e.response_code, url: e.url, message: e.) return nil, e end |
#healthcheck!(register_phase = true) ⇒ Object
262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 262 def healthcheck!(register_phase = true) # Try to keep locking granularity low such that we don't affect IO... @state_mutex.synchronize { @url_info.select {|url,| [:state] != :alive } }.each do |url,| begin _, health_bad_code_err = health_check_request(url) root_response, root_bad_code_err = get_root_path(url) if health_bad_code_err.nil? || register_phase # when called from resurrectionist skip the product check done during register phase if register_phase raise LogStash::ConfigurationError, "Could not read Elasticsearch. Please check the credentials" if root_bad_code_err&.invalid_credentials? raise LogStash::ConfigurationError, "Could not read Elasticsearch. Please check the privileges" if root_bad_code_err&.forbidden? # when customer_headers is invalid raise LogStash::ConfigurationError, "The Elastic-Api-Version header is not valid" if root_bad_code_err&.invalid_eav_header? # when it is not Elasticserach raise LogStash::ConfigurationError, "Could not connect to a compatible version of Elasticsearch" if root_bad_code_err.nil? && !elasticsearch?(root_response) test_serverless_connection(url, root_response) end raise health_bad_code_err if health_bad_code_err raise root_bad_code_err if root_bad_code_err # If no exception was raised it must have succeeded! if register_phase logger.info("Connected to ES instance", url: url.sanitized.to_s) else logger.warn("Restored connection to ES instance", url: url.sanitized.to_s) end # We check its ES version es_version, build_flavor = parse_es_version(root_response) logger.warn("Failed to retrieve Elasticsearch build flavor") if build_flavor.nil? logger.warn("Failed to retrieve Elasticsearch version data from connected endpoint, connection aborted", :url => url.sanitized.to_s) if es_version.nil? next if es_version.nil? @state_mutex.synchronize do [:version] = es_version set_last_es_version(es_version, url) set_build_flavor(build_flavor) alive = @license_checker.appropriate_license?(self, url) [:state] = alive ? :alive : :dead end rescue HostUnreachableError, BadResponseCodeError => e logger.warn("Attempted to resurrect connection to dead ES instance, but got an error", url: url.sanitized.to_s, exception: e.class, message: e.) end end end |
#in_use_connections ⇒ Object
134 135 136 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 134 def in_use_connections @state_mutex.synchronize { @url_info.values.select {|v| v[:in_use] > 0 } } end |
#last_es_version ⇒ Object
496 497 498 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 496 def last_es_version @last_es_version.get end |
#major_version(version_string) ⇒ Object
205 206 207 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 205 def major_version(version_string) version_string.split('.').first.to_i end |
#mark_dead(url, error) ⇒ Object
441 442 443 444 445 446 447 448 449 450 451 452 453 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 441 def mark_dead(url, error) @state_mutex.synchronize do = @url_info[url] # In case a sniff happened removing the metadata just before there's nothing to mark # This is an extreme edge case, but it can happen! return unless logger.warn("Marking url as dead. Last error: [#{error.class}] #{error.}", :url => url, :error_message => error., :error_class => error.class.name) [:state] = :dead [:last_error] = error [:last_errored_at] = Time.now end end |
#maximum_seen_major_version ⇒ Object
500 501 502 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 500 def maximum_seen_major_version @state_mutex.synchronize { @maximum_seen_major_version } end |
#normalize_url(uri) ⇒ Object
357 358 359 360 361 362 363 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 357 def normalize_url(uri) u = @url_normalizer.call(uri) if !u.is_a?(::LogStash::Util::SafeURI) raise "URL Normalizer returned a '#{u.class}' rather than a SafeURI! This shouldn't happen!" end u end |
#perform_request(method, path, params = {}, body = nil) ⇒ Object
338 339 340 341 342 343 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 338 def perform_request(method, path, params={}, body=nil) with_connection do |url, | resp = perform_request_to_url(url, method, path, params, body) [url, , resp] end end |
#perform_request_to_url(url, method, path, params = {}, body = nil) ⇒ Object
352 353 354 355 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 352 def perform_request_to_url(url, method, path, params={}, body=nil) params[:headers] = DEFAULT_EAV_HEADER.merge(params[:headers] || {}) if serverless? @adapter.perform_request(url, method, path, params, body) end |
#remove_url(url) ⇒ Object
411 412 413 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 411 def remove_url(url) @url_info.delete(url) end |
#resurrectionist_alive? ⇒ Boolean
334 335 336 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 334 def resurrectionist_alive? @resurrectionist ? @resurrectionist.alive? : nil end |
#return_connection(url) ⇒ Object
489 490 491 492 493 494 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 489 def return_connection(url) @state_mutex.synchronize do info = @url_info[url] info[:in_use] -= 1 if info # Guard against the condition where the connection has already been deleted end end |
#serverless? ⇒ Boolean
504 505 506 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 504 def serverless? @build_flavor.get == BUILD_FLAVOR_SERVERLESS end |
#size ⇒ Object
403 404 405 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 403 def size @state_mutex.synchronize { @url_info.size } end |
#sniff(nodes) ⇒ Object
209 210 211 212 213 214 215 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 209 def sniff(nodes) nodes.map do |id,info| # Skip master-only nodes next if info["roles"] && info["roles"] == ["master"] address_str_to_uri(info["http"]["publish_address"]) if info["http"] end.compact end |
#sniff! ⇒ Object
Sniffs the cluster then updates the internal URLs
185 186 187 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 185 def sniff! update_urls(check_sniff) end |
#sniffer_alive? ⇒ Boolean
229 230 231 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 229 def sniffer_alive? @sniffer ? @sniffer.alive? : nil end |
#start ⇒ Object
101 102 103 104 105 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 101 def start update_initial_urls start_resurrectionist start_sniffer if @sniffing end |
#start_resurrectionist ⇒ Object
233 234 235 236 237 238 239 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 233 def start_resurrectionist @resurrectionist = Thread.new do until_stopped("resurrection", @resurrect_delay) do healthcheck!(false) end end end |
#start_sniffer ⇒ Object
171 172 173 174 175 176 177 178 179 180 181 182 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 171 def start_sniffer @sniffer = Thread.new do until_stopped("sniffing", sniffer_delay) do begin sniff! rescue NoConnectionAvailableError => e @state_mutex.synchronize { # Synchronize around @url_info logger.warn("Elasticsearch output attempted to sniff for new connections but cannot. No living connections are detected. Pool contains the following current URLs", :url_info => @url_info) } end end end end |
#stop_resurrectionist ⇒ Object
330 331 332 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 330 def stop_resurrectionist @resurrectionist.join if @resurrectionist end |
#stop_sniffer ⇒ Object
225 226 227 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 225 def stop_sniffer @sniffer.join if @sniffer end |
#test_serverless_connection(url, root_response) ⇒ Object
323 324 325 326 327 328 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 323 def test_serverless_connection(url, root_response) _, build_flavor = parse_es_version(root_response) params = { :headers => DEFAULT_EAV_HEADER } _, bad_code_err = get_root_path(url, params) if build_flavor == BUILD_FLAVOR_SERVERLESS raise LogStash::ConfigurationError, "The Elastic-Api-Version header is not valid" if bad_code_err&.invalid_eav_header? end |
#until_stopped(task_name, delay) ⇒ Object
150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 150 def until_stopped(task_name, delay) last_done = Time.now until @state_mutex.synchronize { @stopping } begin now = Time.now if (now - last_done) >= delay last_done = now yield end sleep 1 rescue => e logger.warn( "Error while performing #{task_name}", :error_message => e., :class => e.class.name, :backtrace => e.backtrace ) end end end |
#update_initial_urls ⇒ Object
107 108 109 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 107 def update_initial_urls update_urls(@initial_urls) end |
#update_urls(new_urls) ⇒ Object
365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 365 def update_urls(new_urls) return if new_urls.nil? # Normalize URLs new_urls = new_urls.map(&method(:normalize_url)) # Used for logging nicely state_changes = {:removed => [], :added => []} @state_mutex.synchronize do # Add new connections new_urls.each do |url| # URI objects don't have real hash equality! So, since this isn't perf sensitive we do a linear scan unless @url_info.keys.include?(url) state_changes[:added] << url add_url(url) end end # Delete connections not in the new list @url_info.each do |url,_| unless new_urls.include?(url) state_changes[:removed] << url remove_url(url) end end end if state_changes[:removed].size > 0 || state_changes[:added].size > 0 logger.info? && logger.info("Elasticsearch pool URLs updated", :changes => state_changes) end # Run an inline healthcheck anytime URLs are updated # This guarantees that during startup / post-startup # sniffing we don't have idle periods waiting for the # periodic sniffer to allow new hosts to come online healthcheck! end |
#url_info ⇒ Object
142 143 144 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 142 def url_info @state_mutex.synchronize { @url_info } end |
#url_meta(url) ⇒ Object
455 456 457 458 459 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 455 def (url) @state_mutex.synchronize do @url_info[url] end end |
#urls ⇒ Object
146 147 148 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 146 def urls url_info.keys end |
#wait_for_in_use_connections ⇒ Object
127 128 129 130 131 132 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 127 def wait_for_in_use_connections until in_use_connections.empty? logger.info "Blocked on shutdown to in use connections #{@state_mutex.synchronize {@url_info}}" sleep 1 end end |
#with_connection ⇒ Object
422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 422 def with_connection url, = get_connection # Custom error class used here so that users may retry attempts if they receive this error # should they choose to raise NoConnectionAvailableError, "No Available connections" unless url yield url, rescue HostUnreachableError => e # Mark the connection as dead here since this is likely not transient mark_dead(url, e) raise e rescue BadResponseCodeError => e # These aren't discarded from the pool because these are often very transient # errors raise e ensure return_connection(url) end |