Compare commits

...

10 Commits

Author SHA1 Message Date
Nogweii
4b074a6d8a fix squid crashing on my Arch laptop 2023-11-04 16:55:11 +00:00
HoneyryderChuck
791a94322f resolver: fix for when nested lookup call returns nil 2023-11-04 16:54:59 +00:00
HoneyryderChuck
3cd063b153 Merge branch 'issue-gh-18' into 'master'
Fix close callback leak

See merge request os85/httpx!289
2023-11-04 16:35:01 +00:00
HoneyryderChuck
9a64fadb56 updating example scripts 2023-11-04 16:22:53 +00:00
HoneyryderChuck
e178bc9f20 remove duplicated conn close handler, it's set already in init_connection 2023-11-04 16:22:53 +00:00
HoneyryderChuck
4ef2d9c3ce do not remove ivars anymore 2023-11-04 16:22:53 +00:00
HoneyryderChuck
39d0356340 no consumer of connection reset event, so no emission required 2023-11-04 02:22:32 +00:00
HoneyryderChuck
1e05cdbe62 http/1.1 fix: close connection even if the server does not respect connectionn: close in request 2023-11-04 02:21:03 +00:00
HoneyryderChuck
e27301013d patching the setup of the on close callback instead
the previous patch allowed the callback to be called only once, whereas this one will be long-lived for the duration of the connection
2023-11-03 22:48:55 +00:00
Thomas Hurst
ec7b845c67 Fix close callback leak
Per Github issue #18, this causes a linear performance decrease, with
each connection slightly slowing the next.
2023-11-02 02:00:01 +00:00
12 changed files with 67 additions and 33 deletions

View File

@ -19,21 +19,18 @@ Signal.trap("INFO") { print_status } unless ENV.key?("CI")
Thread.start do
frontpage = HTTPX.get("https://news.ycombinator.com").to_s
html = Oga.parse_html(frontpage)
links = html.css('.itemlist a.storylink').map{|link| link.get('href') }
links = html.css('.athing .title a').map{|link| link.get('href') }.select { |link| URI(link).absolute? }
links = links.select {|l| l.start_with?("https") }
puts links
responses = HTTPX.get(*links)
links.each_with_index do |l, i|
puts "#{responses[i].status}: #{l}"
end
end.join

View File

@ -1,7 +1,7 @@
require "httpx"
require "oga"
http = HTTPX.plugin(:persistent).with(timeout: { operation_timeut: 5, connect_timeout: 5})
http = HTTPX.plugin(:persistent).with(timeout: { request_timeout: 5 })
PAGES = (ARGV.first || 10).to_i
pages = PAGES.times.map do |page|
@ -16,10 +16,11 @@ Array(http.get(*pages)).each_with_index.map do |response, i|
end
html = Oga.parse_html(response.to_s)
# binding.irb
page_links = html.css('.itemlist a.titlelink').map{|link| link.get('href') }
page_links = html.css('.athing .title a').map{|link| link.get('href') }.select { |link| URI(link).absolute? }
puts "page(#{i+1}): #{page_links.size}"
if page_links.size == 0
puts "error(#{response.status}) on page #{i+1}"
next
end
# page_links.each do |link|
# puts "link: #{link}"
@ -31,6 +32,11 @@ end
links = links.each_with_index do |pages, i|
puts "Page: #{i+1}\t Links: #{pages.size}"
pages.each do |page|
puts "URL: #{page.uri} (#{page.status})"
case page
in status:
puts "URL: #{page.uri} (#{status})"
in error:
puts "URL: #{page.uri} (#{error.message})"
end
end
end

View File

@ -6,11 +6,9 @@ include HTTPX
URLS = %w[http://nghttp2.org https://nghttp2.org/blog/]# * 3
client = HTTPX.plugin(:proxy)
client = client.with_proxy(uri: "http://61.7.174.110:54132")
responses = client.get(URLS)
client = client.with_proxy(uri: "http://134.209.29.120:8080")
responses = client.get(*URLS)
puts responses.map(&:status)
# response = client.get(URLS.first)
# puts response.status

View File

@ -273,7 +273,7 @@ module HTTPX
end
def timeout
return @timeout if defined?(@timeout)
return @timeout if @timeout
return @options.timeout[:connect_timeout] if @state == :idle
@ -518,7 +518,6 @@ module HTTPX
else
transition(:closing)
transition(:closed)
emit(:reset)
@parser.reset if @parser
transition(:idle)
@ -617,7 +616,7 @@ module HTTPX
def purge_after_closed
@io.close if @io
@read_buffer.clear
remove_instance_variable(:@timeout) if defined?(@timeout)
@timeout = nil
end
def build_socket(addrs = nil)

View File

@ -181,7 +181,7 @@ module HTTPX
if response.is_a?(ErrorResponse)
disable
else
manage_connection(response)
manage_connection(request, response)
end
if exhausted?
@ -224,7 +224,7 @@ module HTTPX
private
def manage_connection(response)
def manage_connection(request, response)
connection = response.headers["connection"]
case connection
when /keep-alive/i
@ -254,7 +254,7 @@ module HTTPX
disable
when nil
# In HTTP/1.1, it's keep alive by default
return if response.version == "1.1"
return if response.version == "1.1" && request.headers["connection"] != "close"
disable
end

View File

@ -76,7 +76,6 @@ module HTTPX
else
transition(:closing)
transition(:closed)
emit(:reset)
parser.reset if @parser
transition(:idle)

View File

@ -223,9 +223,6 @@ module HTTPX
@connected_connections += 1
end
select_connection(connection)
connection.on(:close) do
unregister_connection(connection)
end
end
def unregister_connection(connection)

View File

@ -87,16 +87,18 @@ module HTTPX
def lookup(hostname, ttl)
return unless @lookups.key?(hostname)
@lookups[hostname] = @lookups[hostname].select do |address|
entries = @lookups[hostname] = @lookups[hostname].select do |address|
address["TTL"] > ttl
end
ips = @lookups[hostname].flat_map do |address|
ips = entries.flat_map do |address|
if address.key?("alias")
lookup(address["alias"], ttl)
else
IPAddr.new(address["data"])
end
end
end.compact
ips unless ips.empty?
end

View File

@ -260,7 +260,9 @@ module HTTPX
connection.on(:open) do
emit(:connection_opened, connection.origin, connection.io.socket)
# only run close callback if it opened
connection.on(:close) { emit(:connection_closed, connection.origin, connection.io.socket) }
end
connection.on(:close) do
emit(:connection_closed, connection.origin, connection.io.socket) if connection.used?
end
catch(:coalesced) do
pool.init_connection(connection, options)

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
require "test_helper"
require "support/http_helpers"
class Bug_1_1_1_Test < Minitest::Test
include HTTPHelpers
def test_conection_callbacks_fire_setup_once
uri = build_uri("/get")
connected = 0
HTTPX.on_connection_opened { |*| connected += 1 }
.on_connection_closed { |*| connected -= 1 }
.wrap do |session|
3.times.each do
response = session.get(uri)
verify_status(response, 200)
assert connected.zero?
end
end
end
private
def scheme
"http://"
end
end

View File

@ -59,13 +59,13 @@ module HTTPX
def initialize: (Buffer, options) -> untyped
def manage_connection: (Response) -> void
def manage_connection: (Request request, Response response) -> void
def disable: () -> void
def disable_pipelining: () -> void
def set_protocol_headers: (Request) -> _Each[[String, String]]
def set_protocol_headers: (Request request) -> _Each[[String, String]]
def handle: (Request request) -> void

View File

@ -22,3 +22,7 @@ acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machine
http_port 3128
https_port 3128
# limit the number of file descriptors so that Squid doesn't try allocating
# hundreds of gigabytes of RAM on systems with large NOFILE ulimits
max_filedescriptors 1024