mirror of
https://github.com/HoneyryderChuck/httpx.git
synced 2025-07-15 00:00:55 -04:00
Compare commits
11 Commits
613e95d5f4
...
9582e17370
Author | SHA1 | Date | |
---|---|---|---|
|
9582e17370 | ||
|
6937f3fbe4 | ||
|
cfac38dc62 | ||
|
bd233c5303 | ||
|
56743923f6 | ||
|
75867115b2 | ||
|
4eee045b02 | ||
|
5f079f8fc0 | ||
|
ce6c1d2ce5 | ||
|
899b2df94f | ||
|
bbf257477b |
@ -131,7 +131,7 @@ coverage:
|
||||
paths:
|
||||
- "coverage/"
|
||||
|
||||
docs:
|
||||
pages:
|
||||
stage: deploy
|
||||
needs:
|
||||
- coverage
|
||||
@ -140,12 +140,15 @@ docs:
|
||||
- gem install hanna-nouveau
|
||||
script:
|
||||
- rake prepare_website
|
||||
- mkdir -p public/
|
||||
- cp -r coverage/ public/
|
||||
artifacts:
|
||||
paths:
|
||||
- rdoc/
|
||||
- wiki/
|
||||
- data/
|
||||
- coverage/
|
||||
- public/
|
||||
only:
|
||||
- master
|
||||
|
||||
|
8
Gemfile
8
Gemfile
@ -20,6 +20,8 @@ group :test do
|
||||
gem "spy"
|
||||
if RUBY_VERSION < "2.3.0"
|
||||
gem "webmock", "< 3.15.0"
|
||||
elsif RUBY_VERSION < "2.4.0"
|
||||
gem "webmock", "< 3.17.0"
|
||||
else
|
||||
gem "webmock"
|
||||
end
|
||||
@ -28,11 +30,7 @@ group :test do
|
||||
|
||||
gem "net-ssh", "~> 4.2.0" if RUBY_VERSION < "2.2.0"
|
||||
|
||||
if RUBY_VERSION >= "2.3.0"
|
||||
gem "ddtrace"
|
||||
else
|
||||
gem "ddtrace", "< 1.0.0"
|
||||
end
|
||||
gem "ddtrace"
|
||||
|
||||
platform :mri do
|
||||
if RUBY_VERSION >= "2.3.0"
|
||||
|
42
doc/release_notes/0_23_0.md
Normal file
42
doc/release_notes/0_23_0.md
Normal file
@ -0,0 +1,42 @@
|
||||
# 0.23.0
|
||||
|
||||
## Features
|
||||
|
||||
### `:retries` plugin: resumable requests
|
||||
|
||||
The `:retries` plugin will now support scenarios where, if the request being retried supports the `range` header, and a partial response has been already buffered, the retry will resume from there and only download the missing data.
|
||||
|
||||
#### HTTPX::ErrorResponse#response
|
||||
|
||||
As a result, ´HTTPX::ErrorResponse#response` has also been introduced; error responses may have an actual response. This happens in cases where the request failed **after** a partial response was initiated.
|
||||
|
||||
#### `:buffer_size` option
|
||||
|
||||
A nnew option, `:buffer_size`, can be used to tweak the buffers used by the read/write socket routines (16k by default, you can lower it in memory-constrained environments).
|
||||
|
||||
## Improvements
|
||||
|
||||
### `:native` resolver falls back to TCP for truncated messages
|
||||
|
||||
The `:native` resolver will repeat DNS queries to a nameserver via TCP when the first attempt is marked as truncated. This behaviour is both aligned with `getaddrinfo` and the `resolv` standard library.
|
||||
|
||||
This introduces a new `resolver_options` option, `:socket_type`, which can now be `:tcp` if it is to remain the default.
|
||||
|
||||
## Chore
|
||||
|
||||
### HTTPX.build_request should receive upcased string (i.e. "GET")
|
||||
|
||||
Functions which receive an HTTP verb should be given he verb in "upcased string" format now. The usage of symbols is still possible, but a deprecation warning will be emitted, and support will be removed in v1.0.0 .
|
||||
|
||||
### Remove HTTPX::Registry
|
||||
|
||||
These internal registries were a bit magical to use, difficult to debug, not thread-safe, and overall a nuisance when it came to type checking. While there is the possibility that someone was relying on it existing, nothing had ever been publicly documented.
|
||||
|
||||
## Bugfixes
|
||||
|
||||
* fixed proxy discovery using proxy env vars (`HTTPS_PROXY`, `NO_PROXY`...) being enabled/disabled based on first host uused in the session;
|
||||
* fixed `:no_proxy` option usage inn the `:proxy` plugin.
|
||||
* fixed `webmock` adapter to correctly disable it when `Webmock.disable!` is called.
|
||||
* fixed bug in `:digest_authentication` plugin when enabled and no credentials were passed.
|
||||
* fixed several bugs in the `sentry` adapter around breadcrumb handling.
|
||||
* fixed `:native` resolver candidate calculation by putting absolute domain at the bottom of the list.
|
@ -25,6 +25,7 @@ services:
|
||||
- AWS_SECRET_ACCESS_KEY=test
|
||||
- AMZ_HOST=aws:4566
|
||||
- WEBDAV_HOST=webdav
|
||||
- DD_INSTRUMENTATION_TELEMETRY_ENABLED=false
|
||||
image: ruby:alpine
|
||||
privileged: true
|
||||
depends_on:
|
||||
|
@ -11,7 +11,6 @@ require "httpx/domain_name"
|
||||
require "httpx/altsvc"
|
||||
require "httpx/callbacks"
|
||||
require "httpx/loggable"
|
||||
require "httpx/registry"
|
||||
require "httpx/transcoder"
|
||||
require "httpx/timers"
|
||||
require "httpx/pool"
|
||||
|
@ -29,7 +29,6 @@ module HTTPX
|
||||
#
|
||||
class Connection
|
||||
extend Forwardable
|
||||
include Registry
|
||||
include Loggable
|
||||
include Callbacks
|
||||
|
||||
@ -63,7 +62,7 @@ module HTTPX
|
||||
# if there's an already open IO, get its
|
||||
# peer address, and force-initiate the parser
|
||||
transition(:already_open)
|
||||
@io = IO.registry(@type).new(@origin, nil, @options)
|
||||
@io = build_socket
|
||||
parser
|
||||
else
|
||||
transition(:idle)
|
||||
@ -82,7 +81,7 @@ module HTTPX
|
||||
if @io
|
||||
@io.add_addresses(addrs)
|
||||
else
|
||||
@io = IO.registry(@type).new(@origin, addrs, @options)
|
||||
@io = build_socket(addrs)
|
||||
end
|
||||
end
|
||||
|
||||
@ -102,7 +101,7 @@ module HTTPX
|
||||
# was the result of coalescing. To prevent blind trust in the case where the
|
||||
# origin came from an ORIGIN frame, we're going to verify the hostname with the
|
||||
# SSL certificate
|
||||
(@origins.size == 1 || @origin == uri.origin || (@io && @io.verify_hostname(uri.host)))
|
||||
(@origins.size == 1 || @origin == uri.origin || (@io.is_a?(SSL) && @io.verify_hostname(uri.host)))
|
||||
) && @options == options
|
||||
) || (match_altsvcs?(uri) && match_altsvc_options?(uri, options))
|
||||
end
|
||||
@ -116,7 +115,7 @@ module HTTPX
|
||||
|
||||
(
|
||||
(open? && @origin == connection.origin) ||
|
||||
!(@io.addresses & connection.addresses).empty?
|
||||
!(@io.addresses & (connection.addresses || [])).empty?
|
||||
) && @options == connection.options
|
||||
end
|
||||
|
||||
@ -451,7 +450,7 @@ module HTTPX
|
||||
end
|
||||
|
||||
def build_parser(protocol = @io.protocol)
|
||||
parser = registry(protocol).new(@write_buffer, @options)
|
||||
parser = self.class.parser_type(protocol).new(@write_buffer, @options)
|
||||
set_parser_callbacks(parser)
|
||||
parser
|
||||
end
|
||||
@ -594,6 +593,17 @@ module HTTPX
|
||||
remove_instance_variable(:@timeout) if defined?(@timeout)
|
||||
end
|
||||
|
||||
def build_socket(addrs = nil)
|
||||
transport_type = case @type
|
||||
when "tcp" then TCP
|
||||
when "ssl" then SSL
|
||||
when "unix" then UNIX
|
||||
else
|
||||
raise Error, "unsupported transport (#{@type})"
|
||||
end
|
||||
transport_type.new(@origin, addrs, @options)
|
||||
end
|
||||
|
||||
def on_error(error)
|
||||
if error.instance_of?(TimeoutError)
|
||||
|
||||
@ -662,5 +672,16 @@ module HTTPX
|
||||
error = error_type.new(request, request.response, read_timeout)
|
||||
on_error(error)
|
||||
end
|
||||
|
||||
class << self
|
||||
def parser_type(protocol)
|
||||
case protocol
|
||||
when "h2" then HTTP2
|
||||
when "http/1.1" then HTTP1
|
||||
else
|
||||
raise Error, "unsupported protocol (##{protocol})"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -368,5 +368,4 @@ module HTTPX
|
||||
UPCASED[field] || field.split("-").map(&:capitalize).join("-")
|
||||
end
|
||||
end
|
||||
Connection.register "http/1.1", Connection::HTTP1
|
||||
end
|
||||
|
@ -412,5 +412,4 @@ module HTTPX
|
||||
end
|
||||
end
|
||||
end
|
||||
Connection.register "h2", Connection::HTTP2
|
||||
end
|
||||
|
@ -5,13 +5,3 @@ require "httpx/io/udp"
|
||||
require "httpx/io/tcp"
|
||||
require "httpx/io/unix"
|
||||
require "httpx/io/ssl"
|
||||
|
||||
module HTTPX
|
||||
module IO
|
||||
extend Registry
|
||||
register "udp", UDP
|
||||
register "unix", HTTPX::UNIX
|
||||
register "tcp", TCP
|
||||
register "ssl", SSL
|
||||
end
|
||||
end
|
||||
|
@ -201,7 +201,7 @@ module HTTPX
|
||||
|
||||
def option_transport(value)
|
||||
transport = value.to_s
|
||||
raise TypeError, "\#{transport} is an unsupported transport type" unless IO.registry.key?(transport)
|
||||
raise TypeError, "#{transport} is an unsupported transport type" unless %w[unix].include?(transport)
|
||||
|
||||
transport
|
||||
end
|
||||
|
@ -20,10 +20,7 @@ module HTTPX
|
||||
end
|
||||
|
||||
def extra_options(options)
|
||||
encodings = Module.new do
|
||||
extend Registry
|
||||
end
|
||||
options.merge(encodings: encodings)
|
||||
options.merge(encodings: {})
|
||||
end
|
||||
end
|
||||
|
||||
@ -36,7 +33,7 @@ module HTTPX
|
||||
end
|
||||
|
||||
def option_encodings(value)
|
||||
raise TypeError, ":encodings must be a registry" unless value.respond_to?(:registry)
|
||||
raise TypeError, ":encodings must be an Hash" unless value.is_a?(Hash)
|
||||
|
||||
value
|
||||
end
|
||||
@ -49,7 +46,7 @@ module HTTPX
|
||||
if @headers.key?("range")
|
||||
@headers.delete("accept-encoding")
|
||||
else
|
||||
@headers["accept-encoding"] ||= @options.encodings.registry.keys
|
||||
@headers["accept-encoding"] ||= @options.encodings.keys
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -65,7 +62,9 @@ module HTTPX
|
||||
@headers.get("content-encoding").each do |encoding|
|
||||
next if encoding == "identity"
|
||||
|
||||
@body = Encoder.new(@body, options.encodings.registry(encoding).deflater)
|
||||
next unless options.encodings.key?(encoding)
|
||||
|
||||
@body = Encoder.new(@body, options.encodings[encoding].deflater)
|
||||
end
|
||||
@headers["content-length"] = @body.bytesize unless unbounded_body?
|
||||
end
|
||||
@ -95,7 +94,9 @@ module HTTPX
|
||||
@_inflaters = @headers.get("content-encoding").filter_map do |encoding|
|
||||
next if encoding == "identity"
|
||||
|
||||
inflater = @options.encodings.registry(encoding).inflater(compressed_length)
|
||||
next unless @options.encodings.key?(encoding)
|
||||
|
||||
inflater = @options.encodings[encoding].inflater(compressed_length)
|
||||
# do not uncompress if there is no decoder available. In fact, we can't reliably
|
||||
# continue decompressing beyond that, so ignore.
|
||||
break unless inflater
|
||||
|
@ -5,13 +5,13 @@ module HTTPX
|
||||
module Compression
|
||||
module Brotli
|
||||
class << self
|
||||
def load_dependencies(_klass)
|
||||
def load_dependencies(klass)
|
||||
require "brotli"
|
||||
klass.plugin(:compression)
|
||||
end
|
||||
|
||||
def configure(klass)
|
||||
klass.plugin(:compression)
|
||||
klass.default_options.encodings.register "br", self
|
||||
def extra_options(options)
|
||||
options.merge(encodings: options.encodings.merge("br" => self))
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -4,14 +4,19 @@ module HTTPX
|
||||
module Plugins
|
||||
module Compression
|
||||
module Deflate
|
||||
def self.load_dependencies(_klass)
|
||||
require "stringio"
|
||||
require "zlib"
|
||||
end
|
||||
class << self
|
||||
def load_dependencies(_klass)
|
||||
require "stringio"
|
||||
require "zlib"
|
||||
end
|
||||
|
||||
def self.configure(klass)
|
||||
klass.plugin(:"compression/gzip")
|
||||
klass.default_options.encodings.register "deflate", self
|
||||
def configure(klass)
|
||||
klass.plugin(:"compression/gzip")
|
||||
end
|
||||
|
||||
def extra_options(options)
|
||||
options.merge(encodings: options.encodings.merge("deflate" => self))
|
||||
end
|
||||
end
|
||||
|
||||
module Deflater
|
||||
|
@ -6,12 +6,14 @@ module HTTPX
|
||||
module Plugins
|
||||
module Compression
|
||||
module GZIP
|
||||
def self.load_dependencies(*)
|
||||
require "zlib"
|
||||
end
|
||||
class << self
|
||||
def load_dependencies(*)
|
||||
require "zlib"
|
||||
end
|
||||
|
||||
def self.configure(klass)
|
||||
klass.default_options.encodings.register "gzip", self
|
||||
def extra_options(options)
|
||||
options.merge(encodings: options.encodings.merge("gzip" => self))
|
||||
end
|
||||
end
|
||||
|
||||
class Deflater
|
||||
|
@ -233,7 +233,7 @@ module HTTPX
|
||||
uri.path = rpc_method
|
||||
|
||||
headers = HEADERS.merge(
|
||||
"grpc-accept-encoding" => ["identity", *@options.encodings.registry.keys]
|
||||
"grpc-accept-encoding" => ["identity", *@options.encodings.keys]
|
||||
)
|
||||
unless deadline == Float::INFINITY
|
||||
# convert to milliseconds
|
||||
@ -249,7 +249,7 @@ module HTTPX
|
||||
|
||||
if compression
|
||||
headers["grpc-encoding"] = compression
|
||||
deflater = @options.encodings.registry(compression).deflater
|
||||
deflater = @options.encodings[compression].deflater if @options.encodings.key?(compression)
|
||||
end
|
||||
|
||||
headers.merge!(@options.call_credentials.call) if @options.call_credentials
|
||||
|
@ -47,7 +47,9 @@ module HTTPX
|
||||
data = message.byteslice(5..size + 5 - 1)
|
||||
if compressed == 1
|
||||
encodings.reverse_each do |algo|
|
||||
inflater = encoders.registry(algo).inflater(size)
|
||||
next unless encoders.key?(algo)
|
||||
|
||||
inflater = encoders[algo].inflater(size)
|
||||
data = inflater.inflate(data)
|
||||
size = data.bytesize
|
||||
end
|
||||
|
@ -12,13 +12,9 @@ module HTTPX
|
||||
VALID_H2C_VERBS = %w[GET OPTIONS HEAD].freeze
|
||||
|
||||
class << self
|
||||
def load_dependencies(*)
|
||||
def load_dependencies(klass)
|
||||
require "base64"
|
||||
end
|
||||
|
||||
def configure(klass)
|
||||
klass.plugin(:upgrade)
|
||||
klass.default_options.upgrade_handlers.register "h2c", self
|
||||
end
|
||||
|
||||
def call(connection, request, response)
|
||||
@ -26,7 +22,7 @@ module HTTPX
|
||||
end
|
||||
|
||||
def extra_options(options)
|
||||
options.merge(max_concurrent_requests: 1)
|
||||
options.merge(max_concurrent_requests: 1, upgrade_handlers: options.upgrade_handlers.merge("h2c" => self))
|
||||
end
|
||||
end
|
||||
|
||||
@ -38,7 +34,7 @@ module HTTPX
|
||||
|
||||
connection = pool.find_connection(upgrade_request.uri, upgrade_request.options)
|
||||
|
||||
return super if connection && connection.upgrade_protocol == :h2c
|
||||
return super if connection && connection.upgrade_protocol == "h2c"
|
||||
|
||||
# build upgrade request
|
||||
upgrade_request.headers.add("connection", "upgrade")
|
||||
@ -83,7 +79,7 @@ module HTTPX
|
||||
set_parser_callbacks(@parser)
|
||||
@inflight += 1
|
||||
@parser.upgrade(request, response)
|
||||
@upgrade_protocol = :h2c
|
||||
@upgrade_protocol = "h2c"
|
||||
|
||||
if request.options.max_concurrent_requests != @options.max_concurrent_requests
|
||||
@options = @options.merge(max_concurrent_requests: nil)
|
||||
|
@ -52,7 +52,7 @@ module HTTPX
|
||||
super
|
||||
meter_elapsed_time("Session: initialized!!!")
|
||||
resolver_type = @options.resolver_class
|
||||
resolver_type = Resolver.registry(resolver_type) if resolver_type.is_a?(Symbol)
|
||||
resolver_type = Resolver.resolver_for(resolver_type)
|
||||
return unless resolver_type <= Resolver::Native
|
||||
|
||||
resolver_type.prepend TrackTimeMethods
|
||||
|
@ -40,9 +40,21 @@ module HTTPX
|
||||
require "httpx/plugins/multipart/part"
|
||||
require "httpx/plugins/multipart/mime_type_detector"
|
||||
end
|
||||
end
|
||||
|
||||
def configure(*)
|
||||
Transcoder.register("form", FormTranscoder)
|
||||
module RequestBodyMethods
|
||||
private
|
||||
|
||||
def initialize_body(options)
|
||||
return FormTranscoder.encode(options.form) if options.form
|
||||
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
module ResponseMethods
|
||||
def form
|
||||
decode(FormTranscoder)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -318,7 +318,7 @@ module HTTPX
|
||||
register_plugin :proxy, Proxy
|
||||
end
|
||||
|
||||
class ProxySSL < IO.registry["ssl"]
|
||||
class ProxySSL < SSL
|
||||
def initialize(tcp, request_uri, options)
|
||||
@io = tcp.to_io
|
||||
super(request_uri, tcp.addresses, options)
|
||||
|
@ -61,7 +61,7 @@ module HTTPX
|
||||
return unless @io.connected?
|
||||
|
||||
@parser || begin
|
||||
@parser = registry(@io.protocol).new(@write_buffer, @options.merge(max_concurrent_requests: 1))
|
||||
@parser = self.class.parser_type(@io.protocol).new(@write_buffer, @options.merge(max_concurrent_requests: 1))
|
||||
parser = @parser
|
||||
parser.extend(ProxyParser)
|
||||
parser.on(:response, &method(:__http_on_connect))
|
||||
|
@ -15,16 +15,13 @@ module HTTPX
|
||||
end
|
||||
|
||||
def extra_options(options)
|
||||
upgrade_handlers = Module.new do
|
||||
extend Registry
|
||||
end
|
||||
options.merge(upgrade_handlers: upgrade_handlers)
|
||||
options.merge(upgrade_handlers: {})
|
||||
end
|
||||
end
|
||||
|
||||
module OptionsMethods
|
||||
def option_upgrade_handlers(value)
|
||||
raise TypeError, ":upgrade_handlers must be a registry" unless value.respond_to?(:registry)
|
||||
raise TypeError, ":upgrade_handlers must be a Hash" unless value.is_a?(Hash)
|
||||
|
||||
value
|
||||
end
|
||||
@ -41,9 +38,9 @@ module HTTPX
|
||||
|
||||
upgrade_protocol = response.headers["upgrade"].split(/ *, */).first
|
||||
|
||||
return response unless upgrade_protocol && options.upgrade_handlers.registry.key?(upgrade_protocol)
|
||||
return response unless upgrade_protocol && options.upgrade_handlers.key?(upgrade_protocol)
|
||||
|
||||
protocol_handler = options.upgrade_handlers.registry(upgrade_protocol)
|
||||
protocol_handler = options.upgrade_handlers[upgrade_protocol]
|
||||
|
||||
return response unless protocol_handler
|
||||
|
||||
|
@ -10,8 +10,8 @@ module HTTPX
|
||||
#
|
||||
module H2
|
||||
class << self
|
||||
def configure(klass)
|
||||
klass.default_options.upgrade_handlers.register "h2", self
|
||||
def extra_options(options)
|
||||
options.merge(upgrade_handlers: options.upgrade_handlers.merge("h2" => self))
|
||||
end
|
||||
|
||||
def call(connection, _request, _response)
|
||||
@ -32,7 +32,7 @@ module HTTPX
|
||||
|
||||
@parser = Connection::HTTP2.new(@write_buffer, @options)
|
||||
set_parser_callbacks(@parser)
|
||||
@upgrade_protocol = :h2
|
||||
@upgrade_protocol = "h2"
|
||||
|
||||
# what's happening here:
|
||||
# a deviation from the state machine is done to perform the actions when a
|
||||
|
@ -244,7 +244,7 @@ module HTTPX
|
||||
def find_resolver_for(connection)
|
||||
connection_options = connection.options
|
||||
resolver_type = connection_options.resolver_class
|
||||
resolver_type = Resolver.registry(resolver_type) if resolver_type.is_a?(Symbol)
|
||||
resolver_type = Resolver.resolver_for(resolver_type)
|
||||
|
||||
@resolvers[resolver_type] ||= begin
|
||||
resolver_manager = if resolver_type.multi?
|
||||
|
@ -1,85 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module HTTPX
|
||||
# Adds a general-purpose registry API to a class. It is designed to be a
|
||||
# configuration-level API, i.e. the registry is global to the class and
|
||||
# should be set on **boot time**.
|
||||
#
|
||||
# It is used internally to associate tags with handlers.
|
||||
#
|
||||
# ## Register/Fetch
|
||||
#
|
||||
# One is strongly advised to register handlers when creating the class.
|
||||
#
|
||||
# There is an instance-level method to retrieve from the registry based
|
||||
# on the tag:
|
||||
#
|
||||
# class Server
|
||||
# include HTTPX::Registry
|
||||
#
|
||||
# register "tcp", TCPHandler
|
||||
# register "ssl", SSLHandlers
|
||||
# ...
|
||||
#
|
||||
#
|
||||
# def handle(uri)
|
||||
# scheme = uri.scheme
|
||||
# handler = registry(scheme) #=> TCPHandler
|
||||
# handler.handle
|
||||
# end
|
||||
# end
|
||||
#
|
||||
module Registry
|
||||
# Base Registry Error
|
||||
class Error < Error; end
|
||||
|
||||
def self.extended(klass)
|
||||
super
|
||||
klass.extend(ClassMethods)
|
||||
end
|
||||
|
||||
def self.included(klass)
|
||||
super
|
||||
klass.extend(ClassMethods)
|
||||
klass.__send__(:include, InstanceMethods)
|
||||
end
|
||||
|
||||
# Class Methods
|
||||
module ClassMethods
|
||||
def inherited(klass)
|
||||
super
|
||||
klass.instance_variable_set(:@registry, @registry.dup)
|
||||
end
|
||||
|
||||
# @param [Object] tag the handler identifier in the registry
|
||||
# @return [Symbol, String, Object] the corresponding handler (if Symbol or String,
|
||||
# will assume it referes to an autoloaded module, and will load-and-return it).
|
||||
#
|
||||
def registry(tag = nil)
|
||||
@registry ||= {}
|
||||
return @registry if tag.nil?
|
||||
|
||||
handler = @registry[tag]
|
||||
raise(Error, "#{tag} is not registered in #{self}") unless handler
|
||||
|
||||
handler
|
||||
end
|
||||
|
||||
# @param [Object] tag the identifier for the handler in the registry
|
||||
# @return [Symbol, String, Object] the handler (if Symbol or String, it is
|
||||
# assumed to be an autoloaded module, to be loaded later)
|
||||
#
|
||||
def register(tag, handler)
|
||||
registry[tag] = handler
|
||||
end
|
||||
end
|
||||
|
||||
# Instance Methods
|
||||
module InstanceMethods
|
||||
# delegates to HTTPX::Registry#registry
|
||||
def registry(tag)
|
||||
self.class.registry(tag)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -120,7 +120,7 @@ module HTTPX
|
||||
|
||||
query = []
|
||||
if (q = @options.params)
|
||||
query << Transcoder.registry("form").encode(q)
|
||||
query << Transcoder::Form.encode(q)
|
||||
end
|
||||
query << @uri.query if @uri.query
|
||||
@query = query.join("&")
|
||||
@ -160,15 +160,7 @@ module HTTPX
|
||||
|
||||
def initialize(headers, options)
|
||||
@headers = headers
|
||||
@body = if options.body
|
||||
Transcoder.registry("body").encode(options.body)
|
||||
elsif options.form
|
||||
Transcoder.registry("form").encode(options.form)
|
||||
elsif options.json
|
||||
Transcoder.registry("json").encode(options.json)
|
||||
elsif options.xml
|
||||
Transcoder.registry("xml").encode(options.xml)
|
||||
end
|
||||
@body = initialize_body(options)
|
||||
return if @body.nil?
|
||||
|
||||
@headers["content-type"] ||= @body.content_type
|
||||
@ -211,7 +203,7 @@ module HTTPX
|
||||
|
||||
def stream(body)
|
||||
encoded = body
|
||||
encoded = Transcoder.registry("chunker").encode(body.enum_for(:each)) if chunked?
|
||||
encoded = Transcoder::Chunker.encode(body.enum_for(:each)) if chunked?
|
||||
encoded
|
||||
end
|
||||
|
||||
@ -235,6 +227,20 @@ module HTTPX
|
||||
"#{unbounded_body? ? "stream" : "@bytesize=#{bytesize}"}>"
|
||||
end
|
||||
# :nocov:
|
||||
|
||||
private
|
||||
|
||||
def initialize_body(options)
|
||||
if options.body
|
||||
Transcoder::Body.encode(options.body)
|
||||
elsif options.form
|
||||
Transcoder::Form.encode(options.form)
|
||||
elsif options.json
|
||||
Transcoder::JSON.encode(options.json)
|
||||
elsif options.xml
|
||||
Transcoder::Xml.encode(options.xml)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def transition(nextstate)
|
||||
|
@ -5,8 +5,6 @@ require "ipaddr"
|
||||
|
||||
module HTTPX
|
||||
module Resolver
|
||||
extend Registry
|
||||
|
||||
RESOLVE_TIMEOUT = 5
|
||||
|
||||
require "httpx/resolver/resolver"
|
||||
@ -15,10 +13,6 @@ module HTTPX
|
||||
require "httpx/resolver/https"
|
||||
require "httpx/resolver/multi"
|
||||
|
||||
register :system, System
|
||||
register :native, Native
|
||||
register :https, HTTPS
|
||||
|
||||
@lookup_mutex = Mutex.new
|
||||
@lookups = Hash.new { |h, k| h[k] = [] }
|
||||
|
||||
@ -28,6 +22,18 @@ module HTTPX
|
||||
|
||||
module_function
|
||||
|
||||
def resolver_for(resolver_type)
|
||||
case resolver_type
|
||||
when :native then Native
|
||||
when :system then System
|
||||
when :https then HTTPS
|
||||
else
|
||||
return resolver_type if resolver_type.is_a?(Class) && resolver_type < Resolver
|
||||
|
||||
raise Error, "unsupported resolver type (#{resolver_type})"
|
||||
end
|
||||
end
|
||||
|
||||
def nolookup_resolve(hostname)
|
||||
ip_resolve(hostname) || cached_lookup(hostname) || system_resolve(hostname)
|
||||
end
|
||||
|
@ -87,32 +87,27 @@ module HTTPX
|
||||
end
|
||||
|
||||
def json(*args)
|
||||
decode("json", *args)
|
||||
decode(Transcoder::JSON, *args)
|
||||
end
|
||||
|
||||
def form
|
||||
decode("form")
|
||||
decode(Transcoder::Form)
|
||||
end
|
||||
|
||||
def xml
|
||||
decode("xml")
|
||||
decode(Transcoder::Xml)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def decode(format, *args)
|
||||
def decode(transcoder, *args)
|
||||
# TODO: check if content-type is a valid format, i.e. "application/json" for json parsing
|
||||
transcoder = Transcoder.registry(format)
|
||||
|
||||
raise Error, "no decoder available for \"#{format}\"" unless transcoder.respond_to?(:decode)
|
||||
|
||||
decoder = transcoder.decode(self)
|
||||
|
||||
raise Error, "no decoder available for \"#{format}\"" unless decoder
|
||||
raise Error, "no decoder available for \"#{transcoder}\"" unless decoder
|
||||
|
||||
decoder.call(self, *args)
|
||||
rescue Registry::Error
|
||||
raise Error, "no decoder available for \"#{format}\""
|
||||
end
|
||||
|
||||
def no_data?
|
||||
@ -203,10 +198,8 @@ module HTTPX
|
||||
rescue ArgumentError # ex: unknown encoding name - utf
|
||||
content
|
||||
end
|
||||
when nil
|
||||
"".b
|
||||
else
|
||||
@buffer
|
||||
"".b
|
||||
end
|
||||
end
|
||||
alias_method :to_str, :to_s
|
||||
|
@ -203,6 +203,7 @@ module HTTPX
|
||||
end
|
||||
|
||||
def receive_requests(requests, connections)
|
||||
# @type var responses: Array[response]
|
||||
responses = []
|
||||
|
||||
begin
|
||||
|
@ -2,8 +2,6 @@
|
||||
|
||||
module HTTPX
|
||||
module Transcoder
|
||||
extend Registry
|
||||
|
||||
using RegexpExtensions unless Regexp.method_defined?(:match?)
|
||||
|
||||
module_function
|
||||
|
@ -55,5 +55,4 @@ module HTTPX::Transcoder
|
||||
Encoder.new(body)
|
||||
end
|
||||
end
|
||||
register "body", Body
|
||||
end
|
||||
|
@ -112,5 +112,4 @@ module HTTPX::Transcoder
|
||||
Encoder.new(chunks)
|
||||
end
|
||||
end
|
||||
register "chunker", Chunker
|
||||
end
|
||||
|
@ -55,5 +55,4 @@ module HTTPX::Transcoder
|
||||
Decoder
|
||||
end
|
||||
end
|
||||
register "form", Form
|
||||
end
|
||||
|
@ -56,5 +56,4 @@ module HTTPX::Transcoder
|
||||
end
|
||||
# rubocop:enable Style/SingleLineMethods
|
||||
end
|
||||
register "json", JSON
|
||||
end
|
||||
|
@ -51,5 +51,4 @@ module HTTPX::Transcoder
|
||||
end
|
||||
end
|
||||
end
|
||||
register "xml", Xml
|
||||
end
|
||||
|
@ -1,5 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module HTTPX
|
||||
VERSION = "0.22.5"
|
||||
VERSION = "0.23.0"
|
||||
end
|
||||
|
@ -17,7 +17,6 @@ module HTTPX
|
||||
extend Forwardable
|
||||
include Loggable
|
||||
include Callbacks
|
||||
include HTTPX::Registry[String, Class]
|
||||
|
||||
|
||||
attr_reader type: io_type
|
||||
@ -35,7 +34,13 @@ module HTTPX
|
||||
@write_buffer: Buffer
|
||||
@inflight: Integer
|
||||
@keep_alive_timeout: Numeric?
|
||||
@timeout: Numeric?
|
||||
@current_timeout: Numeric?
|
||||
@total_timeout: Numeric?
|
||||
@io: TCP | SSL | UNIX
|
||||
@parser: HTTP1 | HTTP2 | _Parser
|
||||
@connected_at: Float
|
||||
@response_received_at: Float
|
||||
|
||||
def addresses: () -> Array[ipaddr]?
|
||||
|
||||
@ -76,6 +81,8 @@ module HTTPX
|
||||
|
||||
def deactivate: () -> void
|
||||
|
||||
def open?: () -> bool
|
||||
|
||||
def raise_timeout_error: (Numeric interval) -> void
|
||||
|
||||
private
|
||||
@ -90,17 +97,20 @@ module HTTPX
|
||||
|
||||
def send_pending: () -> void
|
||||
|
||||
def parser: () -> _Parser
|
||||
def parser: () -> (HTTP1 | HTTP2 | _Parser)
|
||||
|
||||
def send_request_to_parser: (Request request) -> void
|
||||
|
||||
def build_parser: () -> _Parser
|
||||
| (String) -> _Parser
|
||||
def build_parser: (?String protocol) -> (HTTP1 | HTTP2)
|
||||
|
||||
def set_parser_callbacks: (_Parser) -> void
|
||||
def set_parser_callbacks: (HTTP1 | HTTP2 parser) -> void
|
||||
|
||||
def transition: (Symbol) -> void
|
||||
|
||||
def handle_transition: (Symbol) -> void
|
||||
|
||||
def build_socket: (?Array[ipaddr]? addrs) -> (TCP | SSL | UNIX)
|
||||
|
||||
def on_error: (HTTPX::TimeoutError | Error | StandardError) -> void
|
||||
|
||||
def handle_error: (StandardError) -> void
|
||||
@ -112,5 +122,7 @@ module HTTPX
|
||||
def write_timeout_callback: (Request request, Numeric write_timeout) -> void
|
||||
|
||||
def read_timeout_callback: (Request request, Numeric read_timeout, ?singleton(RequestTimeoutError) error_type) -> void
|
||||
|
||||
def self.parser_type: (String protocol) -> (singleton(HTTP1) | singleton(HTTP2))
|
||||
end
|
||||
end
|
@ -2,6 +2,12 @@ module HTTPX
|
||||
class Error < StandardError
|
||||
end
|
||||
|
||||
class UnsupportedSchemeError < Error
|
||||
end
|
||||
|
||||
class ConnectionError < Error
|
||||
end
|
||||
|
||||
class TimeoutError < Error
|
||||
attr_reader timeout: Numeric
|
||||
|
||||
@ -55,4 +61,7 @@ module HTTPX
|
||||
|
||||
def initialize: (Connection connection, String hostname, ?String message) -> untyped
|
||||
end
|
||||
|
||||
class MisdirectedRequestError < HTTPError
|
||||
end
|
||||
end
|
@ -1,6 +1,9 @@
|
||||
module HTTPX
|
||||
IPRegex: Regexp
|
||||
|
||||
class TLSError < OpenSSL::SSL::SSLError
|
||||
end
|
||||
|
||||
class SSL < TCP
|
||||
TLS_OPTIONS: Hash[Symbol, untyped]
|
||||
|
||||
|
@ -10,7 +10,7 @@ module HTTPX
|
||||
|
||||
attr_reader state: Symbol
|
||||
|
||||
attr_reader interests: Symbol
|
||||
attr_reader interests: io_interests
|
||||
|
||||
alias host ip
|
||||
|
||||
|
@ -46,7 +46,7 @@ module HTTPX
|
||||
attr_reader body_threshold_size: Integer
|
||||
|
||||
# transport
|
||||
attr_reader transport: String?
|
||||
attr_reader transport: "unix" | nil
|
||||
|
||||
# transport_options
|
||||
attr_reader transport_options: Hash[untyped, untyped]?
|
||||
|
@ -1,7 +1,6 @@
|
||||
module HTTPX
|
||||
module Plugins
|
||||
module Compression
|
||||
type encodings_registry = Registry[Symbol, Class]
|
||||
|
||||
type deflatable = _Reader | _ToS
|
||||
|
||||
@ -16,12 +15,17 @@ module HTTPX
|
||||
def initialize: (Integer | Float bytesize) -> untyped
|
||||
end
|
||||
|
||||
interface _Compressor
|
||||
def deflater: () -> _Deflater
|
||||
def inflater: (Integer | Float bytesize) -> _Inflater
|
||||
end
|
||||
|
||||
def self.configure: (singleton(Session)) -> void
|
||||
|
||||
interface _CompressionOptions
|
||||
def compression_threshold_size: () -> Integer?
|
||||
|
||||
def encodings: () -> encodings_registry?
|
||||
def encodings: () -> Hash[String, _Compressor]
|
||||
end
|
||||
|
||||
def self.extra_options: (Options) -> (Options & _CompressionOptions)
|
||||
|
@ -13,8 +13,8 @@ module HTTPX
|
||||
|
||||
def []: (uri) -> Array[Cookie]
|
||||
|
||||
def each: (?uri) { (Cookie) -> void } -> void
|
||||
| (?uri) -> Enumerable[Cookie]
|
||||
def each: (?uri?) { (Cookie) -> void } -> void
|
||||
| (?uri?) -> Enumerable[Cookie]
|
||||
|
||||
def merge: (_Each[cookie] cookies) -> instance
|
||||
|
||||
|
@ -39,8 +39,8 @@ module HTTPX
|
||||
|
||||
def self?.encode: (String bytes, ?deflater: Compression::_Deflater?) -> String
|
||||
|
||||
def self?.decode: (String message, encodings: Array[String], encoders: Compression::encodings_registry) -> String
|
||||
| (String message, encodings: Array[String], encoders: Compression::encodings_registry) { (String) -> void } -> void
|
||||
def self?.decode: (String message, encodings: Array[String], encoders: Hash[String, Compression::_Compressor]) -> String
|
||||
| (String message, encodings: Array[String], encoders: Hash[String, Compression::_Compressor]) { (String) -> void } -> void
|
||||
|
||||
def self?.cancel: (Request) -> void
|
||||
|
||||
@ -65,7 +65,7 @@ module HTTPX
|
||||
module ResponseMethods
|
||||
def merge_headers: (headers_input trailers) -> void
|
||||
|
||||
def encoders: () -> Compression::encodings_registry
|
||||
def encoders: () -> Hash[String, Compression::_Compressor]
|
||||
end
|
||||
|
||||
module InstanceMethods
|
||||
|
@ -2,7 +2,7 @@ module HTTPX
|
||||
module Plugins
|
||||
module Retries
|
||||
MAX_RETRIES: Integer
|
||||
IDEMPOTENT_METHODS: Array[verb]
|
||||
IDEMPOTENT_METHODS: Array[String]
|
||||
RETRYABLE_ERRORS: Array[singleton(StandardError)]
|
||||
DEFAULT_JITTER: ^(Numeric) -> Numeric
|
||||
|
||||
|
@ -1,18 +1,20 @@
|
||||
module HTTPX
|
||||
module Plugins
|
||||
module Upgrade
|
||||
type handlers_registry = Registry[Symbol, Class]
|
||||
interface _Upgrader
|
||||
def call: (Connection connection, Request request, Response response) -> void
|
||||
end
|
||||
|
||||
def self.configure: (singleton(Session)) -> void
|
||||
|
||||
interface _UpgradeOptions
|
||||
def upgrade_handlers: () -> handlers_registry?
|
||||
def upgrade_handlers: () -> Hash[String, _Upgrader]
|
||||
end
|
||||
|
||||
def self.extra_options: (Options) -> (Options & _UpgradeOptions)
|
||||
|
||||
module ConnectionMethods
|
||||
attr_reader upgrade_protocol: Symbol?
|
||||
attr_reader upgrade_protocol: String?
|
||||
attr_reader hijacked: boolish
|
||||
|
||||
def hijack_io: () -> void
|
||||
|
@ -1,13 +0,0 @@
|
||||
module HTTPX::Registry[unchecked out T, unchecked out V]
|
||||
class Error < HTTPX::Error
|
||||
end
|
||||
|
||||
# type registrable = Symbol | String | Class
|
||||
|
||||
def self.registry: [T, V] (T) -> Class
|
||||
| [T, V] () -> Hash[T, V]
|
||||
|
||||
def self.register: [T, V] (T tag, V handler) -> void
|
||||
|
||||
def registry: (?T tag) -> V
|
||||
end
|
@ -7,7 +7,7 @@ module HTTPX
|
||||
USER_AGENT: String
|
||||
|
||||
attr_reader verb: verb
|
||||
attr_reader uri: URI::Generic
|
||||
attr_reader uri: URI::HTTP | URI::HTTPS
|
||||
attr_reader headers: Headers
|
||||
attr_reader body: Body
|
||||
attr_reader state: Symbol
|
||||
@ -56,6 +56,10 @@ module HTTPX
|
||||
|
||||
def request_timeout: () -> Numeric
|
||||
|
||||
private
|
||||
|
||||
def initialize_body: (Options options) -> Transcoder::_Encoder?
|
||||
|
||||
class Body
|
||||
@headers: Headers
|
||||
@body: body_encoder?
|
||||
|
@ -2,8 +2,6 @@ module HTTPX
|
||||
type ipaddr = IPAddr | String
|
||||
|
||||
module Resolver
|
||||
extend Registry[Symbol, Class]
|
||||
|
||||
RESOLVE_TIMEOUT: Integer | Float
|
||||
|
||||
@lookup_mutex: Thread::Mutex
|
||||
@ -21,6 +19,11 @@ module HTTPX
|
||||
|
||||
def system_resolve: (String hostname) -> Array[IPAddr]?
|
||||
|
||||
def self?.resolver_for: (:native resolver_type) -> singleton(Native) |
|
||||
(:system resolver_type) -> singleton(System) |
|
||||
(:https resolver_type) -> singleton(HTTPS) |
|
||||
[U] (U resolver_type) -> U
|
||||
|
||||
def self?.cached_lookup: (String hostname) -> Array[IPAddr]?
|
||||
|
||||
def self?.cached_lookup_set: (String hostname, ip_family family, Array[dns_result] addresses) -> void
|
||||
|
@ -41,14 +41,14 @@ module HTTPX
|
||||
def initialize: (Request request, String | Integer status, String version, headers?) -> untyped
|
||||
def no_data?: () -> bool
|
||||
|
||||
def decode:(String format, ?untyped options) -> untyped
|
||||
def decode:(Transcoder::_Decode transcoder, ?untyped options) -> untyped
|
||||
|
||||
class Body
|
||||
include _Reader
|
||||
include _ToS
|
||||
include _ToStr
|
||||
|
||||
attr_reader encoding: String
|
||||
attr_reader encoding: Encoding | String
|
||||
|
||||
@response: Response
|
||||
@headers: Headers
|
||||
|
@ -11,8 +11,6 @@ module HTTPX
|
||||
|
||||
def self.plugin: (Symbol | Module plugin, ?options? options) ?{ (Class) -> void } -> singleton(Session)
|
||||
|
||||
def self.default_options: -> Options
|
||||
|
||||
def wrap: () { (instance) -> void } -> void
|
||||
|
||||
def close: (*untyped) -> void
|
||||
@ -41,12 +39,14 @@ module HTTPX
|
||||
| (verb, _Each[[uri, options]], Options) -> Array[Request]
|
||||
| (verb, _Each[uri], options) -> Array[Request]
|
||||
|
||||
def build_connection: (URI::Generic, Options) -> Connection
|
||||
def build_connection: (URI::HTTP | URI::HTTPS uri, Options options) -> Connection
|
||||
|
||||
def send_requests: (*Request) -> Array[response]
|
||||
|
||||
def _send_requests: (Array[Request]) -> Array[Connection]
|
||||
|
||||
def receive_requests: (Array[Request], Array[Connection]) -> Array[response]
|
||||
|
||||
attr_reader self.default_options: Options
|
||||
end
|
||||
end
|
@ -4,11 +4,6 @@ module HTTPX
|
||||
type body_encoder = Transcoder::_Encoder | _Each[String]
|
||||
|
||||
module Transcoder
|
||||
def self?.registry: (String tag) -> _Encode
|
||||
| () -> Hash[String, _Encode]
|
||||
|
||||
def self?.register: (String tag, _Encode handler) -> void
|
||||
|
||||
def self?.normalize_keys: [U] (_ToS key, _ToAry[untyped] | _ToHash[_ToS, untyped] | untyped value, ?(^(untyped value) -> bool | nil) cond) { (String, ?untyped) -> U } -> U
|
||||
|
||||
def self?.normalize_query: (Hash[String, untyped] params, String name, String v, Integer depth) -> void
|
||||
@ -17,6 +12,10 @@ module HTTPX
|
||||
def encode: (untyped payload) -> body_encoder
|
||||
end
|
||||
|
||||
interface _Decode
|
||||
def decode: (HTTPX::Response response) -> _Decoder
|
||||
end
|
||||
|
||||
interface _Encoder
|
||||
def bytesize: () -> (Integer | Float)
|
||||
end
|
||||
|
@ -1,5 +1,5 @@
|
||||
module HTTPX::Transcoder
|
||||
module XML
|
||||
module Xml
|
||||
|
||||
def self?.encode: (untyped xml) -> Encoder
|
||||
def self?.decode: (HTTPX::Response response) -> _Decoder
|
||||
|
16
test/compression_test.rb
Normal file
16
test/compression_test.rb
Normal file
@ -0,0 +1,16 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative "test_helper"
|
||||
require "httpx/plugins/compression"
|
||||
|
||||
class CompressionTest < Minitest::Test
|
||||
include HTTPX
|
||||
|
||||
def test_ignore_encoding_on_range
|
||||
session = HTTPX.plugin(:compression)
|
||||
request = session.build_request("GET", "http://example.com")
|
||||
assert request.headers.key?("accept-encoding")
|
||||
range_request = session.build_request("GET", "http://example.com", headers: { "range" => "bytes=100-200" })
|
||||
assert !range_request.headers.key?("accept-encoding")
|
||||
end
|
||||
end
|
115
test/cookie_jar_test.rb
Normal file
115
test/cookie_jar_test.rb
Normal file
@ -0,0 +1,115 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative "test_helper"
|
||||
|
||||
class CookieJarTest < Minitest::Test
|
||||
def test_plugin_cookies_jar
|
||||
HTTPX.plugin(:cookies) # force loading the modules
|
||||
|
||||
# Test special cases
|
||||
special_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
special_jar.parse(%(a="b"; Path=/, c=d; Path=/, e="f\\"; \\"g"))
|
||||
cookies = special_jar[jar_cookies_uri]
|
||||
assert(cookies.one? { |cookie| cookie.name == "a" && cookie.value == "b" })
|
||||
assert(cookies.one? { |cookie| cookie.name == "c" && cookie.value == "d" })
|
||||
assert(cookies.one? { |cookie| cookie.name == "e" && cookie.value == "f\"; \"g" })
|
||||
|
||||
# Test secure parameter
|
||||
secure_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
secure_jar.parse(%(a=b; Path=/; Secure))
|
||||
assert !secure_jar[jar_cookies_uri(scheme: "https")].empty?, "cookie jar should contain the secure cookie"
|
||||
assert secure_jar[jar_cookies_uri(scheme: "http")].empty?, "cookie jar should not contain the secure cookie"
|
||||
|
||||
# Test path parameter
|
||||
path_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
path_jar.parse(%(a=b; Path=/cookies))
|
||||
assert path_jar[jar_cookies_uri("/")].empty?
|
||||
assert !path_jar[jar_cookies_uri("/cookies")].empty?
|
||||
assert !path_jar[jar_cookies_uri("/cookies/set")].empty?
|
||||
|
||||
# Test expires
|
||||
maxage_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
maxage_jar.parse(%(a=b; Path=/; Max-Age=2))
|
||||
assert !maxage_jar[jar_cookies_uri].empty?
|
||||
sleep 3
|
||||
assert maxage_jar[jar_cookies_uri].empty?
|
||||
|
||||
expires_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
expires_jar.parse(%(a=b; Path=/; Expires=Sat, 02 Nov 2019 15:24:00 GMT))
|
||||
assert expires_jar[jar_cookies_uri].empty?
|
||||
|
||||
# regression test
|
||||
rfc2616_expires_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
rfc2616_expires_jar.parse(%(a=b; Path=/; Expires=Fri, 17-Feb-2033 12:43:41 GMT))
|
||||
assert !rfc2616_expires_jar[jar_cookies_uri].empty?
|
||||
|
||||
# Test domain
|
||||
domain_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
domain_jar.parse(%(a=b; Path=/; Domain=.google.com))
|
||||
assert domain_jar[jar_cookies_uri].empty?
|
||||
assert !domain_jar["http://www.google.com/"].empty?
|
||||
|
||||
ipv4_domain_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
ipv4_domain_jar.parse(%(a=b; Path=/; Domain=137.1.0.12))
|
||||
assert ipv4_domain_jar["http://www.google.com/"].empty?
|
||||
assert !ipv4_domain_jar["http://137.1.0.12/"].empty?
|
||||
|
||||
ipv6_domain_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
ipv6_domain_jar.parse(%(a=b; Path=/; Domain=[fe80::1]))
|
||||
assert ipv6_domain_jar["http://www.google.com/"].empty?
|
||||
assert !ipv6_domain_jar["http://[fe80::1]/"].empty?
|
||||
|
||||
# Test duplicate
|
||||
dup_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
dup_jar.parse(%(a=c, a=a, a=b))
|
||||
cookies = dup_jar[jar_cookies_uri]
|
||||
assert cookies.size == 1, "should only have kept one of the received \"a\" cookies"
|
||||
cookie = cookies.first
|
||||
assert cookie.name == "a", "unexpected name"
|
||||
assert cookie.value == "b", "unexpected value, should have been \"b\", instead it's \"#{cookie.value}\""
|
||||
end
|
||||
|
||||
def test_plugin_cookies_jar_merge
|
||||
HTTPX.plugin(:cookies) # force loading the modules
|
||||
|
||||
jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
assert jar.each.to_a == []
|
||||
assert jar.merge("a" => "b").each.map { |c| [c.name, c.value] } == [%w[a b]]
|
||||
assert jar.merge([HTTPX::Plugins::Cookies::Cookie.new("a", "b")]).each.map { |c| [c.name, c.value] } == [%w[a b]]
|
||||
assert jar.merge([{ name: "a", value: "b" }]).each.map { |c| [c.name, c.value] } == [%w[a b]]
|
||||
end
|
||||
|
||||
def test_plugins_cookies_cookie
|
||||
HTTPX.plugin(:cookies) # force loading the modules
|
||||
|
||||
# match against uris
|
||||
acc_c1 = HTTPX::Plugins::Cookies::Cookie.new("a", "b")
|
||||
assert acc_c1.send(:acceptable_from_uri?, "https://www.google.com")
|
||||
acc_c2 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", domain: ".google.com")
|
||||
assert acc_c2.send(:acceptable_from_uri?, "https://www.google.com")
|
||||
assert !acc_c2.send(:acceptable_from_uri?, "https://nghttp2.org")
|
||||
acc_c3 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", domain: "google.com")
|
||||
assert !acc_c3.send(:acceptable_from_uri?, "https://www.google.com")
|
||||
|
||||
# quoting funny characters
|
||||
sch_cookie = HTTPX::Plugins::Cookies::Cookie.new("Bar", "value\"4")
|
||||
assert sch_cookie.cookie_value == %(Bar="value\\"4")
|
||||
|
||||
# sorting
|
||||
c1 = HTTPX::Plugins::Cookies::Cookie.new("a", "b")
|
||||
c2 = HTTPX::Plugins::Cookies::Cookie.new("a", "bc")
|
||||
assert [c2, c1].sort == [c1, c2]
|
||||
|
||||
c3 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", path: "/cookies")
|
||||
assert [c3, c2, c1].sort == [c3, c1, c2]
|
||||
|
||||
c4 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", created_at: (Time.now - (60 * 60 * 24)))
|
||||
assert [c4, c3, c2, c1].sort == [c3, c4, c1, c2]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def jar_cookies_uri(path = "/cookies", scheme: "http")
|
||||
"#{scheme}://example.com#{path}"
|
||||
end
|
||||
end
|
@ -10,6 +10,11 @@ class ErrorResponseTest < Minitest::Test
|
||||
assert r1.status == "wow"
|
||||
end
|
||||
|
||||
def test_error_response_finished?
|
||||
r1 = ErrorResponse.new(request_mock, RuntimeError.new("wow"), {})
|
||||
assert r1.finished?
|
||||
end
|
||||
|
||||
def test_error_response_error
|
||||
error = RuntimeError.new("wow")
|
||||
r1 = ErrorResponse.new(request_mock, error, {})
|
||||
@ -28,9 +33,18 @@ class ErrorResponseTest < Minitest::Test
|
||||
assert str.match(/wow \(.*RuntimeError.*\)/), "expected \"wow (RuntimeError)\" in \"#{str}\""
|
||||
end
|
||||
|
||||
def test_error_response_close
|
||||
response = Response.new(request_mock, 200, "1.1", {})
|
||||
request_mock.response = response
|
||||
r = ErrorResponse.new(request_mock, RuntimeError.new("wow"), {})
|
||||
assert !response.body.closed?
|
||||
r.close
|
||||
assert response.body.closed?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def request_mock
|
||||
Request.new("GET", "http://example.com/")
|
||||
@request_mock ||= Request.new("GET", "http://example.com/")
|
||||
end
|
||||
end
|
||||
|
@ -17,6 +17,17 @@ class ProxyTest < Minitest::Test
|
||||
assert params != 1
|
||||
end
|
||||
|
||||
%w[basic digest ntlm].each do |auth_method|
|
||||
define_method :"test_proxy_factory_#{auth_method}" do
|
||||
basic_proxy_opts = HTTPX.plugin(:proxy).__send__(:"with_proxy_#{auth_method}_auth", username: "user",
|
||||
password: "pass").instance_variable_get(:@options)
|
||||
proxy = basic_proxy_opts.proxy
|
||||
assert proxy[:username] == "user"
|
||||
assert proxy[:password] == "pass"
|
||||
assert proxy[:scheme] == auth_method
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def parameters(uri: "http://proxy", **args)
|
||||
|
@ -29,4 +29,13 @@ class ResolverTest < Minitest::Test
|
||||
ips = Resolver.cached_lookup("test.com")
|
||||
assert ips == %w[127.0.0.2 ::2 ::3]
|
||||
end
|
||||
|
||||
def test_resolver_for
|
||||
assert Resolver.resolver_for(:native) == Resolver::Native
|
||||
assert Resolver.resolver_for(:system) == Resolver::System
|
||||
assert Resolver.resolver_for(:https) == Resolver::HTTPS
|
||||
assert Resolver.resolver_for(Resolver::HTTPS) == Resolver::HTTPS
|
||||
ex = assert_raises(Error) { Resolver.resolver_for(Object) }
|
||||
assert(ex.message.include?("unsupported resolver type"))
|
||||
end
|
||||
end
|
||||
|
@ -82,6 +82,23 @@ class ResponseTest < Minitest::Test
|
||||
body5.write(payload)
|
||||
assert body5 == "a" * 2048, "body messed up with file"
|
||||
assert body5 == StringIO.new("a" * 2048), "body messed up with file"
|
||||
|
||||
text = ("ã" * 2048).b
|
||||
body6 = Response::Body.new(Response.new(request, 200, "2.0", { "content-type" => "text/html; charset=utf" }),
|
||||
Options.new(body_threshold_size: 1024))
|
||||
body6.write(text)
|
||||
req_text = body6.to_s
|
||||
assert text == req_text, "request body must be in original encoding (#{req_text})"
|
||||
end
|
||||
|
||||
def test_response_body_close
|
||||
payload = "a" * 512
|
||||
body = Response::Body.new(Response.new(request, 200, "2.0", {}), Options.new(body_threshold_size: 1024))
|
||||
assert !body.closed?
|
||||
body.write(payload)
|
||||
assert !body.closed?
|
||||
body.close
|
||||
assert body.closed?
|
||||
end
|
||||
|
||||
def test_response_body_copy_to_memory
|
||||
@ -205,9 +222,6 @@ class ResponseTest < Minitest::Test
|
||||
form4_response = Response.new(request, 200, "2.0", { "content-type" => "application/x-www-form-urlencoded" })
|
||||
form4_response << "[]"
|
||||
assert form4_response.form == {}
|
||||
|
||||
error = assert_raises(HTTPX::Error) { form2_response.__send__(:decode, "bla") }
|
||||
assert error.message.include?("no decoder available for"), "failed with unexpected error"
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -50,6 +50,8 @@ module Requests
|
||||
verify_status(response, 401)
|
||||
response = session.get(build_uri("/get"))
|
||||
verify_status(response, 200)
|
||||
response = session.digest_auth(user, pass).get(build_uri("/get"))
|
||||
verify_status(response, 200)
|
||||
end
|
||||
|
||||
# NTLM
|
||||
@ -68,6 +70,11 @@ module Requests
|
||||
response = http.ntlm_auth("user", "password").get(uri)
|
||||
verify_status(response, 200)
|
||||
|
||||
# bypass
|
||||
response = http.get(build_uri("/get"))
|
||||
verify_status(response, 200)
|
||||
response = http.ntlm_auth("user", "password").get(build_uri("/get"))
|
||||
verify_status(response, 200)
|
||||
# invalid_response = http.ntlm_authentication("user", "fake").get(uri)
|
||||
# verify_status(invalid_response, 401)
|
||||
end
|
||||
|
@ -103,104 +103,6 @@ module Requests
|
||||
verify_cookies(body["cookies"], session_cookies)
|
||||
end
|
||||
|
||||
def test_plugin_cookies_jar
|
||||
HTTPX.plugin(:cookies) # force loading the modules
|
||||
|
||||
# Test special cases
|
||||
special_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
special_jar.parse(%(a="b"; Path=/, c=d; Path=/, e="f\\"; \\"g"))
|
||||
cookies = special_jar[jar_cookies_uri]
|
||||
assert(cookies.one? { |cookie| cookie.name == "a" && cookie.value == "b" })
|
||||
assert(cookies.one? { |cookie| cookie.name == "c" && cookie.value == "d" })
|
||||
assert(cookies.one? { |cookie| cookie.name == "e" && cookie.value == "f\"; \"g" })
|
||||
|
||||
# Test secure parameter
|
||||
secure_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
secure_jar.parse(%(a=b; Path=/; Secure))
|
||||
cookies = secure_jar[jar_cookies_uri]
|
||||
if URI(cookies_uri).scheme == "https"
|
||||
assert !cookies.empty?, "cookie jar should contain the secure cookie"
|
||||
else
|
||||
assert cookies.empty?, "cookie jar should not contain the secure cookie"
|
||||
end
|
||||
|
||||
# Test path parameter
|
||||
path_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
path_jar.parse(%(a=b; Path=/cookies))
|
||||
assert path_jar[jar_cookies_uri("/")].empty?
|
||||
assert !path_jar[jar_cookies_uri("/cookies")].empty?
|
||||
assert !path_jar[jar_cookies_uri("/cookies/set")].empty?
|
||||
|
||||
# Test expires
|
||||
maxage_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
maxage_jar.parse(%(a=b; Path=/; Max-Age=2))
|
||||
assert !maxage_jar[jar_cookies_uri].empty?
|
||||
sleep 3
|
||||
assert maxage_jar[jar_cookies_uri].empty?
|
||||
|
||||
expires_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
expires_jar.parse(%(a=b; Path=/; Expires=Sat, 02 Nov 2019 15:24:00 GMT))
|
||||
assert expires_jar[jar_cookies_uri].empty?
|
||||
|
||||
# regression test
|
||||
rfc2616_expires_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
rfc2616_expires_jar.parse(%(a=b; Path=/; Expires=Fri, 17-Feb-2033 12:43:41 GMT))
|
||||
assert !rfc2616_expires_jar[jar_cookies_uri].empty?
|
||||
|
||||
# Test domain
|
||||
domain_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
domain_jar.parse(%(a=b; Path=/; Domain=.google.com))
|
||||
assert domain_jar[jar_cookies_uri].empty?
|
||||
assert !domain_jar["http://www.google.com/"].empty?
|
||||
|
||||
ipv4_domain_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
ipv4_domain_jar.parse(%(a=b; Path=/; Domain=137.1.0.12))
|
||||
assert ipv4_domain_jar["http://www.google.com/"].empty?
|
||||
assert !ipv4_domain_jar["http://137.1.0.12/"].empty?
|
||||
|
||||
ipv6_domain_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
ipv6_domain_jar.parse(%(a=b; Path=/; Domain=[fe80::1]))
|
||||
assert ipv6_domain_jar["http://www.google.com/"].empty?
|
||||
assert !ipv6_domain_jar["http://[fe80::1]/"].empty?
|
||||
|
||||
# Test duplicate
|
||||
dup_jar = HTTPX::Plugins::Cookies::Jar.new
|
||||
dup_jar.parse(%(a=c, a=a, a=b))
|
||||
cookies = dup_jar[jar_cookies_uri]
|
||||
assert cookies.size == 1, "should only have kept one of the received \"a\" cookies"
|
||||
cookie = cookies.first
|
||||
assert cookie.name == "a", "unexpected name"
|
||||
assert cookie.value == "b", "unexpected value, should have been \"b\", instead it's \"#{cookie.value}\""
|
||||
end
|
||||
|
||||
def test_plugins_cookies_cookie
|
||||
HTTPX.plugin(:cookies) # force loading the modules
|
||||
|
||||
# match against uris
|
||||
acc_c1 = HTTPX::Plugins::Cookies::Cookie.new("a", "b")
|
||||
assert acc_c1.send(:acceptable_from_uri?, "https://www.google.com")
|
||||
acc_c2 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", domain: ".google.com")
|
||||
assert acc_c2.send(:acceptable_from_uri?, "https://www.google.com")
|
||||
assert !acc_c2.send(:acceptable_from_uri?, "https://nghttp2.org")
|
||||
acc_c3 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", domain: "google.com")
|
||||
assert !acc_c3.send(:acceptable_from_uri?, "https://www.google.com")
|
||||
|
||||
# quoting funny characters
|
||||
sch_cookie = HTTPX::Plugins::Cookies::Cookie.new("Bar", "value\"4")
|
||||
assert sch_cookie.cookie_value == %(Bar="value\\"4")
|
||||
|
||||
# sorting
|
||||
c1 = HTTPX::Plugins::Cookies::Cookie.new("a", "b")
|
||||
c2 = HTTPX::Plugins::Cookies::Cookie.new("a", "bc")
|
||||
assert [c2, c1].sort == [c1, c2]
|
||||
|
||||
c3 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", path: "/cookies")
|
||||
assert [c3, c2, c1].sort == [c3, c1, c2]
|
||||
|
||||
c4 = HTTPX::Plugins::Cookies::Cookie.new("a", "b", created_at: (Time.now - (60 * 60 * 24)))
|
||||
assert [c4, c3, c2, c1].sort == [c3, c4, c1, c2]
|
||||
end
|
||||
|
||||
def test_plugin_cookies_jar_management
|
||||
cookie_header = lambda do |response|
|
||||
JSON.parse(response.to_s)["headers"]
|
||||
@ -223,11 +125,6 @@ module Requests
|
||||
|
||||
private
|
||||
|
||||
def jar_cookies_uri(path = "/cookies")
|
||||
jar_origin = URI(origin).origin
|
||||
build_uri(path, jar_origin)
|
||||
end
|
||||
|
||||
def cookies_uri
|
||||
build_uri("/cookies")
|
||||
end
|
||||
|
@ -112,9 +112,8 @@ end
|
||||
|
||||
module WSTestPlugin
|
||||
class << self
|
||||
def configure(klass)
|
||||
def load_dependencies(klass)
|
||||
klass.plugin(:upgrade)
|
||||
klass.default_options.upgrade_handlers.register("websocket", self)
|
||||
end
|
||||
|
||||
def call(connection, request, response)
|
||||
@ -128,7 +127,7 @@ module WSTestPlugin
|
||||
end
|
||||
|
||||
def extra_options(options)
|
||||
options.merge(max_concurrent_requests: 1)
|
||||
options.merge(max_concurrent_requests: 1, upgrade_handlers: options.upgrade_handlers.merge("websocket" => self))
|
||||
end
|
||||
end
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user