em-http-request-0.3.0/0000755000175000017500000000000011772656255014102 5ustar tfheentfheenem-http-request-0.3.0/em-http-request.gemspec0000644000175000017500000000224411772656255020515 0ustar tfheentfheen# -*- encoding: utf-8 -*- $:.push File.expand_path("../lib", __FILE__) require "em-http/version" Gem::Specification.new do |s| s.name = "em-http-request" s.version = EventMachine::HttpRequest::VERSION s.platform = Gem::Platform::RUBY s.authors = ["Ilya Grigorik"] s.email = ["ilya@igvita.com"] s.homepage = "http://github.com/igrigorik/em-http-request" s.summary = "EventMachine based, async HTTP Request client" s.description = s.summary s.rubyforge_project = "em-http-request" s.add_dependency "eventmachine", ">= 0.12.9" s.add_dependency "addressable", ">= 2.0.0" s.add_dependency "escape_utils" s.add_development_dependency "rspec" s.add_development_dependency "rake" s.add_development_dependency "em-websocket" s.add_development_dependency "rack" s.add_development_dependency "mongrel", "~> 1.2.0.pre2" s.extensions = ["ext/buffer/extconf.rb", "ext/http11_client/extconf.rb"] s.files = `git ls-files`.split("\n") s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n") s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) } s.require_paths = ["lib"] endem-http-request-0.3.0/spec/0000755000175000017500000000000011772656255015034 5ustar tfheentfheenem-http-request-0.3.0/spec/stub_server.rb0000644000175000017500000000062711772656255017731 0ustar tfheentfheenclass StubServer module Server def receive_data(data) send_data @response close_connection_after_writing end def response=(response) @response = response end end def initialize(response, port=8081) @sig = EventMachine::start_server("127.0.0.1", port, Server) { |s| s.response = response } end def stop EventMachine.stop_server @sig end end em-http-request-0.3.0/spec/request_spec.rb0000644000175000017500000007420411772656255020072 0ustar tfheentfheenrequire 'helper' require 'stallion' require 'stub_server' describe EventMachine::HttpRequest do def failed(http=nil) EventMachine.stop http ? fail(http.error) : fail end it "should fail GET on DNS timeout" do EventMachine.run { EventMachine.heartbeat_interval = 0.1 http = EventMachine::HttpRequest.new('http://127.1.1.1/').get :timeout => 1 http.callback { failed(http) } http.errback { http.response_header.status.should == 0 EventMachine.stop } } end it "should fail GET on invalid host" do EventMachine.run { EventMachine.heartbeat_interval = 0.1 http = EventMachine::HttpRequest.new('http://somethinglocal/').get :timeout => 1 http.callback { failed(http) } http.errback { http.response_header.status.should == 0 http.error.should match(/unable to resolve server address/) http.uri.to_s.should match('http://somethinglocal:80/') EventMachine.stop } } end it "should raise error on invalid URL" do EventMachine.run { lambda { EventMachine::HttpRequest.new('random?text').get }.should raise_error EM.stop } end it "should succeed GET on missing path" do EventMachine.run { lambda { EventMachine::HttpRequest.new('http://127.0.0.1:8080').get }.should_not raise_error(ArgumentError) EventMachine.stop } end it "should perform successfull GET" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/Hello/) EventMachine.stop } } end context "host override" do it "should accept optional host" do EventMachine.run { http = EventMachine::HttpRequest.new('http://google.com:8080/').get :host => '127.0.0.1' http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/Hello/) EventMachine.stop } } end it "should reset host on redirect" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect').get :redirects => 1, :host => '127.0.0.1' http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header["CONTENT_ENCODING"].should == "gzip" http.response.should == "compressed" http.last_effective_url.to_s.should == 'http://127.0.0.1:8080/gzip' http.redirects.should == 1 EM.stop } } end it "should redirect with missing content-length" do EventMachine.run { @s = StubServer.new("HTTP/1.0 301 MOVED PERMANENTLY\r\nlocation: http://127.0.0.1:8080/redirect\r\n\r\n") http = EventMachine::HttpRequest.new('http://127.0.0.1:8081/').get :redirects => 2 http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header["CONTENT_ENCODING"].should == "gzip" http.response.should == "compressed" http.last_effective_url.to_s.should == 'http://127.0.0.1:8080/gzip' http.redirects.should == 2 @s.stop EM.stop } } end it "should follow redirects on HEAD method" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect/head').head :redirects => 1 http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.last_effective_url.to_s.should == 'http://127.0.0.1:8080/' EM.stop } } end it "should follow redirects on HEAD method (external)" do EventMachine.run { http = EventMachine::HttpRequest.new('http://www.google.com/').head :redirects => 1 http.errback { failed(http) } http.callback { http.response_header.status.should == 200 EM.stop } } end end it "should perform successfull GET with a URI passed as argument" do EventMachine.run { uri = URI.parse('http://127.0.0.1:8080/') http = EventMachine::HttpRequest.new(uri).get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/Hello/) EventMachine.stop } } end it "should perform successfull HEAD with a URI passed as argument" do EventMachine.run { uri = URI.parse('http://127.0.0.1:8080/') http = EventMachine::HttpRequest.new(uri).head http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should == "" EventMachine.stop } } end # should be no different than a GET it "should perform successfull DELETE with a URI passed as argument" do EventMachine.run { uri = URI.parse('http://127.0.0.1:8080/') http = EventMachine::HttpRequest.new(uri).delete http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should == "" EventMachine.stop } } end it "should return 404 on invalid path" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/fail').get http.errback { failed(http) } http.callback { http.response_header.status.should == 404 EventMachine.stop } } end it "should build query parameters from Hash" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get :query => {:q => 'test'} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/test/) EventMachine.stop } } end it "should pass query parameters string" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get :query => "q=test" http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/test/) EventMachine.stop } } end it "should encode an array of query parameters" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_query').get :query => {:hash => ['value1', 'value2']} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/hash\[\]=value1&hash\[\]=value2/) EventMachine.stop } } end # should be no different than a POST it "should perform successfull PUT" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').put :body => "data" http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/data/) EventMachine.stop } } end it "should perform successfull POST" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').post :body => "data" http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/data/) EventMachine.stop } } end it "should escape body on POST" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').post :body => {:stuff => 'string&string'} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should == "stuff=string%26string" EventMachine.stop } } end it "should perform successfull POST with Ruby Hash/Array as params" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').post :body => {"key1" => 1, "key2" => [2,3]} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/key1=1&key2\[0\]=2&key2\[1\]=3/) EventMachine.stop } } end it "should perform successfull POST with Ruby Hash/Array as params and with the correct content length" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_content_length').post :body => {"key1" => "data1"} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.to_i.should == 10 EventMachine.stop } } end it "should perform successfull GET with custom header" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get :head => {'if-none-match' => 'evar!'} http.errback { failed(http) } http.callback { http.response_header.status.should == 304 EventMachine.stop } } end it "should perform a streaming GET" do EventMachine.run { # digg.com uses chunked encoding http = EventMachine::HttpRequest.new('http://digg.com/news').get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 EventMachine.stop } } end it "should perform basic auth" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get :head => {'authorization' => ['user', 'pass']} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 EventMachine.stop } } end it "should send proper OAuth auth header" do EventMachine.run { oauth_header = 'OAuth oauth_nonce="oqwgSYFUD87MHmJJDv7bQqOF2EPnVus7Wkqj5duNByU", b=c, d=e' http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/oauth_auth').get :head => {'authorization' => oauth_header} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should == oauth_header EventMachine.stop } } end context "keepalive" do it "should default to non-keepalive" do EventMachine.run { headers = {'If-Modified-Since' => 'Thu, 05 Aug 2010 22:54:44 GMT'} http = EventMachine::HttpRequest.new('http://www.google.com/images/logos/ps_logo2.png').get :head => headers http.errback { fail } start = Time.now.to_i http.callback { (start - Time.now.to_i).should be_within(1).of(0) EventMachine.stop } } end it "should work with keep-alive servers" do EventMachine.run { http = EventMachine::HttpRequest.new('http://mexicodiario.com/touch.public.json.php').get :keepalive => true http.errback { failed(http) } http.callback { http.response_header.status.should == 200 EventMachine.stop } } end end it "should return ETag and Last-Modified headers" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_query').get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header.etag.should match('abcdefg') http.response_header.last_modified.should match('Fri, 13 Aug 2010 17:31:21 GMT') EventMachine.stop } } end it "should detect deflate encoding" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/deflate').get :head => {"accept-encoding" => "deflate"} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header["CONTENT_ENCODING"].should == "deflate" http.response.should == "compressed" EventMachine.stop } } end it "should detect gzip encoding" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/gzip').get :head => {"accept-encoding" => "gzip, compressed"} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header["CONTENT_ENCODING"].should == "gzip" http.response.should == "compressed" EventMachine.stop } } end it "should timeout after 1 second" do EventMachine.run { t = Time.now.to_i EventMachine.heartbeat_interval = 0.1 http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/timeout').get :timeout => 1 http.errback { (Time.now.to_i - t).should <= 5 EventMachine.stop } http.callback { failed(http) } } end context "redirect" do it "should report last_effective_url" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.last_effective_url.to_s.should == 'http://127.0.0.1:8080/' EM.stop } } end it "should follow location redirects" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect').get :redirects => 1 http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header["CONTENT_ENCODING"].should == "gzip" http.response.should == "compressed" http.last_effective_url.to_s.should == 'http://127.0.0.1:8080/gzip' http.redirects.should == 1 EM.stop } } end it "should default to 0 redirects" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect').get http.errback { failed(http) } http.callback { http.response_header.status.should == 301 http.last_effective_url.to_s.should == 'http://127.0.0.1:8080/gzip' http.redirects.should == 0 EM.stop } } end it "should not invoke redirect logic on failed(http) connections" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8081/').get :timeout => 0.1, :redirects => 5 http.callback { failed(http) } http.errback { http.redirects.should == 0 EM.stop } } end it "should normalize redirect urls" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect/bad').get :redirects => 1 http.errback { failed(http) } http.callback { http.last_effective_url.to_s.should match('http://127.0.0.1:8080/') http.response.should match('Hello, World!') EM.stop } } end it "should fail gracefully on a missing host in absolute Location header" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect/nohost').get :redirects => 1 http.callback { failed(http) } http.errback { http.error.should == 'Location header format error' EM.stop } } end it "should fail gracefully on an invalid host in Location header" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/redirect/badhost').get :redirects => 1 http.callback { failed(http) } http.errback { http.error.should == 'unable to resolve server address' EM.stop } } end end it "should optionally pass the response body progressively" do EventMachine.run { body = '' http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get http.errback { failed(http) } http.stream { |chunk| body += chunk } http.callback { http.response_header.status.should == 200 http.response.should == '' body.should match(/Hello/) EventMachine.stop } } end context "optional header callback" do it "should optionally pass the response headers" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get http.errback { failed(http) } http.headers { |hash| hash.should be_an_kind_of Hash hash.should include 'CONNECTION' hash.should include 'CONTENT_LENGTH' } http.callback { http.response_header.status.should == 200 http.response.should match(/Hello/) EventMachine.stop } } end it "should allow to terminate current connection from header callback" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get http.callback { failed(http) } http.headers { |hash| hash.should be_an_kind_of Hash hash.should include 'CONNECTION' hash.should include 'CONTENT_LENGTH' http.close('header callback terminated connection') } http.errback { |e| http.response_header.status.should == 200 http.error.should == 'header callback terminated connection' http.response.should == '' EventMachine.stop } } end end it "should optionally pass the deflate-encoded response body progressively" do EventMachine.run { body = '' http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/deflate').get :head => {"accept-encoding" => "deflate, compressed"} http.errback { failed(http) } http.stream { |chunk| body += chunk } http.callback { http.response_header.status.should == 200 http.response_header["CONTENT_ENCODING"].should == "deflate" http.response.should == '' body.should == "compressed" EventMachine.stop } } end it "should initiate SSL/TLS on HTTPS connections" do EventMachine.run { http = EventMachine::HttpRequest.new('https://mail.google.com:443/mail/').get http.errback { failed(http) } http.callback { http.response_header.status.should == 302 EventMachine.stop } } end it "should accept & return cookie header to user" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/set_cookie').get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header.cookie.should == "id=1; expires=Tue, 09-Aug-2011 17:53:39 GMT; path=/;" EventMachine.stop } } end it "should pass cookie header to server from string" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_cookie').get :head => {'cookie' => 'id=2;'} http.errback { failed(http) } http.callback { http.response.should == "id=2;" EventMachine.stop } } end it "should pass cookie header to server from Hash" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_cookie').get :head => {'cookie' => {'id' => 2}} http.errback { failed(http) } http.callback { http.response.should == "id=2;" EventMachine.stop } } end context "when talking to a stub HTTP/1.0 server" do it "should get the body without Content-Length" do EventMachine.run { @s = StubServer.new("HTTP/1.0 200 OK\r\nConnection: close\r\n\r\nFoo") http = EventMachine::HttpRequest.new('http://127.0.0.1:8081/').get http.errback { failed(http) } http.callback { http.response.should match(/Foo/) http.response_header['CONTENT_LENGTH'].should_not == 0 @s.stop EventMachine.stop } } end it "should work with \\n instead of \\r\\n" do EventMachine.run { @s = StubServer.new("HTTP/1.0 200 OK\nContent-Type: text/plain\nContent-Length: 3\nConnection: close\n\nFoo") http = EventMachine::HttpRequest.new('http://127.0.0.1:8081/').get http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response_header['CONTENT_TYPE'].should == 'text/plain' http.response.should match(/Foo/) @s.stop EventMachine.stop } } end end context "body content-type encoding" do it "should not set content type on string in body" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_content_type').post :body => "data" http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should be_empty EventMachine.stop } } end it "should set content-type automatically when passed a ruby hash/array for body" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_content_type').post :body => {:a => :b} http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match("application/x-www-form-urlencoded") EventMachine.stop } } end it "should not override content-type when passing in ruby hash/array for body" do EventMachine.run { ct = 'text; charset=utf-8' http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_content_type').post({ :body => {:a => :b}, :head => {'content-type' => ct}}) http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.content_charset.should == Encoding.find('utf-8') http.response_header["CONTENT_TYPE"].should == ct EventMachine.stop } } end it "should default to external encoding on invalid encoding" do EventMachine.run { ct = 'text/html; charset=utf-8lias' http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_content_type').post({ :body => {:a => :b}, :head => {'content-type' => ct}}) http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.content_charset.should == Encoding.find('utf-8') http.response_header["CONTENT_TYPE"].should == ct EventMachine.stop } } end it "should processed escaped content-type" do EventMachine.run { ct = "text/html; charset=\"ISO-8859-4\"" http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/echo_content_type').post({ :body => {:a => :b}, :head => {'content-type' => ct}}) http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.content_charset.should == Encoding.find('ISO-8859-4') http.response_header["CONTENT_TYPE"].should == ct EventMachine.stop } } end end it "should complete a Location: with a relative path" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/relative-location').get http.errback { failed(http) } http.callback { http.response_header['LOCATION'].should == 'http://127.0.0.1:8080/forwarded' EventMachine.stop } } end it "should stream a file off disk" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').post :file => 'spec/fixtures/google.ca' http.errback { failed(http) } http.callback { http.response.should match('google') EventMachine.stop } } end it 'should let you pass a block to be called once the client is created' do client = nil EventMachine.run { request = EventMachine::HttpRequest.new('http://127.0.0.1:8080/') http = request.post { |c| c.options[:body] = {:callback_run => 'yes'} client = c } http.errback { failed(http) } http.callback { client.should be_kind_of(EventMachine::HttpClient) http.response_header.status.should == 200 http.response.should match(/callback_run=yes/) EventMachine.stop } } end it "should retrieve multiple cookies" do EventMachine::MockHttpRequest.register_file('http://www.google.ca:80/', :get, {}, File.join(File.dirname(__FILE__), 'fixtures', 'google.ca')) EventMachine.run { http = EventMachine::MockHttpRequest.new('http://www.google.ca/').get http.errback { fail } http.callback { c1 = "PREF=ID=11955ae9690fd292:TM=1281823106:LM=1281823106:S=wHdloFqGQ_OLSE92; expires=Mon, 13-Aug-2012 21:58:26 GMT; path=/; domain=.google.ca" c2 = "NID=37=USTdOsxOSMbLjphkJ3S5Ueua3Yc23COXuK_pbztcHx7JoyhomwQySrvebCf3_u8eyrBiLWssVzaZcEOiKGEJbNdy8lRhnq_mfrdz693LaMjNPh__ccW4sgn1ZO6nQltE; expires=Sun, 13-Feb-2011 21:58:26 GMT; path=/; domain=.google.ca; HttpOnly" http.response_header.cookie.should == [c1, c2] EventMachine.stop } } EventMachine::MockHttpRequest.count('http://www.google.ca:80/', :get, {}).should == 1 end context "connections via" do context "direct proxy" do it "should default to skip CONNECT" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/?q=test').get :proxy => { :host => '127.0.0.1', :port => 8083 } http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match('test') EventMachine.stop } } end it "should send absolute URIs to the proxy server" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/?q=test').get :proxy => { :host => '127.0.0.1', :port => 8083 } http.errback { failed(http) } http.callback { http.response_header.status.should == 200 # The test proxy server gives the requested uri back in this header http.response_header['X_THE_REQUESTED_URI'].should == 'http://127.0.0.1:8080/?q=test' http.response_header['X_THE_REQUESTED_URI'].should_not == '/?q=test' http.response.should match('test') EventMachine.stop } } end it "should include query parameters specified in the options" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get( :proxy => { :host => '127.0.0.1', :port => 8083 }, :query => { 'q' => 'test' } ) http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match('test') EventMachine.stop } } end end context "CONNECT proxy" do it "should work with CONNECT proxy servers" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get({ :proxy => {:host => '127.0.0.1', :port => 8082, :use_connect => true} }) http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should == 'Hello, World!' EventMachine.stop } } end it "should proxy POST data" do EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').post({ :body => "data", :proxy => {:host => '127.0.0.1', :port => 8082, :use_connect => true} }) http.errback { failed(http) } http.callback { http.response_header.status.should == 200 http.response.should match(/data/) EventMachine.stop } } end end end context "websocket connection" do # Spec: http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-55 # # ws.onopen = http.callback # ws.onmessage = http.stream { |msg| } # ws.errback = no connection # it "should invoke errback on failed upgrade" do EventMachine.run { http = EventMachine::HttpRequest.new('ws://127.0.0.1:8080/').get :timeout => 0 http.callback { failed(http) } http.errback { http.response_header.status.should == 200 EventMachine.stop } } end it "should complete websocket handshake and transfer data from client to server and back" do EventMachine.run { MSG = "hello bi-directional data exchange" EventMachine::WebSocket.start(:host => "0.0.0.0", :port => 8085) do |ws| ws.onmessage {|msg| ws.send msg} end http = EventMachine::HttpRequest.new('ws://127.0.0.1:8085/').get :timeout => 1 http.errback { failed(http) } http.callback { http.response_header.status.should == 101 http.response_header['CONNECTION'].should match(/Upgrade/) http.response_header['UPGRADE'].should match(/WebSocket/) # push should only be invoked after handshake is complete http.send(MSG) } http.stream { |chunk| chunk.should == MSG EventMachine.stop } } end it "should split multiple messages from websocket server into separate stream callbacks" do EM.run do messages = %w[1 2] recieved = [] EventMachine::WebSocket.start(:host => "0.0.0.0", :port => 8085) do |ws| ws.onopen { ws.send messages[0] ws.send messages[1] } end EventMachine.add_timer(0.1) do http = EventMachine::HttpRequest.new('ws://127.0.0.1:8085/').get :timeout => 0 http.errback { failed(http) } http.callback { http.response_header.status.should == 101 } http.stream {|msg| msg.should == messages[recieved.size] recieved.push msg EventMachine.stop if recieved.size == messages.size } end end end end end em-http-request-0.3.0/spec/encoding_spec.rb0000644000175000017500000000260611772656255020165 0ustar tfheentfheenrequire 'helper' describe EventMachine::HttpEncoding do include EventMachine::HttpEncoding it "should transform a basic hash into HTTP POST Params" do form_encode_body({:a => "alpha", :b => "beta"}).should == "a=alpha&b=beta" end it "should transform a more complex hash into HTTP POST Params" do form_encode_body({:a => "a", :b => ["c", "d", "e"]}).should == "a=a&b[0]=c&b[1]=d&b[2]=e" end it "should transform a very complex hash into HTTP POST Params" do params = form_encode_body({:a => "a", :b => [{:c => "c", :d => "d"}, {:e => "e", :f => "f"}]}) params.should == "a=a&b[0][c]=c&b[0][d]=d&b[1][e]=e&b[1][f]=f" end it "should escape values" do params = form_encode_body({:stuff => 'string&string'}) params.should == "stuff=string%26string" end it "should escape keys" do params = form_encode_body({'bad&str'=> {'key&key' => [:a, :b]}}) params.should == 'bad%26str[key%26key][0]=a&bad%26str[key%26key][1]=b' end it "should escape keys and values" do params = form_encode_body({'bad&str'=> {'key&key' => ['bad+&stuff', '[test]']}}) params.should == "bad%26str[key%26key][0]=bad%2B%26stuff&bad%26str[key%26key][1]=%5Btest%5D" end it "should be fast on long string escapes" do s = Time.now 5000.times { |n| form_encode_body({:a => "{a:'b', d:'f', g:['a','b']}"*50}) } (Time.now - s).should satisfy { |t| t < 1.5 } end end em-http-request-0.3.0/spec/stallion.rb0000644000175000017500000001642111772656255017212 0ustar tfheentfheen# #-- # Includes portion originally Copyright (C)2008 Michael Fellinger # license See file LICENSE for details # #-- require 'rack' module Stallion class Mount def initialize(name, *methods, &block) @name, @methods, @block = name, methods, block end def ride @block.call end def match?(request) method = request['REQUEST_METHOD'] right_method = @methods.empty? or @methods.include?(method) end end class Stable attr_reader :request, :response def initialize @boxes = {} end def in(path, *methods, &block) mount = Mount.new(path, *methods, &block) @boxes[[path, methods]] = mount mount end def call(request, response) @request, @response = request, response @boxes.each do |(path, methods), mount| if mount.match?(request) mount.ride end end end end STABLES = {} def self.saddle(name = nil) STABLES[name] = stable = Stable.new yield stable end def self.run(options = {}) options = {:Host => "127.0.0.1", :Port => 8080}.merge(options) Rack::Handler::Mongrel.run(Rack::Lint.new(self), options) end def self.call(env) request = Rack::Request.new(env) response = Rack::Response.new STABLES.each do |name, stable| stable.call(request, response) end response.finish end end Stallion.saddle :spec do |stable| stable.in '/' do if stable.request.path_info == '/fail' stable.response.status = 404 elsif stable.request.query_string == 'q=test' stable.response.write 'test' elsif stable.request.path_info == '/echo_query' stable.response["ETag"] = "abcdefg" stable.response["Last-Modified"] = "Fri, 13 Aug 2010 17:31:21 GMT" stable.response.write stable.request.query_string elsif stable.request.path_info == '/echo_content_length' stable.response.write stable.request.content_length elsif stable.request.head? && stable.request.path_info == '/' stable.response.status = 200 elsif stable.request.delete? stable.response.status = 200 elsif stable.request.put? stable.response.write stable.request.body.read elsif stable.request.post? if stable.request.path_info == '/echo_content_type' stable.response["Content-Type"] = stable.request.env["CONTENT_TYPE"] || 'text/html' stable.response.write stable.request.env["CONTENT_TYPE"] else stable.response.write stable.request.body.read end elsif stable.request.path_info == '/set_cookie' stable.response["Set-Cookie"] = "id=1; expires=Tue, 09-Aug-2011 17:53:39 GMT; path=/;" stable.response.write "cookie set" elsif stable.request.path_info == '/echo_cookie' stable.response.write stable.request.env["HTTP_COOKIE"] elsif stable.request.path_info == '/timeout' sleep(10) stable.response.write 'timeout' elsif stable.request.path_info == '/redirect' stable.response.status = 301 stable.response["Location"] = "/gzip" stable.response.write 'redirect' elsif stable.request.path_info == '/redirect/bad' stable.response.status = 301 stable.response["Location"] = "http://127.0.0.1:8080" elsif stable.request.path_info == '/redirect/head' stable.response.status = 301 stable.response["Location"] = "/" elsif stable.request.path_info == '/redirect/nohost' stable.response.status = 301 stable.response["Location"] = "http:/" elsif stable.request.path_info == '/redirect/badhost' stable.response.status = 301 stable.response["Location"] = "http://$$$@$!%&^" elsif stable.request.path_info == '/gzip' io = StringIO.new gzip = Zlib::GzipWriter.new(io) gzip << "compressed" gzip.close stable.response.write io.string stable.response["Content-Encoding"] = "gzip" elsif stable.request.path_info == '/deflate' stable.response.write Zlib::Deflate.deflate("compressed") stable.response["Content-Encoding"] = "deflate" elsif stable.request.env["HTTP_IF_NONE_MATCH"] stable.response.status = 304 elsif stable.request.env["HTTP_AUTHORIZATION"] if stable.request.path_info == '/oauth_auth' stable.response.status = 200 stable.response.write stable.request.env["HTTP_AUTHORIZATION"] else auth = "Basic %s" % Base64.encode64(['user', 'pass'].join(':')).chomp if auth == stable.request.env["HTTP_AUTHORIZATION"] stable.response.status = 200 stable.response.write 'success' else stable.response.status = 401 end end elsif stable.request.path_info == '/relative-location' stable.response.status = 301 stable.response["Location"] = '/forwarded' elsif stable.response.write 'Hello, World!' end end end Thread.new do begin Stallion.run :Host => '127.0.0.1', :Port => 8080 rescue Exception => e print e end end # # Tunneling HTTP Proxy server # Thread.new do server = TCPServer.new('127.0.0.1', 8082) loop do session = server.accept request = "" while (data = session.gets) != "\r\n" request << data end parts = request.split("\r\n") method, destination, http_version = parts.first.split(' ') if method == 'CONNECT' target_host, target_port = destination.split(':') client = TCPSocket.open(target_host, target_port) session.write "HTTP/1.1 200 Connection established\r\nProxy-agent: Whatever\r\n\r\n" session.flush content_length = -1 verb = "" req = "" while data = session.gets if request = data.match(/(\w+).*HTTP\/1\.1/) verb = request[1] end if post = data.match(/Content-Length: (\d+)/) content_length = post[1].to_i end req += data # read POST data if data == "\r\n" and verb == "POST" req += session.read(content_length) end if data == "\r\n" client.write req client.flush client.close_write break end end while data = client.gets session.write data end session.flush client.close end session.close end end # # CONNECT-less HTTP Proxy server # Thread.new do server = TCPServer.new('127.0.0.1', 8083) loop do session = server.accept request = "" while (data = session.gets) != "\r\n" request << data end parts = request.split("\r\n") method, destination, http_version = parts.first.split(' ') if destination =~ /^http:/ uri = Addressable::URI.parse(destination) absolute_path = uri.path + (uri.query ? "?#{uri.query}" : "") client = TCPSocket.open(uri.host, uri.port || 80) client.write "#{method} #{absolute_path} #{http_version}\r\n" parts[1..-1].each do |part| client.write "#{part}\r\n" end client.write "\r\n" client.flush client.close_write # Take the initial line from the upstream response session.write client.gets # What (absolute) uri was requested? Send it back in a header session.write "X-The-Requested-URI: #{destination}\r\n" while data = client.gets session.write data end session.flush client.close end session.close end end sleep(1) em-http-request-0.3.0/spec/multi_spec.rb0000644000175000017500000000466711772656255017542 0ustar tfheentfheenrequire 'helper' require 'stallion' describe EventMachine::MultiRequest do it "should submit multiple requests in parallel and return once all of them are complete" do EventMachine.run { # create an instance of multi-request handler, and the requests themselves multi = EventMachine::MultiRequest.new # add multiple requests to the multi-handler multi.add(EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get(:query => {:q => 'test'})) multi.add(EventMachine::HttpRequest.new('http://0.0.0.0:8083/').get(:timeout => 1)) multi.callback { # verify successful request multi.responses[:succeeded].size.should == 1 multi.responses[:succeeded].first.response.should match(/test/) # verify invalid requests multi.responses[:failed].size.should == 1 multi.responses[:failed].first.response_header.status.should == 0 EventMachine.stop } } end it "should accept multiple open connections and return once all of them are complete" do EventMachine.run { http1 = EventMachine::HttpRequest.new('http://127.0.0.1:8080/').get(:query => {:q => 'test'}) http2 = EventMachine::HttpRequest.new('http://0.0.0.0:8083/').get(:timeout => 1) multi = EventMachine::MultiRequest.new([http1, http2]) do multi.responses[:succeeded].size.should == 1 multi.responses[:succeeded].first.response.should match(/test/) multi.responses[:failed].size.should == 1 multi.responses[:failed].first.response_header.status.should == 0 EventMachine.stop end } end it "should handle multiple mock requests" do EventMachine::MockHttpRequest.register_file('http://127.0.0.1:8080/', :get, {}, File.join(File.dirname(__FILE__), 'fixtures', 'google.ca')) EventMachine::MockHttpRequest.register_file('http://0.0.0.0:8083/', :get, {}, File.join(File.dirname(__FILE__), 'fixtures', 'google.ca')) EventMachine.run { # create an instance of multi-request handler, and the requests themselves multi = EventMachine::MultiRequest.new # add multiple requests to the multi-handler multi.add(EventMachine::MockHttpRequest.new('http://127.0.0.1:8080/').get) multi.add(EventMachine::MockHttpRequest.new('http://0.0.0.0:8083/').get) multi.callback { # verify successful request multi.responses[:succeeded].size.should == 2 EventMachine.stop } } end end em-http-request-0.3.0/spec/mock_spec.rb0000644000175000017500000002723211772656255017332 0ustar tfheentfheenrequire 'helper' describe 'em-http mock' do before(:all) do EventMachine::MockHttpRequest.activate! end after(:all) do EventMachine::MockHttpRequest.deactivate! end before(:each) do EventMachine::MockHttpRequest.reset_registry! EventMachine::MockHttpRequest.reset_counts! end it "should serve a fake http request from a proc" do EventMachine::HttpRequest.register('http://www.google.ca:80/', :get) { |req| req.response_header.http_status = 200 req.response_header['SOME_WACKY_HEADER'] = 'WACKY_HEADER_VALUE' req.response = "Well, now this is fun." } EM.run { http = EventMachine::HttpRequest.new('http://www.google.ca/').get http.errback { fail } http.callback { http.response_header.status.should == 200 http.response_header['SOME_WACKY_HEADER'].should == 'WACKY_HEADER_VALUE' http.response.should == "Well, now this is fun." EventMachine.stop } } EventMachine::HttpRequest.count('http://www.google.ca:80/', :get, {}).should == 1 end it "should serve a fake http request from a proc with raw data" do EventMachine::HttpRequest.register('http://www.google.ca:80/', :get) { |req| req.receive_data(File.read(File.join(File.dirname(__FILE__), 'fixtures', 'google.ca'))) } EM.run { http = EventMachine::HttpRequest.new('http://www.google.ca/').get http.errback { fail } http.callback { http.response_header.status.should == 200 http.response.should == File.read(File.join(File.dirname(__FILE__), 'fixtures', 'google.ca'), :encoding => 'ISO-8859-1').split("\r\n\r\n", 2).last http.response.encoding.to_s.should == 'ISO-8859-1' EventMachine::HttpRequest.count('http://www.google.ca:80/', :get, {}).should == 1 EventMachine.stop } } end it "should serve a fake http request from a file" do EventMachine::HttpRequest.register_file('http://www.google.ca:80/', :get, {}, File.join(File.dirname(__FILE__), 'fixtures', 'google.ca')) EM.run { http = EventMachine::HttpRequest.new('http://www.google.ca/').get http.errback { fail } http.callback { http.response_header.status.should == 200 http.response.should == File.read(File.join(File.dirname(__FILE__), 'fixtures', 'google.ca'), :encoding => 'ISO-8859-1').split("\r\n\r\n", 2).last http.response.encoding.to_s.should == 'ISO-8859-1' EventMachine::HttpRequest.count('http://www.google.ca:80/', :get, {}).should == 1 EventMachine.stop } } end it "should serve a fake http request from a string" do data = <<-HEREDOC HTTP/1.0 200 OK Date: Mon, 16 Nov 2009 20:39:15 GMT Expires: -1 Cache-Control: private, max-age=0 Content-Type: text/html; charset=ISO-8859-1 Set-Cookie: PREF=ID=9454187d21c4a6a6:TM=1258403955:LM=1258403955:S=2-mf1n5oV5yAeT9-; expires=Wed, 16-Nov-2011 20:39:15 GMT; path=/; domain=.google.ca Set-Cookie: NID=28=lvxxVdiBQkCetu_WFaUxLyB7qPlHXS5OdAGYTqge_laVlCKVN8VYYeVBh4bNZiK_Oan2gm8oP9GA-FrZfMPC3ZMHeNq37MG2JH8AIW9LYucU8brOeuggMEbLNNXuiWg4; expires=Tue, 18-May-2010 20:39:15 GMT; path=/; domain=.google.ca; HttpOnly Server: gws X-XSS-Protection: 0 X-Cache: MISS from . Via: 1.0 .:80 (squid) Connection: close Google
Web Images Videos Maps News Groups Gmail more »
iGoogle | Search settings | Sign in



 
  Advanced Search
  Language Tools
Search:

Google.ca offered in: fran?ais


Advertising Programs - Business Solutions - About Google - Go to Google.com

©2009 - Privacy

Web Images Videos Maps News Books Gmail more »
iGoogle | Settings | Sign in

Google
Canada

 

Advanced SearchLanguage Tools

Google.ca offered in: français

© 2010 - Privacy

em-http-request-0.3.0/spec/helper.rb0000644000175000017500000000026211772656255016640 0ustar tfheentfheenrequire 'rubygems' require 'rspec' require 'pp' $LOAD_PATH << File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib')) require 'em-http' require 'em-websocket' em-http-request-0.3.0/metadata.yml0000644000175000017500000001147511772656255016415 0ustar tfheentfheen--- !ruby/object:Gem::Specification name: em-http-request version: !ruby/object:Gem::Version prerelease: false segments: - 0 - 3 - 0 version: 0.3.0 platform: ruby authors: - Ilya Grigorik autorequire: bindir: bin cert_chain: [] date: 2011-01-15 00:00:00 -05:00 default_executable: dependencies: - !ruby/object:Gem::Dependency name: eventmachine prerelease: false requirement: &id001 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 - 12 - 9 version: 0.12.9 type: :runtime version_requirements: *id001 - !ruby/object:Gem::Dependency name: addressable prerelease: false requirement: &id002 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 2 - 0 - 0 version: 2.0.0 type: :runtime version_requirements: *id002 - !ruby/object:Gem::Dependency name: escape_utils prerelease: false requirement: &id003 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" type: :runtime version_requirements: *id003 - !ruby/object:Gem::Dependency name: rspec prerelease: false requirement: &id004 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" type: :development version_requirements: *id004 - !ruby/object:Gem::Dependency name: rake prerelease: false requirement: &id005 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" type: :development version_requirements: *id005 - !ruby/object:Gem::Dependency name: em-websocket prerelease: false requirement: &id006 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" type: :development version_requirements: *id006 - !ruby/object:Gem::Dependency name: rack prerelease: false requirement: &id007 !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" type: :development version_requirements: *id007 - !ruby/object:Gem::Dependency name: mongrel prerelease: false requirement: &id008 !ruby/object:Gem::Requirement none: false requirements: - - ~> - !ruby/object:Gem::Version segments: - 1 - 2 - 0 - pre2 version: 1.2.0.pre2 type: :development version_requirements: *id008 description: EventMachine based, async HTTP Request client email: - ilya@igvita.com executables: [] extensions: - ext/buffer/extconf.rb - ext/http11_client/extconf.rb extra_rdoc_files: [] files: - .gitignore - .rspec - Changelog.md - Gemfile - README.md - Rakefile - em-http-request.gemspec - examples/fetch.rb - examples/fibered-http.rb - examples/oauth-tweet.rb - examples/socks5.rb - examples/websocket-handler.rb - examples/websocket-server.rb - ext/buffer/em_buffer.c - ext/buffer/extconf.rb - ext/http11_client/ext_help.h - ext/http11_client/extconf.rb - ext/http11_client/http11_client.c - ext/http11_client/http11_parser.c - ext/http11_client/http11_parser.h - ext/http11_client/http11_parser.rl - lib/em-http-request.rb - lib/em-http.rb - lib/em-http/client.rb - lib/em-http/core_ext/bytesize.rb - lib/em-http/decoders.rb - lib/em-http/http_encoding.rb - lib/em-http/http_header.rb - lib/em-http/http_options.rb - lib/em-http/mock.rb - lib/em-http/multi.rb - lib/em-http/request.rb - lib/em-http/version.rb - spec/encoding_spec.rb - spec/fixtures/google.ca - spec/helper.rb - spec/mock_spec.rb - spec/multi_spec.rb - spec/request_spec.rb - spec/stallion.rb - spec/stub_server.rb has_rdoc: true homepage: http://github.com/igrigorik/em-http-request licenses: [] post_install_message: rdoc_options: [] require_paths: - lib required_ruby_version: !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" required_rubygems_version: !ruby/object:Gem::Requirement none: false requirements: - - ">=" - !ruby/object:Gem::Version segments: - 0 version: "0" requirements: [] rubyforge_project: em-http-request rubygems_version: 1.3.7 signing_key: specification_version: 3 summary: EventMachine based, async HTTP Request client test_files: - spec/encoding_spec.rb - spec/fixtures/google.ca - spec/helper.rb - spec/mock_spec.rb - spec/multi_spec.rb - spec/request_spec.rb - spec/stallion.rb - spec/stub_server.rb em-http-request-0.3.0/.rspec0000644000175000017500000000000011772656255015205 0ustar tfheentfheenem-http-request-0.3.0/README.md0000644000175000017500000001337311772656255015370 0ustar tfheentfheenEM-HTTP-Request =============== Asynchronous HTTP client for Ruby, based on EventMachine runtime. - Ragel HTTP parser for speed & performance - Simple interface for single & parallel requests via deferred callbacks - Automatic gzip & deflate decoding - Basic-Auth & OAuth support - Custom timeout support - Stream response processing - Proxy support (with SSL Tunneling): CONNECT, direct & SOCKS5 - Auto-follow 3xx redirects with custom max depth - Bi-directional communication with web-socket services - [Native mocking support](http://wiki.github.com/igrigorik/em-http-request/mocking-httprequest) and through [Webmock](http://github.com/bblimke/webmock) Getting started --------------- gem install em-http-request irb:0> require 'em-http' Or checkout [screencast / demo](http://everburning.com/news/eventmachine-screencast-em-http-request/) of using EM-HTTP-Request. Libraries & Applications using em-http -------------------------------------- - [chirpstream](http://github.com/joshbuddy/chirpstream) - EM client for Twitters Chirpstream API - [RDaneel](http://github.com/hasmanydevelopers/RDaneel) - Ruby crawler which respects robots.txt - [rsolr-async](http://github.com/mwmitchell/rsolr-async) - An asynchronus connection adapter for RSolr - [PubSubHubbub](http://github.com/igrigorik/PubSubHubbub) - Asynchronous PubSubHubbub ruby client - [Firering](http://github.com/EmmanuelOga/firering) - Eventmachine powered Campfire API - and many others.. drop me a link if you want yours included! Simple client example --------------------- EventMachine.run { http = EventMachine::HttpRequest.new('http://127.0.0.1/').get :query => {'keyname' => 'value'}, :timeout => 10 http.callback { p http.response_header.status p http.response_header p http.response EventMachine.stop } } Multi-request example --------------------- Fire and wait for multiple requests to complete via the MultiRequest interface. EventMachine.run { multi = EventMachine::MultiRequest.new # add multiple requests to the multi-handler multi.add(EventMachine::HttpRequest.new('http://www.google.com/').get) multi.add(EventMachine::HttpRequest.new('http://www.yahoo.com/').get) multi.callback { p multi.responses[:succeeded] p multi.responses[:failed] EventMachine.stop } } Basic-Auth example ------------------ Full basic author support. For OAuth, check examples/oauth-tweet.rb file. EventMachine.run { http = EventMachine::HttpRequest.new('http://www.website.com/').get :head => {'authorization' => ['user', 'pass']} http.errback { failed } http.callback { p http.response_header EventMachine.stop } } POSTing data example -------------------- For multi-part uploads, please see [this gist](https://gist.github.com/778639). EventMachine.run { http1 = EventMachine::HttpRequest.new('http://www.website.com/').post :body => {"key1" => 1, "key2" => [2,3]} http2 = EventMachine::HttpRequest.new('http://www.website.com/').post :body => "some data" # ... } Streaming body processing ------------------------- Allows you to consume an HTTP stream of content in real-time. Each time a new piece of content is pushed to the client, it is passed to the stream callback for you to operate on. EventMachine.run { http = EventMachine::HttpRequest.new('http://www.website.com/').get http.stream { |chunk| print chunk } } Streaming files from disk ------------------------- Allows you to efficiently stream a (large) file from disk via EventMachine's FileStream interface. EventMachine.run { http = EventMachine::HttpRequest.new('http://www.website.com/').post :file => 'largefile.txt' http.callback { |chunk| puts "Upload finished!" } } Proxy example ------------- Full transparent proxy support with support for SSL tunneling. EventMachine.run { http = EventMachine::HttpRequest.new('http://www.website.com/').get :proxy => { :host => 'www.myproxy.com', :port => 8080, :authorization => ['username', 'password'] # authorization is optional } SOCKS5 Proxy example ------------- Tunnel your requests via connect via SOCKS5 proxies (ssh -D port somehost). EventMachine.run { http = EventMachine::HttpRequest.new('http://www.website.com/').get :proxy => { :host => 'www.myproxy.com', :port => 8080, :type => :socks } Auto-follow 3xx redirects ------------------------- Specify the max depth of redirects to follow, default is 0. EventMachine.run { http = EventMachine::HttpRequest.new('http://www.google.com/').get :redirects => 1 http.callback { p http.last_effective_url } } WebSocket example ----------------- [Bi-directional communication with WebSockets](http://www.igvita.com/2009/12/22/ruby-websockets-tcp-for-the-browser/): simply pass in a ws:// resource and the client will negotiate the connection upgrade for you. On successful handshake the callback is invoked, and any incoming messages will be passed to the stream callback. The client can also send data to the server at will by calling the "send" method! EventMachine.run { http = EventMachine::HttpRequest.new("ws://yourservice.com/websocket").get :timeout => 0 http.errback { puts "oops" } http.callback { puts "WebSocket connected!" http.send("Hello client") } http.stream { |msg| puts "Recieved: #{msg}" http.send "Pong: #{msg}" } http.disconnect { puts "oops, dropped connection?" } } License ------- (MIT License) - Copyright (c) 2011 Ilya Grigorikem-http-request-0.3.0/Rakefile0000644000175000017500000000317311772656255015553 0ustar tfheentfheenrequire 'bundler' Bundler.setup Bundler.require :default, :development require 'rake' require 'rake/clean' require 'rake/gempackagetask' require 'rspec/core/rake_task' require 'fileutils' include FileUtils # copied from EventMachine. MAKE = ENV['MAKE'] || if RUBY_PLATFORM =~ /mswin/ # mingw uses make. 'nmake' else 'make' end # Default Rake task is compile task :default => :compile # Rebuild parser Ragel task :ragel do Dir.chdir "ext/http11_client" do target = "http11_parser.c" File.unlink target if File.exist? target sh "ragel http11_parser.rl | rlgen-cd -G2 -o #{target}" raise "Failed to build C source" unless File.exist? target end end desc "Run all RSpec tests" RSpec::Core::RakeTask.new(:spec) def make(makedir) Dir.chdir(makedir) { sh MAKE } end def extconf(dir) Dir.chdir(dir) { ruby "extconf.rb" } end def setup_extension(dir, extension) ext = "ext/#{dir}" ext_so = "#{ext}/#{extension}.#{Config::MAKEFILE_CONFIG['DLEXT']}" ext_files = FileList[ "#{ext}/*.c", "#{ext}/*.h", "#{ext}/extconf.rb", "#{ext}/Makefile", "lib" ] task "lib" do directory "lib" end desc "Builds just the #{extension} extension" mf = (extension + '_makefile').to_sym task mf do |t| extconf "#{ext}" end task extension.to_sym => [mf] do make "#{ext}" cp ext_so, "lib" end end setup_extension("buffer", "em_buffer") setup_extension("http11_client", "http11_client") task :compile => [:em_buffer, :http11_client] CLEAN.include ['build/*', '**/*.o', '**/*.so', '**/*.a', '**/*.log', 'pkg'] CLEAN.include ['ext/buffer/Makefile', 'lib/em_buffer.*', 'lib/http11_client.*']em-http-request-0.3.0/examples/0000755000175000017500000000000011772656255015720 5ustar tfheentfheenem-http-request-0.3.0/examples/websocket-server.rb0000644000175000017500000000042611772656255021541 0ustar tfheentfheenrequire 'rubygems' require 'em-websocket' EventMachine::WebSocket.start(:host => "0.0.0.0", :port => 8080) do |ws| ws.onopen { ws.send "Hello Client!"} ws.onmessage { |msg| p "got: #{msg}"; ws.send "Pong: #{msg}" } ws.onclose { puts "WebSocket closed" } end em-http-request-0.3.0/examples/fibered-http.rb0000644000175000017500000000160611772656255020625 0ustar tfheentfheenrequire 'eventmachine' require 'em-http' require 'fiber' # Using Fibers in Ruby 1.9 to simulate blocking IO / IO scheduling # while using the async EventMachine API's def async_fetch(url) f = Fiber.current http = EventMachine::HttpRequest.new(url).get :timeout => 10 http.callback { f.resume(http) } http.errback { f.resume(http) } return Fiber.yield end EventMachine.run do Fiber.new{ puts "Setting up HTTP request #1" data = async_fetch('http://www.google.com/') puts "Fetched page #1: #{data.response_header.status}" puts "Setting up HTTP request #2" data = async_fetch('http://www.yahoo.com/') puts "Fetched page #2: #{data.response_header.status}" EventMachine.stop }.resume end puts "Done" # Setting up HTTP request #1 # Fetched page #1: 302 # Setting up HTTP request #2 # Fetched page #2: 200 # Doneem-http-request-0.3.0/examples/socks5.rb0000644000175000017500000000112211772656255017450 0ustar tfheentfheenrequire 'rubygems' require 'eventmachine' require '../lib/em-http' EM.run do # Establish a SOCKS5 tunnel via SSH # ssh -D 8000 some_remote_machine # http = EM::HttpRequest.new('http://whatismyip.org/').get({ http = EM::HttpRequest.new('http://igvita.com/').get({ :proxy => {:host => '127.0.0.1', :port => 8000, :type => :socks}, :redirects => 2 }) http.callback { puts "#{http.response_header.status} - #{http.response.length} bytes\n" puts http.response EM.stop } http.errback { puts "Error: " + http.error puts http.inspect EM.stop } end em-http-request-0.3.0/examples/websocket-handler.rb0000644000175000017500000000104411772656255021645 0ustar tfheentfheenrequire 'rubygems' require 'lib/em-http' module KBHandler include EM::Protocols::LineText2 def receive_line(data) p "Want to send: #{data}" p "Error status: #{$http.error?}" $http.send(data) p "After send" end end EventMachine.run { $http = EventMachine::HttpRequest.new("ws://localhost:8080/").get :timeout => 0 $http.disconnect { puts 'oops' } $http.callback { puts "WebSocket connected!" } $http.stream { |msg| puts "Recieved: #{msg}" } EM.open_keyboard(KBHandler) } em-http-request-0.3.0/examples/oauth-tweet.rb0000644000175000017500000000277511772656255020526 0ustar tfheentfheen# Courtesy of Darcy Laycock: # http://gist.github.com/265261 # require 'rubygems' require 'em-http' require 'oauth' # At a minimum, require 'oauth/request_proxy/em_http_request' # for this example, we'll use Net::HTTP like support. require 'oauth/client/em_http' # You need two things: an oauth consumer and an access token. # You need to generate an access token, I suggest looking elsewhere how to do that or wait for a full tutorial. # For a consumer key / consumer secret, signup for an app at: # http://twitter.com/apps/new # Edit in your details. CONSUMER_KEY = "" CONSUMER_SECRET = "" ACCESS_TOKEN = "" ACCESS_TOKEN_SECRET = "" def twitter_oauth_consumer @twitter_oauth_consumer ||= OAuth::Consumer.new(CONSUMER_KEY, CONSUMER_SECRET, :site => "http://twitter.com") end def twitter_oauth_access_token @twitter_oauth_access_token ||= OAuth::AccessToken.new(twitter_oauth_consumer, ACCESS_TOKEN, ACCESS_TOKEN_SECRET) end EM.run do request = EventMachine::HttpRequest.new('http://twitter.com/statuses/update.json') http = request.post(:body => {'status' => 'Hello Twitter from em-http-request with OAuth'}, :head => {"Content-Type" => "application/x-www-form-urlencoded"}) do |client| twitter_oauth_consumer.sign!(client, twitter_oauth_access_token) end http.callback do puts "Response: #{http.response} (Code: #{http.response_header.status})" EM.stop_event_loop end http.errback do puts "Failed to post" EM.stop_event_loop end endem-http-request-0.3.0/examples/fetch.rb0000644000175000017500000000110011772656255017326 0ustar tfheentfheenrequire 'rubygems' require 'eventmachine' require '../lib/em-http' urls = ARGV if urls.size < 1 puts "Usage: #{$0} <...>" exit end pending = urls.size EM.run do urls.each do |url| http = EM::HttpRequest.new(url).get http.callback { puts "#{url}\n#{http.response_header.status} - #{http.response.length} bytes\n" puts http.response pending -= 1 EM.stop if pending < 1 } http.errback { puts "#{url}\n" + http.error pending -= 1 EM.stop if pending < 1 } end end em-http-request-0.3.0/.gitignore0000644000175000017500000000005411772656255016071 0ustar tfheentfheen*.bundle *.o Makefile mkmf.log Gemfile.lock em-http-request-0.3.0/ext/0000755000175000017500000000000011772656255014702 5ustar tfheentfheenem-http-request-0.3.0/ext/http11_client/0000755000175000017500000000000011772656255017361 5ustar tfheentfheenem-http-request-0.3.0/ext/http11_client/extconf.rb0000644000175000017500000000015011772656255021350 0ustar tfheentfheenrequire 'mkmf' dir_config("http11_client") have_library("c", "main") create_makefile("http11_client") em-http-request-0.3.0/ext/http11_client/http11_parser.rl0000644000175000017500000001060311772656255022415 0ustar tfheentfheen/** * Copyright (c) 2005 Zed A. Shaw * You can redistribute it and/or modify it under the same terms as Ruby. */ #include "http11_parser.h" #include #include #include #include #include #define LEN(AT, FPC) (FPC - buffer - parser->AT) #define MARK(M,FPC) (parser->M = (FPC) - buffer) #define PTR_TO(F) (buffer + parser->F) #define L(M) fprintf(stderr, "" # M "\n"); /** machine **/ %%{ machine httpclient_parser; action mark {MARK(mark, fpc); } action start_field { MARK(field_start, fpc); } action write_field { parser->field_len = LEN(field_start, fpc); } action start_value { MARK(mark, fpc); } action write_value { parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, fpc)); } action reason_phrase { parser->reason_phrase(parser->data, PTR_TO(mark), LEN(mark, fpc)); } action status_code { parser->status_code(parser->data, PTR_TO(mark), LEN(mark, fpc)); } action http_version { parser->http_version(parser->data, PTR_TO(mark), LEN(mark, fpc)); } action chunk_size { parser->chunk_size(parser->data, PTR_TO(mark), LEN(mark, fpc)); } action last_chunk { parser->last_chunk(parser->data, NULL, 0); } action done { parser->body_start = fpc - buffer + 1; if(parser->header_done != NULL) parser->header_done(parser->data, fpc + 1, pe - fpc - 1); fbreak; } # line endings CRLF = ("\r\n" | "\n"); # character types CTL = (cntrl | 127); tspecials = ("(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\\" | "\"" | "/" | "[" | "]" | "?" | "=" | "{" | "}" | " " | "\t"); # elements token = (ascii -- (CTL | tspecials)); Reason_Phrase = (any -- CRLF)* >mark %reason_phrase; Status_Code = digit{3} >mark %status_code; http_number = (digit+ "." digit+) ; HTTP_Version = ("HTTP/" http_number) >mark %http_version ; Status_Line = HTTP_Version " " Status_Code " "? Reason_Phrase :> CRLF; field_name = token+ >start_field %write_field; field_value = any* >start_value %write_value; message_header = field_name ":" " "* field_value :> CRLF; Response = Status_Line (message_header)* (CRLF @done); chunk_ext_val = token+; chunk_ext_name = token+; chunk_extension = (";" chunk_ext_name >start_field %write_field %start_value ("=" chunk_ext_val >start_value)? %write_value )*; last_chunk = "0"? chunk_extension :> (CRLF @last_chunk @done); chunk_size = xdigit+; chunk = chunk_size >mark %chunk_size chunk_extension space* :> (CRLF @done); Chunked_Header = (chunk | last_chunk); main := Response | Chunked_Header; }%% /** Data **/ %% write data; int httpclient_parser_init(httpclient_parser *parser) { int cs = 0; %% write init; parser->cs = cs; parser->body_start = 0; parser->content_len = 0; parser->mark = 0; parser->nread = 0; parser->field_len = 0; parser->field_start = 0; return(1); } /** exec **/ size_t httpclient_parser_execute(httpclient_parser *parser, const char *buffer, size_t len, size_t off) { const char *p, *pe; int cs = parser->cs; assert(off <= len && "offset past end of buffer"); p = buffer+off; pe = buffer+len; assert(*pe == '\0' && "pointer does not end on NUL"); assert(pe - p == len - off && "pointers aren't same distance"); %% write exec; parser->cs = cs; parser->nread += p - (buffer + off); assert(p <= pe && "buffer overflow after parsing execute"); assert(parser->nread <= len && "nread longer than length"); assert(parser->body_start <= len && "body starts after buffer end"); assert(parser->mark < len && "mark is after buffer end"); assert(parser->field_len <= len && "field has length longer than whole buffer"); assert(parser->field_start < len && "field starts after buffer end"); if(parser->body_start) { /* final \r\n combo encountered so stop right here */ parser->nread = parser->body_start; } return(parser->nread); } int httpclient_parser_finish(httpclient_parser *parser) { int cs = parser->cs; parser->cs = cs; if (httpclient_parser_has_error(parser) ) { return -1; } else if (httpclient_parser_is_finished(parser) ) { return 1; } else { return 0; } } int httpclient_parser_has_error(httpclient_parser *parser) { return parser->cs == httpclient_parser_error; } int httpclient_parser_is_finished(httpclient_parser *parser) { return parser->cs == httpclient_parser_first_final; } em-http-request-0.3.0/ext/http11_client/ext_help.h0000644000175000017500000000075111772656255021345 0ustar tfheentfheen#ifndef ext_help_h #define ext_help_h #define RAISE_NOT_NULL(T) if(T == NULL) rb_raise(rb_eArgError, "NULL found for " # T " when shouldn't be."); #define DATA_GET(from,type,name) Data_Get_Struct(from,type,name); RAISE_NOT_NULL(name); #define REQUIRE_TYPE(V, T) if(TYPE(V) != T) rb_raise(rb_eTypeError, "Wrong argument type for " # V " required " # T); #ifdef DEBUG #define TRACE() fprintf(stderr, "> %s:%d:%s\n", __FILE__, __LINE__, __FUNCTION__) #else #define TRACE() #endif #endif em-http-request-0.3.0/ext/http11_client/http11_client.c0000644000175000017500000001765211772656255022217 0ustar tfheentfheen/** * Copyright (c) 2005 Zed A. Shaw * You can redistribute it and/or modify it under the same terms as Ruby. */ #include "ruby.h" #include "ext_help.h" #include #include #include "http11_parser.h" #include static VALUE mEm; static VALUE cHttpClientParser; static VALUE eHttpClientParserError; #define id_reason rb_intern("@http_reason") #define id_status rb_intern("@http_status") #define id_version rb_intern("@http_version") #define id_body rb_intern("@http_body") #define id_chunk_size rb_intern("@http_chunk_size") #define id_last_chunk rb_intern("@last_chunk") #ifndef RHASH_TBL /* rb_hash_lookup() is only in Ruby 1.8.7 */ static VALUE rb_hash_lookup(VALUE hash, VALUE key) { VALUE val; if (!st_lookup(RHASH(hash)->tbl, key, &val)) { return Qnil; /* without Hash#default */ } return val; } #endif void client_http_field(void *data, const char *field, size_t flen, const char *value, size_t vlen) { char *ch, *end; VALUE req = (VALUE)data; VALUE v = Qnil; VALUE f = Qnil; VALUE el = Qnil; v = rb_str_new(value, vlen); f = rb_str_new(field, flen); /* Yes Children, rb_str_upcase_bang isn't even available as an intern.h function. * how incredibly handy to not have that. Nope, I have to do it by hand.*/ for(ch = RSTRING_PTR(f), end = ch + RSTRING_LEN(f); ch < end; ch++) { if(*ch == '-') { *ch = '_'; } else { *ch = toupper(*ch); } } el = rb_hash_lookup(req, f); switch(TYPE(el)) { case T_ARRAY: rb_ary_push(el, v); break; case T_STRING: rb_hash_aset(req, f, rb_ary_new3(2, el, v)); break; default: rb_hash_aset(req, f, v); break; } } void client_reason_phrase(void *data, const char *at, size_t length) { VALUE req = (VALUE)data; VALUE v = Qnil; v = rb_str_new(at, length); rb_ivar_set(req, id_reason, v); } void client_status_code(void *data, const char *at, size_t length) { VALUE req = (VALUE)data; VALUE v = Qnil; v = rb_str_new(at, length); rb_ivar_set(req, id_status, v); } void client_http_version(void *data, const char *at, size_t length) { VALUE req = (VALUE)data; VALUE v = Qnil; v = rb_str_new(at, length); rb_ivar_set(req, id_version, v); } /** Finalizes the request header to have a bunch of stuff that's needed. */ void client_header_done(void *data, const char *at, size_t length) { VALUE req = (VALUE)data; VALUE v = Qnil; v = rb_str_new(at, length); rb_ivar_set(req, id_body, v); } void client_chunk_size(void *data, const char *at, size_t length) { VALUE req = (VALUE)data; VALUE v = Qnil; if(length <= 0) { rb_raise(eHttpClientParserError, "Chunked Encoding gave <= 0 chunk size."); } v = rb_str_new(at, length); rb_ivar_set(req, id_chunk_size, v); } void client_last_chunk(void *data, const char *at, size_t length) { VALUE req = (VALUE)data; rb_ivar_set(req, id_last_chunk,Qtrue); } void HttpClientParser_free(void *data) { TRACE(); if(data) { free(data); } } VALUE HttpClientParser_alloc(VALUE klass) { VALUE obj; httpclient_parser *hp = ALLOC_N(httpclient_parser, 1); TRACE(); hp->http_field = client_http_field; hp->status_code = client_status_code; hp->reason_phrase = client_reason_phrase; hp->http_version = client_http_version; hp->header_done = client_header_done; hp->chunk_size = client_chunk_size; hp->last_chunk = client_last_chunk; httpclient_parser_init(hp); obj = Data_Wrap_Struct(klass, NULL, HttpClientParser_free, hp); return obj; } /** * call-seq: * parser.new -> parser * * Creates a new parser. */ VALUE HttpClientParser_init(VALUE self) { httpclient_parser *http = NULL; DATA_GET(self, httpclient_parser, http); httpclient_parser_init(http); return self; } /** * call-seq: * parser.reset -> nil * * Resets the parser to it's initial state so that you can reuse it * rather than making new ones. */ VALUE HttpClientParser_reset(VALUE self) { httpclient_parser *http = NULL; DATA_GET(self, httpclient_parser, http); httpclient_parser_init(http); return Qnil; } /** * call-seq: * parser.finish -> true/false * * Finishes a parser early which could put in a "good" or bad state. * You should call reset after finish it or bad things will happen. */ VALUE HttpClientParser_finish(VALUE self) { httpclient_parser *http = NULL; DATA_GET(self, httpclient_parser, http); httpclient_parser_finish(http); return httpclient_parser_is_finished(http) ? Qtrue : Qfalse; } /** * call-seq: * parser.execute(req_hash, data, start) -> Integer * * Takes a Hash and a String of data, parses the String of data filling in the Hash * returning an Integer to indicate how much of the data has been read. No matter * what the return value, you should call HttpClientParser#finished? and HttpClientParser#error? * to figure out if it's done parsing or there was an error. * * This function now throws an exception when there is a parsing error. This makes * the logic for working with the parser much easier. You can still test for an * error, but now you need to wrap the parser with an exception handling block. * * The third argument allows for parsing a partial request and then continuing * the parsing from that position. It needs all of the original data as well * so you have to append to the data buffer as you read. */ VALUE HttpClientParser_execute(VALUE self, VALUE req_hash, VALUE data, VALUE start) { httpclient_parser *http = NULL; int from = 0; char *dptr = NULL; long dlen = 0; REQUIRE_TYPE(req_hash, T_HASH); REQUIRE_TYPE(data, T_STRING); REQUIRE_TYPE(start, T_FIXNUM); DATA_GET(self, httpclient_parser, http); from = FIX2INT(start); dptr = RSTRING_PTR(data); dlen = RSTRING_LEN(data); if(from >= dlen) { rb_raise(eHttpClientParserError, "Requested start is after data buffer end."); } else { http->data = (void *)req_hash; httpclient_parser_execute(http, dptr, dlen, from); if(httpclient_parser_has_error(http)) { rb_raise(eHttpClientParserError, "Invalid HTTP format, parsing fails."); } else { return INT2FIX(httpclient_parser_nread(http)); } } } /** * call-seq: * parser.error? -> true/false * * Tells you whether the parser is in an error state. */ VALUE HttpClientParser_has_error(VALUE self) { httpclient_parser *http = NULL; DATA_GET(self, httpclient_parser, http); return httpclient_parser_has_error(http) ? Qtrue : Qfalse; } /** * call-seq: * parser.finished? -> true/false * * Tells you whether the parser is finished or not and in a good state. */ VALUE HttpClientParser_is_finished(VALUE self) { httpclient_parser *http = NULL; DATA_GET(self, httpclient_parser, http); return httpclient_parser_is_finished(http) ? Qtrue : Qfalse; } /** * call-seq: * parser.nread -> Integer * * Returns the amount of data processed so far during this processing cycle. It is * set to 0 on initialize or reset calls and is incremented each time execute is called. */ VALUE HttpClientParser_nread(VALUE self) { httpclient_parser *http = NULL; DATA_GET(self, httpclient_parser, http); return INT2FIX(http->nread); } void Init_http11_client() { mEm = rb_define_module("EventMachine"); eHttpClientParserError = rb_define_class_under(mEm, "HttpClientParserError", rb_eIOError); cHttpClientParser = rb_define_class_under(mEm, "HttpClientParser", rb_cObject); rb_define_alloc_func(cHttpClientParser, HttpClientParser_alloc); rb_define_method(cHttpClientParser, "initialize", HttpClientParser_init,0); rb_define_method(cHttpClientParser, "reset", HttpClientParser_reset,0); rb_define_method(cHttpClientParser, "finish", HttpClientParser_finish,0); rb_define_method(cHttpClientParser, "execute", HttpClientParser_execute,3); rb_define_method(cHttpClientParser, "error?", HttpClientParser_has_error,0); rb_define_method(cHttpClientParser, "finished?", HttpClientParser_is_finished,0); rb_define_method(cHttpClientParser, "nread", HttpClientParser_nread,0); } em-http-request-0.3.0/ext/http11_client/http11_parser.c0000644000175000017500000002370211772656255022226 0ustar tfheentfheen #line 1 "http11_parser.rl" /** * Copyright (c) 2005 Zed A. Shaw * You can redistribute it and/or modify it under the same terms as Ruby. */ #include "http11_parser.h" #include #include #include #include #include #define LEN(AT, FPC) (FPC - buffer - parser->AT) #define MARK(M,FPC) (parser->M = (FPC) - buffer) #define PTR_TO(F) (buffer + parser->F) #define L(M) fprintf(stderr, "" # M "\n"); /** machine **/ #line 95 "http11_parser.rl" /** Data **/ #line 29 "http11_parser.c" static const char _httpclient_parser_actions[] = { 0, 1, 0, 1, 1, 1, 2, 1, 3, 1, 4, 1, 5, 1, 7, 1, 8, 1, 10, 2, 0, 5, 2, 2, 3, 2, 3, 4, 2, 4, 10, 2, 6, 0, 2, 8, 10, 2, 9, 10, 2, 10, 9, 3, 2, 3, 4, 3, 4, 9, 10, 3, 4, 10, 9, 3, 6, 0, 5, 3, 8, 10, 9, 4, 2, 3, 4, 10, 5, 2, 3, 4, 9, 10, 5, 2, 3, 4, 10, 9 }; static const short _httpclient_parser_key_offsets[] = { 0, 0, 11, 12, 24, 29, 30, 31, 43, 58, 80, 95, 116, 131, 153, 168, 189, 204, 223, 238, 256, 257, 258, 259, 260, 262, 265, 267, 270, 272, 274, 276, 279, 281, 298, 314, 317, 319, 320, 322 }; static const char _httpclient_parser_trans_keys[] = { 10, 13, 48, 59, 72, 49, 57, 65, 70, 97, 102, 10, 10, 13, 32, 59, 9, 12, 48, 57, 65, 70, 97, 102, 10, 13, 32, 9, 12, 10, 10, 10, 13, 32, 59, 9, 12, 48, 57, 65, 70, 97, 102, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 32, 33, 59, 61, 124, 126, 9, 12, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 32, 33, 59, 124, 126, 9, 12, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 32, 33, 59, 61, 124, 126, 9, 12, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 32, 33, 59, 124, 126, 9, 12, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 33, 59, 61, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 33, 59, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 84, 84, 80, 47, 48, 57, 46, 48, 57, 48, 57, 32, 48, 57, 48, 57, 48, 57, 48, 57, 10, 13, 32, 10, 13, 10, 13, 33, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 33, 58, 124, 126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94, 122, 10, 13, 32, 10, 13, 10, 10, 13, 0 }; static const char _httpclient_parser_single_lengths[] = { 0, 5, 1, 4, 3, 1, 1, 4, 3, 8, 3, 7, 3, 8, 3, 7, 3, 7, 3, 6, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 3, 2, 5, 4, 3, 2, 1, 2, 0 }; static const char _httpclient_parser_range_lengths[] = { 0, 3, 0, 4, 1, 0, 0, 4, 6, 7, 6, 7, 6, 7, 6, 7, 6, 6, 6, 6, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 6, 6, 0, 0, 0, 0, 0 }; static const short _httpclient_parser_index_offsets[] = { 0, 0, 9, 11, 20, 25, 27, 29, 38, 48, 64, 74, 89, 99, 115, 125, 140, 150, 164, 174, 187, 189, 191, 193, 195, 197, 200, 202, 205, 207, 209, 211, 215, 218, 230, 241, 245, 248, 250, 253 }; static const char _httpclient_parser_indicies[] = { 0, 2, 3, 5, 6, 4, 4, 4, 1, 0, 1, 8, 9, 7, 11, 7, 10, 10, 10, 1, 13, 14, 12, 12, 1, 13, 1, 15, 1, 16, 17, 7, 18, 7, 10, 10, 10, 1, 19, 19, 19, 19, 19, 19, 19, 19, 19, 1, 21, 22, 20, 23, 24, 25, 23, 23, 20, 23, 23, 23, 23, 23, 23, 1, 26, 26, 26, 26, 26, 26, 26, 26, 26, 1, 28, 29, 27, 30, 31, 30, 30, 27, 30, 30, 30, 30, 30, 30, 1, 32, 32, 32, 32, 32, 32, 32, 32, 32, 1, 33, 34, 20, 35, 36, 37, 35, 35, 20, 35, 35, 35, 35, 35, 35, 1, 38, 38, 38, 38, 38, 38, 38, 38, 38, 1, 39, 40, 27, 41, 42, 41, 41, 27, 41, 41, 41, 41, 41, 41, 1, 43, 43, 43, 43, 43, 43, 43, 43, 43, 1, 44, 45, 46, 47, 48, 46, 46, 46, 46, 46, 46, 46, 46, 1, 49, 49, 49, 49, 49, 49, 49, 49, 49, 1, 50, 51, 52, 53, 52, 52, 52, 52, 52, 52, 52, 52, 1, 54, 1, 55, 1, 56, 1, 57, 1, 58, 1, 59, 58, 1, 60, 1, 61, 60, 1, 62, 1, 63, 1, 64, 1, 66, 67, 68, 65, 70, 71, 69, 13, 14, 72, 72, 72, 72, 72, 72, 72, 72, 72, 1, 73, 74, 73, 73, 73, 73, 73, 73, 73, 73, 1, 76, 77, 78, 75, 80, 81, 79, 82, 1, 84, 85, 83, 1, 0 }; static const char _httpclient_parser_trans_targs[] = { 39, 0, 2, 3, 7, 16, 20, 4, 39, 6, 7, 12, 4, 39, 5, 39, 39, 5, 8, 9, 4, 39, 5, 9, 8, 10, 11, 4, 39, 5, 11, 8, 13, 39, 6, 13, 12, 14, 15, 39, 6, 15, 12, 17, 39, 2, 17, 16, 18, 19, 39, 2, 19, 16, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 37, 38, 32, 33, 37, 34, 34, 35, 36, 33, 37, 35, 36, 33, 37, 33, 32, 33, 37 }; static const char _httpclient_parser_trans_actions[] = { 37, 0, 0, 1, 1, 0, 1, 15, 59, 15, 0, 15, 0, 17, 0, 40, 34, 15, 15, 3, 43, 63, 43, 0, 43, 22, 7, 9, 28, 9, 0, 9, 3, 74, 43, 0, 43, 22, 7, 51, 9, 0, 9, 3, 68, 43, 0, 43, 22, 7, 47, 9, 0, 9, 0, 0, 0, 0, 0, 0, 0, 13, 1, 0, 0, 31, 55, 55, 31, 0, 11, 11, 3, 0, 5, 7, 25, 25, 7, 0, 9, 9, 0, 1, 19, 19 }; static const int httpclient_parser_start = 1; static const int httpclient_parser_first_final = 39; static const int httpclient_parser_error = 0; static const int httpclient_parser_en_main = 1; #line 99 "http11_parser.rl" int httpclient_parser_init(httpclient_parser *parser) { int cs = 0; #line 195 "http11_parser.c" { cs = httpclient_parser_start; } #line 103 "http11_parser.rl" parser->cs = cs; parser->body_start = 0; parser->content_len = 0; parser->mark = 0; parser->nread = 0; parser->field_len = 0; parser->field_start = 0; return(1); } /** exec **/ size_t httpclient_parser_execute(httpclient_parser *parser, const char *buffer, size_t len, size_t off) { const char *p, *pe; int cs = parser->cs; assert(off <= len && "offset past end of buffer"); p = buffer+off; pe = buffer+len; assert(*pe == '\0' && "pointer does not end on NUL"); assert(pe - p == len - off && "pointers aren't same distance"); #line 228 "http11_parser.c" { int _klen; unsigned int _trans; const char *_acts; unsigned int _nacts; const char *_keys; if ( p == pe ) goto _test_eof; if ( cs == 0 ) goto _out; _resume: _keys = _httpclient_parser_trans_keys + _httpclient_parser_key_offsets[cs]; _trans = _httpclient_parser_index_offsets[cs]; _klen = _httpclient_parser_single_lengths[cs]; if ( _klen > 0 ) { const char *_lower = _keys; const char *_mid; const char *_upper = _keys + _klen - 1; while (1) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( (*p) < *_mid ) _upper = _mid - 1; else if ( (*p) > *_mid ) _lower = _mid + 1; else { _trans += (_mid - _keys); goto _match; } } _keys += _klen; _trans += _klen; } _klen = _httpclient_parser_range_lengths[cs]; if ( _klen > 0 ) { const char *_lower = _keys; const char *_mid; const char *_upper = _keys + (_klen<<1) - 2; while (1) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( (*p) < _mid[0] ) _upper = _mid - 2; else if ( (*p) > _mid[1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); goto _match; } } _trans += _klen; } _match: _trans = _httpclient_parser_indicies[_trans]; cs = _httpclient_parser_trans_targs[_trans]; if ( _httpclient_parser_trans_actions[_trans] == 0 ) goto _again; _acts = _httpclient_parser_actions + _httpclient_parser_trans_actions[_trans]; _nacts = (unsigned int) *_acts++; while ( _nacts-- > 0 ) { switch ( *_acts++ ) { case 0: #line 23 "http11_parser.rl" {MARK(mark, p); } break; case 1: #line 25 "http11_parser.rl" { MARK(field_start, p); } break; case 2: #line 27 "http11_parser.rl" { parser->field_len = LEN(field_start, p); } break; case 3: #line 31 "http11_parser.rl" { MARK(mark, p); } break; case 4: #line 33 "http11_parser.rl" { parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, p)); } break; case 5: #line 37 "http11_parser.rl" { parser->reason_phrase(parser->data, PTR_TO(mark), LEN(mark, p)); } break; case 6: #line 41 "http11_parser.rl" { parser->status_code(parser->data, PTR_TO(mark), LEN(mark, p)); } break; case 7: #line 45 "http11_parser.rl" { parser->http_version(parser->data, PTR_TO(mark), LEN(mark, p)); } break; case 8: #line 49 "http11_parser.rl" { parser->chunk_size(parser->data, PTR_TO(mark), LEN(mark, p)); } break; case 9: #line 53 "http11_parser.rl" { parser->last_chunk(parser->data, NULL, 0); } break; case 10: #line 57 "http11_parser.rl" { parser->body_start = p - buffer + 1; if(parser->header_done != NULL) parser->header_done(parser->data, p + 1, pe - p - 1); {p++; goto _out; } } break; #line 365 "http11_parser.c" } } _again: if ( cs == 0 ) goto _out; if ( ++p != pe ) goto _resume; _test_eof: {} _out: {} } #line 130 "http11_parser.rl" parser->cs = cs; parser->nread += p - (buffer + off); assert(p <= pe && "buffer overflow after parsing execute"); assert(parser->nread <= len && "nread longer than length"); assert(parser->body_start <= len && "body starts after buffer end"); assert(parser->mark < len && "mark is after buffer end"); assert(parser->field_len <= len && "field has length longer than whole buffer"); assert(parser->field_start < len && "field starts after buffer end"); if(parser->body_start) { /* final \r\n combo encountered so stop right here */ parser->nread = parser->body_start; } return(parser->nread); } int httpclient_parser_finish(httpclient_parser *parser) { int cs = parser->cs; parser->cs = cs; if (httpclient_parser_has_error(parser) ) { return -1; } else if (httpclient_parser_is_finished(parser) ) { return 1; } else { return 0; } } int httpclient_parser_has_error(httpclient_parser *parser) { return parser->cs == httpclient_parser_error; } int httpclient_parser_is_finished(httpclient_parser *parser) { return parser->cs == httpclient_parser_first_final; } em-http-request-0.3.0/ext/http11_client/http11_parser.h0000644000175000017500000000225611772656255022234 0ustar tfheentfheen/** * Copyright (c) 2005 Zed A. Shaw * You can redistribute it and/or modify it under the same terms as Ruby. */ #ifndef http11_parser_h #define http11_parser_h #include #if defined(_WIN32) #include #endif typedef void (*element_cb)(void *data, const char *at, size_t length); typedef void (*field_cb)(void *data, const char *field, size_t flen, const char *value, size_t vlen); typedef struct httpclient_parser { int cs; size_t body_start; int content_len; size_t nread; size_t mark; size_t field_start; size_t field_len; void *data; field_cb http_field; element_cb reason_phrase; element_cb status_code; element_cb chunk_size; element_cb http_version; element_cb header_done; element_cb last_chunk; } httpclient_parser; int httpclient_parser_init(httpclient_parser *parser); int httpclient_parser_finish(httpclient_parser *parser); size_t httpclient_parser_execute(httpclient_parser *parser, const char *data, size_t len, size_t off); int httpclient_parser_has_error(httpclient_parser *parser); int httpclient_parser_is_finished(httpclient_parser *parser); #define httpclient_parser_nread(parser) (parser)->nread #endif em-http-request-0.3.0/ext/buffer/0000755000175000017500000000000011772656255016153 5ustar tfheentfheenem-http-request-0.3.0/ext/buffer/extconf.rb0000644000175000017500000000172511772656255020153 0ustar tfheentfheenrequire 'mkmf' libs = [] $defs << "-DRUBY_VERSION_CODE=#{RUBY_VERSION.gsub(/\D/, '')}" if have_func('rb_thread_blocking_region') $defs << '-DHAVE_RB_THREAD_BLOCKING_REGION' end if have_func('rb_str_set_len') $defs << '-DHAVE_RB_STR_SET_LEN' end if have_header('sys/select.h') $defs << '-DEV_USE_SELECT' end if have_header('poll.h') $defs << '-DEV_USE_POLL' end if have_header('sys/epoll.h') $defs << '-DEV_USE_EPOLL' end if have_header('sys/event.h') and have_header('sys/queue.h') $defs << '-DEV_USE_KQUEUE' end if have_header('port.h') $defs << '-DEV_USE_PORT' end if have_header('openssl/ssl.h') $defs << '-DHAVE_OPENSSL_SSL_H' libs << '-lssl -lcrypto' end # ncpu detection specifics case RUBY_PLATFORM when /linux/ $defs << '-DHAVE_LINUX_PROCFS' else if have_func('sysctlbyname', ['sys/param.h', 'sys/sysctl.h']) $defs << '-DHAVE_SYSCTLBYNAME' end end $LIBS << ' ' << libs.join(' ') dir_config('em_buffer') create_makefile('em_buffer') em-http-request-0.3.0/ext/buffer/em_buffer.c0000644000175000017500000004000511772656255020250 0ustar tfheentfheen/* * Copyright (C) 2007 Tony Arcieri * You may redistribute this under the terms of the Ruby license. * See LICENSE for details */ #include "ruby.h" #include "rubyio.h" #include #include #include #include #ifndef GetReadFile #define FPTR_TO_FD(fptr) (fptr->fd) #else #define FPTR_TO_FD(fptr) (fileno(GetReadFile(fptr))) #endif /* Default number of bytes in each node's buffer */ #define DEFAULT_NODE_SIZE 16384 /* Maximum age of a buffer node in a memory pool, in seconds */ #define MAX_AGE 60 /* How often to scan the pool for old nodes */ #define PURGE_INTERVAL 10 struct buffer { time_t last_purged_at; unsigned size, node_size; struct buffer_node *head, *tail; struct buffer_node *pool_head, *pool_tail; }; struct buffer_node { time_t last_used_at; unsigned start, end; struct buffer_node *next; unsigned char data[0]; }; static VALUE mEm = Qnil; static VALUE cEm_Buffer = Qnil; static VALUE Em_Buffer_allocate(VALUE klass); static void Em_Buffer_mark(struct buffer *); static void Em_Buffer_free(struct buffer *); static VALUE Em_Buffer_initialize(int argc, VALUE *argv, VALUE self); static VALUE Em_Buffer_clear(VALUE self); static VALUE Em_Buffer_size(VALUE self); static VALUE Em_Buffer_empty(VALUE self); static VALUE Em_Buffer_append(VALUE self, VALUE data); static VALUE Em_Buffer_prepend(VALUE self, VALUE data); static VALUE Em_Buffer_read(int argc, VALUE *argv, VALUE self); static VALUE Em_Buffer_to_str(VALUE self); static VALUE Em_Buffer_read_from(VALUE self, VALUE io); static VALUE Em_Buffer_write_to(VALUE self, VALUE io); static struct buffer *buffer_new(void); static void buffer_clear(struct buffer *buf); static void buffer_free(struct buffer *buf); static void buffer_gc(struct buffer *buf); static void buffer_prepend(struct buffer *buf, char *str, unsigned len); static void buffer_append(struct buffer *buf, char *str, unsigned len); static void buffer_read(struct buffer *buf, char *str, unsigned len); static void buffer_copy(struct buffer *buf, char *str, unsigned len); static int buffer_read_from(struct buffer *buf, int fd); static int buffer_write_to(struct buffer *buf, int fd); /* * High speed buffering geared towards non-blocking I/O. * * Data is stored in a byte queue implemented as a linked list of equal size * chunks. Since every node in the list is the same size they are easily * memory pooled. Routines are provided for high speed non-blocking reads * and writes from Ruby IO objects. */ void Init_em_buffer() { mEm = rb_define_module("EventMachine"); cEm_Buffer = rb_define_class_under(mEm, "Buffer", rb_cObject); rb_define_alloc_func(cEm_Buffer, Em_Buffer_allocate); rb_define_method(cEm_Buffer, "initialize", Em_Buffer_initialize, -1); rb_define_method(cEm_Buffer, "clear", Em_Buffer_clear, 0); rb_define_method(cEm_Buffer, "size", Em_Buffer_size, 0); rb_define_method(cEm_Buffer, "empty?", Em_Buffer_empty, 0); rb_define_method(cEm_Buffer, "<<", Em_Buffer_append, 1); rb_define_method(cEm_Buffer, "append", Em_Buffer_append, 1); rb_define_method(cEm_Buffer, "prepend", Em_Buffer_prepend, 1); rb_define_method(cEm_Buffer, "read", Em_Buffer_read, -1); rb_define_method(cEm_Buffer, "to_str", Em_Buffer_to_str, 0); rb_define_method(cEm_Buffer, "read_from", Em_Buffer_read_from, 1); rb_define_method(cEm_Buffer, "write_to", Em_Buffer_write_to, 1); } static VALUE Em_Buffer_allocate(VALUE klass) { return Data_Wrap_Struct(klass, Em_Buffer_mark, Em_Buffer_free, buffer_new()); } static void Em_Buffer_mark(struct buffer *buf) { /* Walks the pool of unused chunks and frees any that are beyond a certain age */ buffer_gc(buf); } static void Em_Buffer_free(struct buffer *buf) { buffer_free(buf); } /** * call-seq: * EventMachine::Buffer.new(size = DEFAULT_NODE_SIZE) -> EventMachine::Buffer * * Create a new EventMachine::Buffer with linked segments of the given size */ static VALUE Em_Buffer_initialize(int argc, VALUE *argv, VALUE self) { VALUE node_size_obj; int node_size; struct buffer *buf; if(rb_scan_args(argc, argv, "01", &node_size_obj) == 1) { node_size = NUM2INT(node_size_obj); if(node_size < 1) rb_raise(rb_eArgError, "invalid buffer size"); Data_Get_Struct(self, struct buffer, buf); /* Make sure we're not changing the buffer size after data has been allocated */ assert(!buf->head); assert(!buf->pool_head); buf->node_size = node_size; } return Qnil; } /** * call-seq: * EventMachine::Buffer#clear -> nil * * Clear all data from the EventMachine::Buffer */ static VALUE Em_Buffer_clear(VALUE self) { struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); buffer_clear(buf); return Qnil; } /** * call-seq: * EventMachine::Buffer#size -> Integer * * Return the size of the buffer in bytes */ static VALUE Em_Buffer_size(VALUE self) { struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); return INT2NUM(buf->size); } /** * call-seq: * EventMachine::Buffer#empty? -> Boolean * * Is the buffer empty? */ static VALUE Em_Buffer_empty(VALUE self) { struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); return buf->size > 0 ? Qfalse : Qtrue; } /** * call-seq: * EventMachine::Buffer#append(data) -> String * * Append the given data to the end of the buffer */ static VALUE Em_Buffer_append(VALUE self, VALUE data) { struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); /* Is this needed? Never seen anyone else do it... */ data = rb_convert_type(data, T_STRING, "String", "to_str"); buffer_append(buf, RSTRING_PTR(data), RSTRING_LEN(data)); return data; } /** * call-seq: * EventMachine::Buffer#prepend(data) -> String * * Prepend the given data to the beginning of the buffer */ static VALUE Em_Buffer_prepend(VALUE self, VALUE data) { struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); data = rb_convert_type(data, T_STRING, "String", "to_str"); buffer_prepend(buf, RSTRING_PTR(data), RSTRING_LEN(data)); return data; } /** * call-seq: * EventMachine::Buffer#read(length = nil) -> String * * Read the specified abount of data from the buffer. If no value * is given the entire contents of the buffer are returned. Any data * read from the buffer is cleared. */ static VALUE Em_Buffer_read(int argc, VALUE *argv, VALUE self) { VALUE length_obj, str; int length; struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); if(rb_scan_args(argc, argv, "01", &length_obj) == 1) { length = NUM2INT(length_obj); } else { if(buf->size == 0) return rb_str_new2(""); length = buf->size; } if(length > buf->size) length = buf->size; if(length < 1) rb_raise(rb_eArgError, "length must be greater than zero"); str = rb_str_new(0, length); buffer_read(buf, RSTRING_PTR(str), length); return str; } /** * call-seq: * EventMachine::Buffer#to_str -> String * * Convert the Buffer to a String. The original buffer is unmodified. */ static VALUE Em_Buffer_to_str(VALUE self) { VALUE str; struct buffer *buf; Data_Get_Struct(self, struct buffer, buf); str = rb_str_new(0, buf->size); buffer_copy(buf, RSTRING_PTR(str), buf->size); return str; } /** * call-seq: * EventMachine::Buffer#read_from(io) -> Integer * * Perform a nonblocking read of the the given IO object and fill * the buffer with any data received. The call will read as much * data as it can until the read would block. */ static VALUE Em_Buffer_read_from(VALUE self, VALUE io) { struct buffer *buf; #if HAVE_RB_IO_T rb_io_t *fptr; #else OpenFile *fptr; #endif Data_Get_Struct(self, struct buffer, buf); GetOpenFile(rb_convert_type(io, T_FILE, "IO", "to_io"), fptr); rb_io_set_nonblock(fptr); #ifdef HAVE_RB_IO_FD return INT2NUM(buffer_read_from(buf, rb_io_fd(io))); #else return INT2NUM(buffer_read_from(buf, FPTR_TO_FD(fptr))); #endif } /** * call-seq: * EventMachine::Buffer#write_to(io) -> Integer * * Perform a nonblocking write of the buffer to the given IO object. * As much data as possible is written until the call would block. * Any data which is written is removed from the buffer. */ static VALUE Em_Buffer_write_to(VALUE self, VALUE io) { struct buffer *buf; #if HAVE_RB_IO_T rb_io_t *fptr; #else OpenFile *fptr; #endif Data_Get_Struct(self, struct buffer, buf); GetOpenFile(rb_convert_type(io, T_FILE, "IO", "to_io"), fptr); rb_io_set_nonblock(fptr); #ifdef HAVE_RB_IO_FD return INT2NUM(buffer_read_from(buf, rb_io_fd(io))); #else return INT2NUM(buffer_read_from(buf, FPTR_TO_FD(fptr))); #endif } /* * Ruby bindings end here. Below is the actual implementation of * the underlying data structures. */ /* Create a new buffer */ static struct buffer *buffer_new(void) { struct buffer *buf; buf = (struct buffer *)xmalloc(sizeof(struct buffer)); buf->head = buf->tail = buf->pool_head = buf->pool_tail = 0; buf->size = 0; buf->node_size = DEFAULT_NODE_SIZE; time(&buf->last_purged_at); return buf; } /* Clear all data from a buffer */ static void buffer_clear(struct buffer *buf) { struct buffer_node *tmp; /* Move everything into the buffer pool */ if(!buf->pool_tail) buf->pool_head = buf->pool_tail = buf->head; else buf->pool_tail->next = buf->head; buf->head = buf->tail = 0; buf->size = 0; } /* Free a buffer */ static void buffer_free(struct buffer *buf) { struct buffer_node *tmp; buffer_clear(buf); while(buf->pool_head) { tmp = buf->pool_head; buf->pool_head = tmp->next; free(tmp); } free(buf); } /* Run through the pool and find elements that haven't been used for awhile */ static void buffer_gc(struct buffer *buf) { struct buffer_node *cur, *tmp; time_t now; time(&now); /* Only purge if we've passed the purge interval */ if(now - buf->last_purged_at < PURGE_INTERVAL) return; buf->last_purged_at = now; while(buf->pool_head && now - buf->pool_head->last_used_at >= MAX_AGE) { tmp = buf->pool_head; buf->pool_head = buf->pool_head->next; free(tmp); } if(!buf->pool_head) buf->pool_tail = 0; } /* Create a new buffer_node (or pull one from the memory pool) */ static struct buffer_node *buffer_node_new(struct buffer *buf) { struct buffer_node *node; /* Pull from the memory pool if available */ if(buf->pool_head) { node = buf->pool_head; buf->pool_head = node->next; if(node->next) node->next = 0; else buf->pool_tail = 0; } else { node = (struct buffer_node *)xmalloc(sizeof(struct buffer_node) + buf->node_size); node->next = 0; } node->start = node->end = 0; return node; } /* Free a buffer node (i.e. return it to the memory pool) */ static void buffer_node_free(struct buffer *buf, struct buffer_node *node) { /* Store when the node was freed */ time(&node->last_used_at); node->next = buf->pool_head; buf->pool_head = node; if(!buf->pool_tail) buf->pool_tail = node; } /* Prepend data to the front of the buffer */ static void buffer_prepend(struct buffer *buf, char *str, unsigned len) { struct buffer_node *node, *tmp; buf->size += len; /* If it fits in the beginning of the head */ if(buf->head && buf->head->start >= len) { buf->head->start -= len; memcpy(buf->head->data + buf->head->start, str, len); } else { node = buffer_node_new(buf); node->next = buf->head; buf->head = node; if(!buf->tail) buf->tail = node; while(len > buf->node_size) { memcpy(node->data, str, buf->node_size); node->end = buf->node_size; tmp = buffer_node_new(buf); tmp->next = node->next; node->next = tmp; if(buf->tail == node) buf->tail = tmp; node = tmp; str += buf->node_size; len -= buf->node_size; } if(len > 0) { memcpy(node->data, str, len); node->end = len; } } } /* Append data to the front of the buffer */ static void buffer_append(struct buffer *buf, char *str, unsigned len) { unsigned nbytes; buf->size += len; /* If it fits in the remaining space in the tail */ if(buf->tail && len <= buf->node_size - buf->tail->end) { memcpy(buf->tail->data + buf->tail->end, str, len); buf->tail->end += len; return; } /* Empty list needs initialized */ if(!buf->head) { buf->head = buffer_node_new(buf); buf->tail = buf->head; } /* Build links out of the data */ while(len > 0) { nbytes = buf->node_size - buf->tail->end; if(len < nbytes) nbytes = len; memcpy(buf->tail->data + buf->tail->end, str, nbytes); str += nbytes; len -= nbytes; buf->tail->end += nbytes; if(len > 0) { buf->tail->next = buffer_node_new(buf); buf->tail = buf->tail->next; } } } /* Read data from the buffer (and clear what we've read) */ static void buffer_read(struct buffer *buf, char *str, unsigned len) { unsigned nbytes; struct buffer_node *tmp; while(buf->size > 0 && len > 0) { nbytes = buf->head->end - buf->head->start; if(len < nbytes) nbytes = len; memcpy(str, buf->head->data + buf->head->start, nbytes); str += nbytes; len -= nbytes; buf->head->start += nbytes; buf->size -= nbytes; if(buf->head->start == buf->head->end) { tmp = buf->head; buf->head = tmp->next; buffer_node_free(buf, tmp); if(!buf->head) buf->tail = 0; } } } /* Copy data from the buffer without clearing it */ static void buffer_copy(struct buffer *buf, char *str, unsigned len) { unsigned nbytes; struct buffer_node *node; node = buf->head; while(node && len > 0) { nbytes = node->end - node->start; if(len < nbytes) nbytes = len; memcpy(str, node->data + node->start, nbytes); str += nbytes; len -= nbytes; if(node->start + nbytes == node->end) node = node->next; } } /* Write data from the buffer to a file descriptor */ static int buffer_write_to(struct buffer *buf, int fd) { int bytes_written, total_bytes_written = 0; struct buffer_node *tmp; while(buf->head) { bytes_written = write(fd, buf->head->data + buf->head->start, buf->head->end - buf->head->start); /* If the write failed... */ if(bytes_written < 0) { if(errno != EAGAIN) rb_sys_fail("write"); return total_bytes_written; } total_bytes_written += bytes_written; buf->size -= bytes_written; /* If the write blocked... */ if(bytes_written < buf->head->end - buf->head->start) { buf->head->start += bytes_written; return total_bytes_written; } /* Otherwise we wrote the whole buffer */ tmp = buf->head; buf->head = tmp->next; buffer_node_free(buf, tmp); if(!buf->head) buf->tail = 0; } return total_bytes_written; } /* Read data from a file descriptor to a buffer */ /* Append data to the front of the buffer */ static int buffer_read_from(struct buffer *buf, int fd) { int bytes_read, total_bytes_read = 0; unsigned nbytes; /* Empty list needs initialized */ if(!buf->head) { buf->head = buffer_node_new(buf); buf->tail = buf->head; } do { nbytes = buf->node_size - buf->tail->end; bytes_read = read(fd, buf->tail->data + buf->tail->end, nbytes); if(bytes_read < 1) { if(errno != EAGAIN) rb_sys_fail("read"); return total_bytes_read; } total_bytes_read += bytes_read; buf->tail->end += nbytes; buf->size += nbytes; if(buf->tail->end == buf->node_size) { buf->tail->next = buffer_node_new(buf); buf->tail = buf->tail->next; } } while(bytes_read == nbytes); return total_bytes_read; } em-http-request-0.3.0/Gemfile0000644000175000017500000000004511772656255015374 0ustar tfheentfheensource "http://rubygems.org" gemspecem-http-request-0.3.0/Changelog.md0000644000175000017500000000344611772656255016322 0ustar tfheentfheen# Changelog ## 0.3.0 / 2011-01-15 - IMPORTANT: default to non-persistent connections (timeout => 0 now requires :keepalive => true) - see: https://github.com/igrigorik/em-http-request/commit/1ca5b608e876c18fa6cfa318d0685dcf5b974e09 - added escape_utils dependency to fix slow encode on long string escapes - bugfix: proxy authorization headers - bugfix: default to Encoding.default_external on invalid encoding in response - bugfix: do not normalize URI's internally - bugfix: more robust Encoding detection ## 0.2.15 / 2010-11-18 - bugfix: follow redirects on missing content-length - bugfix: fixed undefined warnings when running in strict mode ## 0.2.14 / 2010-10-06 - bugfix: form-encode keys/values of ruby objects passed in as body ## 0.2.13 / 2010-09-25 - added SOCKS5 proxy support - bugfix: follow redirects on HEAD requests ## 0.2.12 / 2010-09-12 - added headers callback (http.headers {|h| p h}) - added .close method on client obj to terminate session (accepts message) - bugfix: report 0 for response status on 1.9 on timeouts - bugfix: handle bad Location host redirects - bugfix: reset host override on connect ## 0.2.11 / 2010-08-16 - all URIs are now normalized prior to dispatch (and on redirect) - default to direct proxy (instead of CONNECT handshake) - better performance - specify :proxy => {:tunnel => true} if you need to force CONNECT route - MultiRequest accepts block syntax for dispatching parallel requests (see specs) - MockHttpRequest accepts block syntax (see Mock wiki page) - bugfix: nullbyte frame for websockets - bugfix: set @uri on DNS resolve failure - bugfix: handle bad hosts in absolute redirects - bugfix: reset seen content on redirects (doh!) - bugfix: invalid multibyte escape in websocket regex (1.9.2+) - bugfix: report etag and last_modified headers correctly em-http-request-0.3.0/lib/0000755000175000017500000000000011772656255014650 5ustar tfheentfheenem-http-request-0.3.0/lib/em-http-request.rb0000644000175000017500000000002211772656255020233 0ustar tfheentfheenrequire 'em-http' em-http-request-0.3.0/lib/em-http.rb0000644000175000017500000000062611772656255016557 0ustar tfheentfheenrequire 'eventmachine' require 'escape_utils' require 'addressable/uri' require 'base64' require 'socket' require 'http11_client' require 'em_buffer' require 'em-http/core_ext/bytesize' require 'em-http/http_header' require 'em-http/http_encoding' require 'em-http/http_options' require 'em-http/client' require 'em-http/multi' require 'em-http/request' require 'em-http/decoders' require 'em-http/mock'em-http-request-0.3.0/lib/em-http/0000755000175000017500000000000011772656255016226 5ustar tfheentfheenem-http-request-0.3.0/lib/em-http/http_options.rb0000644000175000017500000000172511772656255021312 0ustar tfheentfheenclass HttpOptions attr_reader :uri, :method, :host, :port, :options def initialize(method, uri, options) uri.path = '/' if uri.path.empty? @options = options @method = method.to_s.upcase @uri = uri if proxy = options[:proxy] @host = proxy[:host] @port = proxy[:port] else # optional host for cases where you may have # pre-resolved the host, or you need an override @host = options.delete(:host) || uri.host @port = uri.port end @options[:timeout] ||= 10 # default connect & inactivity timeouts @options[:redirects] ||= 0 # default number of redirects to follow @options[:keepalive] ||= false # default to single request per connection # Make sure the ports are set as Addressable::URI doesn't # set the port if it isn't there if uri.scheme == "https" @uri.port ||= 443 @port ||= 443 else @uri.port ||= 80 @port ||= 80 end end endem-http-request-0.3.0/lib/em-http/decoders.rb0000644000175000017500000000455711772656255020356 0ustar tfheentfheenrequire 'zlib' require 'stringio' ## # Provides a unified callback interface to decompression libraries. module EventMachine::HttpDecoders class DecoderError < StandardError end class << self def accepted_encodings DECODERS.inject([]) { |r,d| r + d.encoding_names } end def decoder_for_encoding(encoding) DECODERS.each { |d| return d if d.encoding_names.include? encoding } nil end end class Base def self.encoding_names name = to_s.split('::').last.downcase [name] end ## # chunk_callback:: [Block] To handle a decompressed chunk def initialize(&chunk_callback) @chunk_callback = chunk_callback end def <<(compressed) return unless compressed && compressed.size > 0 decompressed = decompress(compressed) receive_decompressed decompressed end def finalize! decompressed = finalize receive_decompressed decompressed end private def receive_decompressed(decompressed) if decompressed && decompressed.size > 0 @chunk_callback.call(decompressed) end end protected ## # Must return a part of decompressed def decompress(compressed) nil end ## # May return last part def finalize nil end end class Deflate < Base def decompress(compressed) begin @zstream ||= Zlib::Inflate.new(nil) @zstream.inflate(compressed) rescue Zlib::Error raise DecoderError end end def finalize return nil unless @zstream begin r = @zstream.inflate(nil) @zstream.close r rescue Zlib::Error raise DecoderError end end end ## # Oneshot decompressor, due to lack of a streaming Gzip reader # implementation. We may steal code from Zliby to improve this. # # For now, do not put `gzip' or `compressed' in your accept-encoding # header if you expect much data through the :on_response interface. class GZip < Base def self.encoding_names %w(gzip compressed) end def decompress(compressed) @buf ||= '' @buf += compressed nil end def finalize begin Zlib::GzipReader.new(StringIO.new(@buf.to_s)).read rescue Zlib::Error raise DecoderError end end end DECODERS = [Deflate, GZip] end em-http-request-0.3.0/lib/em-http/http_encoding.rb0000644000175000017500000000702211772656255021401 0ustar tfheentfheenmodule EventMachine module HttpEncoding HTTP_REQUEST_HEADER="%s %s HTTP/1.1\r\n" FIELD_ENCODING = "%s: %s\r\n" # Escapes a URI. def escape(s) EscapeUtils.escape_url(s.to_s) end # Unescapes a URI escaped string. def unescape(s) EscapeUtils.unescape_url(s.to_s) end if ''.respond_to?(:bytesize) def bytesize(string) string.bytesize end else def bytesize(string) string.size end end # Map all header keys to a downcased string version def munge_header_keys(head) head.inject({}) { |h, (k, v)| h[k.to_s.downcase] = v; h } end # HTTP is kind of retarded that you have to specify a Host header, but if # you include port 80 then further redirects will tack on the :80 which is # annoying. def encode_host if @uri.port == 80 || @uri.port == 443 return @uri.host else @uri.host + ":#{@uri.port}" end end def encode_request(method, uri, query, proxy) query = encode_query(uri, query) # Non CONNECT proxies require that you provide the full request # uri in request header, as opposed to a relative path. query = uri.join(query) if proxy && proxy[:type] != :socks && !proxy[:use_connect] HTTP_REQUEST_HEADER % [method.to_s.upcase, query] end def encode_query(uri, query) encoded_query = if query.kind_of?(Hash) query.map { |k, v| encode_param(k, v) }.join('&') else query.to_s end if !uri.query.to_s.empty? encoded_query = [encoded_query, uri.query].reject {|part| part.empty?}.join("&") end encoded_query.to_s.empty? ? uri.path : "#{uri.path}?#{encoded_query}" end # URL encodes query parameters: # single k=v, or a URL encoded array, if v is an array of values def encode_param(k, v) if v.is_a?(Array) v.map { |e| escape(k) + "[]=" + escape(e) }.join("&") else escape(k) + "=" + escape(v) end end def form_encode_body(obj) pairs = [] recursive = Proc.new do |h, prefix| h.each do |k,v| key = prefix == '' ? escape(k) : "#{prefix}[#{escape(k)}]" if v.is_a? Array nh = Hash.new v.size.times { |t| nh[t] = v[t] } recursive.call(nh, key) elsif v.is_a? Hash recursive.call(v, key) else pairs << "#{key}=#{escape(v)}" end end end recursive.call(obj, '') return pairs.join('&') end # Encode a field in an HTTP header def encode_field(k, v) FIELD_ENCODING % [k, v] end # Encode basic auth in an HTTP header # In: Array ([user, pass]) - for basic auth # String - custom auth string (OAuth, etc) def encode_auth(k,v) if v.is_a? Array FIELD_ENCODING % [k, ["Basic", Base64.encode64(v.join(":")).chomp].join(" ")] else encode_field(k,v) end end def encode_headers(head) head.inject('') do |result, (key, value)| # Munge keys from foo-bar-baz to Foo-Bar-Baz key = key.split('-').map { |k| k.to_s.capitalize }.join('-') result << case key when 'Authorization', 'Proxy-Authorization' encode_auth(key, value) else encode_field(key, value) end end end def encode_cookie(cookie) if cookie.is_a? Hash cookie.inject('') { |result, (k, v)| result << encode_param(k, v) + ";" } else cookie end end end end em-http-request-0.3.0/lib/em-http/multi.rb0000644000175000017500000000264111772656255017710 0ustar tfheentfheenmodule EventMachine # EventMachine based Multi request client, based on a streaming HTTPRequest class, # which allows you to open multiple parallel connections and return only when all # of them finish. (i.e. ideal for parallelizing workloads) # # == Example # # EventMachine.run { # # multi = EventMachine::MultiRequest.new # # # add multiple requests to the multi-handler # multi.add(EventMachine::HttpRequest.new('http://www.google.com/').get) # multi.add(EventMachine::HttpRequest.new('http://www.yahoo.com/').get) # # multi.callback { # p multi.responses[:succeeded] # p multi.responses[:failed] # # EventMachine.stop # } # } # class MultiRequest include EventMachine::Deferrable attr_reader :requests, :responses def initialize(conns=[], &block) @requests = [] @responses = {:succeeded => [], :failed => []} conns.each {|conn| add(conn)} callback(&block) if block_given? end def add(conn) @requests.push(conn) conn.callback { @responses[:succeeded].push(conn); check_progress } conn.errback { @responses[:failed].push(conn); check_progress } end protected # invoke callback if all requests have completed def check_progress succeed(self) if (@responses[:succeeded].size + @responses[:failed].size) == @requests.size end end end em-http-request-0.3.0/lib/em-http/version.rb0000644000175000017500000000010711772656255020236 0ustar tfheentfheenmodule EventMachine class HttpRequest VERSION = "0.3.0" end endem-http-request-0.3.0/lib/em-http/client.rb0000644000175000017500000004464311772656255020044 0ustar tfheentfheen# #-- # Copyright (C)2008 Ilya Grigorik # # Includes portion originally Copyright (C)2007 Tony Arcieri # Includes portion originally Copyright (C)2005 Zed Shaw # You can redistribute this under the terms of the Ruby # license See file LICENSE for details # #-- module EventMachine class HttpClient < Connection include EventMachine::Deferrable include EventMachine::HttpEncoding TRANSFER_ENCODING="TRANSFER_ENCODING" CONTENT_ENCODING="CONTENT_ENCODING" CONTENT_LENGTH="CONTENT_LENGTH" CONTENT_TYPE="CONTENT_TYPE" LAST_MODIFIED="LAST_MODIFIED" KEEP_ALIVE="CONNECTION" SET_COOKIE="SET_COOKIE" LOCATION="LOCATION" HOST="HOST" ETAG="ETAG" CRLF="\r\n" attr_accessor :method, :options, :uri attr_reader :response, :response_header, :error, :redirects, :last_effective_url, :content_charset def post_init @parser = HttpClientParser.new @data = EventMachine::Buffer.new @chunk_header = HttpChunkHeader.new @response_header = HttpResponseHeader.new @parser_nbytes = 0 @redirects = 0 @response = '' @error = '' @headers = nil @last_effective_url = nil @content_decoder = nil @content_charset = nil @stream = nil @disconnect = nil @state = :response_header @socks_state = nil end # start HTTP request once we establish connection to host def connection_completed # if a socks proxy is specified, then a connection request # has to be made to the socks server and we need to wait # for a response code if socks_proxy? and @state == :response_header @state = :connect_socks_proxy send_socks_handshake # if we need to negotiate the proxy connection first, then # issue a CONNECT query and wait for 200 response elsif connect_proxy? and @state == :response_header @state = :connect_http_proxy send_request_header # if connecting via proxy, then state will be :proxy_connected, # indicating successful tunnel. from here, initiate normal http # exchange else @state = :response_header ssl = @options[:tls] || @options[:ssl] || {} start_tls(ssl) if @uri.scheme == "https" or @uri.port == 443 send_request_header send_request_body end end # request is done, invoke the callback def on_request_complete begin @content_decoder.finalize! if @content_decoder rescue HttpDecoders::DecoderError on_error "Content-decoder error" end close_connection end # request failed, invoke errback def on_error(msg, dns_error = false) @error = msg # no connection signature on DNS failures # fail the connection directly dns_error == true ? fail(self) : unbind end alias :close :on_error # assign a stream processing block def stream(&blk) @stream = blk end # assign disconnect callback for websocket def disconnect(&blk) @disconnect = blk end # assign a headers parse callback def headers(&blk) @headers = blk end # raw data push from the client (WebSocket) should # only be invoked after handshake, otherwise it will # inject data into the header exchange # # frames need to start with 0x00-0x7f byte and end with # an 0xFF byte. Per spec, we can also set the first # byte to a value betweent 0x80 and 0xFF, followed by # a leading length indicator def send(data) if @state == :websocket send_data("\x00#{data}\xff") end end def normalize_body @normalized_body ||= begin if @options[:body].is_a? Hash form_encode_body(@options[:body]) else @options[:body] end end end # determines if there is enough data in the buffer def has_bytes?(num) @data.size >= num end def websocket?; @uri.scheme == 'ws'; end def proxy?; !@options[:proxy].nil?; end # determines if a proxy should be used that uses # http-headers as proxy-mechanism # # this is the default proxy type if none is specified def http_proxy?; proxy? && [nil, :http].include?(@options[:proxy][:type]); end # determines if a http-proxy should be used with # the CONNECT verb def connect_proxy?; http_proxy? && (@options[:proxy][:use_connect] == true); end # determines if a SOCKS5 proxy should be used def socks_proxy?; proxy? && (@options[:proxy][:type] == :socks); end def socks_methods methods = [] methods << 2 if !options[:proxy][:authorization].nil? # 2 => Username/Password Authentication methods << 0 # 0 => No Authentication Required methods end def send_socks_handshake # Method Negotiation as described on # http://www.faqs.org/rfcs/rfc1928.html Section 3 @socks_state = :method_negotiation methods = socks_methods send_data [5, methods.size].pack('CC') + methods.pack('C*') end def send_request_header query = @options[:query] head = @options[:head] ? munge_header_keys(@options[:head]) : {} file = @options[:file] proxy = @options[:proxy] body = normalize_body request_header = nil if http_proxy? # initialize headers for the http proxy head = proxy[:head] ? munge_header_keys(proxy[:head]) : {} head['proxy-authorization'] = proxy[:authorization] if proxy[:authorization] # if we need to negotiate the tunnel connection first, then # issue a CONNECT query to the proxy first. This is an optional # flag, by default we will provide full URIs to the proxy if @state == :connect_http_proxy request_header = HTTP_REQUEST_HEADER % ['CONNECT', "#{@uri.host}:#{@uri.port}"] end end if websocket? head['upgrade'] = 'WebSocket' head['connection'] = 'Upgrade' head['origin'] = @options[:origin] || @uri.host else # Set the Content-Length if file is given head['content-length'] = File.size(file) if file # Set the Content-Length if body is given head['content-length'] = body.bytesize if body # Set the cookie header if provided if cookie = head.delete('cookie') head['cookie'] = encode_cookie(cookie) end # Set content-type header if missing and body is a Ruby hash if not head['content-type'] and options[:body].is_a? Hash head['content-type'] = 'application/x-www-form-urlencoded' end # Set connection close unless keepalive unless options[:keepalive] head['connection'] = 'close' end end # Set the Host header if it hasn't been specified already head['host'] ||= encode_host # Set the User-Agent if it hasn't been specified head['user-agent'] ||= "EventMachine HttpClient" # Record last seen URL @last_effective_url = @uri # Build the request headers request_header ||= encode_request(@method, @uri, query, proxy) request_header << encode_headers(head) request_header << CRLF send_data request_header end def send_request_body if @options[:body] body = normalize_body send_data body return elsif @options[:file] stream_file_data @options[:file], :http_chunks => false end end def receive_data(data) @data << data dispatch end # Called when part of the body has been read def on_body_data(data) if @content_decoder begin @content_decoder << data rescue HttpDecoders::DecoderError on_error "Content-decoder error" end else on_decoded_body_data(data) end end def on_decoded_body_data(data) data.force_encoding @content_charset if @content_charset if @stream @stream.call(data) else @response << data end end def finished? @state == :finished || (@state == :body && @bytes_remaining.nil?) end def unbind if finished? && (@last_effective_url != @uri) && (@redirects < @options[:redirects]) begin # update uri to redirect location if we're allowed to traverse deeper @uri = @last_effective_url # keep track of the depth of requests we made in this session @redirects += 1 # swap current connection and reassign current handler req = HttpOptions.new(@method, @uri, @options) reconnect(req.host, req.port) @response_header = HttpResponseHeader.new @state = :response_header @response = '' @data.clear rescue EventMachine::ConnectionError => e on_error(e.message, true) end else if finished? succeed(self) else @disconnect.call(self) if @state == :websocket and @disconnect fail(self) end end end # # Response processing # def dispatch while case @state when :connect_socks_proxy parse_socks_response when :connect_http_proxy parse_response_header when :response_header parse_response_header when :chunk_header parse_chunk_header when :chunk_body process_chunk_body when :chunk_footer process_chunk_footer when :response_footer process_response_footer when :body process_body when :websocket process_websocket when :finished, :invalid break else raise RuntimeError, "invalid state: #{@state}" end end end def parse_header(header) return false if @data.empty? begin @parser_nbytes = @parser.execute(header, @data.to_str, @parser_nbytes) rescue EventMachine::HttpClientParserError @state = :invalid on_error "invalid HTTP format, parsing fails" end return false unless @parser.finished? # Clear parsed data from the buffer @data.read(@parser_nbytes) @parser.reset @parser_nbytes = 0 true end def parse_response_header return false unless parse_header(@response_header) # invoke headers callback after full parse if one # is specified by the user @headers.call(@response_header) if @headers unless @response_header.http_status and @response_header.http_reason @state = :invalid on_error "no HTTP response" return false end if @state == :connect_http_proxy # when a successfull tunnel is established, the proxy responds with a # 200 response code. from here, the tunnel is transparent. if @response_header.http_status.to_i == 200 @response_header = HttpResponseHeader.new connection_completed return true else @state = :invalid on_error "proxy not accessible" return false end end # correct location header - some servers will incorrectly give a relative URI if @response_header.location begin location = Addressable::URI.parse(@response_header.location) if location.relative? location = @uri.join(location) @response_header[LOCATION] = location.to_s else # if redirect is to an absolute url, check for correct URI structure raise if location.host.nil? end # store last url on any sign of redirect @last_effective_url = location rescue on_error "Location header format error" return false end end # Fire callbacks immediately after recieving header requests # if the request method is HEAD. In case of a redirect, terminate # current connection and reinitialize the process. if @method == "HEAD" @state = :finished close_connection return false end if websocket? if @response_header.status == 101 @state = :websocket succeed else fail "websocket handshake failed" end elsif @response_header.chunked_encoding? @state = :chunk_header elsif @response_header.content_length @state = :body @bytes_remaining = @response_header.content_length else @state = :body @bytes_remaining = nil end if decoder_class = HttpDecoders.decoder_for_encoding(response_header[CONTENT_ENCODING]) begin @content_decoder = decoder_class.new do |s| on_decoded_body_data(s) end rescue HttpDecoders::DecoderError on_error "Content-decoder error" end end if ''.respond_to?(:force_encoding) && /;\s*charset=\s*(.+?)\s*(;|$)/.match(response_header[CONTENT_TYPE]) @content_charset = Encoding.find($1.gsub(/^\"|\"$/, '')) rescue Encoding.default_external end true end def send_socks_connect_request # TO-DO: Implement address types for IPv6 and Domain begin ip_address = Socket.gethostbyname(@uri.host).last send_data [5, 1, 0, 1, ip_address, @uri.port].flatten.pack('CCCCA4n') rescue @state = :invalid on_error "could not resolve host", true return false end true end # parses socks 5 server responses as specified # on http://www.faqs.org/rfcs/rfc1928.html def parse_socks_response if @socks_state == :method_negotiation return false unless has_bytes? 2 _, method = @data.read(2).unpack('CC') if socks_methods.include?(method) if method == 0 @socks_state = :connecting return send_socks_connect_request elsif method == 2 @socks_state = :authenticating credentials = @options[:proxy][:authorization] if credentials.size < 2 @state = :invalid on_error "username and password are not supplied" return false end username, password = credentials send_data [5, username.length, username, password.length, password].pack('CCA*CA*') end else @state = :invalid on_error "proxy did not accept method" return false end elsif @socks_state == :authenticating return false unless has_bytes? 2 _, status_code = @data.read(2).unpack('CC') if status_code == 0 # success @socks_state = :connecting return send_socks_connect_request else # error @state = :invalid on_error "access denied by proxy" return false end elsif @socks_state == :connecting return false unless has_bytes? 10 _, response_code, _, address_type, _, _ = @data.read(10).unpack('CCCCNn') if response_code == 0 # success @socks_state = :connected @state = :proxy_connected @response_header = HttpResponseHeader.new # connection_completed will invoke actions to # start sending all http data transparently # over the socks connection connection_completed else # error @state = :invalid error_messages = { 1 => "general socks server failure", 2 => "connection not allowed by ruleset", 3 => "network unreachable", 4 => "host unreachable", 5 => "connection refused", 6 => "TTL expired", 7 => "command not supported", 8 => "address type not supported" } error_message = error_messages[response_code] || "unknown error (code: #{response_code})" on_error "socks5 connect error: #{error_message}" return false end end true end def parse_chunk_header return false unless parse_header(@chunk_header) @bytes_remaining = @chunk_header.chunk_size @chunk_header = HttpChunkHeader.new @state = @bytes_remaining > 0 ? :chunk_body : :response_footer true end def process_chunk_body if @data.size < @bytes_remaining @bytes_remaining -= @data.size on_body_data @data.read return false end on_body_data @data.read(@bytes_remaining) @bytes_remaining = 0 @state = :chunk_footer true end def process_chunk_footer return false if @data.size < 2 if @data.read(2) == CRLF @state = :chunk_header else @state = :invalid on_error "non-CRLF chunk footer" end true end def process_response_footer return false if @data.size < 2 if @data.read(2) == CRLF if @data.empty? @state = :finished on_request_complete else @state = :invalid on_error "garbage at end of chunked response" end else @state = :invalid on_error "non-CRLF response footer" end false end def process_body if @bytes_remaining.nil? on_body_data @data.read return false end if @bytes_remaining.zero? @state = :finished on_request_complete return false end if @data.size < @bytes_remaining @bytes_remaining -= @data.size on_body_data @data.read return false end on_body_data @data.read(@bytes_remaining) @bytes_remaining = 0 # If Keep-Alive is enabled, the server may be pushing more data to us # after the first request is complete. Hence, finish first request, and # reset state. if @response_header.keep_alive? @data.clear # hard reset, TODO: add support for keep-alive connections! @state = :finished on_request_complete else @data.clear @state = :finished on_request_complete end false end def process_websocket return false if @data.empty? # slice the message out of the buffer and pass in # for processing, and buffer data otherwise buffer = @data.read while msg = buffer.slice!(/\000([^\377]*)\377/n) msg.gsub!(/\A\x00|\xff\z/n, '') @stream.call(msg) end # store remainder if message boundary has not yet # been received @data << buffer if not buffer.empty? false end end end em-http-request-0.3.0/lib/em-http/mock.rb0000644000175000017500000000736211772656255017514 0ustar tfheentfheenmodule EventMachine OriginalHttpRequest = HttpRequest unless const_defined?(:OriginalHttpRequest) class MockHttpRequest < EventMachine::HttpRequest include HttpEncoding class RegisteredRequest < Struct.new(:uri, :method, :headers) def self.build(uri, method, headers) new(uri, method.to_s.upcase, headers || {}) end end class FakeHttpClient < EventMachine::HttpClient attr_writer :response attr_reader :data def setup(response, uri) @uri = uri if response == :fail fail(self) else if response.respond_to?(:call) response.call(self) @state = :body else receive_data(response) end @state == :body ? succeed(self) : fail(self) end end def unbind end end @@registry = Hash.new @@registry_count = Hash.new{|h,k| h[k] = 0} def self.use activate! yield ensure deactivate! end def self.activate! EventMachine.send(:remove_const, :HttpRequest) EventMachine.send(:const_set, :HttpRequest, MockHttpRequest) end def self.deactivate! EventMachine.send(:remove_const, :HttpRequest) EventMachine.send(:const_set, :HttpRequest, OriginalHttpRequest) end def self.reset_counts! @@registry_count.clear end def self.reset_registry! @@registry.clear end @@pass_through_requests = true def self.pass_through_requests=(pass_through_requests) @@pass_through_requests = pass_through_requests end def self.pass_through_requests @@pass_through_requests end def self.parse_register_args(args, &proc) args << proc{|client| proc.call(client); ''} if proc headers, data = case args.size when 3 args[2].is_a?(Hash) ? [args[2][:headers], args[2][:data]] : [{}, args[2]] when 4 [args[2], args[3]] else raise end url = args[0] method = args[1] [headers, url, method, data] end def self.register(*args, &proc) headers, url, method, data = parse_register_args(args, &proc) @@registry[RegisteredRequest.build(url, method, headers)] = data end def self.register_file(*args) headers, url, method, data = parse_register_args(args) @@registry[RegisteredRequest.build(url, method, headers)] = File.read(data) end def self.count(url, method, headers = {}) @@registry_count[RegisteredRequest.build(url, method, headers)] end def self.registered?(url, method, headers = {}) @@registry.key?(RegisteredRequest.build(url, method, headers)) end def self.registered_content(url, method, headers = {}) @@registry[RegisteredRequest.build(url, method, headers)] end def self.increment_access(url, method, headers = {}) @@registry_count[RegisteredRequest.build(url, method, headers)] += 1 end alias_method :real_send_request, :send_request protected def send_request(&blk) query = "#{@req.uri.scheme}://#{@req.uri.host}:#{@req.uri.port}#{encode_query(@req.uri, @req.options[:query])}" headers = @req.options[:head] if self.class.registered?(query, @req.method, headers) self.class.increment_access(query, @req.method, headers) client = FakeHttpClient.new(nil) content = self.class.registered_content(query, @req.method, headers) client.setup(content, @req.uri) client elsif @@pass_through_requests real_send_request else raise "this request #{query} for method #{@req.method} with the headers #{@req.options[:head].inspect} isn't registered, and pass_through_requests is current set to false" end end end end em-http-request-0.3.0/lib/em-http/request.rb0000644000175000017500000000444011772656255020245 0ustar tfheentfheenmodule EventMachine # EventMachine based HTTP request class with support for streaming consumption # of the response. Response is parsed with a Ragel-generated whitelist parser # which supports chunked HTTP encoding. # # == Example # # EventMachine.run { # http = EventMachine::HttpRequest.new('http://127.0.0.1/').get :query => {'keyname' => 'value'} # # http.callback { # p http.response_header.status # p http.response_header # p http.response # # EventMachine.stop # } # } # class HttpRequest attr_reader :options, :method def initialize(host) @uri = host.kind_of?(Addressable::URI) ? host : Addressable::URI::parse(host.to_s) end # Send an HTTP request and consume the response. Supported options: # # head: {Key: Value} # Specify an HTTP header, e.g. {'Connection': 'close'} # # query: {Key: Value} # Specify query string parameters (auto-escaped) # # body: String # Specify the request body (you must encode it for now) # # on_response: Proc # Called for each response body chunk (you may assume HTTP 200 # OK then) # def get options = {}, &blk; setup_request(:get, options, &blk); end def head options = {}, &blk; setup_request(:head, options, &blk); end def delete options = {}, &blk; setup_request(:delete,options, &blk); end def put options = {}, &blk; setup_request(:put, options, &blk); end def post options = {}, &blk; setup_request(:post, options, &blk); end protected def setup_request(method, options, &blk) @req = HttpOptions.new(method, @uri, options) send_request(&blk) end def send_request(&blk) begin EventMachine.connect(@req.host, @req.port, EventMachine::HttpClient) { |c| c.uri = @req.uri c.method = @req.method c.options = @req.options c.comm_inactivity_timeout = @req.options[:timeout] c.pending_connect_timeout = @req.options[:timeout] blk.call(c) unless blk.nil? } rescue EventMachine::ConnectionError => e conn = EventMachine::HttpClient.new("") conn.on_error(e.message, true) conn.uri = @req.uri conn end end end endem-http-request-0.3.0/lib/em-http/core_ext/0000755000175000017500000000000011772656255020036 5ustar tfheentfheenem-http-request-0.3.0/lib/em-http/core_ext/bytesize.rb0000644000175000017500000000017011772656255022217 0ustar tfheentfheen# bytesize was introduced in 1.8.7+ if RUBY_VERSION <= "1.8.6" class String def bytesize; self.size; end end endem-http-request-0.3.0/lib/em-http/http_header.rb0000644000175000017500000000320311772656255021040 0ustar tfheentfheenmodule EventMachine # A simple hash is returned for each request made by HttpClient with the # headers that were given by the server for that request. class HttpResponseHeader < Hash # The reason returned in the http response ("OK","File not found",etc.) attr_accessor :http_reason # The HTTP version returned. attr_accessor :http_version # The status code (as a string!) attr_accessor :http_status # E-Tag def etag self[HttpClient::ETAG] end def last_modified self[HttpClient::LAST_MODIFIED] end # HTTP response status as an integer def status Integer(http_status) rescue 0 end # Length of content as an integer, or nil if chunked/unspecified def content_length @content_length ||= ((s = self[HttpClient::CONTENT_LENGTH]) && (s =~ /^(\d+)$/)) ? $1.to_i : nil end # Cookie header from the server def cookie self[HttpClient::SET_COOKIE] end # Is the transfer encoding chunked? def chunked_encoding? /chunked/i === self[HttpClient::TRANSFER_ENCODING] end def keep_alive? /keep-alive/i === self[HttpClient::KEEP_ALIVE] end def compressed? /gzip|compressed|deflate/i === self[HttpClient::CONTENT_ENCODING] end def location self[HttpClient::LOCATION] end end class HttpChunkHeader < Hash # When parsing chunked encodings this is set attr_accessor :http_chunk_size def initialize super @http_chunk_size = '0' end # Size of the chunk as an integer def chunk_size @http_chunk_size.to_i(base=16) end end end