diff --git a/.coveragerc b/.coveragerc index 3d57cf76..f7570d40 100644 --- a/.coveragerc +++ b/.coveragerc @@ -4,7 +4,6 @@ omit = hyper/compat.py hyper/httplib_compat.py hyper/ssl_compat.py - hyper/packages/* [report] fail_under = 100 diff --git a/.travis.yml b/.travis.yml index e6eb45d4..642c7d41 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,7 @@ python: - "2.7" - "3.4" - "3.5" + - "3.6" - "pypy-5.3.1" env: @@ -11,29 +12,15 @@ env: - TEST_RELEASE=false HYPER_FAST_PARSE=true - TEST_RELEASE=true HYPER_FAST_PARSE=false - TEST_RELEASE=true HYPER_FAST_PARSE=true - - NGHTTP2=true matrix: allow_failures: - env: TEST_RELEASE=true HYPER_FAST_PARSE=true - env: TEST_RELEASE=true HYPER_FAST_PARSE=false - exclude: - - env: NGHTTP2=true - python: "pypy-5.3.1" install: - ".travis/install.sh" -before_script: "flake8 --max-complexity 15 --exclude 'hyper/packages/*' hyper test" +before_script: "flake8 hyper test" script: - - > - if [[ "$TEST_RELEASE" == true ]]; then - py.test test_release.py - else - if [[ $TRAVIS_PYTHON_VERSION == pypy ]]; then - py.test test/ - else - coverage run -m py.test test/ - coverage report - fi - fi + - ".travis/run.sh" diff --git a/.travis/install.sh b/.travis/install.sh index d446b38a..d7423ce7 100755 --- a/.travis/install.sh +++ b/.travis/install.sh @@ -3,45 +3,6 @@ set -e set -x -if [[ "$NGHTTP2" = true ]]; then - # GCC 4.6 seems to cause problems, so go straight to 4.8. - sudo add-apt-repository --yes ppa:ubuntu-toolchain-r/test - sudo apt-get update - sudo apt-get install g++-4.8 libstdc++-4.8-dev - export CXX="g++-4.8" CC="gcc-4.8" - $CC --version - - # Install nghttp2. Right now I haven't built a PPA for this so we have to - # do it from source, which kinda sucks. First, install a ton of - # prerequisite packages. - sudo apt-get install autoconf automake autotools-dev libtool pkg-config \ - zlib1g-dev libcunit1-dev libssl-dev libxml2-dev \ - libevent-dev libjansson-dev libjemalloc-dev - pip install cython - - # Now, download and install nghttp2's latest version. - git clone https://github.com/tatsuhiro-t/nghttp2.git - cd nghttp2 - DIR=`pwd` - export PYTHONPATH="$DIR/lib/python${TRAVIS_PYTHON_VERSION}/site-packages" - mkdir -p $PYTHONPATH - autoreconf -i - automake - autoconf - ./configure --disable-threads --prefix=`pwd` - make - make install - - # The makefile doesn't install into the active virtualenv. Install again. - cd python - python setup.py install - cd ../.. - - # Let's try ldconfig. - sudo sh -c 'echo "/usr/local/lib" > /etc/ld.so.conf.d/libnghttp2.conf' - sudo ldconfig -fi - if [[ "$HYPER_FAST_PARSE" = true ]]; then pip install pycohttpparser~=1.0 fi diff --git a/.travis/run.sh b/.travis/run.sh new file mode 100755 index 00000000..321835de --- /dev/null +++ b/.travis/run.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -e +set -x + +if [[ "$TEST_RELEASE" == true ]]; then + py.test test_release.py +else + if [[ $TRAVIS_PYTHON_VERSION == pypy* ]]; then + py.test test/ + else + coverage run -m py.test test/ + coverage report + fi +fi diff --git a/HISTORY.rst b/HISTORY.rst index 60be8b7d..dab8eed7 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -1,6 +1,13 @@ Release History =============== +dev +--- + +*Bugfixes* + +- Stream end flag when length of last chunk equal to MAX_CHUNK + v0.7.0 (2016-09-27) ------------------- diff --git a/README.rst b/README.rst index 0e01b7f7..99dce29d 100644 --- a/README.rst +++ b/README.rst @@ -2,10 +2,23 @@ Hyper: HTTP/2 Client for Python =============================== -.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png +**This project is no longer maintained!** + +Please use an alternative, such as `HTTPX`_ or others. + +.. _HTTPX: https://www.python-httpx.org/ + +We will not publish further updates for ``hyper``. + +Potential security issues will not be addressed. -.. image:: https://travis-ci.org/Lukasa/hyper.png?branch=master - :target: https://travis-ci.org/Lukasa/hyper +---- + +**So long, and thanks for all the fish!** + +---- + +.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png HTTP is changing under our feet. HTTP/1.1, our old friend, is being supplemented by the brand new HTTP/2 standard. HTTP/2 provides many benefits: @@ -15,8 +28,8 @@ improved speed, lower bandwidth usage, better connection management, and more. from hyper import HTTPConnection - conn = HTTPConnection('http2bin.org:443') - conn.request('GET', '/get') + conn = HTTPConnection('nghttp2.org:443') + conn.request('GET', '/httpbin/get') resp = conn.get_response() print(resp.read()) diff --git a/docs/source/advanced.rst b/docs/source/advanced.rst index 1c42068b..a1d53085 100644 --- a/docs/source/advanced.rst +++ b/docs/source/advanced.rst @@ -71,7 +71,17 @@ SSL/TLS Certificate Verification By default, all HTTP/2 connections are made over TLS, and ``hyper`` bundles certificate authorities that it uses to verify the offered TLS certificates. -Currently certificate verification cannot be disabled. + +You can change how certificates are verified by getting a new SSL context +from :func:`hyper.tls.init_context`, tweaking its options, and passing it +to the :class:`HTTPConnection `. For example, this will +disable verification altogether:: + + import ssl + context = hyper.tls.init_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + conn = HTTPConnection('http2bin.org:443', ssl_context=context) Streaming Uploads ----------------- diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index 8f9502ee..6ad44ec1 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -99,7 +99,7 @@ the response from any of them, and switch between them using their stream IDs. For example:: >>> from hyper import HTTPConnection - >>> c = HTTPConnection('http2bin.org') + >>> c = HTTPConnection('http2bin.org', port=443) >>> first = c.request('GET', '/get', headers={'key': 'value'}) >>> second = c.request('POST', '/post', body=b'hello') >>> third = c.request('GET', '/ip') diff --git a/hyper/__init__.py b/hyper/__init__.py index afa88035..99044881 100644 --- a/hyper/__init__.py +++ b/hyper/__init__.py @@ -33,4 +33,4 @@ # Set default logging handler. logging.getLogger(__name__).addHandler(logging.NullHandler()) -__version__ = '0.7.0' +__version__ = '0.8.0dev0' diff --git a/hyper/certs.pem b/hyper/certs.pem index 72a750f9..6b7bccfb 100644 --- a/hyper/certs.pem +++ b/hyper/certs.pem @@ -1703,38 +1703,6 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= -----END CERTIFICATE----- -# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI -# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI -# Label: "IGC/A" -# Serial: 245102874772 -# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37 -# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c -# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32 ------BEGIN CERTIFICATE----- -MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT -AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ -TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG -9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw -MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM -BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO -MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2 -LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI -s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2 -xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4 -u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b -F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx -Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd -PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV -HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx -NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF -AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ -L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY -YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg -Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a -NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R -0982gaEbeC9xs/FZTEYYKKuF0mBWWg== ------END CERTIFICATE----- - # Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 # Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 # Label: "Security Communication EV RootCA1" @@ -2044,48 +2012,6 @@ h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho -----END CERTIFICATE----- -# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. -# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. -# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1" -# Serial: 5525761995591021570 -# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37 -# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58 -# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2 ------BEGIN CERTIFICATE----- -MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV -BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt -ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4 -MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg -SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl -a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h -4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk -tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s -tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL -dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4 -c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um -TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z -+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O -Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW -OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW -fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2 -l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB -/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw -FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+ -8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI -6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO -TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME -wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY -Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn -xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q -DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q -Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t -hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4 -7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7 -QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT ------END CERTIFICATE----- - # Issuer: O=certSIGN OU=certSIGN ROOT CA # Subject: O=certSIGN OU=certSIGN ROOT CA # Label: "certSIGN ROOT CA" @@ -2424,43 +2350,6 @@ Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== -----END CERTIFICATE----- -# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus -# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus -# Label: "Juur-SK" -# Serial: 999181308 -# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55 -# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89 -# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39 ------BEGIN CERTIFICATE----- -MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN -AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp -dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw -MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw -CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ -MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB -SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz -ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH -LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP -PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL -2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w -ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC -MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk -AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0 -AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz -AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz -AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f -BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE -FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY -P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi -CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g -kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95 -HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS -na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q -qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z -TbvGRNs2yyqcjg== ------END CERTIFICATE----- - # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post # Label: "Hongkong Post Root CA 1" @@ -5114,3 +5003,273 @@ XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P 5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi DrW5viSP -----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G1 O=Certplus +# Subject: CN=Certplus Root CA G1 O=Certplus +# Label: "Certplus Root CA G1" +# Serial: 1491911565779898356709731176965615564637713 +# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 +# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 +# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a +iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt +6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP +0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f +6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE +EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN +1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc +h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT +mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV +4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO +WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud +DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd +Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq +hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh +66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 +/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS +S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j +2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R +Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr +RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy +6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV +V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 +g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl +++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G2 O=Certplus +# Subject: CN=Certplus Root CA G2 O=Certplus +# Label: "Certplus Root CA G2" +# Serial: 1492087096131536844209563509228951875861589 +# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 +# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a +# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 +-----BEGIN CERTIFICATE----- +MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat +93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x +Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P +AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj +FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG +SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch +p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal +U5ORGpOucGpnutee5WEaXw== +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust +# Subject: CN=OpenTrust Root CA G1 O=OpenTrust +# Label: "OpenTrust Root CA G1" +# Serial: 1492036577811947013770400127034825178844775 +# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da +# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e +# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b +wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX +/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 +77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP +uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx +p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx +Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 +TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W +G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw +vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY +EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 +2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw +DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E +PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf +gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS +FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 +V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P +XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I +i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t +TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 +09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky +Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ +AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj +1oxx +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust +# Subject: CN=OpenTrust Root CA G2 O=OpenTrust +# Label: "OpenTrust Root CA G2" +# Serial: 1492012448042702096986875987676935573415441 +# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb +# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b +# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh +/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e +CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 +1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE +FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS +gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X +G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy +YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH +vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 +t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ +gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 +5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w +DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz +Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 +nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT +RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT +wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 +t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa +TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 +o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU +3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA +iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f +WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM +S1IK +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust +# Subject: CN=OpenTrust Root CA G3 O=OpenTrust +# Label: "OpenTrust Root CA G3" +# Serial: 1492104908271485653071219941864171170455615 +# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 +# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 +# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 +-----BEGIN CERTIFICATE----- +MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx +CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U +cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow +QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl +blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm +3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d +oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G +A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 +DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK +BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q +j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx +4nxp5V2a+EEfOzmTk51V6s2N8fvB +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- diff --git a/hyper/common/bufsocket.py b/hyper/common/bufsocket.py index b35393af..854681ca 100644 --- a/hyper/common/bufsocket.py +++ b/hyper/common/bufsocket.py @@ -138,7 +138,7 @@ def recv(self, amt): else: should_read = True - if (self._remaining_capacity > self._bytes_in_buffer and should_read): + if should_read: count = self._sck.recv_into(self._buffer_view[self._buffer_end:]) # The socket just got closed. We should throw an exception if we diff --git a/hyper/common/connection.py b/hyper/common/connection.py index dee18d68..855994f8 100644 --- a/hyper/common/connection.py +++ b/hyper/common/connection.py @@ -44,8 +44,9 @@ class HTTPConnection(object): :param proxy_host: (optional) The proxy to connect to. This can be an IP address or a host name and may include a port. :param proxy_port: (optional) The proxy port to connect to. If not provided - and one also isn't provided in the ``proxy`` parameter, defaults to - 8080. + and one also isn't provided in the ``proxy_host`` parameter, defaults + to 8080. + :param proxy_headers: (optional) The headers to send to a proxy. """ def __init__(self, host, @@ -56,18 +57,24 @@ def __init__(self, ssl_context=None, proxy_host=None, proxy_port=None, + proxy_headers=None, + timeout=None, **kwargs): self._host = host self._port = port self._h1_kwargs = { 'secure': secure, 'ssl_context': ssl_context, - 'proxy_host': proxy_host, 'proxy_port': proxy_port + 'proxy_host': proxy_host, 'proxy_port': proxy_port, + 'proxy_headers': proxy_headers, 'enable_push': enable_push, + 'timeout': timeout } self._h2_kwargs = { 'window_manager': window_manager, 'enable_push': enable_push, 'secure': secure, 'ssl_context': ssl_context, - 'proxy_host': proxy_host, 'proxy_port': proxy_port + 'proxy_host': proxy_host, 'proxy_port': proxy_port, + 'proxy_headers': proxy_headers, + 'timeout': timeout } # Add any unexpected kwargs to both dictionaries. diff --git a/hyper/common/exceptions.py b/hyper/common/exceptions.py index 268431ab..78dfefc9 100644 --- a/hyper/common/exceptions.py +++ b/hyper/common/exceptions.py @@ -71,3 +71,22 @@ class MissingCertFile(Exception): The certificate file could not be found. """ pass + + +# Create our own ConnectionError. +try: # pragma: no cover + ConnectionError = ConnectionError +except NameError: # pragma: no cover + class ConnectionError(Exception): + """ + An error occurred during connection to a host. + """ + + +class ProxyError(ConnectionError): + """ + An error occurred during connection to a proxy. + """ + def __init__(self, message, response): + self.response = response + super(ProxyError, self).__init__(message) diff --git a/hyper/common/util.py b/hyper/common/util.py index 6d199a0c..2f286e10 100644 --- a/hyper/common/util.py +++ b/hyper/common/util.py @@ -5,8 +5,10 @@ General utility functions for use with hyper. """ +from enum import Enum + from hyper.compat import unicode, bytes, imap -from ..packages.rfc3986.uri import URIReference +from rfc3986 import URIReference from ..compat import is_py3 @@ -57,3 +59,11 @@ def to_native_string(string, encoding='utf-8'): return string return string.decode(encoding) if is_py3 else string.encode(encoding) + + +class HTTPVersion(Enum): + """ + Collection of all HTTP versions used in hyper. + """ + http11 = "HTTP/1.1" + http20 = "HTTP/2" diff --git a/hyper/contrib.py b/hyper/contrib.py index dccec518..79aa7d12 100644 --- a/hyper/contrib.py +++ b/hyper/contrib.py @@ -9,14 +9,17 @@ from requests.adapters import HTTPAdapter from requests.models import Response from requests.structures import CaseInsensitiveDict - from requests.utils import get_encoding_from_headers + from requests.utils import ( + get_encoding_from_headers, select_proxy, prepend_scheme_if_needed + ) from requests.cookies import extract_cookies_to_jar except ImportError: # pragma: no cover HTTPAdapter = object from hyper.common.connection import HTTPConnection -from hyper.compat import urlparse +from hyper.compat import urlparse, ssl from hyper.tls import init_context +from hyper.common.util import to_native_string class HTTP20Adapter(HTTPAdapter): @@ -25,11 +28,13 @@ class HTTP20Adapter(HTTPAdapter): HTTP/2. This implements some degree of connection pooling to maximise the HTTP/2 gain. """ - def __init__(self, *args, **kwargs): + def __init__(self, window_manager=None, *args, **kwargs): #: A mapping between HTTP netlocs and ``HTTP20Connection`` objects. self.connections = {} + self.window_manager = window_manager - def get_connection(self, host, port, scheme, cert=None): + def get_connection(self, host, port, scheme, cert=None, verify=True, + proxy=None, timeout=None): """ Gets an appropriate HTTP/2 connection object based on host/port/scheme/cert tuples. @@ -40,31 +45,64 @@ def get_connection(self, host, port, scheme, cert=None): port = 80 if not secure else 443 ssl_context = None - if cert is not None: + if not verify: + verify = False ssl_context = init_context(cert=cert) - + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + elif verify is True and cert is not None: + ssl_context = init_context(cert=cert) + elif verify is not True: + ssl_context = init_context(cert_path=verify, cert=cert) + + if proxy: + proxy_headers = self.proxy_headers(proxy) + proxy_netloc = urlparse(proxy).netloc + else: + proxy_headers = None + proxy_netloc = None + + # We put proxy headers in the connection_key, because + # ``proxy_headers`` method might be overridden, so we can't + # rely on proxy headers being the same for the same proxies. + proxy_headers_key = (frozenset(proxy_headers.items()) + if proxy_headers else None) + connection_key = (host, port, scheme, cert, verify, + proxy_netloc, proxy_headers_key) try: - conn = self.connections[(host, port, scheme, cert)] + conn = self.connections[connection_key] except KeyError: conn = HTTPConnection( host, port, secure=secure, - ssl_context=ssl_context) - self.connections[(host, port, scheme, cert)] = conn + window_manager=self.window_manager, + ssl_context=ssl_context, + proxy_host=proxy_netloc, + proxy_headers=proxy_headers, + timeout=timeout) + self.connections[connection_key] = conn return conn - def send(self, request, stream=False, cert=None, **kwargs): + def send(self, request, stream=False, cert=None, verify=True, proxies=None, + timeout=None, **kwargs): """ Sends a HTTP message to the server. """ + proxy = select_proxy(request.url, proxies) + if proxy: + proxy = prepend_scheme_if_needed(proxy, 'http') + parsed = urlparse(request.url) conn = self.get_connection( parsed.hostname, parsed.port, parsed.scheme, - cert=cert) + cert=cert, + verify=verify, + proxy=proxy, + timeout=timeout) # Build the selector. selector = parsed.path @@ -89,7 +127,7 @@ def send(self, request, stream=False, cert=None, **kwargs): def build_response(self, request, resp): """ Builds a Requests' response object. This emulates most of the logic of - the standard fuction but deals with the lack of the ``.headers`` + the standard function but deals with the lack of the ``.headers`` property on the HTTP20Response object. Additionally, this function builds in a number of features that are @@ -99,7 +137,10 @@ def build_response(self, request, resp): response = Response() response.status_code = resp.status - response.headers = CaseInsensitiveDict(resp.headers.iter_raw()) + response.headers = CaseInsensitiveDict(( + map(to_native_string, h) + for h in resp.headers.iter_raw() + )) response.raw = resp response.reason = resp.reason response.encoding = get_encoding_from_headers(response.headers) @@ -157,3 +198,8 @@ def getheaders(self, name): orig.msg = FakeOriginalResponse(resp.headers.iter_raw()) return response + + def close(self): + for connection in self.connections.values(): + connection.close() + self.connections.clear() diff --git a/hyper/http11/connection.py b/hyper/http11/connection.py index 61361c35..4311d307 100644 --- a/hyper/http11/connection.py +++ b/hyper/http11/connection.py @@ -18,9 +18,11 @@ from .response import HTTP11Response from ..tls import wrap_socket, H2C_PROTOCOL from ..common.bufsocket import BufferedSocket -from ..common.exceptions import TLSUpgrade, HTTPUpgrade +from ..common.exceptions import TLSUpgrade, HTTPUpgrade, ProxyError from ..common.headers import HTTPHeaderMap -from ..common.util import to_bytestring, to_host_port_tuple +from ..common.util import ( + to_bytestring, to_host_port_tuple, to_native_string, HTTPVersion +) from ..compat import bytes # We prefer pycohttpparser to the pure-Python interpretation @@ -36,6 +38,43 @@ BODY_FLAT = 2 +def _create_tunnel(proxy_host, proxy_port, target_host, target_port, + proxy_headers=None, timeout=None): + """ + Sends CONNECT method to a proxy and returns a socket with established + connection to the target. + + :returns: socket + """ + conn = HTTP11Connection(proxy_host, proxy_port, timeout=timeout) + conn.request('CONNECT', '%s:%d' % (target_host, target_port), + headers=proxy_headers) + + resp = conn.get_response() + if resp.status != 200: + raise ProxyError( + "Tunnel connection failed: %d %s" % + (resp.status, to_native_string(resp.reason)), + response=resp + ) + return conn._sock + + +def _headers_to_http_header_map(headers): + # TODO turn this to a classmethod of HTTPHeaderMap + headers = headers or {} + if not isinstance(headers, HTTPHeaderMap): + if isinstance(headers, Mapping): + headers = HTTPHeaderMap(headers.items()) + elif isinstance(headers, Iterable): + headers = HTTPHeaderMap(headers) + else: + raise ValueError( + 'Header argument must be a dictionary or an iterable' + ) + return headers + + class HTTP11Connection(object): """ An object representing a single HTTP/1.1 connection to a server. @@ -53,11 +92,16 @@ class HTTP11Connection(object): :param proxy_host: (optional) The proxy to connect to. This can be an IP address or a host name and may include a port. :param proxy_port: (optional) The proxy port to connect to. If not provided - and one also isn't provided in the ``proxy`` parameter, + and one also isn't provided in the ``proxy_host`` parameter, defaults to 8080. + :param proxy_headers: (optional) The headers to send to a proxy. """ + + version = HTTPVersion.http11 + def __init__(self, host, port=None, secure=None, ssl_context=None, - proxy_host=None, proxy_port=None, **kwargs): + proxy_host=None, proxy_port=None, proxy_headers=None, + timeout=None, **kwargs): if port is None: self.host, self.port = to_host_port_tuple(host, default_port=80) else: @@ -75,21 +119,26 @@ def __init__(self, host, port=None, secure=None, ssl_context=None, # only send http upgrade headers for non-secure connection self._send_http_upgrade = not self.secure + self._enable_push = kwargs.get('enable_push') self.ssl_context = ssl_context self._sock = None + # Keep the current request method in order to be able to know + # in get_response() what was the request verb. + self._current_request_method = None + # Setup proxy details if applicable. - if proxy_host: - if proxy_port is None: - self.proxy_host, self.proxy_port = to_host_port_tuple( - proxy_host, default_port=8080 - ) - else: - self.proxy_host, self.proxy_port = proxy_host, proxy_port + if proxy_host and proxy_port is None: + self.proxy_host, self.proxy_port = to_host_port_tuple( + proxy_host, default_port=8080 + ) + elif proxy_host: + self.proxy_host, self.proxy_port = proxy_host, proxy_port else: self.proxy_host = None self.proxy_port = None + self.proxy_headers = proxy_headers #: The size of the in-memory buffer used to store data from the #: network. This is used as a performance optimisation. Increase buffer @@ -101,6 +150,9 @@ def __init__(self, host, port=None, secure=None, ssl_context=None, #: the standard hyper parsing interface. self.parser = Parser() + # timeout + self._timeout = timeout + def connect(self): """ Connect to the server specified when the object was created. This is a @@ -109,23 +161,44 @@ def connect(self): :returns: Nothing. """ if self._sock is None: - if not self.proxy_host: - host = self.host - port = self.port - else: - host = self.proxy_host - port = self.proxy_port - sock = socket.create_connection((host, port), 5) + if isinstance(self._timeout, tuple): + connect_timeout = self._timeout[0] + read_timeout = self._timeout[1] + else: + connect_timeout = self._timeout + read_timeout = self._timeout + + if self.proxy_host and self.secure: + # Send http CONNECT method to a proxy and acquire the socket + sock = _create_tunnel( + self.proxy_host, + self.proxy_port, + self.host, + self.port, + proxy_headers=self.proxy_headers, + timeout=self._timeout + ) + elif self.proxy_host: + # Simple http proxy + sock = socket.create_connection( + (self.proxy_host, self.proxy_port), + timeout=connect_timeout + ) + else: + sock = socket.create_connection((self.host, self.port), + timeout=connect_timeout) proto = None if self.secure: - assert not self.proxy_host, "Proxy with HTTPS not supported." - sock, proto = wrap_socket(sock, host, self.ssl_context) + sock, proto = wrap_socket(sock, self.host, self.ssl_context) log.debug("Selected protocol: %s", proto) sock = BufferedSocket(sock, self.network_buffer_size) + # Set read timeout + sock.settimeout(read_timeout) + if proto not in ('http/1.1', None): raise TLSUpgrade(proto, sock) @@ -150,25 +223,29 @@ def request(self, method, url, body=None, headers=None): :returns: Nothing. """ - headers = headers or {} - method = to_bytestring(method) + is_connect_method = b'CONNECT' == method.upper() + self._current_request_method = method + + if self.proxy_host and not self.secure: + # As per https://tools.ietf.org/html/rfc2068#section-5.1.2: + # The absoluteURI form is required when the request is being made + # to a proxy. + url = self._absolute_http_url(url) url = to_bytestring(url) - if not isinstance(headers, HTTPHeaderMap): - if isinstance(headers, Mapping): - headers = HTTPHeaderMap(headers.items()) - elif isinstance(headers, Iterable): - headers = HTTPHeaderMap(headers) - else: - raise ValueError( - 'Header argument must be a dictionary or an iterable' - ) + headers = _headers_to_http_header_map(headers) + + # Append proxy headers. + if self.proxy_host and not self.secure: + headers.update( + _headers_to_http_header_map(self.proxy_headers).items() + ) if self._sock is None: self.connect() - if self._send_http_upgrade: + if not is_connect_method and self._send_http_upgrade: self._add_upgrade_headers(headers) self._send_http_upgrade = False @@ -176,7 +253,7 @@ def request(self, method, url, body=None, headers=None): if body: body_type = self._add_body_headers(headers, body) - if b'host' not in headers: + if not is_connect_method and b'host' not in headers: headers[b'host'] = self.host # Begin by emitting the header block. @@ -188,6 +265,10 @@ def request(self, method, url, body=None, headers=None): return + def _absolute_http_url(self, url): + port_part = ':%d' % self.port if self.port != 80 else '' + return 'http://%s%s%s' % (self.host, port_part, url) + def get_response(self): """ Returns a response object. @@ -195,6 +276,9 @@ def get_response(self): This is an early beta, so the response object is pretty stupid. That's ok, we'll fix it later. """ + method = self._current_request_method + self._current_request_method = None + headers = HTTPHeaderMap() response = None @@ -208,8 +292,14 @@ def get_response(self): self._sock.advance_buffer(response.consumed) + # Check for a successful "switching protocols to h2c" response. + # "Connection: upgrade" is not strictly necessary on the receiving end, + # but we want to fail fast on broken servers or intermediaries: + # https://github.com/Lukasa/hyper/issues/312. + # Connection options are case-insensitive, while upgrade tokens are + # case-sensitive: https://github.com/httpwg/http11bis/issues/8. if (response.status == 101 and - b'upgrade' in headers['connection'] and + b'upgrade' in map(bytes.lower, headers['connection']) and H2C_PROTOCOL.encode('utf-8') in headers['upgrade']): raise HTTPUpgrade(H2C_PROTOCOL, self._sock) @@ -218,7 +308,8 @@ def get_response(self): response.msg.tobytes(), headers, self._sock, - self + self, + method ) def _send_headers(self, method, url, headers): @@ -273,6 +364,10 @@ def _add_upgrade_headers(self, headers): # Settings header. http2_settings = SettingsFrame(0) http2_settings.settings[SettingsFrame.INITIAL_WINDOW_SIZE] = 65535 + if self._enable_push is not None: + http2_settings.settings[SettingsFrame.ENABLE_PUSH] = ( + int(self._enable_push) + ) encoded_settings = base64.urlsafe_b64encode( http2_settings.serialize_body() ) @@ -371,7 +466,8 @@ def close(self): .. warning:: This method should absolutely only be called when you are certain the connection object is no longer needed. """ - self._sock.close() + if self._sock is not None: + self._sock.close() self._sock = None # The following two methods are the implementation of the context manager diff --git a/hyper/http11/parser.py b/hyper/http11/parser.py index ee391210..acc204f4 100644 --- a/hyper/http11/parser.py +++ b/hyper/http11/parser.py @@ -49,7 +49,8 @@ def parse_response(self, buffer): if index == -1: return None - version, status, reason = temp_buffer[0:index].split(None, 2) + version, status, reason = ( + temp_buffer[0:index].split(None, 2) + [b''])[:3] if not version.startswith(b'HTTP/1.'): raise ParseError("Not HTTP/1.X!") diff --git a/hyper/http11/response.py b/hyper/http11/response.py index ee23be08..7ff7a523 100644 --- a/hyper/http11/response.py +++ b/hyper/http11/response.py @@ -9,10 +9,12 @@ import logging import weakref import zlib +import brotli from ..common.decoder import DeflateDecoder from ..common.exceptions import ChunkedDecodeError, InvalidResponseError from ..common.exceptions import ConnectionResetError +from ..common.util import HTTPVersion log = logging.getLogger(__name__) @@ -23,7 +25,11 @@ class HTTP11Response(object): provides access to the response headers and the entity body. The response is an iterable object and can be used in a with statement. """ - def __init__(self, code, reason, headers, sock, connection=None): + + version = HTTPVersion.http11 + + def __init__(self, code, reason, headers, sock, connection=None, + request_method=None): #: The reason phrase returned by the server. self.reason = reason @@ -48,21 +54,32 @@ def __init__(self, code, reason, headers, sock, connection=None): self._expect_close = True # The expected length of the body. - try: - self._length = int(self.headers[b'content-length'][0]) - except KeyError: - self._length = None + if request_method != b'HEAD': + try: + self._length = int(self.headers[b'content-length'][0]) + except KeyError: + self._length = None + else: + self._length = 0 # Whether we expect a chunked response. self._chunked = ( b'chunked' in self.headers.get(b'transfer-encoding', []) ) - # One of the following must be true: we must expect that the connection - # will be closed following the body, or that a content-length was sent, - # or that we're getting a chunked response. - # FIXME: Remove naked assert, replace with something better. - assert self._expect_close or self._length is not None or self._chunked + # When content-length is absent and response is not chunked, + # body length is determined by connection closure. + # https://tools.ietf.org/html/rfc7230#section-3.3.3 + if self._length is None and not self._chunked: + # 200 response to a CONNECT request means that proxy has connected + # to the target host and it will start forwarding everything sent + # from the either side. Thus we must not try to read body of this + # response. Socket of current connection will be taken over by + # the code that has sent a CONNECT request. + if not (request_method is not None and + b'CONNECT' == request_method.upper() and + code == 200): + self._expect_close = True # This object is used for decompressing gzipped request bodies. Right # now we only support gzip because that's all the RFC mandates of us. @@ -72,6 +89,8 @@ def __init__(self, code, reason, headers, sock, connection=None): # http://stackoverflow.com/a/2695466/1401686 if b'gzip' in self.headers.get(b'content-encoding', []): self._decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) + elif b'br' in self.headers.get(b'content-encoding', []): + self._decompressobj = brotli.Decompressor() elif b'deflate' in self.headers.get(b'content-encoding', []): self._decompressobj = DeflateDecoder() else: diff --git a/hyper/http20/connection.py b/hyper/http20/connection.py index 31dc7a47..b8be292b 100644 --- a/hyper/http20/connection.py +++ b/hyper/http20/connection.py @@ -14,8 +14,11 @@ from ..common.exceptions import ConnectionResetError from ..common.bufsocket import BufferedSocket from ..common.headers import HTTPHeaderMap -from ..common.util import to_host_port_tuple, to_native_string, to_bytestring +from ..common.util import ( + to_host_port_tuple, to_native_string, to_bytestring, HTTPVersion +) from ..compat import unicode, bytes +from ..http11.connection import _create_tunnel from .stream import Stream from .response import HTTP20Response, HTTP20Push from .window import FlowControlManager @@ -27,6 +30,7 @@ import socket import time import threading +import itertools log = logging.getLogger(__name__) @@ -88,12 +92,17 @@ class HTTP20Connection(object): :param proxy_host: (optional) The proxy to connect to. This can be an IP address or a host name and may include a port. :param proxy_port: (optional) The proxy port to connect to. If not provided - and one also isn't provided in the ``proxy`` parameter, defaults to - 8080. + and one also isn't provided in the ``proxy_host`` parameter, defaults + to 8080. + :param proxy_headers: (optional) The headers to send to a proxy. """ + + version = HTTPVersion.http20 + def __init__(self, host, port=None, secure=None, window_manager=None, enable_push=False, ssl_context=None, proxy_host=None, - proxy_port=None, force_proto=None, **kwargs): + proxy_port=None, force_proto=None, proxy_headers=None, + timeout=None, **kwargs): """ Creates an HTTP/2 connection to a specific server. """ @@ -113,16 +122,16 @@ def __init__(self, host, port=None, secure=None, window_manager=None, self.ssl_context = ssl_context # Setup proxy details if applicable. - if proxy_host: - if proxy_port is None: - self.proxy_host, self.proxy_port = to_host_port_tuple( - proxy_host, default_port=8080 - ) - else: - self.proxy_host, self.proxy_port = proxy_host, proxy_port + if proxy_host and proxy_port is None: + self.proxy_host, self.proxy_port = to_host_port_tuple( + proxy_host, default_port=8080 + ) + elif proxy_host: + self.proxy_host, self.proxy_port = proxy_host, proxy_port else: self.proxy_host = None self.proxy_port = None + self.proxy_headers = proxy_headers #: The size of the in-memory buffer used to store data from the #: network. This is used as a performance optimisation. Increase buffer @@ -134,41 +143,17 @@ def __init__(self, host, port=None, secure=None, window_manager=None, # Concurrency # - # Use one lock (_lock) to synchronize any interaction with global - # connection state, e.g. stream creation/deletion. - # - # It's ok to use the same in lock all these cases as they occur at - # different/linked points in the connection's lifecycle. - # - # Use another 2 locks (_write_lock, _read_lock) to synchronize - # - _send_cb - # - _recv_cb - # respectively. - # - # I.e, send/recieve on the connection and its streams are serialized - # separately across the threads accessing the connection. This is a - # simple way of providing thread-safety. - # - # _write_lock and _read_lock synchronize all interactions between - # streams and the connnection. There is a third I/O callback, - # _close_stream, passed to a stream's constructor. It does not need to - # be synchronized, it uses _send_cb internally (which is serialized); - # its other activity (safe deletion of the stream from self.streams) - # does not require synchronization. - # - # _read_lock may be acquired when already holding the _write_lock, - # when they both held it is always by acquiring _write_lock first. - # - # Either _read_lock or _write_lock may be acquired whilst holding _lock - # which should always be acquired before either of the other two. + # Use one universal lock (_lock) to synchronize all interaction + # with global connection state, _send_cb and _recv_cb. self._lock = threading.RLock() - self._write_lock = threading.RLock() - self._read_lock = threading.RLock() # Create the mutable state. self.__wm_class = window_manager or FlowControlManager self.__init_state() + # timeout + self._timeout = timeout + return def __init_state(self): @@ -227,7 +212,7 @@ def ping(self, opaque_data): :returns: Nothing """ self.connect() - with self._write_lock: + with self._lock: with self._conn as conn: conn.ping(to_bytestring(opaque_data)) self._send_outstanding_data() @@ -266,11 +251,19 @@ def request(self, method, url, body=None, headers=None): # If threads interleave these operations, it could result in messages # being sent in the wrong order, which can lead to the out-of-order # messages with lower stream IDs being closed prematurely. - with self._write_lock: + with self._lock: + # Unlike HTTP/1.1, HTTP/2 (according to RFC 7540) doesn't require + # to use absolute URI when proxying. + stream_id = self.putrequest(method, url) default_headers = (':method', ':scheme', ':authority', ':path') - for name, value in headers.items(): + all_headers = headers.items() + if self.proxy_host and not self.secure: + proxy_headers = self.proxy_headers or {} + all_headers = itertools.chain(all_headers, + proxy_headers.items()) + for name, value in all_headers: is_default = to_native_string(name) in default_headers self.putheader(name, value, stream_id, replace=is_default) @@ -353,27 +346,50 @@ def connect(self): if self._sock is not None: return - if not self.proxy_host: - host = self.host - port = self.port + if isinstance(self._timeout, tuple): + connect_timeout = self._timeout[0] + read_timeout = self._timeout[1] else: - host = self.proxy_host - port = self.proxy_port - - sock = socket.create_connection((host, port)) + connect_timeout = self._timeout + read_timeout = self._timeout + + if self.proxy_host and self.secure: + # Send http CONNECT method to a proxy and acquire the socket + sock = _create_tunnel( + self.proxy_host, + self.proxy_port, + self.host, + self.port, + proxy_headers=self.proxy_headers, + timeout=self._timeout + ) + elif self.proxy_host: + # Simple http proxy + sock = socket.create_connection( + (self.proxy_host, self.proxy_port), + timeout=connect_timeout + ) + else: + sock = socket.create_connection((self.host, self.port), + timeout=connect_timeout) if self.secure: - assert not self.proxy_host, "Proxy with HTTPS not supported." - sock, proto = wrap_socket(sock, host, self.ssl_context, + sock, proto = wrap_socket(sock, self.host, self.ssl_context, force_proto=self.force_proto) else: proto = H2C_PROTOCOL log.debug("Selected NPN protocol: %s", proto) - assert proto in H2_NPN_PROTOCOLS or proto == H2C_PROTOCOL + assert proto in H2_NPN_PROTOCOLS or proto == H2C_PROTOCOL, ( + "No suitable protocol found. Supported protocols: %s. " + "Check your OpenSSL version." + ) % ','.join(H2_NPN_PROTOCOLS + [H2C_PROTOCOL]) self._sock = BufferedSocket(sock, self.network_buffer_size) + # Set read timeout + self._sock.settimeout(read_timeout) + self._send_preamble() def _connect_upgrade(self, sock): @@ -456,10 +472,10 @@ def _send_outstanding_data(self, tolerate_peer_gone=False, send_empty=True): # Concurrency # - # Hold _write_lock; getting and writing data from _conn is synchronized + # Hold _lock; getting and writing data from _conn is synchronized # # I/O occurs while the lock is held; waiting threads will see a delay. - with self._write_lock: + with self._lock: with self._conn as conn: data = conn.data_to_send() if data or send_empty: @@ -549,9 +565,9 @@ def endheaders(self, message_body=None, final=False, stream_id=None): # Concurrency: # - # Hold _write_lock: synchronize access to the connection's HPACK + # Hold _lock: synchronize access to the connection's HPACK # encoder and decoder and the subsquent write to the connection - with self._write_lock: + with self._lock: stream.send_headers(headers_only) # Send whatever data we have. @@ -614,10 +630,10 @@ def _send_cb(self, data, tolerate_peer_gone=False): """ # Concurrency # - # Hold _write_lock: ensures only writer at a time + # Hold _lock: ensures only writer at a time # # I/O occurs while the lock is held; waiting threads will see a delay. - with self._write_lock: + with self._lock: try: self._sock.sendall(data) except socket.error as e: @@ -632,12 +648,12 @@ def _adjust_receive_window(self, frame_len): """ # Concurrency # - # Hold _write_lock; synchronize the window manager update and the + # Hold _lock; synchronize the window manager update and the # subsequent potential write to the connection # # I/O may occur while the lock is held; waiting threads may see a # delay. - with self._write_lock: + with self._lock: increment = self.window_manager._handle_frame(frame_len) if increment: @@ -659,7 +675,7 @@ def _single_read(self): # Synchronizes reading the data # # I/O occurs while the lock is held; waiting threads will see a delay. - with self._read_lock: + with self._lock: if self._sock is None: raise ConnectionError('tried to read after connection close') self._sock.fill() @@ -753,7 +769,7 @@ def _recv_cb(self, stream_id=0): # re-acquired in the calls to self._single_read. # # I/O occurs while the lock is held; waiting threads will see a delay. - with self._read_lock: + with self._lock: log.debug('recv for stream %d with %s already present', stream_id, self.recent_recv_streams) @@ -804,11 +820,11 @@ def _send_rst_frame(self, stream_id, error_code): """ # Concurrency # - # Hold _write_lock; synchronize generating the reset frame and writing + # Hold _lock; synchronize generating the reset frame and writing # it # # I/O occurs while the lock is held; waiting threads will see a delay. - with self._write_lock: + with self._lock: with self._conn as conn: conn.reset_stream(stream_id, error_code=error_code) self._send_outstanding_data() diff --git a/hyper/http20/exceptions.py b/hyper/http20/exceptions.py index 69e25816..634ef284 100644 --- a/hyper/http20/exceptions.py +++ b/hyper/http20/exceptions.py @@ -5,6 +5,7 @@ This defines exceptions used in the HTTP/2 portion of hyper. """ +from ..common.exceptions import ConnectionError as CommonConnectionError class HTTP20Error(Exception): @@ -28,7 +29,7 @@ class HPACKDecodingError(HTTP20Error): pass -class ConnectionError(HTTP20Error): +class ConnectionError(CommonConnectionError, HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. diff --git a/hyper/http20/response.py b/hyper/http20/response.py index bb339b2f..bed10475 100644 --- a/hyper/http20/response.py +++ b/hyper/http20/response.py @@ -8,9 +8,11 @@ """ import logging import zlib +import brotli from ..common.decoder import DeflateDecoder from ..common.headers import HTTPHeaderMap +from ..common.util import HTTPVersion log = logging.getLogger(__name__) @@ -28,6 +30,13 @@ def strip_headers(headers): del headers[name] +decompressors = { + b'gzip': lambda: zlib.decompressobj(16 + zlib.MAX_WBITS), + b'br': brotli.Decompressor, + b'deflate': DeflateDecoder +} + + class HTTP20Response(object): """ An ``HTTP20Response`` wraps the HTTP/2 response from the server. It @@ -36,6 +45,10 @@ class HTTP20Response(object): the persistent connections used in HTTP/2 this has no effect, and is done soley for compatibility). """ + + version = HTTPVersion.http20 + _decompressobj = None + def __init__(self, headers, stream): #: The reason phrase returned by the server. This is not used in #: HTTP/2, and so is always the empty string. @@ -67,12 +80,10 @@ def __init__(self, headers, stream): # This 16 + MAX_WBITS nonsense is to force gzip. See this # Stack Overflow answer for more: # http://stackoverflow.com/a/2695466/1401686 - if b'gzip' in self.headers.get(b'content-encoding', []): - self._decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) - elif b'deflate' in self.headers.get(b'content-encoding', []): - self._decompressobj = DeflateDecoder() - else: - self._decompressobj = None + for c in self.headers.get(b'content-encoding', []): + if c in decompressors: + self._decompressobj = decompressors.get(c)() + break @property def trailers(self): diff --git a/hyper/http20/stream.py b/hyper/http20/stream.py index 598a1490..3c064783 100644 --- a/hyper/http20/stream.py +++ b/hyper/http20/stream.py @@ -122,8 +122,18 @@ def file_iterator(fobj): chunks = (data[i:i+MAX_CHUNK] for i in range(0, len(data), MAX_CHUNK)) - for chunk in chunks: - self._send_chunk(chunk, final) + # since we need to know when we have a last package we need to know + # if there is another package in advance + cur_chunk = None + try: + cur_chunk = next(chunks) + while True: + next_chunk = next(chunks) + self._send_chunk(cur_chunk, False) + cur_chunk = next_chunk + except StopIteration: + if cur_chunk is not None: # cur_chunk none when no chunks to send + self._send_chunk(cur_chunk, final) def _read(self, amt=None): """ @@ -323,19 +333,12 @@ def _send_chunk(self, data, final): while len(data) > self._out_flow_control_window: self._recv_cb() - # If the length of the data is less than MAX_CHUNK, we're probably - # at the end of the file. If this is the end of the data, mark it - # as END_STREAM. - end_stream = False - if len(data) < MAX_CHUNK and final: - end_stream = True - # Send the frame and decrement the flow control window. with self._conn as conn: conn.send_data( - stream_id=self.stream_id, data=data, end_stream=end_stream + stream_id=self.stream_id, data=data, end_stream=final ) self._send_outstanding_data() - if end_stream: + if final: self.local_closed = True diff --git a/hyper/http20/util.py b/hyper/http20/util.py index b116d095..ca5547d0 100644 --- a/hyper/http20/util.py +++ b/hyper/http20/util.py @@ -52,9 +52,9 @@ def h2_safe_headers(headers): """ stripped = { i.lower().strip() - for k, v in headers if k == 'connection' - for i in v.split(',') + for k, v in headers if k == b'connection' + for i in v.split(b',') } - stripped.add('connection') + stripped.add(b'connection') return [header for header in headers if header[0] not in stripped] diff --git a/hyper/packages/__init__.py b/hyper/packages/__init__.py deleted file mode 100644 index 4cf1e653..00000000 --- a/hyper/packages/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -""" -hyper/packages -~~~~~~~~~~~~~~ - -This module contains external packages that are vendored into hyper. -""" diff --git a/hyper/packages/rfc3986/LICENSE b/hyper/packages/rfc3986/LICENSE deleted file mode 100644 index 72ce24cf..00000000 --- a/hyper/packages/rfc3986/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2014 Ian Cordasco, Rackspace - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/hyper/packages/rfc3986/__init__.py b/hyper/packages/rfc3986/__init__.py deleted file mode 100644 index a3aea4c4..00000000 --- a/hyper/packages/rfc3986/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -rfc3986 -======= - -An implementation of semantics and validations described in RFC 3986. See -http://rfc3986.rtfd.org/ for documentation. - -:copyright: (c) 2014 Rackspace -:license: Apache v2.0, see LICENSE for details -""" - -__title__ = 'rfc3986' -__author__ = 'Ian Cordasco' -__author_email__ = 'ian.cordasco@rackspace.com' -__license__ = 'Apache v2.0' -__copyright__ = 'Copyright 2014 Rackspace' -__version__ = '0.3.0' - -from .api import (URIReference, uri_reference, is_valid_uri, normalize_uri, - urlparse) -from .parseresult import ParseResult - -__all__ = ( - 'ParseResult', - 'URIReference', - 'is_valid_uri', - 'normalize_uri', - 'uri_reference', - 'urlparse', -) diff --git a/hyper/packages/rfc3986/api.py b/hyper/packages/rfc3986/api.py deleted file mode 100644 index 3e9e401a..00000000 --- a/hyper/packages/rfc3986/api.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -rfc3986.api -~~~~~~~~~~~ - -This defines the simple API to rfc3986. This module defines 3 functions and -provides access to the class ``URIReference``. -""" - -from .uri import URIReference -from .parseresult import ParseResult - - -def uri_reference(uri, encoding='utf-8'): - """Parse a URI string into a URIReference. - - This is a convenience function. You could achieve the same end by using - ``URIReference.from_string(uri)``. - - :param str uri: The URI which needs to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: A parsed URI - :rtype: :class:`URIReference` - """ - return URIReference.from_string(uri, encoding) - - -def is_valid_uri(uri, encoding='utf-8', **kwargs): - """Determine if the URI given is valid. - - This is a convenience function. You could use either - ``uri_reference(uri).is_valid()`` or - ``URIReference.from_string(uri).is_valid()`` to achieve the same result. - - :param str uri: The URI to be validated. - :param str encoding: The encoding of the string provided - :param bool require_scheme: Set to ``True`` if you wish to require the - presence of the scheme component. - :param bool require_authority: Set to ``True`` if you wish to require the - presence of the authority component. - :param bool require_path: Set to ``True`` if you wish to require the - presence of the path component. - :param bool require_query: Set to ``True`` if you wish to require the - presence of the query component. - :param bool require_fragment: Set to ``True`` if you wish to require the - presence of the fragment component. - :returns: ``True`` if the URI is valid, ``False`` otherwise. - :rtype: bool - """ - return URIReference.from_string(uri, encoding).is_valid(**kwargs) - - -def normalize_uri(uri, encoding='utf-8'): - """Normalize the given URI. - - This is a convenience function. You could use either - ``uri_reference(uri).normalize().unsplit()`` or - ``URIReference.from_string(uri).normalize().unsplit()`` instead. - - :param str uri: The URI to be normalized. - :param str encoding: The encoding of the string provided - :returns: The normalized URI. - :rtype: str - """ - normalized_reference = URIReference.from_string(uri, encoding).normalize() - return normalized_reference.unsplit() - - -def urlparse(uri, encoding='utf-8'): - """Parse a given URI and return a ParseResult. - - This is a partial replacement of the standard library's urlparse function. - - :param str uri: The URI to be parsed. - :param str encoding: The encoding of the string provided. - :returns: A parsed URI - :rtype: :class:`~rfc3986.parseresult.ParseResult` - """ - return ParseResult.from_string(uri, encoding, strict=False) diff --git a/hyper/packages/rfc3986/compat.py b/hyper/packages/rfc3986/compat.py deleted file mode 100644 index 6fc7f6d8..00000000 --- a/hyper/packages/rfc3986/compat.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import sys - - -if sys.version_info >= (3, 0): - unicode = str # Python 3.x - - -def to_str(b, encoding): - if hasattr(b, 'decode') and not isinstance(b, unicode): - b = b.decode('utf-8') - return b - - -def to_bytes(s, encoding): - if hasattr(s, 'encode') and not isinstance(s, bytes): - s = s.encode('utf-8') - return s diff --git a/hyper/packages/rfc3986/exceptions.py b/hyper/packages/rfc3986/exceptions.py deleted file mode 100644 index f9adbde7..00000000 --- a/hyper/packages/rfc3986/exceptions.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -class RFC3986Exception(Exception): - pass - - -class InvalidAuthority(RFC3986Exception): - def __init__(self, authority): - super(InvalidAuthority, self).__init__( - "The authority ({0}) is not valid.".format(authority)) - - -class InvalidPort(RFC3986Exception): - def __init__(self, port): - super(InvalidPort, self).__init__( - 'The port ("{0}") is not valid.'.format(port)) - - -class ResolutionError(RFC3986Exception): - def __init__(self, uri): - super(ResolutionError, self).__init__( - "{0} is not an absolute URI.".format(uri.unsplit())) diff --git a/hyper/packages/rfc3986/misc.py b/hyper/packages/rfc3986/misc.py deleted file mode 100644 index c599434c..00000000 --- a/hyper/packages/rfc3986/misc.py +++ /dev/null @@ -1,214 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -rfc3986.misc -~~~~~~~~~~~~ - -This module contains important constants, patterns, and compiled regular -expressions for parsing and validating URIs and their components. -""" - -import re - -# These are enumerated for the named tuple used as a superclass of -# URIReference -URI_COMPONENTS = ['scheme', 'authority', 'path', 'query', 'fragment'] - -important_characters = { - 'generic_delimiters': ":/?#[]@", - 'sub_delimiters': "!$&'()*+,;=", - # We need to escape the '*' in this case - 're_sub_delimiters': "!$&'()\*+,;=", - 'unreserved_chars': ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' - '0123456789._~-'), - # We need to escape the '-' in this case: - 're_unreserved': 'A-Za-z0-9._~\-', - } -# For details about delimiters and reserved characters, see: -# http://tools.ietf.org/html/rfc3986#section-2.2 -GENERIC_DELIMITERS = set(important_characters['generic_delimiters']) -SUB_DELIMITERS = set(important_characters['sub_delimiters']) -RESERVED_CHARS = GENERIC_DELIMITERS.union(SUB_DELIMITERS) -# For details about unreserved characters, see: -# http://tools.ietf.org/html/rfc3986#section-2.3 -UNRESERVED_CHARS = set(important_characters['unreserved_chars']) -NON_PCT_ENCODED = RESERVED_CHARS.union(UNRESERVED_CHARS).union('%') - -# Extracted from http://tools.ietf.org/html/rfc3986#appendix-B -component_pattern_dict = { - 'scheme': '[^:/?#]+', - 'authority': '[^/?#]*', - 'path': '[^?#]*', - 'query': '[^#]*', - 'fragment': '.*', - } - -# See http://tools.ietf.org/html/rfc3986#appendix-B -# In this case, we name each of the important matches so we can use -# SRE_Match#groupdict to parse the values out if we so choose. This is also -# modified to ignore other matches that are not important to the parsing of -# the reference so we can also simply use SRE_Match#groups. -expression = ('(?:(?P{scheme}):)?(?://(?P{authority}))?' - '(?P{path})(?:\?(?P{query}))?' - '(?:#(?P{fragment}))?' - ).format(**component_pattern_dict) - -URI_MATCHER = re.compile(expression) - -# ######################### -# Authority Matcher Section -# ######################### - -# Host patterns, see: http://tools.ietf.org/html/rfc3986#section-3.2.2 -# The pattern for a regular name, e.g., www.google.com, api.github.com -reg_name = '(({0})*|[{1}]*)'.format( - '%[0-9A-Fa-f]{2}', - important_characters['re_sub_delimiters'] + - important_characters['re_unreserved'] - ) -# The pattern for an IPv4 address, e.g., 192.168.255.255, 127.0.0.1, -ipv4 = '(\d{1,3}.){3}\d{1,3}' -# Hexadecimal characters used in each piece of an IPv6 address -hexdig = '[0-9A-Fa-f]{1,4}' -# Least-significant 32 bits of an IPv6 address -ls32 = '({hex}:{hex}|{ipv4})'.format(hex=hexdig, ipv4=ipv4) -# Substitutions into the following patterns for IPv6 patterns defined -# http://tools.ietf.org/html/rfc3986#page-20 -subs = {'hex': hexdig, 'ls32': ls32} - -# Below: h16 = hexdig, see: https://tools.ietf.org/html/rfc5234 for details -# about ABNF (Augmented Backus-Naur Form) use in the comments -variations = [ - # 6( h16 ":" ) ls32 - '(%(hex)s:){6}%(ls32)s' % subs, - # "::" 5( h16 ":" ) ls32 - '::(%(hex)s:){5}%(ls32)s' % subs, - # [ h16 ] "::" 4( h16 ":" ) ls32 - '(%(hex)s)?::(%(hex)s:){4}%(ls32)s' % subs, - # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 - '((%(hex)s:)?%(hex)s)?::(%(hex)s:){3}%(ls32)s' % subs, - # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 - '((%(hex)s:){0,2}%(hex)s)?::(%(hex)s:){2}%(ls32)s' % subs, - # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 - '((%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s' % subs, - # [ *4( h16 ":" ) h16 ] "::" ls32 - '((%(hex)s:){0,4}%(hex)s)?::%(ls32)s' % subs, - # [ *5( h16 ":" ) h16 ] "::" h16 - '((%(hex)s:){0,5}%(hex)s)?::%(hex)s' % subs, - # [ *6( h16 ":" ) h16 ] "::" - '((%(hex)s:){0,6}%(hex)s)?::' % subs, - ] - -ipv6 = '(({0})|({1})|({2})|({3})|({4})|({5})|({6})|({7}))'.format(*variations) - -ipv_future = 'v[0-9A-Fa-f]+.[%s]+' % ( - important_characters['re_unreserved'] + - important_characters['re_sub_delimiters'] + - ':') - -ip_literal = '\[({0}|{1})\]'.format(ipv6, ipv_future) - -# Pattern for matching the host piece of the authority -HOST_PATTERN = '({0}|{1}|{2})'.format(reg_name, ipv4, ip_literal) - -SUBAUTHORITY_MATCHER = re.compile(( - '^(?:(?P[A-Za-z0-9_.~\-%:]+)@)?' # userinfo - '(?P{0}?)' # host - ':?(?P\d+)?$' # port - ).format(HOST_PATTERN)) - -IPv4_MATCHER = re.compile('^' + ipv4 + '$') - - -# #################### -# Path Matcher Section -# #################### - -# See http://tools.ietf.org/html/rfc3986#section-3.3 for more information -# about the path patterns defined below. - -# Percent encoded character values -pct_encoded = '%[A-Fa-f0-9]{2}' -pchar = ('([' + important_characters['re_unreserved'] - + important_characters['re_sub_delimiters'] - + ':@]|%s)' % pct_encoded) -segments = { - 'segment': pchar + '*', - # Non-zero length segment - 'segment-nz': pchar + '+', - # Non-zero length segment without ":" - 'segment-nz-nc': pchar.replace(':', '') + '+' - } - -# Path types taken from Section 3.3 (linked above) -path_empty = '^$' -path_rootless = '%(segment-nz)s(/%(segment)s)*' % segments -path_noscheme = '%(segment-nz-nc)s(/%(segment)s)*' % segments -path_absolute = '/(%s)?' % path_rootless -path_abempty = '(/%(segment)s)*' % segments - -# Matcher used to validate path components -PATH_MATCHER = re.compile('^(%s|%s|%s|%s|%s)$' % ( - path_abempty, path_absolute, path_noscheme, path_rootless, path_empty - )) - - -# ################################## -# Query and Fragment Matcher Section -# ################################## - -QUERY_MATCHER = re.compile( - '^([/?:@' + important_characters['re_unreserved'] - + important_characters['re_sub_delimiters'] - + ']|%s)*$' % pct_encoded) - -FRAGMENT_MATCHER = QUERY_MATCHER - -# Scheme validation, see: http://tools.ietf.org/html/rfc3986#section-3.1 -SCHEME_MATCHER = re.compile('^[A-Za-z][A-Za-z0-9+.\-]*$') - -# Relative reference matcher - -# See http://tools.ietf.org/html/rfc3986#section-4.2 for details -relative_part = '(//%s%s|%s|%s|%s)' % ( - component_pattern_dict['authority'], path_abempty, path_absolute, - path_noscheme, path_empty - ) - -RELATIVE_REF_MATCHER = re.compile('^%s(\?%s)?(#%s)?$' % ( - relative_part, QUERY_MATCHER.pattern, FRAGMENT_MATCHER.pattern - )) - -# See http://tools.ietf.org/html/rfc3986#section-3 for definition -hier_part = '(//%s%s|%s|%s|%s)' % ( - component_pattern_dict['authority'], path_abempty, path_absolute, - path_rootless, path_empty - ) - -# See http://tools.ietf.org/html/rfc3986#section-4.3 -ABSOLUTE_URI_MATCHER = re.compile('^%s:%s(\?%s)?$' % ( - component_pattern_dict['scheme'], hier_part, QUERY_MATCHER.pattern[1:-1] - )) - - -# Path merger as defined in http://tools.ietf.org/html/rfc3986#section-5.2.3 -def merge_paths(base_uri, relative_path): - """Merge a base URI's path with a relative URI's path.""" - if base_uri.path is None and base_uri.authority is not None: - return '/' + relative_path - else: - path = base_uri.path or '' - index = path.rfind('/') - return path[:index] + '/' + relative_path diff --git a/hyper/packages/rfc3986/normalizers.py b/hyper/packages/rfc3986/normalizers.py deleted file mode 100644 index bb0630cb..00000000 --- a/hyper/packages/rfc3986/normalizers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import re - -from .compat import to_bytes -from .misc import NON_PCT_ENCODED - - -def normalize_scheme(scheme): - return scheme.lower() - - -def normalize_authority(authority): - userinfo, host, port = authority - result = '' - if userinfo: - result += normalize_percent_characters(userinfo) + '@' - if host: - result += host.lower() - if port: - result += ':' + port - return result - - -def normalize_path(path): - if not path: - return path - - path = normalize_percent_characters(path) - return remove_dot_segments(path) - - -def normalize_query(query): - return normalize_percent_characters(query) - - -def normalize_fragment(fragment): - return normalize_percent_characters(fragment) - - -PERCENT_MATCHER = re.compile('%[A-Fa-f0-9]{2}') - - -def normalize_percent_characters(s): - """All percent characters should be upper-cased. - - For example, ``"%3afoo%DF%ab"`` should be turned into ``"%3Afoo%DF%AB"``. - """ - matches = set(PERCENT_MATCHER.findall(s)) - for m in matches: - if not m.isupper(): - s = s.replace(m, m.upper()) - return s - - -def remove_dot_segments(s): - # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code - segments = s.split('/') # Turn the path into a list of segments - output = [] # Initialize the variable to use to store output - - for segment in segments: - # '.' is the current directory, so ignore it, it is superfluous - if segment == '.': - continue - # Anything other than '..', should be appended to the output - elif segment != '..': - output.append(segment) - # In this case segment == '..', if we can, we should pop the last - # element - elif output: - output.pop() - - # If the path starts with '/' and the output is empty or the first string - # is non-empty - if s.startswith('/') and (not output or output[0]): - output.insert(0, '') - - # If the path starts with '/.' or '/..' ensure we add one more empty - # string to add a trailing '/' - if s.endswith(('/.', '/..')): - output.append('') - - return '/'.join(output) - - -def encode_component(uri_component, encoding): - if uri_component is None: - return uri_component - - uri_bytes = to_bytes(uri_component, encoding) - - encoded_uri = bytearray() - - for i in range(0, len(uri_bytes)): - # Will return a single character bytestring on both Python 2 & 3 - byte = uri_bytes[i:i+1] - byte_ord = ord(byte) - if byte_ord < 128 and byte.decode() in NON_PCT_ENCODED: - encoded_uri.extend(byte) - continue - encoded_uri.extend('%{0:02x}'.format(byte_ord).encode()) - - return encoded_uri.decode(encoding) diff --git a/hyper/packages/rfc3986/parseresult.py b/hyper/packages/rfc3986/parseresult.py deleted file mode 100644 index 2def55b6..00000000 --- a/hyper/packages/rfc3986/parseresult.py +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2015 Ian Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import namedtuple - -from . import compat -from . import exceptions -from . import normalizers -from . import uri - -__all__ = ('ParseResult', 'ParseResultBytes') - -PARSED_COMPONENTS = ('scheme', 'userinfo', 'host', 'port', 'path', 'query', - 'fragment') - - -class ParseResultMixin(object): - def _generate_authority(self, attributes): - # I swear I did not align the comparisons below. That's just how they - # happened to align based on pep8 and attribute lengths. - userinfo, host, port = (attributes[p] - for p in ('userinfo', 'host', 'port')) - if (self.userinfo != userinfo or - self.host != host or - self.port != port): - if port: - port = '{0}'.format(port) - return normalizers.normalize_authority( - (compat.to_str(userinfo, self.encoding), - compat.to_str(host, self.encoding), - port) - ) - return self.authority - - def geturl(self): - """Standard library shim to the unsplit method.""" - return self.unsplit() - - @property - def hostname(self): - """Standard library shim for the host portion of the URI.""" - return self.host - - @property - def netloc(self): - """Standard library shim for the authority portion of the URI.""" - return self.authority - - @property - def params(self): - """Standard library shim for the query portion of the URI.""" - return self.query - - -class ParseResult(namedtuple('ParseResult', PARSED_COMPONENTS), - ParseResultMixin): - slots = () - - def __new__(cls, scheme, userinfo, host, port, path, query, fragment, - uri_ref, encoding='utf-8'): - parse_result = super(ParseResult, cls).__new__( - cls, - scheme or None, - userinfo or None, - host, - port or None, - path or None, - query or None, - fragment or None) - parse_result.encoding = encoding - parse_result.reference = uri_ref - return parse_result - - @classmethod - def from_string(cls, uri_string, encoding='utf-8', strict=True): - """Parse a URI from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :param bool strict: Parse strictly according to :rfc:`3986` if True. - If False, parse similarly to the standard library's urlparse - function. - :returns: :class:`ParseResult` or subclass thereof - """ - reference = uri.URIReference.from_string(uri_string, encoding) - try: - subauthority = reference.authority_info() - except exceptions.InvalidAuthority: - if strict: - raise - userinfo, host, port = split_authority(reference.authority) - else: - # Thanks to Richard Barrell for this idea: - # https://twitter.com/0x2ba22e11/status/617338811975139328 - userinfo, host, port = (subauthority.get(p) - for p in ('userinfo', 'host', 'port')) - - if port: - try: - port = int(port) - except ValueError: - raise exceptions.InvalidPort(port) - - return cls(scheme=reference.scheme, - userinfo=userinfo, - host=host, - port=port, - path=reference.path, - query=reference.query, - fragment=reference.fragment, - uri_ref=reference, - encoding=encoding) - - @property - def authority(self): - """Normalized authority generated from the subauthority parts.""" - return self.reference.authority - - def copy_with(self, scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment=None): - attributes = zip(PARSED_COMPONENTS, - (scheme, userinfo, host, port, path, query, fragment)) - attrs_dict = {} - for name, value in attributes: - if value is None: - value = getattr(self, name) - attrs_dict[name] = value - authority = self._generate_authority(attrs_dict) - ref = self.reference.copy_with(scheme=attrs_dict['scheme'], - authority=authority, - path=attrs_dict['path'], - query=attrs_dict['query'], - fragment=attrs_dict['fragment']) - return ParseResult(uri_ref=ref, encoding=self.encoding, **attrs_dict) - - def encode(self, encoding=None): - encoding = encoding or self.encoding - attrs = dict( - zip(PARSED_COMPONENTS, - (attr.encode(encoding) if hasattr(attr, 'encode') else attr - for attr in self))) - return ParseResultBytes( - uri_ref=self.reference, - encoding=encoding, - **attrs - ) - - def unsplit(self, use_idna=False): - """Create a URI string from the components. - - :returns: The parsed URI reconstituted as a string. - :rtype: str - """ - parse_result = self - if use_idna and self.host: - hostbytes = self.host.encode('idna') - host = hostbytes.decode(self.encoding) - parse_result = self.copy_with(host=host) - return parse_result.reference.unsplit() - - -class ParseResultBytes(namedtuple('ParseResultBytes', PARSED_COMPONENTS), - ParseResultMixin): - def __new__(cls, scheme, userinfo, host, port, path, query, fragment, - uri_ref, encoding='utf-8'): - parse_result = super(ParseResultBytes, cls).__new__( - cls, - scheme or None, - userinfo or None, - host, - port or None, - path or None, - query or None, - fragment or None) - parse_result.encoding = encoding - parse_result.reference = uri_ref - return parse_result - - @classmethod - def from_string(cls, uri_string, encoding='utf-8', strict=True): - """Parse a URI from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :param bool strict: Parse strictly according to :rfc:`3986` if True. - If False, parse similarly to the standard library's urlparse - function. - :returns: :class:`ParseResultBytes` or subclass thereof - """ - reference = uri.URIReference.from_string(uri_string, encoding) - try: - subauthority = reference.authority_info() - except exceptions.InvalidAuthority: - if strict: - raise - userinfo, host, port = split_authority(reference.authority) - else: - # Thanks to Richard Barrell for this idea: - # https://twitter.com/0x2ba22e11/status/617338811975139328 - userinfo, host, port = (subauthority.get(p) - for p in ('userinfo', 'host', 'port')) - - if port: - try: - port = int(port) - except ValueError: - raise exceptions.InvalidPort(port) - - to_bytes = compat.to_bytes - return cls(scheme=to_bytes(reference.scheme, encoding), - userinfo=to_bytes(userinfo, encoding), - host=to_bytes(host, encoding), - port=port, - path=to_bytes(reference.path, encoding), - query=to_bytes(reference.query, encoding), - fragment=to_bytes(reference.fragment, encoding), - uri_ref=reference, - encoding=encoding) - - @property - def authority(self): - """Normalized authority generated from the subauthority parts.""" - return self.reference.authority.encode(self.encoding) - - def copy_with(self, scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment=None): - attributes = zip(PARSED_COMPONENTS, - (scheme, userinfo, host, port, path, query, fragment)) - attrs_dict = {} - for name, value in attributes: - if value is None: - value = getattr(self, name) - if not isinstance(value, bytes) and hasattr(value, 'encode'): - value = value.encode(self.encoding) - attrs_dict[name] = value - authority = self._generate_authority(attrs_dict) - to_str = compat.to_str - ref = self.reference.copy_with( - scheme=to_str(attrs_dict['scheme'], self.encoding), - authority=authority, - path=to_str(attrs_dict['path'], self.encoding), - query=to_str(attrs_dict['query'], self.encoding), - fragment=to_str(attrs_dict['fragment'], self.encoding) - ) - return ParseResultBytes( - uri_ref=ref, - encoding=self.encoding, - **attrs_dict - ) - - def unsplit(self, use_idna=False): - """Create a URI bytes object from the components. - - :returns: The parsed URI reconstituted as a string. - :rtype: bytes - """ - parse_result = self - if use_idna and self.host: - # self.host is bytes, to encode to idna, we need to decode it - # first - host = self.host.decode(self.encoding) - hostbytes = host.encode('idna') - parse_result = self.copy_with(host=hostbytes) - uri = parse_result.reference.unsplit() - return uri.encode(self.encoding) - - -def split_authority(authority): - # Initialize our expected return values - userinfo = host = port = None - # Initialize an extra var we may need to use - extra_host = None - # Set-up rest in case there is no userinfo portion - rest = authority - - if '@' in authority: - userinfo, rest = authority.rsplit('@', 1) - - # Handle IPv6 host addresses - if rest.startswith('['): - host, rest = rest.split(']', 1) - host += ']' - - if ':' in rest: - extra_host, port = rest.split(':', 1) - elif not host and rest: - host = rest - - if extra_host and not host: - host = extra_host - - return userinfo, host, port diff --git a/hyper/packages/rfc3986/uri.py b/hyper/packages/rfc3986/uri.py deleted file mode 100644 index b7f5ccb7..00000000 --- a/hyper/packages/rfc3986/uri.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Copyright (c) 2015 Ian Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import namedtuple - -from .compat import to_str -from .exceptions import InvalidAuthority, ResolutionError -from .misc import ( - ABSOLUTE_URI_MATCHER, FRAGMENT_MATCHER, IPv4_MATCHER, PATH_MATCHER, - QUERY_MATCHER, SCHEME_MATCHER, SUBAUTHORITY_MATCHER, URI_MATCHER, - URI_COMPONENTS, merge_paths - ) -from .normalizers import ( - encode_component, normalize_scheme, normalize_authority, normalize_path, - normalize_query, normalize_fragment - ) - - -class URIReference(namedtuple('URIReference', URI_COMPONENTS)): - slots = () - - def __new__(cls, scheme, authority, path, query, fragment, - encoding='utf-8'): - ref = super(URIReference, cls).__new__( - cls, - scheme or None, - authority or None, - path or None, - query or None, - fragment or None) - ref.encoding = encoding - return ref - - def __eq__(self, other): - other_ref = other - if isinstance(other, tuple): - other_ref = URIReference(*other) - elif not isinstance(other, URIReference): - try: - other_ref = URIReference.from_string(other) - except TypeError: - raise TypeError( - 'Unable to compare URIReference() to {0}()'.format( - type(other).__name__)) - - # See http://tools.ietf.org/html/rfc3986#section-6.2 - naive_equality = tuple(self) == tuple(other_ref) - return naive_equality or self.normalized_equality(other_ref) - - @classmethod - def from_string(cls, uri_string, encoding='utf-8'): - """Parse a URI reference from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: :class:`URIReference` or subclass thereof - """ - uri_string = to_str(uri_string, encoding) - - split_uri = URI_MATCHER.match(uri_string).groupdict() - return cls(split_uri['scheme'], split_uri['authority'], - encode_component(split_uri['path'], encoding), - encode_component(split_uri['query'], encoding), - encode_component(split_uri['fragment'], encoding), encoding) - - def authority_info(self): - """Returns a dictionary with the ``userinfo``, ``host``, and ``port``. - - If the authority is not valid, it will raise a ``InvalidAuthority`` - Exception. - - :returns: - ``{'userinfo': 'username:password', 'host': 'www.example.com', - 'port': '80'}`` - :rtype: dict - :raises InvalidAuthority: If the authority is not ``None`` and can not - be parsed. - """ - if not self.authority: - return {'userinfo': None, 'host': None, 'port': None} - - match = SUBAUTHORITY_MATCHER.match(self.authority) - - if match is None: - # In this case, we have an authority that was parsed from the URI - # Reference, but it cannot be further parsed by our - # SUBAUTHORITY_MATCHER. In this case it must not be a valid - # authority. - raise InvalidAuthority(self.authority.encode(self.encoding)) - - # We had a match, now let's ensure that it is actually a valid host - # address if it is IPv4 - matches = match.groupdict() - host = matches.get('host') - - if (host and IPv4_MATCHER.match(host) and not - valid_ipv4_host_address(host)): - # If we have a host, it appears to be IPv4 and it does not have - # valid bytes, it is an InvalidAuthority. - raise InvalidAuthority(self.authority.encode(self.encoding)) - - return matches - - @property - def host(self): - """If present, a string representing the host.""" - try: - authority = self.authority_info() - except InvalidAuthority: - return None - return authority['host'] - - @property - def port(self): - """If present, the port (as a string) extracted from the authority.""" - try: - authority = self.authority_info() - except InvalidAuthority: - return None - return authority['port'] - - @property - def userinfo(self): - """If present, the userinfo extracted from the authority.""" - try: - authority = self.authority_info() - except InvalidAuthority: - return None - return authority['userinfo'] - - def is_absolute(self): - """Determine if this URI Reference is an absolute URI. - - See http://tools.ietf.org/html/rfc3986#section-4.3 for explanation. - - :returns: ``True`` if it is an absolute URI, ``False`` otherwise. - :rtype: bool - """ - return bool(ABSOLUTE_URI_MATCHER.match(self.unsplit())) - - def is_valid(self, **kwargs): - """Determines if the URI is valid. - - :param bool require_scheme: Set to ``True`` if you wish to require the - presence of the scheme component. - :param bool require_authority: Set to ``True`` if you wish to require - the presence of the authority component. - :param bool require_path: Set to ``True`` if you wish to require the - presence of the path component. - :param bool require_query: Set to ``True`` if you wish to require the - presence of the query component. - :param bool require_fragment: Set to ``True`` if you wish to require - the presence of the fragment component. - :returns: ``True`` if the URI is valid. ``False`` otherwise. - :rtype: bool - """ - validators = [ - (self.scheme_is_valid, kwargs.get('require_scheme', False)), - (self.authority_is_valid, kwargs.get('require_authority', False)), - (self.path_is_valid, kwargs.get('require_path', False)), - (self.query_is_valid, kwargs.get('require_query', False)), - (self.fragment_is_valid, kwargs.get('require_fragment', False)), - ] - return all(v(r) for v, r in validators) - - def _is_valid(self, value, matcher, require): - if require: - return (value is not None - and matcher.match(value)) - - # require is False and value is not None - return value is None or matcher.match(value) - - def authority_is_valid(self, require=False): - """Determines if the authority component is valid. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the authority is valid. ``False`` otherwise. - :rtype: bool - """ - try: - self.authority_info() - except InvalidAuthority: - return False - - is_valid = self._is_valid(self.authority, - SUBAUTHORITY_MATCHER, - require) - - # Ensure that IPv4 addresses have valid bytes - if is_valid and self.host and IPv4_MATCHER.match(self.host): - return valid_ipv4_host_address(self.host) - - # Perhaps the host didn't exist or if it did, it wasn't an IPv4-like - # address. In either case, we want to rely on the `_is_valid` check, - # so let's return that. - return is_valid - - def scheme_is_valid(self, require=False): - """Determines if the scheme component is valid. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the scheme is valid. ``False`` otherwise. - :rtype: bool - """ - return self._is_valid(self.scheme, SCHEME_MATCHER, require) - - def path_is_valid(self, require=False): - """Determines if the path component is valid. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the path is valid. ``False`` otherwise. - :rtype: bool - """ - return self._is_valid(self.path, PATH_MATCHER, require) - - def query_is_valid(self, require=False): - """Determines if the query component is valid. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the query is valid. ``False`` otherwise. - :rtype: bool - """ - return self._is_valid(self.query, QUERY_MATCHER, require) - - def fragment_is_valid(self, require=False): - """Determines if the fragment component is valid. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the fragment is valid. ``False`` otherwise. - :rtype: bool - """ - return self._is_valid(self.fragment, FRAGMENT_MATCHER, require) - - def normalize(self): - """Normalize this reference as described in Section 6.2.2 - - This is not an in-place normalization. Instead this creates a new - URIReference. - - :returns: A new reference object with normalized components. - :rtype: URIReference - """ - # See http://tools.ietf.org/html/rfc3986#section-6.2.2 for logic in - # this method. - return URIReference(normalize_scheme(self.scheme or ''), - normalize_authority( - (self.userinfo, self.host, self.port)), - normalize_path(self.path or ''), - normalize_query(self.query or ''), - normalize_fragment(self.fragment or '')) - - def normalized_equality(self, other_ref): - """Compare this URIReference to another URIReference. - - :param URIReference other_ref: (required), The reference with which - we're comparing. - :returns: ``True`` if the references are equal, ``False`` otherwise. - :rtype: bool - """ - return tuple(self.normalize()) == tuple(other_ref.normalize()) - - def resolve_with(self, base_uri, strict=False): - """Use an absolute URI Reference to resolve this relative reference. - - Assuming this is a relative reference that you would like to resolve, - use the provided base URI to resolve it. - - See http://tools.ietf.org/html/rfc3986#section-5 for more information. - - :param base_uri: Either a string or URIReference. It must be an - absolute URI or it will raise an exception. - :returns: A new URIReference which is the result of resolving this - reference using ``base_uri``. - :rtype: :class:`URIReference` - :raises ResolutionError: If the ``base_uri`` is not an absolute URI. - """ - if not isinstance(base_uri, URIReference): - base_uri = URIReference.from_string(base_uri) - - if not base_uri.is_absolute(): - raise ResolutionError(base_uri) - - # This is optional per - # http://tools.ietf.org/html/rfc3986#section-5.2.1 - base_uri = base_uri.normalize() - - # The reference we're resolving - resolving = self - - if not strict and resolving.scheme == base_uri.scheme: - resolving = resolving.copy_with(scheme=None) - - # http://tools.ietf.org/html/rfc3986#page-32 - if resolving.scheme is not None: - target = resolving.copy_with(path=normalize_path(resolving.path)) - else: - if resolving.authority is not None: - target = resolving.copy_with( - scheme=base_uri.scheme, - path=normalize_path(resolving.path) - ) - else: - if resolving.path is None: - if resolving.query is not None: - query = resolving.query - else: - query = base_uri.query - target = resolving.copy_with( - scheme=base_uri.scheme, - authority=base_uri.authority, - path=base_uri.path, - query=query - ) - else: - if resolving.path.startswith('/'): - path = normalize_path(resolving.path) - else: - path = normalize_path( - merge_paths(base_uri, resolving.path) - ) - target = resolving.copy_with( - scheme=base_uri.scheme, - authority=base_uri.authority, - path=path, - query=resolving.query - ) - return target - - def unsplit(self): - """Create a URI string from the components. - - :returns: The URI Reference reconstituted as a string. - :rtype: str - """ - # See http://tools.ietf.org/html/rfc3986#section-5.3 - result_list = [] - if self.scheme: - result_list.extend([self.scheme, ':']) - if self.authority: - result_list.extend(['//', self.authority]) - if self.path: - result_list.append(self.path) - if self.query: - result_list.extend(['?', self.query]) - if self.fragment: - result_list.extend(['#', self.fragment]) - return ''.join(result_list) - - def copy_with(self, scheme=None, authority=None, path=None, query=None, - fragment=None): - attributes = { - 'scheme': scheme, - 'authority': authority, - 'path': path, - 'query': query, - 'fragment': fragment, - } - for key, value in list(attributes.items()): - if value is None: - del attributes[key] - return self._replace(**attributes) - - -def valid_ipv4_host_address(host): - # If the host exists, and it might be IPv4, check each byte in the - # address. - return all([0 <= int(byte, base=10) <= 255 for byte in host.split('.')]) diff --git a/hyper/ssl_compat.py b/hyper/ssl_compat.py index 976b6235..97e6fb2e 100644 --- a/hyper/ssl_compat.py +++ b/hyper/ssl_compat.py @@ -48,6 +48,7 @@ def inner(self, *args, **kwargs): return getattr(self._conn, method)(*args, **kwargs) return inner + # Referenced in hyper/http20/connection.py. These values come # from the python ssl package, and must be defined in this file # for hyper to work in python versions <2.7.9 @@ -187,7 +188,7 @@ def resolve_alias(alias): C='countryName', ST='stateOrProvinceName', L='localityName', - O='organizationName', + O='organizationName', # noqa: E741 OU='organizationalUnitName', CN='commonName', ).get(alias, alias) diff --git a/setup.cfg b/setup.cfg index 5e409001..53d397a8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,5 @@ [wheel] universal = 1 + +[flake8] +max-complexity = 15 diff --git a/setup.py b/setup.py index 0d36afaa..94cd8d21 100644 --- a/setup.py +++ b/setup.py @@ -49,8 +49,6 @@ def run_tests(self): 'hyper.http20', 'hyper.common', 'hyper.http11', - 'hyper.packages', - 'hyper.packages.rfc3986' ] setup( @@ -78,7 +76,9 @@ def run_tests(self): 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: CPython', ], - install_requires=['h2>=2.4,<3.0', 'hyperframe>=3.2,<4.0'], + install_requires=[ + 'h2>=2.4,<3.0,!=2.5.0', 'hyperframe>=3.2,<4.0', 'rfc3986>=1.1.0,<2.0', 'brotlipy>=0.7.0,<1.0' + ], tests_require=['pytest', 'requests', 'mock'], cmdclass={'test': PyTest}, entry_points={ @@ -96,6 +96,7 @@ def run_tests(self): # module at lower than 1.0, because it doesn't support CFFI v1.0 yet. ':platform_python_implementation == "PyPy" and python_full_version < "2.7.9"': [ 'cryptography<1.0' - ] + ], + ':python_version == "2.7" or python_version == "3.3"': ['enum34>=1.0.4, <2'] } ) diff --git a/test/server.py b/test/server.py index 3f6ded4a..edc28755 100644 --- a/test/server.py +++ b/test/server.py @@ -15,6 +15,7 @@ import threading import socket import sys +from enum import Enum from hyper import HTTP20Connection from hyper.compat import ssl @@ -27,6 +28,23 @@ from hyper.tls import NPN_PROTOCOL +class SocketSecuritySetting(Enum): + """ + Server socket TLS wrapping strategy: + + SECURE - automatically wrap socket + INSECURE - never wrap + SECURE_NO_AUTO_WRAP - init context, but socket must be wrapped manually + + The values are needed to be able to convert ``secure`` boolean flag of + a client to a member of this enum: + ``socket_security = SocketSecuritySetting(secure)`` + """ + SECURE = True + INSECURE = False + SECURE_NO_AUTO_WRAP = 'NO_AUTO_WRAP' + + class SocketServerThread(threading.Thread): """ This method stolen wholesale from shazow/urllib3 under license. See @@ -42,16 +60,17 @@ def __init__(self, host='localhost', ready_event=None, h2=True, - secure=True): + socket_security=SocketSecuritySetting.SECURE): threading.Thread.__init__(self) self.socket_handler = socket_handler self.host = host - self.secure = secure + self.socket_security = socket_security self.ready_event = ready_event self.daemon = True - if self.secure: + if self.socket_security in (SocketSecuritySetting.SECURE, + SocketSecuritySetting.SECURE_NO_AUTO_WRAP): self.cxt = ssl.SSLContext(ssl.PROTOCOL_SSLv23) if ssl.HAS_NPN and h2: self.cxt.set_npn_protocols([NPN_PROTOCOL]) @@ -63,8 +82,8 @@ def _start_server(self): if sys.platform != 'win32': sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if self.secure: - sock = self.cxt.wrap_socket(sock, server_side=True) + if self.socket_security == SocketSecuritySetting.SECURE: + sock = self.wrap_socket(sock) sock.bind((self.host, 0)) self.port = sock.getsockname()[1] @@ -77,8 +96,8 @@ def _start_server(self): self.socket_handler(sock) sock.close() - def _wrap_socket(self, sock): - raise NotImplementedError() + def wrap_socket(self, sock): + return self.cxt.wrap_socket(sock, server_side=True) def run(self): self.server = self._start_server() @@ -89,12 +108,13 @@ class SocketLevelTest(object): A test-class that defines a few helper methods for running socket-level tests. """ - def set_up(self, secure=True, proxy=False): + def set_up(self, secure=True, proxy=False, timeout=None): self.host = None self.port = None - self.secure = secure if not proxy else False + self.socket_security = SocketSecuritySetting(secure) self.proxy = proxy self.server_thread = None + self.timeout = timeout def _start_server(self, socket_handler): """ @@ -105,30 +125,44 @@ def _start_server(self, socket_handler): socket_handler=socket_handler, ready_event=ready_event, h2=self.h2, - secure=self.secure + socket_security=self.socket_security ) self.server_thread.start() ready_event.wait() self.host = self.server_thread.host self.port = self.server_thread.port - self.secure = self.server_thread.secure + self.socket_security = self.server_thread.socket_security + + @property + def secure(self): + return self.socket_security in \ + (SocketSecuritySetting.SECURE, + SocketSecuritySetting.SECURE_NO_AUTO_WRAP) + + @secure.setter + def secure(self, value): + self.socket_security = SocketSecuritySetting(value) def get_connection(self): if self.h2: if not self.proxy: - return HTTP20Connection(self.host, self.port, self.secure) + return HTTP20Connection(self.host, self.port, self.secure, + timeout=self.timeout) else: return HTTP20Connection('http2bin.org', secure=self.secure, proxy_host=self.host, - proxy_port=self.port) + proxy_port=self.port, + timeout=self.timeout) else: if not self.proxy: - return HTTP11Connection(self.host, self.port, self.secure) + return HTTP11Connection(self.host, self.port, self.secure, + timeout=self.timeout) else: return HTTP11Connection('httpbin.org', secure=self.secure, proxy_host=self.host, - proxy_port=self.port) + proxy_port=self.port, + timeout=self.timeout) def get_encoder(self): """ diff --git a/test/test_SSLContext.py b/test/test_SSLContext.py index 4add16f3..e6051af7 100644 --- a/test/test_SSLContext.py +++ b/test/test_SSLContext.py @@ -40,7 +40,6 @@ def test_custom_context(self): assert not hyper.tls._context.check_hostname assert hyper.tls._context.verify_mode == ssl.CERT_NONE - assert hyper.tls._context.options & ssl.OP_NO_COMPRESSION == 0 def test_HTTPConnection_with_custom_context(self): context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) @@ -70,7 +69,7 @@ def test_missing_certs(self): succeeded = True except hyper.common.exceptions.MissingCertFile: threw_expected_exception = True - except: + except Exception: pass assert not succeeded diff --git a/test/test_abstraction.py b/test/test_abstraction.py index cd0e0645..00ee16ec 100644 --- a/test/test_abstraction.py +++ b/test/test_abstraction.py @@ -10,7 +10,7 @@ def test_h1_kwargs(self): c = HTTPConnection( 'test', 443, secure=False, window_manager=True, enable_push=True, ssl_context=False, proxy_host=False, proxy_port=False, - other_kwarg=True + proxy_headers=False, other_kwarg=True, timeout=5 ) assert c._h1_kwargs == { @@ -18,14 +18,17 @@ def test_h1_kwargs(self): 'ssl_context': False, 'proxy_host': False, 'proxy_port': False, + 'proxy_headers': False, 'other_kwarg': True, + 'enable_push': True, + 'timeout': 5, } def test_h2_kwargs(self): c = HTTPConnection( 'test', 443, secure=False, window_manager=True, enable_push=True, ssl_context=True, proxy_host=False, proxy_port=False, - other_kwarg=True + proxy_headers=False, other_kwarg=True, timeout=(10, 30) ) assert c._h2_kwargs == { @@ -35,7 +38,9 @@ def test_h2_kwargs(self): 'ssl_context': True, 'proxy_host': False, 'proxy_port': False, + 'proxy_headers': False, 'other_kwarg': True, + 'timeout': (10, 30), } def test_tls_upgrade(self, monkeypatch): diff --git a/test/test_http11.py b/test/test_http11.py index d4039588..9f3fd3d0 100644 --- a/test/test_http11.py +++ b/test/test_http11.py @@ -7,6 +7,7 @@ """ import os import zlib +import brotli from collections import namedtuple from io import BytesIO, StringIO @@ -19,6 +20,7 @@ from hyper.http11.response import HTTP11Response from hyper.common.headers import HTTPHeaderMap from hyper.common.exceptions import ChunkedDecodeError, ConnectionResetError +from hyper.common.util import HTTPVersion from hyper.compat import bytes, zlib_compressobj @@ -109,6 +111,16 @@ def test_initialization_with_ipv6_addresses_proxy_inline_port(self): assert c.proxy_host == 'ffff:aaaa::1' assert c.proxy_port == 8443 + def test_initialization_timeout(self): + c = HTTP11Connection('httpbin.org', timeout=30) + + assert c._timeout == 30 + + def test_initialization_tuple_timeout(self): + c = HTTP11Connection('httpbin.org', timeout=(5, 60)) + + assert c._timeout == (5, 60) + def test_basic_request(self): c = HTTP11Connection('httpbin.org') c._sock = sock = DummySocket() @@ -169,7 +181,26 @@ def test_proxy_request(self): c.request('GET', '/get', headers={'User-Agent': 'hyper'}) expected = ( - b"GET /get HTTP/1.1\r\n" + b"GET http://httpbin.org/get HTTP/1.1\r\n" + b"User-Agent: hyper\r\n" + b"connection: Upgrade, HTTP2-Settings\r\n" + b"upgrade: h2c\r\n" + b"HTTP2-Settings: AAQAAP__\r\n" + b"host: httpbin.org\r\n" + b"\r\n" + ) + received = b''.join(sock.queue) + + assert received == expected + + def test_proxy_request_with_non_standard_port(self): + c = HTTP11Connection('httpbin.org:8080', proxy_host='localhost') + c._sock = sock = DummySocket() + + c.request('GET', '/get', headers={'User-Agent': 'hyper'}) + + expected = ( + b"GET http://httpbin.org:8080/get HTTP/1.1\r\n" b"User-Agent: hyper\r\n" b"connection: Upgrade, HTTP2-Settings\r\n" b"upgrade: h2c\r\n" @@ -181,6 +212,46 @@ def test_proxy_request(self): assert received == expected + def test_proxy_headers_presence_for_insecure_request(self): + c = HTTP11Connection( + 'httpbin.org', secure=False, proxy_host='localhost', + proxy_headers={'Proxy-Authorization': 'Basic ==='}) + c._sock = sock = DummySocket() + + c.request('GET', '/get', headers={'User-Agent': 'hyper'}) + + expected = ( + b"GET http://httpbin.org/get HTTP/1.1\r\n" + b"User-Agent: hyper\r\n" + b"proxy-authorization: Basic ===\r\n" + b"connection: Upgrade, HTTP2-Settings\r\n" + b"upgrade: h2c\r\n" + b"HTTP2-Settings: AAQAAP__\r\n" + b"host: httpbin.org\r\n" + b"\r\n" + ) + received = b''.join(sock.queue) + + assert received == expected + + def test_proxy_headers_absence_for_secure_request(self): + c = HTTP11Connection( + 'httpbin.org', secure=True, proxy_host='localhost', + proxy_headers={'Proxy-Authorization': 'Basic ==='}) + c._sock = sock = DummySocket() + + c.request('GET', '/get', headers={'User-Agent': 'hyper'}) + + expected = ( + b"GET /get HTTP/1.1\r\n" + b"User-Agent: hyper\r\n" + b"host: httpbin.org\r\n" + b"\r\n" + ) + received = b''.join(sock.queue) + + assert received == expected + def test_request_with_bytestring_body(self): c = HTTP11Connection('httpbin.org') c._sock = sock = DummySocket() @@ -328,6 +399,23 @@ def test_chunked_overrides_body(self): assert received == expected + def test_response_with_empty_reason(self): + c = HTTP11Connection('httpbin.org') + c._sock = sock = DummySocket() + + sock._buffer = BytesIO( + b"HTTP/1.1 201 \r\n" + b"Connection: close\r\n" + b"Server: Socket\r\n" + b"Content-Length: 0\r\n" + b"\r\n" + ) + + r = c.get_response() + + assert r.status == 201 + assert r.reason == b'' + def test_get_response(self): c = HTTP11Connection('httpbin.org') c._sock = sock = DummySocket() @@ -511,6 +599,10 @@ def read(self, size): assert 'File-like bodies must return bytestrings. ' \ 'Got: {}'.format(int) in str(exc_info) + def test_close_with_uninitialized_socket(self): + c = HTTP11Connection('httpbin.org') + c.close() + class TestHTTP11Response(object): def test_short_circuit_read(self): @@ -546,13 +638,23 @@ def test_response_transparently_decrypts_gzip(self): headers = {b'content-encoding': [b'gzip'], b'connection': [b'close']} r = HTTP11Response(200, 'OK', headers, d, None) - c = zlib_compressobj(wbits=24) + c = zlib_compressobj(wbits=25) body = c.compress(b'this is test data') body += c.flush() d._buffer = BytesIO(body) assert r.read() == b'this is test data' + def test_response_transparently_decrypts_brotli(self): + d = DummySocket() + headers = {b'content-encoding': [b'br'], b'connection': [b'close']} + r = HTTP11Response(200, 'OK', headers, d, None) + + body = brotli.compress(b'this is test data') + d._buffer = BytesIO(body) + + assert r.read() == b'this is test data' + def test_response_transparently_decrypts_real_deflate(self): d = DummySocket() headers = { @@ -628,7 +730,7 @@ def test_response_transparently_decrypts_chunked_gzip(self): } r = HTTP11Response(200, 'OK', headers, d, None) - c = zlib_compressobj(wbits=24) + c = zlib_compressobj(wbits=25) body = c.compress(b'this is test data') body += c.flush() @@ -713,7 +815,7 @@ def test_bounded_read_expect_close_with_content_length(self): def test_compressed_bounded_read_expect_close(self): headers = {b'connection': [b'close'], b'content-encoding': [b'gzip']} - c = zlib_compressobj(wbits=24) + c = zlib_compressobj(wbits=25) body = c.compress(b'hello there sir') body += c.flush() @@ -838,6 +940,30 @@ def test_closing_chunked_reads_dont_call_close_callback(self): assert r._sock is None assert connection.close.call_count == 1 + def test_connection_version(self): + c = HTTP11Connection('httpbin.org') + assert c.version is HTTPVersion.http11 + + def test_response_version(self): + d = DummySocket() + headers = { + b'transfer-encoding': [b'chunked'], b'connection': [b'close'] + } + r = HTTP11Response(200, 'OK', headers, d) + assert r.version is HTTPVersion.http11 + + def test_response_body_length(self): + methods = [b'HEAD', b'GET'] + headers = {b'content-length': [b'15']} + d = DummySocket() + for method in methods: + d.queue = [] + r = HTTP11Response(200, 'OK', headers, d, request_method=method) + if method == b'HEAD': + assert r._length == 0 + else: + assert r._length == int(r.headers[b'content-length'][0]) + class DummySocket(object): def __init__(self): diff --git a/test/test_http20.py b/test/test_http20.py new file mode 100644 index 00000000..d187c315 --- /dev/null +++ b/test/test_http20.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +test_http20.py +~~~~~~~~~~~~~~ + +Unit tests for hyper's HTTP/2.0 implementation. +""" +import pytest +from mock import patch + +from server import SocketLevelTest + + +class TestHTTP20Connection(SocketLevelTest): + h2 = True + + def test_useful_error_with_no_protocol(self): + self.set_up() + + def socket_handler(listener): + sock = listener.accept()[0] + sock.close() + + self._start_server(socket_handler) + conn = self.get_connection() + + with patch('hyper.http20.connection.wrap_socket') as mock: + mock.return_value = (None, None) + with pytest.raises(AssertionError) as exc_info: + conn.connect() + assert ( + "No suitable protocol found." + in + str(exc_info) + ) + assert ( + "Check your OpenSSL version." + in + str(exc_info) + ) + + self.tear_down() diff --git a/test/test_hyper.py b/test/test_hyper.py index da0aad57..b826c63c 100644 --- a/test/test_hyper.py +++ b/test/test_hyper.py @@ -9,6 +9,7 @@ PingFrame, FRAME_MAX_ALLOWED_LEN ) from hpack.hpack_compat import Encoder +from hyper.common.connection import HTTPConnection from hyper.http20.connection import HTTP20Connection from hyper.http20.response import HTTP20Response, HTTP20Push from hyper.http20.exceptions import ConnectionError, StreamResetError @@ -16,8 +17,8 @@ combine_repeated_headers, split_repeated_headers, h2_safe_headers ) from hyper.common.headers import HTTPHeaderMap -from hyper.common.util import to_bytestring -from hyper.compat import zlib_compressobj, is_py2 +from hyper.common.util import to_bytestring, HTTPVersion +from hyper.compat import zlib_compressobj, is_py2, ssl from hyper.contrib import HTTP20Adapter import hyper.http20.errors as errors import errno @@ -25,11 +26,13 @@ import pytest import socket import zlib +import brotli from io import BytesIO TEST_DIR = os.path.abspath(os.path.dirname(__file__)) TEST_CERTS_DIR = os.path.join(TEST_DIR, 'certs') CLIENT_PEM_FILE = os.path.join(TEST_CERTS_DIR, 'nopassword.pem') +SERVER_CERT_FILE = os.path.join(TEST_CERTS_DIR, 'server.crt') def decode_frame(frame_data): @@ -65,6 +68,16 @@ def test_connections_accept_proxy_hosts_and_ports(self): assert c.proxy_host == 'localhost' assert c.proxy_port == 8443 + def test_connections_can_parse_proxy_hosts_with_userinfo(self): + c = HTTP20Connection('www.google.com', + proxy_host='azAz09!==:fakepaswd@localhost:8443') + # Note that the userinfo part is getting stripped out, + # it's not automatically added as Basic Auth header to + # the proxy_headers! It should be done manually. + assert c.host == 'www.google.com' + assert c.proxy_host == 'localhost' + assert c.proxy_port == 8443 + def test_connections_can_parse_proxy_hosts_and_ports(self): c = HTTP20Connection('www.google.com', proxy_host='localhost', @@ -82,6 +95,20 @@ def test_connections_can_parse_ipv6_hosts_and_ports(self): assert c.proxy_host == 'ffff:aaaa::1' assert c.proxy_port == 8443 + def test_connection_version(self): + c = HTTP20Connection('www.google.com') + assert c.version is HTTPVersion.http20 + + def test_connection_timeout(self): + c = HTTP20Connection('httpbin.org', timeout=30) + + assert c._timeout == 30 + + def test_connection_tuple_timeout(self): + c = HTTP20Connection('httpbin.org', timeout=(5, 60)) + + assert c._timeout == (5, 60) + def test_ping(self, frame_buffer): def data_callback(chunk, **kwargs): frame_buffer.add_data(chunk) @@ -185,6 +212,61 @@ def data_callback(chunk, **kwargs): assert frames[1].data == b'hello there' assert frames[1].flags == set(['END_STREAM']) + def test_request_correctly_sent_max_chunk(self, frame_buffer): + """ + Test that request correctly sent when data length multiple + max chunk. We check last chunk has a end flag and correct number + of chunks. + """ + def data_callback(chunk, **kwargs): + frame_buffer.add_data(chunk) + + # one chunk + c = HTTP20Connection('www.google.com') + c._sock = DummySocket() + c._send_cb = data_callback + c.putrequest('GET', '/') + c.endheaders(message_body=b'1'*1024, final=True) + + frames = list(frame_buffer) + assert len(frames) == 2 + assert isinstance(frames[1], DataFrame) + assert frames[1].flags == set(['END_STREAM']) + + # two chunks + c = HTTP20Connection('www.google.com') + c._sock = DummySocket() + c._send_cb = data_callback + c.putrequest('GET', '/') + c.endheaders(message_body=b'1' * 2024, final=True) + + frames = list(frame_buffer) + assert len(frames) == 3 + assert isinstance(frames[1], DataFrame) + assert frames[2].flags == set(['END_STREAM']) + + # two chunks with last chunk < 1024 + c = HTTP20Connection('www.google.com') + c._sock = DummySocket() + c._send_cb = data_callback + c.putrequest('GET', '/') + c.endheaders(message_body=b'1' * 2000, final=True) + + frames = list(frame_buffer) + assert len(frames) == 3 + assert isinstance(frames[1], DataFrame) + assert frames[2].flags == set(['END_STREAM']) + + # no chunks + c = HTTP20Connection('www.google.com') + c._sock = DummySocket() + c._send_cb = data_callback + c.putrequest('GET', '/') + c.endheaders(message_body=b'', final=True) + + frames = list(frame_buffer) + assert len(frames) == 1 + def test_that_we_correctly_send_over_the_socket(self): sock = DummySocket() c = HTTP20Connection('www.google.com') @@ -579,6 +661,41 @@ def test_that_using_proxy_keeps_http_headers_intact(self): (b':path', b'/'), ] + def test_proxy_headers_presence_for_insecure_request(self): + sock = DummySocket() + c = HTTP20Connection( + 'www.google.com', secure=False, proxy_host='localhost', + proxy_headers={'Proxy-Authorization': 'Basic ==='} + ) + c._sock = sock + c.request('GET', '/') + s = c.recent_stream + + assert list(s.headers.items()) == [ + (b':method', b'GET'), + (b':scheme', b'http'), + (b':authority', b'www.google.com'), + (b':path', b'/'), + (b'proxy-authorization', b'Basic ==='), + ] + + def test_proxy_headers_absence_for_secure_request(self): + sock = DummySocket() + c = HTTP20Connection( + 'www.google.com', secure=True, proxy_host='localhost', + proxy_headers={'Proxy-Authorization': 'Basic ==='} + ) + c._sock = sock + c.request('GET', '/') + s = c.recent_stream + + assert list(s.headers.items()) == [ + (b':method', b'GET'), + (b':scheme', b'https'), + (b':authority', b'www.google.com'), + (b':path', b'/'), + ] + def test_recv_cb_n_times(self): sock = DummySocket() sock.can_read = True @@ -695,7 +812,7 @@ def test_incrementing_window_after_close(self): assert len(originally_sent_data) + 1 == len(c._sock.queue) -class TestServerPush(object): +class FrameEncoderMixin(object): def setup_method(self, method): self.frames = [] self.encoder = Encoder() @@ -727,8 +844,10 @@ def add_data_frame(self, stream_id, data, end_stream=False): frame.flags.add('END_STREAM') self.frames.append(frame) - def request(self): - self.conn = HTTP20Connection('www.google.com', enable_push=True) + +class TestServerPush(FrameEncoderMixin): + def request(self, enable_push=True): + self.conn = HTTP20Connection('www.google.com', enable_push=enable_push) self.conn._sock = DummySocket() self.conn._sock.buffer = BytesIO( b''.join([frame.serialize() for frame in self.frames]) @@ -930,8 +1049,7 @@ def test_reset_pushed_streams_when_push_disabled(self): 1, [(':status', '200'), ('content-type', 'text/html')] ) - self.request() - self.conn._enable_push = False + self.request(False) self.conn.get_response() f = RstStreamFrame(2) @@ -964,13 +1082,22 @@ def test_response_transparently_decrypts_gzip(self): headers = HTTPHeaderMap( [(':status', '200'), ('content-encoding', 'gzip')] ) - c = zlib_compressobj(wbits=24) + c = zlib_compressobj(wbits=25) body = c.compress(b'this is test data') body += c.flush() resp = HTTP20Response(headers, DummyStream(body)) assert resp.read() == b'this is test data' + def test_response_transparently_decrypts_brotli(self): + headers = HTTPHeaderMap( + [(':status', '200'), ('content-encoding', 'br')] + ) + body = brotli.compress(b'this is test data') + resp = HTTP20Response(headers, DummyStream(body)) + + assert resp.read() == b'this is test data' + def test_response_transparently_decrypts_real_deflate(self): headers = HTTPHeaderMap( [(':status', '200'), ('content-encoding', 'deflate')] @@ -993,6 +1120,15 @@ def test_response_transparently_decrypts_wrong_deflate(self): assert resp.read() == b'this is test data' + def test_response_ignored_unsupported_compression(self): + headers = HTTPHeaderMap( + [(':status', '200'), ('content-encoding', 'invalid')] + ) + body = b'this is test data' + resp = HTTP20Response(headers, DummyStream(body)) + + assert resp.read() == b'this is test data' + def test_response_calls_stream_close(self): headers = HTTPHeaderMap([(':status', '200')]) stream = DummyStream('') @@ -1082,7 +1218,7 @@ def test_read_compressed_frames(self): headers = HTTPHeaderMap( [(':status', '200'), ('content-encoding', 'gzip')] ) - c = zlib_compressobj(wbits=24) + c = zlib_compressobj(wbits=25) body = c.compress(b'this is test data') body += c.flush() @@ -1097,6 +1233,10 @@ def test_read_compressed_frames(self): assert received == b'this is test data' + def test_response_version(self): + r = HTTP20Response(HTTPHeaderMap([(':status', '200')]), None) + assert r.version is HTTPVersion.http20 + class TestHTTP20Adapter(object): def test_adapter_reuses_connections(self): @@ -1119,6 +1259,29 @@ def test_adapter_accept_client_certificate(self): 'http', cert=CLIENT_PEM_FILE) assert conn1 is conn2 + assert conn1._conn.ssl_context.check_hostname + assert conn1._conn.ssl_context.verify_mode == ssl.CERT_REQUIRED + + def test_adapter_respects_disabled_ca_verification(self): + a = HTTP20Adapter() + conn = a.get_connection( + 'http2bin.org', + 80, + 'http', + verify=False, + cert=CLIENT_PEM_FILE) + assert not conn._conn.ssl_context.check_hostname + assert conn._conn.ssl_context.verify_mode == ssl.CERT_NONE + + def test_adapter_respects_custom_ca_verification(self): + a = HTTP20Adapter() + conn = a.get_connection( + 'http2bin.org', + 80, + 'http', + verify=SERVER_CERT_FILE) + assert conn._conn.ssl_context.check_hostname + assert conn._conn.ssl_context.verify_mode == ssl.CERT_REQUIRED class TestUtilities(object): @@ -1168,27 +1331,27 @@ def test_nghttp2_installs_correctly(self): assert True def test_stripping_connection_header(self): - headers = [('one', 'two'), ('connection', 'close')] - stripped = [('one', 'two')] + headers = [(b'one', b'two'), (b'connection', b'close')] + stripped = [(b'one', b'two')] assert h2_safe_headers(headers) == stripped def test_stripping_related_headers(self): headers = [ - ('one', 'two'), ('three', 'four'), ('five', 'six'), - ('connection', 'close, three, five') + (b'one', b'two'), (b'three', b'four'), (b'five', b'six'), + (b'connection', b'close, three, five') ] - stripped = [('one', 'two')] + stripped = [(b'one', b'two')] assert h2_safe_headers(headers) == stripped def test_stripping_multiple_connection_headers(self): headers = [ - ('one', 'two'), ('three', 'four'), ('five', 'six'), - ('connection', 'close'), - ('connection', 'three, five') + (b'one', b'two'), (b'three', b'four'), (b'five', b'six'), + (b'connection', b'close'), + (b'connection', b'three, five') ] - stripped = [('one', 'two')] + stripped = [(b'one', b'two')] assert h2_safe_headers(headers) == stripped @@ -1295,6 +1458,158 @@ def test_resetting_streams_after_close(self): c._single_read() +class TestUpgradingPush(FrameEncoderMixin): + http101 = (b"HTTP/1.1 101 Switching Protocols\r\n" + b"Connection: upgrade\r\n" + b"Upgrade: h2c\r\n" + b"\r\n") + + def request(self, enable_push=True): + self.frames = [SettingsFrame(0)] + self.frames # Server side preface + self.conn = HTTPConnection('www.google.com', enable_push=enable_push) + self.conn._conn._sock = DummySocket() + self.conn._conn._sock.buffer = BytesIO( + self.http101 + b''.join([frame.serialize() + for frame in self.frames]) + ) + self.conn.request('GET', '/') + + def assert_response(self): + self.response = self.conn.get_response() + assert self.response.status == 200 + assert dict(self.response.headers) == {b'content-type': [b'text/html']} + + def assert_pushes(self): + self.pushes = list(self.conn.get_pushes()) + assert len(self.pushes) == 1 + assert self.pushes[0].method == b'GET' + assert self.pushes[0].scheme == b'http' + assert self.pushes[0].authority == b'www.google.com' + assert self.pushes[0].path == b'/' + expected_headers = {b'accept-encoding': [b'gzip']} + assert dict(self.pushes[0].request_headers) == expected_headers + + def assert_push_response(self): + push_response = self.pushes[0].get_response() + assert push_response.status == 200 + assert dict(push_response.headers) == { + b'content-type': [b'application/javascript'] + } + assert push_response.read() == b'bar' + + def test_promise_before_headers(self): + # Current implementation only support get_pushes call + # after get_response + pass + + def test_promise_after_headers(self): + self.add_headers_frame( + 1, [(':status', '200'), ('content-type', 'text/html')] + ) + self.add_push_frame( + 1, + 2, + [ + (':method', 'GET'), + (':path', '/'), + (':authority', 'www.google.com'), + (':scheme', 'http'), + ('accept-encoding', 'gzip') + ] + ) + self.add_data_frame(1, b'foo', end_stream=True) + self.add_headers_frame( + 2, [(':status', '200'), ('content-type', 'application/javascript')] + ) + self.add_data_frame(2, b'bar', end_stream=True) + + self.request() + self.assert_response() + assert self.response.read() == b'foo' + self.assert_pushes() + self.assert_push_response() + + def test_promise_after_data(self): + self.add_headers_frame( + 1, [(':status', '200'), ('content-type', 'text/html')] + ) + self.add_data_frame(1, b'fo') + self.add_push_frame( + 1, + 2, + [ + (':method', 'GET'), + (':path', '/'), + (':authority', 'www.google.com'), + (':scheme', 'http'), + ('accept-encoding', 'gzip') + ] + ) + self.add_data_frame(1, b'o', end_stream=True) + self.add_headers_frame( + 2, [(':status', '200'), ('content-type', 'application/javascript')] + ) + self.add_data_frame(2, b'bar', end_stream=True) + + self.request() + self.assert_response() + assert self.response.read() == b'foo' + self.assert_pushes() + self.assert_push_response() + + def test_capture_all_promises(self): + # Current implementation does not support capture_all + # for h2c upgrading connection. + pass + + def test_cancel_push(self): + self.add_push_frame( + 1, + 2, + [ + (':method', 'GET'), + (':path', '/'), + (':authority', 'www.google.com'), + (':scheme', 'http'), + ('accept-encoding', 'gzip') + ] + ) + self.add_headers_frame( + 1, [(':status', '200'), ('content-type', 'text/html')] + ) + + self.request() + self.conn.get_response() + list(self.conn.get_pushes())[0].cancel() + + f = RstStreamFrame(2) + f.error_code = 8 + assert self.conn._sock.queue[-1] == f.serialize() + + def test_reset_pushed_streams_when_push_disabled(self): + self.add_push_frame( + 1, + 2, + [ + (':method', 'GET'), + (':path', '/'), + (':authority', 'www.google.com'), + (':scheme', 'http'), + ('accept-encoding', 'gzip') + ] + ) + self.add_headers_frame( + 1, [(':status', '200'), ('content-type', 'text/html')] + ) + + self.request(False) + self.conn.get_response() + + f = RstStreamFrame(2) + f.error_code = 7 + assert self.conn._sock.queue[-1].endswith(f.serialize()) + + # Some utility classes for the tests. class NullEncoder(object): @staticmethod diff --git a/test/test_import.py b/test/test_import.py index b8e3a1f2..9da32bd5 100644 --- a/test/test_import.py +++ b/test/test_import.py @@ -5,6 +5,7 @@ class TestImportPython2(object): + @pytest.mark.skipif(sys.version_info[0] == 3, reason="Python 2 only") def test_cannot_import_python_2(self, monkeypatch): monkeypatch.setattr(sys, 'version_info', (2, 6, 5, 'final', 0)) with pytest.raises(ImportError): @@ -12,6 +13,7 @@ def test_cannot_import_python_2(self, monkeypatch): class TestImportPython3(object): + @pytest.mark.skipif(sys.version_info[0] == 2, reason="Python 3 only") def test_cannot_import_python_32(self, monkeypatch): monkeypatch.setattr(sys, 'version_info', (3, 2, 3, 'final', 0)) with pytest.raises(ImportError): diff --git a/test/test_integration.py b/test/test_integration.py index 6a6c2600..6a9ece42 100644 --- a/test/test_integration.py +++ b/test/test_integration.py @@ -6,15 +6,22 @@ This file defines integration-type tests for hyper. These are still not fully hitting the network, so that's alright. """ +import base64 import requests import threading import time import hyper import hyper.http11.connection import pytest +from socket import timeout as SocketTimeout +from contextlib import contextmanager +from mock import patch +from concurrent.futures import ThreadPoolExecutor, TimeoutError from h2.frame_buffer import FrameBuffer from hyper.compat import ssl from hyper.contrib import HTTP20Adapter +from hyper.common.exceptions import ProxyError +from hyper.common.util import HTTPVersion, to_bytestring from hyperframe.frame import ( Frame, SettingsFrame, WindowUpdateFrame, DataFrame, HeadersFrame, GoAwayFrame, RstStreamFrame @@ -25,7 +32,7 @@ REQUEST_CODES, REQUEST_CODES_LENGTH ) from hyper.http20.exceptions import ConnectionError, StreamResetError -from server import SocketLevelTest +from server import SocketLevelTest, SocketSecuritySetting # Turn off certificate verification for the tests. if ssl is not None: @@ -33,8 +40,8 @@ hyper.tls._context.check_hostname = False hyper.tls._context.verify_mode = ssl.CERT_NONE - # Cover our bases because NPN doesn't yet work on all our test platforms. - hyper.http20.connection.H2_NPN_PROTOCOLS += ['', None] +# Cover our bases because NPN doesn't yet work on all our test platforms. +PROTOCOLS = hyper.http20.connection.H2_NPN_PROTOCOLS + ['', None] def decode_frame(frame_data): @@ -62,19 +69,34 @@ def frame_buffer(): return buffer +@contextmanager +def reusable_frame_buffer(buffer): + # FrameBuffer does not return new iterator for iteration. + data = buffer.data + yield buffer + buffer.data = data + + def receive_preamble(sock): # Receive the HTTP/2 'preamble'. - first = sock.recv(65535) + client_preface = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' + got = b'' + while len(got) < len(client_preface): + tmp = sock.recv(len(client_preface) - len(got)) + assert len(tmp) > 0, "unexpected EOF" + got += tmp - # Work around some bugs: if the first message received was only the PRI - # string, aim to receive a settings frame as well. - if len(first) <= len(b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'): - sock.recv(65535) + assert got == client_preface, "client preface mismatch" + + # Send server side HTTP/2 preface sock.send(SettingsFrame(0).serialize()) - sock.recv(65535) - return + # Drain to let the client proceed. + # Note that in the lower socket level, this method is not + # just doing "receive". + return sock.recv(65535) +@patch('hyper.http20.connection.H2_NPN_PROTOCOLS', PROTOCOLS) class TestHyperIntegration(SocketLevelTest): # These are HTTP/2 tests. h2 = True @@ -135,7 +157,7 @@ def socket_handler(listener): self._start_server(socket_handler) conn = self.get_connection() conn.connect() - send_event.wait() + send_event.wait(5) # Get the chunk of data after the preamble and decode it into frames. # We actually expect two, but only the second one contains ENABLE_PUSH. @@ -238,9 +260,9 @@ def socket_handler(listener): # We need to send back a SettingsFrame. f = SettingsFrame(0) sock.send(f.serialize()) - - send_event.wait() sock.recv(65535) + + send_event.wait(5) sock.close() self._start_server(socket_handler) @@ -257,6 +279,7 @@ def socket_handler(listener): def test_closed_responses_remove_their_streams_from_conn(self): self.set_up() + req_event = threading.Event() recv_event = threading.Event() def socket_handler(listener): @@ -267,24 +290,27 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. f = build_headers_frame([(':status', '200')]) f.stream_id = 1 sock.send(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() resp = conn.get_response() # Close the response. resp.close() - recv_event.wait(5) + recv_event.set() assert not conn.streams @@ -293,6 +319,7 @@ def socket_handler(listener): def test_receiving_responses_with_no_body(self): self.set_up() + req_event = threading.Event() recv_event = threading.Event() def socket_handler(listener): @@ -303,6 +330,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. This response has no body f = build_headers_frame( [(':status', '204'), ('content-length', '0')] @@ -312,12 +341,13 @@ def socket_handler(listener): sock.send(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() resp = conn.get_response() # Confirm the status code. @@ -328,13 +358,13 @@ def socket_handler(listener): assert resp._stream._in_window_manager.document_size == 0 # Awesome, we're done now. - recv_event.wait(5) - + recv_event.set() self.tear_down() def test_receiving_trailers(self): self.set_up() + req_event = threading.Event() recv_event = threading.Event() def socket_handler(listener): @@ -347,6 +377,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. f = build_headers_frame( [(':status', '200'), ('content-length', '14')], @@ -369,12 +401,13 @@ def socket_handler(listener): sock.send(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() resp = conn.get_response() # Confirm the status code. @@ -393,15 +426,15 @@ def socket_handler(listener): assert len(resp.trailers) == 2 # Awesome, we're done now. - recv_event.wait(5) - + recv_event.set() self.tear_down() def test_receiving_trailers_before_reading(self): self.set_up() - recv_event = threading.Event() + req_event = threading.Event() wait_event = threading.Event() + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] @@ -413,6 +446,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. f = build_headers_frame( [(':status', '200'), ('content-length', '14')], @@ -440,12 +475,13 @@ def socket_handler(listener): sock.send(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() resp = conn.get_response() # Confirm the status code. @@ -467,8 +503,7 @@ def socket_handler(listener): assert resp._stream._in_window_manager.document_size == 14 # Awesome, we're done now. - recv_event.wait(5) - + recv_event.set() self.tear_down() def test_clean_shut_down(self): @@ -489,7 +524,7 @@ def socket_handler(listener): sock.send(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) @@ -500,8 +535,7 @@ def socket_handler(listener): assert conn._sock is None # Awesome, we're done now. - recv_event.wait(5) - + recv_event.set() self.tear_down() def test_unexpected_shut_down(self): @@ -522,8 +556,8 @@ def socket_handler(listener): sock.send(f.serialize()) # Wait for the message from the main thread. + recv_event.wait(5) sock.close() - recv_event.set() self._start_server(socket_handler) conn = self.get_connection() @@ -535,15 +569,15 @@ def socket_handler(listener): assert conn._sock is None # Awesome, we're done now. - recv_event.wait(5) - + recv_event.set() self.tear_down() def test_insecure_connection(self): self.set_up(secure=False) data = [] - send_event = threading.Event() + req_event = threading.Event() + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] @@ -551,7 +585,7 @@ def socket_handler(listener): receive_preamble(sock) data.append(sock.recv(65535)) - send_event.wait(5) + req_event.wait(5) h = HeadersFrame(1) h.data = self.get_encoder().encode( @@ -570,12 +604,13 @@ def socket_handler(listener): d.flags.add('END_STREAM') sock.send(d.serialize()) + recv_event.wait(5) sock.close() self._start_server(socket_handler) c = self.get_connection() c.request('GET', '/') - send_event.set() + req_event.set() r = c.get_response() assert r.status == 200 @@ -586,13 +621,15 @@ def socket_handler(listener): assert r.read() == b'nsaislistening' + recv_event.set() self.tear_down() - def test_proxy_connection(self): - self.set_up(proxy=True) + def test_insecure_proxy_connection(self): + self.set_up(secure=False, proxy=True) data = [] - send_event = threading.Event() + req_event = threading.Event() + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] @@ -600,7 +637,7 @@ def socket_handler(listener): receive_preamble(sock) data.append(sock.recv(65535)) - send_event.wait(5) + req_event.wait(5) h = HeadersFrame(1) h.data = self.get_encoder().encode( @@ -619,12 +656,75 @@ def socket_handler(listener): d.flags.add('END_STREAM') sock.send(d.serialize()) + recv_event.wait(5) sock.close() self._start_server(socket_handler) c = self.get_connection() c.request('GET', '/') - send_event.set() + req_event.set() + r = c.get_response() + + assert r.status == 200 + assert len(r.headers) == 3 + assert r.headers[b'server'] == [b'socket-level-server'] + assert r.headers[b'content-length'] == [b'12'] + assert r.headers[b'content-type'] == [b'not/real'] + + assert r.read() == b'thisisaproxy' + + recv_event.set() + self.tear_down() + + def test_secure_proxy_connection(self): + self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP, + proxy=True) + + data = [] + connect_request_headers = [] + req_event = threading.Event() + recv_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # Read the CONNECT request + while not b''.join(connect_request_headers).endswith(b'\r\n\r\n'): + connect_request_headers.append(sock.recv(65535)) + + sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n') + + sock = self.server_thread.wrap_socket(sock) + + receive_preamble(sock) + + data.append(sock.recv(65535)) + req_event.wait(5) + + h = HeadersFrame(1) + h.data = self.get_encoder().encode( + [ + (':status', 200), + ('content-type', 'not/real'), + ('content-length', 12), + ('server', 'socket-level-server') + ] + ) + h.flags.add('END_HEADERS') + sock.send(h.serialize()) + + d = DataFrame(1) + d.data = b'thisisaproxy' + d.flags.add('END_STREAM') + sock.send(d.serialize()) + + recv_event.wait(5) + sock.close() + + self._start_server(socket_handler) + c = self.get_connection() + c.request('GET', '/') + req_event.set() r = c.get_response() assert r.status == 200 @@ -635,6 +735,46 @@ def socket_handler(listener): assert r.read() == b'thisisaproxy' + assert (to_bytestring( + 'CONNECT %s:%d HTTP/1.1\r\n\r\n' % (c.host, c.port)) == + b''.join(connect_request_headers)) + + recv_event.set() + self.tear_down() + + def test_failing_proxy_tunnel(self): + self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP, + proxy=True) + + recv_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # Read the CONNECT request + connect_data = b'' + while not connect_data.endswith(b'\r\n\r\n'): + connect_data += sock.recv(65535) + + sock.send(b'HTTP/1.0 407 Proxy Authentication Required\r\n\r\n') + + recv_event.wait(5) + sock.close() + + self._start_server(socket_handler) + conn = self.get_connection() + + try: + conn.connect() + assert False, "Exception should have been thrown" + except ProxyError as e: + assert e.response.status == 407 + assert e.response.reason == b'Proxy Authentication Required' + + # Confirm the connection is closed. + assert conn._sock is None + + recv_event.set() self.tear_down() def test_resetting_stream_with_frames_in_flight(self): @@ -644,6 +784,7 @@ def test_resetting_stream_with_frames_in_flight(self): """ self.set_up() + req_event = threading.Event() recv_event = threading.Event() def socket_handler(listener): @@ -654,6 +795,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. This response has no # body. f = build_headers_frame( @@ -670,6 +813,7 @@ def socket_handler(listener): self._start_server(socket_handler) conn = self.get_connection() stream_id = conn.request('GET', '/') + req_event.set() # Now, trigger the RST_STREAM frame by closing the stream. conn._send_rst_frame(stream_id, 0) @@ -683,7 +827,6 @@ def socket_handler(listener): # Awesome, we're done now. recv_event.set() - self.tear_down() def test_stream_can_be_reset_multiple_times(self): @@ -693,6 +836,7 @@ def test_stream_can_be_reset_multiple_times(self): """ self.set_up() + req_event = threading.Event() recv_event = threading.Event() def socket_handler(listener): @@ -703,6 +847,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send two RST_STREAM frames. for _ in range(0, 2): f = RstStreamFrame(1) @@ -715,6 +861,7 @@ def socket_handler(listener): self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() # Now, eat the Rst frames. These should not cause an exception. conn._single_read() @@ -734,6 +881,7 @@ def socket_handler(listener): def test_read_chunked_http2(self): self.set_up() + req_event = threading.Event() recv_event = threading.Event() wait_event = threading.Event() @@ -745,6 +893,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. This response has a body. f = build_headers_frame([(':status', '200')]) f.stream_id = 1 @@ -768,12 +918,13 @@ def socket_handler(listener): sock.sendall(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() resp = conn.get_response() # Confirm the status code. @@ -795,15 +946,16 @@ def socket_handler(listener): assert third_chunk == b'world' # Awesome, we're done now. - recv_event.wait(5) + recv_event.set() self.tear_down() def test_read_delayed(self): self.set_up() - recv_event = threading.Event() + req_event = threading.Event() wait_event = threading.Event() + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] @@ -813,6 +965,8 @@ def socket_handler(listener): receive_preamble(sock) sock.recv(65535) + # Wait for request + req_event.wait(5) # Now, send the headers for the response. This response has a body. f = build_headers_frame([(':status', '200')]) f.stream_id = 1 @@ -836,12 +990,13 @@ def socket_handler(listener): sock.sendall(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) conn = self.get_connection() conn.request('GET', '/') + req_event.set() resp = conn.get_response() # Confirm the status code. @@ -855,15 +1010,14 @@ def socket_handler(listener): assert second_chunk == b'world' # Awesome, we're done now. - recv_event.wait(5) - + recv_event.set() self.tear_down() def test_upgrade(self): self.set_up(secure=False) - recv_event = threading.Event() wait_event = threading.Event() + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] @@ -912,7 +1066,7 @@ def socket_handler(listener): sock.sendall(f.serialize()) # Wait for the message from the main thread. - recv_event.set() + recv_event.wait(5) sock.close() self._start_server(socket_handler) @@ -931,16 +1085,263 @@ def socket_handler(listener): assert second_chunk == b'world' # Awesome, we're done now. - recv_event.wait(5) + recv_event.set() + self.tear_down() + + def test_version_after_tls_upgrade(self, monkeypatch): + self.set_up() + + # We need to patch the ssl_wrap_socket method to ensure that we + # forcefully upgrade. + old_wrap_socket = hyper.http11.connection.wrap_socket + + def wrap(*args): + sock, _ = old_wrap_socket(*args) + return sock, 'h2' + + monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap) + + req_event = threading.Event() + recv_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + receive_preamble(sock) + # Wait for the request + req_event.wait(5) + # Send the headers for the response. This response has no body. + f = build_headers_frame( + [(':status', '200'), ('content-length', '0')] + ) + f.flags.add('END_STREAM') + f.stream_id = 1 + sock.sendall(f.serialize()) + + # wait for the message from the main thread + recv_event.wait(5) + sock.close() + + self._start_server(socket_handler) + c = hyper.HTTPConnection(self.host, self.port, secure=True) + + assert c.version is HTTPVersion.http11 + assert c.version is not HTTPVersion.http20 + c.request('GET', '/') + req_event.set() + assert c.version is HTTPVersion.http20 + + recv_event.set() self.tear_down() + def test_version_after_http_upgrade(self): + self.set_up() + self.secure = False + + req_event = threading.Event() + recv_event = threading.Event() + def socket_handler(listener): + sock = listener.accept()[0] + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + assert b'upgrade: h2c\r\n' in data + + req_event.wait(5) + + # We need to send back a response. + resp = ( + b'HTTP/1.1 101 Upgrade\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: upgrade\r\n' + b'Upgrade: h2c\r\n' + b'\r\n' + ) + sock.sendall(resp) + + # We get a message for connection open, specifically the preamble. + receive_preamble(sock) + + # Send the headers for the response. This response has a body. + f = build_headers_frame( + [(':status', '200'), ('content-length', '0')] + ) + f.stream_id = 1 + f.flags.add('END_STREAM') + sock.sendall(f.serialize()) + + # keep the socket open for clean shutdown + recv_event.wait(5) + sock.close() + + self._start_server(socket_handler) + + c = hyper.HTTPConnection(self.host, self.port) + assert c.version is HTTPVersion.http11 + + c.request('GET', '/') + req_event.set() + + resp = c.get_response() + assert c.version is HTTPVersion.http20 + assert resp.version is HTTPVersion.http20 + recv_event.set() + + self.tear_down() + + def test_connection_and_send_simultaneously(self): + # Since deadlock occurs probabilistic, + # It still has deadlock probability + # even the testcase is passed. + self.set_up() + + recv_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + receive_preamble(sock) + sock.recv(65535) + + recv_event.set() + sock.close() + + def do_req(conn): + conn.request('GET', '/') + recv_event.wait() + + def do_connect(conn): + conn.connect() + + self._start_server(socket_handler) + conn = self.get_connection() + + pool = ThreadPoolExecutor(max_workers=2) + pool.submit(do_connect, conn) + f = pool.submit(do_req, conn) + + try: + f.result(timeout=10) + except TimeoutError: + assert False + + self.tear_down() + + def test_connection_timeout(self): + self.set_up(timeout=0.5) + + def socket_handler(listener): + time.sleep(1) + + self._start_server(socket_handler) + conn = self.get_connection() + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + conn.connect() + + self.tear_down() + + def test_hyper_connection_timeout(self): + self.set_up(timeout=0.5) + + def socket_handler(listener): + time.sleep(1) + + self._start_server(socket_handler) + conn = hyper.HTTPConnection(self.host, self.port, self.secure, + timeout=self.timeout) + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + conn.request('GET', '/') + + self.tear_down() + + def test_read_timeout(self): + self.set_up(timeout=(10, 0.5)) + + req_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # We get two messages for the connection open and then a HEADERS + # frame. + receive_preamble(sock) + sock.recv(65535) + + # Wait for request + req_event.wait(5) + + # Sleep wait for read timeout + time.sleep(1) + + sock.close() + + self._start_server(socket_handler) + conn = self.get_connection() + conn.request('GET', '/') + req_event.set() + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + conn.get_response() + + self.tear_down() + + def test_default_connection_timeout(self): + self.set_up(timeout=None) + + # Confirm that we send the connection upgrade string and the initial + # SettingsFrame. + data = [] + send_event = threading.Event() + + def socket_handler(listener): + time.sleep(1) + sock = listener.accept()[0] + + # We should get one big chunk. + first = sock.recv(65535) + data.append(first) + + # We need to send back a SettingsFrame. + f = SettingsFrame(0) + sock.send(f.serialize()) + + send_event.set() + sock.close() + + self._start_server(socket_handler) + conn = self.get_connection() + try: + conn.connect() + except (SocketTimeout, ssl.SSLError): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + pytest.fail() + + send_event.wait(5) + + assert data[0].startswith(b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n') + + self.tear_down() + + +@patch('hyper.http20.connection.H2_NPN_PROTOCOLS', PROTOCOLS) class TestRequestsAdapter(SocketLevelTest): # This uses HTTP/2. h2 = True - def test_adapter_received_values(self, monkeypatch): + def test_adapter_received_values(self, monkeypatch, frame_buffer): self.set_up() # We need to patch the ssl_wrap_socket method to ensure that we @@ -953,17 +1354,22 @@ def wrap(*args): monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap) - data = [] - send_event = threading.Event() + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] # Do the handshake: conn header, settings, send settings, recv ack. - receive_preamble(sock) + frame_buffer.add_data(receive_preamble(sock)) # Now expect some data. One headers frame. - data.append(sock.recv(65535)) + req_wait = True + while req_wait: + frame_buffer.add_data(sock.recv(65535)) + with reusable_frame_buffer(frame_buffer) as fr: + for f in fr: + if isinstance(f, HeadersFrame): + req_wait = False # Respond! h = HeadersFrame(1) @@ -981,7 +1387,8 @@ def socket_handler(listener): d.flags.add('END_STREAM') sock.send(d.serialize()) - send_event.wait(5) + # keep the socket open for clean shutdown + recv_event.wait(5) sock.close() self._start_server(socket_handler) @@ -992,14 +1399,13 @@ def socket_handler(listener): # Assert about the received values. assert r.status_code == 200 - assert r.headers[b'Content-Type'] == b'not/real' + assert r.headers['Content-Type'] == 'not/real' assert r.content == b'1234567890' * 2 - send_event.set() - + recv_event.set() self.tear_down() - def test_adapter_sending_values(self, monkeypatch): + def test_adapter_sending_values(self, monkeypatch, frame_buffer): self.set_up() # We need to patch the ssl_wrap_socket method to ensure that we @@ -1012,17 +1418,22 @@ def wrap(*args): monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap) - data = [] + recv_event = threading.Event() def socket_handler(listener): sock = listener.accept()[0] # Do the handshake: conn header, settings, send settings, recv ack. - receive_preamble(sock) + frame_buffer.add_data(receive_preamble(sock)) # Now expect some data. One headers frame and one data frame. - data.append(sock.recv(65535)) - data.append(sock.recv(65535)) + req_wait = True + while req_wait: + frame_buffer.add_data(sock.recv(65535)) + with reusable_frame_buffer(frame_buffer) as fr: + for f in fr: + if isinstance(f, DataFrame): + req_wait = False # Respond! h = HeadersFrame(1) @@ -1040,6 +1451,8 @@ def socket_handler(listener): d.flags.add('END_STREAM') sock.send(d.serialize()) + # keep the socket open for clean shutdown + recv_event.wait(5) sock.close() self._start_server(socket_handler) @@ -1054,11 +1467,353 @@ def socket_handler(listener): # Assert about the sent values. assert r.status_code == 200 - f = decode_frame(data[0]) - assert isinstance(f, HeadersFrame) + frames = list(frame_buffer) + assert isinstance(frames[-2], HeadersFrame) + + assert isinstance(frames[-1], DataFrame) + assert frames[-1].data == b'hi there' + + recv_event.set() + self.tear_down() + + def test_adapter_uses_proxies(self): + self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP, + proxy=True) + + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # Read the CONNECT request + connect_data = b'' + while not connect_data.endswith(b'\r\n\r\n'): + connect_data += sock.recv(65535) + + sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n') + + sock = self.server_thread.wrap_socket(sock) + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + send_event.wait() + + # We need to send back a response. + resp = ( + b'HTTP/1.1 201 No Content\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + + sock.close() + + self._start_server(socket_handler) + s = requests.Session() + s.proxies = {'all': 'http://%s:%s' % (self.host, self.port)} + s.mount('https://', HTTP20Adapter()) + send_event.set() + r = s.get('https://foobar/') + + assert r.status_code == 201 + assert len(r.headers) == 3 + assert r.headers['server'] == 'socket-level-server' + assert r.headers['content-length'] == '0' + assert r.headers['connection'] == 'close' + + assert r.content == b'' + + self.tear_down() + + def test_adapter_uses_proxy_auth_for_secure(self): + self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP, + proxy=True) + + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # Read the CONNECT request + connect_data = b'' + while not connect_data.endswith(b'\r\n\r\n'): + connect_data += sock.recv(65535) + + # Ensure that request contains the proper Proxy-Authorization + # header + assert (b'CONNECT foobar:443 HTTP/1.1\r\n' + b'Proxy-Authorization: Basic ' + + base64.b64encode(b'foo:bar') + b'\r\n' + b'\r\n') == connect_data + + sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n') + + sock = self.server_thread.wrap_socket(sock) + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + # Ensure that proxy headers are not passed via tunnelled connection + assert b'Proxy-Authorization:' not in data + + send_event.wait() + + # We need to send back a response. + resp = ( + b'HTTP/1.1 201 No Content\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + + sock.close() + + self._start_server(socket_handler) + s = requests.Session() + s.proxies = {'all': 'http://foo:bar@%s:%s' % (self.host, self.port)} + s.mount('https://', HTTP20Adapter()) + send_event.set() + r = s.get('https://foobar/') + + assert r.status_code == 201 + assert len(r.headers) == 3 + assert r.headers['server'] == 'socket-level-server' + assert r.headers['content-length'] == '0' + assert r.headers['connection'] == 'close' + + assert r.content == b'' + + self.tear_down() + + def test_adapter_uses_proxy_auth_for_insecure(self): + self.set_up(secure=False, proxy=True) + + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # We should get the initial request. + connect_data = b'' + while not connect_data.endswith(b'\r\n\r\n'): + connect_data += sock.recv(65535) + + # Ensure that request contains the proper Proxy-Authorization + # header + assert (b'Proxy-Authorization: Basic ' + + base64.b64encode(b'foo:bar') + b'\r\n' + ).lower() in connect_data.lower() + + send_event.wait() + + # We need to send back a response. + resp = ( + b'HTTP/1.1 201 No Content\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + + sock.close() + + self._start_server(socket_handler) + s = requests.Session() + s.proxies = {'all': 'http://foo:bar@%s:%s' % (self.host, self.port)} + s.mount('http://', HTTP20Adapter()) + send_event.set() + r = s.get('http://foobar/') + + assert r.status_code == 201 + assert len(r.headers) == 3 + assert r.headers['server'] == 'socket-level-server' + assert r.headers['content-length'] == '0' + assert r.headers['connection'] == 'close' + + assert r.content == b'' + + self.tear_down() + + def test_adapter_connection_timeout(self, monkeypatch, frame_buffer): + self.set_up() + + # We need to patch the ssl_wrap_socket method to ensure that we + # forcefully upgrade. + old_wrap_socket = hyper.http11.connection.wrap_socket + + def wrap(*args): + sock, _ = old_wrap_socket(*args) + return sock, 'h2' + + monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap) + + def socket_handler(listener): + time.sleep(1) + + self._start_server(socket_handler) + + s = requests.Session() + s.mount('https://%s' % self.host, HTTP20Adapter()) + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + s.get('https://%s:%s/some/path' % (self.host, self.port), + timeout=0.5) + + self.tear_down() + + def test_adapter_read_timeout(self, monkeypatch, frame_buffer): + self.set_up() + + # We need to patch the ssl_wrap_socket method to ensure that we + # forcefully upgrade. + old_wrap_socket = hyper.http11.connection.wrap_socket + + def wrap(*args): + sock, _ = old_wrap_socket(*args) + return sock, 'h2' + + monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap) + + def socket_handler(listener): + sock = listener.accept()[0] + + # Do the handshake: conn header, settings, send settings, recv ack. + frame_buffer.add_data(receive_preamble(sock)) + + # Now expect some data. One headers frame. + req_wait = True + while req_wait: + frame_buffer.add_data(sock.recv(65535)) + with reusable_frame_buffer(frame_buffer) as fr: + for f in fr: + if isinstance(f, HeadersFrame): + req_wait = False + + # Sleep wait for read timeout + time.sleep(1) + + sock.close() + + self._start_server(socket_handler) + + s = requests.Session() + s.mount('https://%s' % self.host, HTTP20Adapter()) + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + s.get('https://%s:%s/some/path' % (self.host, self.port), + timeout=(10, 0.5)) + + self.tear_down() + + def test_adapter_close(self): + self.set_up(secure=False) + + def socket_handler(listener): + sock = listener.accept()[0] + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + # We need to send back a response. + resp = ( + b'HTTP/1.1 201 No Content\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + sock.close() + + self._start_server(socket_handler) + + a = HTTP20Adapter() + s = requests.Session() + s.mount('http://', a) + r = s.get('http://%s:%s' % (self.host, self.port)) + connections_before_close = list(a.connections.values()) + + # ensure that we have at least 1 connection + assert connections_before_close + + s.close() + + # check that connections cache is empty + assert not a.connections + + # check that all connections are actually closed + assert all(conn._sock is None for conn in connections_before_close) + + assert r.status_code == 201 + assert len(r.headers) == 3 + assert r.headers['server'] == 'socket-level-server' + assert r.headers['content-length'] == '0' + assert r.headers['connection'] == 'close' + + assert r.content == b'' + + self.tear_down() + + def test_adapter_close_context_manager(self): + self.set_up(secure=False) + + def socket_handler(listener): + sock = listener.accept()[0] + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + # We need to send back a response. + resp = ( + b'HTTP/1.1 201 No Content\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + sock.close() + + self._start_server(socket_handler) + + with requests.Session() as s: + a = HTTP20Adapter() + s.mount('http://', a) + r = s.get('http://%s:%s' % (self.host, self.port)) + connections_before_close = list(a.connections.values()) + + # ensure that we have at least 1 connection + assert connections_before_close + + # check that connections cache is empty + assert not a.connections + + # check that all connections are actually closed + assert all(conn._sock is None for conn in connections_before_close) + + assert r.status_code == 201 + assert len(r.headers) == 3 + assert r.headers['server'] == 'socket-level-server' + assert r.headers['content-length'] == '0' + assert r.headers['connection'] == 'close' - f = decode_frame(data[1]) - assert isinstance(f, DataFrame) - assert f.data == b'hi there' + assert r.content == b'' self.tear_down() diff --git a/test/test_integration_http11.py b/test/test_integration_http11.py index 53cd83a6..7ec3846a 100644 --- a/test/test_integration_http11.py +++ b/test/test_integration_http11.py @@ -9,10 +9,13 @@ import hyper import threading import pytest +import time +from socket import timeout as SocketTimeout from hyper.compat import ssl -from server import SocketLevelTest +from server import SocketLevelTest, SocketSecuritySetting from hyper.common.exceptions import HTTPUpgrade +from hyper.common.util import to_bytestring # Turn off certificate verification for the tests. if ssl is not None: @@ -119,8 +122,50 @@ def socket_handler(listener): assert r.read() == b'hellotheresirfinalfantasy' - def test_proxy_request_response(self): - self.set_up(proxy=True) + def test_closing_response_without_headers(self): + self.set_up() + + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + send_event.wait() + + # We need to send back a response. + resp = ( + b'HTTP/1.1 200 OK\r\n' + b'Server: socket-level-server\r\n' + b'\r\n' + ) + sock.send(resp) + + sock.send(b'hi') + + sock.close() + + self._start_server(socket_handler) + c = self.get_connection() + c.request('GET', '/') + send_event.set() + r = c.get_response() + + assert r.status == 200 + assert r.reason == b'OK' + assert len(r.headers) == 1 + assert r.headers[b'server'] == [b'socket-level-server'] + + assert r.read() == b'hi' + + assert c._sock is None + + def test_insecure_proxy_request_response(self): + self.set_up(secure=False, proxy=True) send_event = threading.Event() @@ -163,6 +208,108 @@ def socket_handler(listener): assert c._sock is None + def test_secure_proxy_request_response(self): + self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP, + proxy=True) + + connect_request_headers = [] + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # Read the CONNECT request + while not b''.join(connect_request_headers).endswith(b'\r\n\r\n'): + connect_request_headers.append(sock.recv(65535)) + + sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n') + + sock = self.server_thread.wrap_socket(sock) + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + send_event.wait() + + # We need to send back a response. + resp = ( + b'HTTP/1.1 201 No Content\r\n' + b'Server: socket-level-server\r\n' + b'Content-Length: 0\r\n' + b'Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + + sock.close() + + self._start_server(socket_handler) + c = self.get_connection() + c.request('GET', '/') + send_event.set() + r = c.get_response() + + assert r.status == 201 + assert r.reason == b'No Content' + assert len(r.headers) == 3 + assert r.headers[b'server'] == [b'socket-level-server'] + assert r.headers[b'content-length'] == [b'0'] + assert r.headers[b'connection'] == [b'close'] + + assert r.read() == b'' + + assert (to_bytestring( + 'CONNECT %s:%d HTTP/1.1\r\n\r\n' % (c.host, c.port)) == + b''.join(connect_request_headers)) + + assert c._sock is None + + def test_proxy_connection_close_is_respected(self): + self.set_up(secure=False, proxy=True) + + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + send_event.wait() + + # We need to send back a response. + resp = ( + b'HTTP/1.0 407 Proxy Authentication Required\r\n' + b'Proxy-Authenticate: Basic realm="proxy"\r\n' + b'Proxy-Connection: close\r\n' + b'\r\n' + ) + sock.send(resp) + + sock.close() + + self._start_server(socket_handler) + conn = self.get_connection() + conn.request('GET', '/') + send_event.set() + + r = conn.get_response() + + assert r.status == 407 + assert r.reason == b'Proxy Authentication Required' + assert len(r.headers) == 2 + assert r.headers[b'proxy-authenticate'] == [b'Basic realm="proxy"'] + assert r.headers[b'proxy-connection'] == [b'close'] + + assert r.read() == b'' + + # Confirm the connection is closed. + assert conn._sock is None + def test_response_with_body(self): self.set_up() @@ -282,7 +429,7 @@ def socket_handler(listener): b'HTTP/1.1 101 Upgrade\r\n' b'Server: socket-level-server\r\n' b'Content-Length: 0\r\n' - b'Connection: upgrade\r\n' + b'Connection: Upgrade\r\n' b'Upgrade: h2c\r\n' b'\r\n' ) @@ -297,3 +444,68 @@ def socket_handler(listener): with pytest.raises(HTTPUpgrade): c.get_response() + + def test_connection_timeout(self): + self.set_up(timeout=0.5) + + def socket_handler(listener): + time.sleep(1) + + self._start_server(socket_handler) + conn = self.get_connection() + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + conn.connect() + + self.tear_down() + + def test_hyper_connection_timeout(self): + self.set_up(timeout=0.5) + + def socket_handler(listener): + time.sleep(1) + + self._start_server(socket_handler) + conn = hyper.HTTPConnection(self.host, self.port, self.secure, + timeout=self.timeout) + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + conn.request('GET', '/') + + self.tear_down() + + def test_read_timeout(self): + self.set_up(timeout=(10, 0.5)) + + send_event = threading.Event() + + def socket_handler(listener): + sock = listener.accept()[0] + + # We should get the initial request. + data = b'' + while not data.endswith(b'\r\n\r\n'): + data += sock.recv(65535) + + send_event.wait() + + # Sleep wait for read timeout + time.sleep(1) + + sock.close() + + self._start_server(socket_handler) + conn = self.get_connection() + conn.request('GET', '/') + send_event.set() + + with pytest.raises((SocketTimeout, ssl.SSLError)): + # Py2 raises this as a BaseSSLError, + # Py3 raises it as socket timeout. + conn.get_response() + + self.tear_down() diff --git a/test_release.py b/test_release.py index 498fae71..38138657 100644 --- a/test_release.py +++ b/test_release.py @@ -10,16 +10,19 @@ capable of achieving basic tasks. """ -from concurrent.futures import as_completed, ThreadPoolExecutor import logging import random +from concurrent.futures import as_completed, ThreadPoolExecutor + import requests -import threading -from hyper import HTTP20Connection, HTTP11Connection + +from hyper import HTTP20Connection, HTTP11Connection, HTTPConnection +from hyper.common.util import HTTPVersion from hyper.contrib import HTTP20Adapter logging.basicConfig(level=logging.INFO) + class TestHyperActuallyWorks(object): def test_abusing_nghttp2_org(self): """ @@ -92,32 +95,32 @@ def do_one_page(path): assert text_data max_workers = len(paths) - with ThreadPoolExecutor(max_workers=len(paths)) as ex: + with ThreadPoolExecutor(max_workers=max_workers) as ex: futures = [ex.submit(do_one_page, p) for p in paths] for f in as_completed(futures): f.result() - def test_hitting_http2bin_org(self): + def test_hitting_nghttp2_org(self): """ - This test function uses the requests adapter and requests to talk to http2bin. + This test function uses the requests adapter and requests to talk to nghttp2.org/httpbin. """ s = requests.Session() a = HTTP20Adapter() - s.mount('https://http2bin', a) - s.mount('https://www.http2bin', a) + s.mount('https://nghttp2', a) + s.mount('https://www.nghttp2', a) # Here are some nice URLs. urls = [ - 'https://www.http2bin.org/', - 'https://www.http2bin.org/ip', - 'https://www.http2bin.org/user-agent', - 'https://www.http2bin.org/headers', - 'https://www.http2bin.org/get', - 'https://http2bin.org/', - 'https://http2bin.org/ip', - 'https://http2bin.org/user-agent', - 'https://http2bin.org/headers', - 'https://http2bin.org/get', + 'https://www.nghttp2.org/httpbin/', + 'https://www.nghttp2.org/httpbin/ip', + 'https://www.nghttp2.org/httpbin/user-agent', + 'https://www.nghttp2.org/httpbin/headers', + 'https://www.nghttp2.org/httpbin/get', + 'https://nghttp2.org/httpbin/', + 'https://nghttp2.org/httpbin/ip', + 'https://nghttp2.org/httpbin/user-agent', + 'https://nghttp2.org/httpbin/headers', + 'https://nghttp2.org/httpbin/get', ] # Go get everything. @@ -131,7 +134,7 @@ def test_hitting_httpbin_org_http11(self): """ This test function uses hyper's HTTP/1.1 support to talk to httpbin """ - c = HTTP11Connection('httpbin.org') + c = HTTP11Connection('httpbin.org:443') # Here are some nice URLs. urls = [ @@ -149,3 +152,43 @@ def test_hitting_httpbin_org_http11(self): assert resp.status == 200 assert resp.read() + + def test_hitting_nghttp2_org_via_h2c_upgrade(self): + """ + This tests our support for cleartext HTTP/1.1 -> HTTP/2 upgrade + against the most common open source HTTP/2 server implementation. + """ + c = HTTPConnection('nghttp2.org:80') + + # Make the request. + c.request('GET', '/') + response = c.get_response() + + # Check that the response is OK and that we did upgrade to HTTP/2. + assert response.status == 200 + assert response.read() + assert response.version == HTTPVersion.http20 + + def test_http11_response_body_length(self): + """ + This test function uses check the expected length of the HTTP/1.1-response-body. + """ + c = HTTP11Connection('httpbin.org:443') + + # Make some HTTP/1.1 requests. + methods = ['GET', 'HEAD'] + for method in methods: + c.request(method, '/') + resp = c.get_response() + + # Check the expected length of the body. + if method == 'HEAD': + assert resp._length == 0 + assert resp.read() == b'' + else: + try: + content_length = int(resp.headers[b'Content-Length'][0]) + except KeyError: + continue + assert resp._length == content_length + assert resp.read() diff --git a/test_requirements.txt b/test_requirements.txt index e7deae72..cae2fbc6 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -1,4 +1,4 @@ -pytest +pytest>=3.0 pytest-xdist pytest-cov requests diff --git a/tox.ini b/tox.ini index a35f850f..046619e9 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py34, py35, pypy, lint +envlist = py{27,34,35,36}, pypy, lint [testenv] deps= -r{toxinidir}/test_requirements.txt @@ -12,6 +12,6 @@ commands= commands= py.test {toxinidir}/test/ [testenv:lint] -basepython=python3.5 +basepython=python3 deps = flake8==2.5.4 -commands = flake8 --max-complexity 15 --exclude "hyper/packages/*" hyper test +commands = flake8 hyper test