File: test_proxy.py

package info (click to toggle)
python-moto 5.1.18-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 116,520 kB
  • sloc: python: 636,725; javascript: 181; makefile: 39; sh: 3
file content (130 lines) | stat: -rw-r--r-- 4,555 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
from http.server import BaseHTTPRequestHandler
from typing import Any
from unittest import SkipTest

import pytest
import requests

from moto import settings
from tests.test_core.utilities import SimpleServer

url = "http://motoapi.amazonaws.com/moto-api/proxy/passthrough"


class WebRequestHandler(BaseHTTPRequestHandler):
    def do_GET(self) -> None:
        self.send_response(200)
        self.send_header("Content-Type", "application/json")
        self.end_headers()
        self.wfile.write(b"real response")


@pytest.fixture
def server() -> Any:  # type: ignore[misc]
    server = SimpleServer(WebRequestHandler)
    server.start()
    yield server
    server.stop()


def test_real_request_errors() -> None:
    if not settings.is_test_proxy_mode():
        raise SkipTest("Can only be tested in ProxyMode")

    http_proxy = settings.test_proxy_mode_endpoint()
    https_proxy = settings.test_proxy_mode_endpoint()
    proxies = {"http": http_proxy, "https": https_proxy}

    # Delete all to ensure we're starting with a clean slate
    requests.delete(url, proxies=proxies)

    resp = requests.get("http://httpbin.org/robots.txt", proxies=proxies)
    assert resp.status_code == 404
    assert resp.content == b"AWS Service not recognized or supported"


def test_configure_passedthrough_urls() -> None:
    if not settings.is_test_proxy_mode():
        raise SkipTest("Can only be tested in ProxyMode")

    http_proxy = settings.test_proxy_mode_endpoint()
    https_proxy = settings.test_proxy_mode_endpoint()
    proxies = {"http": http_proxy, "https": https_proxy}

    # Delete all to ensure we're starting with a clean slate
    requests.delete(url, proxies=proxies)

    target1 = "http://httpbin.org/robots.txt"
    target2 = "http://othersite.org/"
    target3 = "https://othersite.org/"
    resp = requests.post(url, json={"http_urls": [target1]}, proxies=proxies)
    assert resp.status_code == 201
    assert resp.json() == {"http_urls": [target1], "https_hosts": []}

    # We can configure multiple URL's
    resp = requests.post(url, json={"http_urls": [target2]}, proxies=proxies)
    assert target1 in resp.json()["http_urls"]
    assert target2 in resp.json()["http_urls"]

    # Duplicate URL's are ignored
    requests.post(url, json={"http_urls": [target1]}, proxies=proxies)

    # We can retrieve the data
    resp = requests.get(url, proxies=proxies)
    assert target1 in resp.json()["http_urls"]
    assert target2 in resp.json()["http_urls"]
    assert resp.json()["https_hosts"] == []

    # Set HTTPS HOST for good measure
    resp = requests.post(url, json={"https_hosts": [target3]}, proxies=proxies)
    assert target1 in resp.json()["http_urls"]
    assert target2 in resp.json()["http_urls"]
    assert resp.json()["https_hosts"] == [target3]

    # We can delete all URL's in one go
    requests.delete(url, proxies=proxies)

    resp = requests.get(url, proxies=proxies)
    assert resp.json() == {"http_urls": [], "https_hosts": []}


def test_http_get_request_can_be_passed_through(server: Any) -> None:
    if not settings.is_test_proxy_mode():
        raise SkipTest("Can only be tested in ProxyMode")

    http_proxy = settings.test_proxy_mode_endpoint()
    https_proxy = settings.test_proxy_mode_endpoint()
    proxies = {"http": http_proxy, "https": https_proxy}

    # Delete all to ensure we're starting with a clean slate
    requests.delete(url, proxies=proxies)

    # Configure our URL as the one to passthrough
    server_url, port = server.get_host_and_port()
    target_url = f"http://127.0.0.1:{port}/robots.txt"
    requests.post(url, json={"http_urls": [target_url]}, proxies=proxies)

    resp = requests.get(target_url, proxies=proxies)
    assert resp.status_code == 200
    assert resp.content == b"real response"


def test_https_request_can_be_passed_through() -> None:
    raise SkipTest("Times out regularly")
    if not settings.is_test_proxy_mode():
        raise SkipTest("Can only be tested in ProxyMode")

    http_proxy = settings.test_proxy_mode_endpoint()
    https_proxy = settings.test_proxy_mode_endpoint()
    proxies = {"http": http_proxy, "https": https_proxy}

    # Delete all to ensure we're starting with a clean slate
    requests.delete(url, proxies=proxies)

    # Configure our URL as the one to passthrough
    target_url = "https://httpbin.org/ip"
    requests.post(url, json={"https_hosts": ["httpbin.org"]}, proxies=proxies)

    resp = requests.get(target_url, proxies=proxies)
    assert resp.status_code == 200
    assert "origin" in resp.json()