diff --git a/ipinfo/__init__.py b/ipinfo/__init__.py index eef2046..a456705 100644 --- a/ipinfo/__init__.py +++ b/ipinfo/__init__.py @@ -6,6 +6,8 @@ from .handler_core_async import AsyncHandlerCore from .handler_plus import HandlerPlus from .handler_plus_async import AsyncHandlerPlus +from .handler_resproxy import HandlerResProxy +from .handler_resproxy_async import AsyncHandlerResProxy def getHandler(access_token=None, **kwargs): @@ -28,6 +30,11 @@ def getHandlerPlus(access_token=None, **kwargs): return HandlerPlus(access_token, **kwargs) +def getHandlerResProxy(access_token=None, **kwargs): + """Create and return HandlerResProxy object.""" + return HandlerResProxy(access_token, **kwargs) + + def getHandlerAsync(access_token=None, **kwargs): """Create an return an asynchronous Handler object.""" return AsyncHandler(access_token, **kwargs) @@ -46,3 +53,8 @@ def getHandlerAsyncCore(access_token=None, **kwargs): def getHandlerAsyncPlus(access_token=None, **kwargs): """Create and return asynchronous HandlerPlus object.""" return AsyncHandlerPlus(access_token, **kwargs) + + +def getHandlerAsyncResProxy(access_token=None, **kwargs): + """Create and return asynchronous HandlerResProxy object.""" + return AsyncHandlerResProxy(access_token, **kwargs) diff --git a/ipinfo/handler_resproxy.py b/ipinfo/handler_resproxy.py new file mode 100644 index 0000000..b32ec8e --- /dev/null +++ b/ipinfo/handler_resproxy.py @@ -0,0 +1,225 @@ +""" +Residential Proxy API client handler for fetching data from the IPinfo Residential Proxy service. +""" + +import re +import time +from ipaddress import IPv4Address, IPv6Address + +import requests + +from . import handler_utils +from .bogon import is_bogon +from .cache.default import DefaultCache +from .details import Details +from .error import APIError +from .exceptions import RequestQuotaExceededError, TimeoutExceededError +from .handler_utils import ( + BATCH_MAX_SIZE, + BATCH_REQ_TIMEOUT_DEFAULT, + CACHE_MAXSIZE, + CACHE_TTL, + RESPROXY_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) + + +class HandlerResProxy: + """ + Allows client to request data for specified IP address using the ResProxy API. + ResProxy API provides contextual data around Residential, Mobile, and + Datacenter proxies. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the HandlerResProxy object with the cache initialized. + """ + self.access_token = access_token + + # setup request options + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # set up custom headers + self.headers = kwargs.get("headers", None) + + def getDetails(self, ip_address=None, timeout=None): + """ + Get ResProxy details for the specified IP as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + # If the supplied IP address uses the object defined in the built-in + # module ipaddress extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, (IPv4Address, IPv6Address)): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + return Details(details) + + # check cache first. + try: + cached_data = self.cache[cache_key(ip_address)] + return Details(cached_data) + except KeyError: + pass + + # prepare request http options + req_opts = {**self.request_options} + if timeout is not None: + req_opts["timeout"] = timeout + + # Build URL + url = RESPROXY_API_URL + if ip_address: + url += "/" + ip_address + + headers = handler_utils.get_headers(self.access_token, self.headers) + response = requests.get(url, headers=headers, **req_opts) + + if response.status_code == 429: + raise RequestQuotaExceededError() + if response.status_code >= 400: + error_code = response.status_code + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + error_response = response.json() + else: + error_response = {"error": response.text} + raise APIError(error_code, error_response) + + details = response.json() + + # add cache + self.cache[cache_key(ip_address)] = details + + return Details(details) + + def getBatchDetails( + self, + ip_addresses, + batch_size=None, + timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT, + timeout_total=None, + raise_on_fail=True, + ): + """ + Get ResProxy details for a batch of IP addresses at once. + + There is no specified limit to the number of IPs this function can + accept; it can handle as much as the user can fit in RAM (along with + all of the response data, which is at least a magnitude larger than the + input list). + + The input list is broken up into batches to abide by API requirements. + The batch size can be adjusted with `batch_size` but is clipped to + `BATCH_MAX_SIZE`. + Defaults to `BATCH_MAX_SIZE`. + + For each batch, `timeout_per_batch` indicates the maximum seconds to + spend waiting for the HTTP request to complete. If any batch fails with + this timeout, the whole operation fails. + Defaults to `BATCH_REQ_TIMEOUT_DEFAULT` seconds. + + `timeout_total` is a seconds-denominated hard-timeout for the time + spent in HTTP operations; regardless of whether all batches have + succeeded so far, if `timeout_total` is reached, the whole operation + will fail by raising `TimeoutExceededError`. + Defaults to being turned off. + + `raise_on_fail`, if turned off, will return any result retrieved so far + rather than raise an exception when errors occur, including timeout and + quota errors. + Defaults to on. + """ + if batch_size is None: + batch_size = BATCH_MAX_SIZE + + result = {} + lookup_addresses = [] + + # pre-populate with anything we've got in the cache, and keep around + # the IPs not in the cache. + for ip_address in ip_addresses: + if isinstance(ip_address, (IPv4Address, IPv6Address)): + ip_address = ip_address.exploded + + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + result[ip_address] = Details(details) + else: + try: + cached_data = self.cache[cache_key(ip_address)] + result[ip_address] = Details(cached_data) + except KeyError: + # prepend the url pattern for bulk lookup operations + lookup_addresses.append(f"resproxy/{ip_address}") + + if len(lookup_addresses) == 0: + return result + + if timeout_total is not None: + start_time = time.time() + + # prepare req options + req_opts = {**self.request_options, "timeout": timeout_per_batch} + + # loop over batch chunks and do lookup for each. + url = "https://api.ipinfo.io/batch" + headers = handler_utils.get_headers(self.access_token, self.headers) + headers["content-type"] = "application/json" + + for i in range(0, len(lookup_addresses), batch_size): + # quit if total timeout is reached + if timeout_total is not None and time.time() - start_time > timeout_total: + return handler_utils.return_or_fail( + raise_on_fail, TimeoutExceededError(), result + ) + + chunk = lookup_addresses[i : i + batch_size] + + # lookup + try: + response = requests.post(url, json=chunk, headers=headers, **req_opts) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + try: + if response.status_code == 429: + raise RequestQuotaExceededError() + response.raise_for_status() + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + json_response = response.json() + + for ip_address, data in json_response.items(): + unwound_ip_address = re.sub(r"^resproxy\/", "", ip_address) + # Cache and format the data + self.cache[cache_key(unwound_ip_address)] = data + result[unwound_ip_address] = Details(data) + + return result diff --git a/ipinfo/handler_resproxy_async.py b/ipinfo/handler_resproxy_async.py new file mode 100644 index 0000000..b1bfc67 --- /dev/null +++ b/ipinfo/handler_resproxy_async.py @@ -0,0 +1,293 @@ +""" +Residential Proxy API client asynchronous handler for fetching data from the +IPinfo Residential Proxy service. +""" + +import re +import json +import asyncio +from ipaddress import IPv4Address, IPv6Address + +import aiohttp + +from . import handler_utils +from .bogon import is_bogon +from .cache.default import DefaultCache +from .details import Details +from .error import APIError +from .exceptions import RequestQuotaExceededError, TimeoutExceededError +from .handler_utils import ( + BATCH_MAX_SIZE, + BATCH_REQ_TIMEOUT_DEFAULT, + CACHE_MAXSIZE, + CACHE_TTL, + RESPROXY_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) + +class AsyncHandlerResProxy: + """ + Allows client to request data asynchronously for specified IP address using + the ResProxy API. ResProxy API provides contextual data around Residential, + Mobile, and Datacenter proxies. + Instantiates and maintains access to cache. + """ + def __init__(self, access_token=None, **kwargs): + """ + Initialize the AsyncHandlerResProxy object with the cache initialized. + """ + self.access_token = access_token + + # setup request options + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup aiohttp + self.httpsess = None + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # set up custom headers + self.headers = kwargs.get("headers", None) + + async def init(self): + """ + Initializes internal aiohttp connection pool. + + This isn't _required_, as the pool is initialized lazily when needed. + But in case you require non-lazy initialization, you may await this. + + This is idempotent. + """ + await self._ensure_aiohttp_ready() + + async def deinit(self): + """ + Deinitialize the async handler. + + This is required in case you need to let go of the memory/state + associated with the async handler in a long-running process. + + This is idempotent. + """ + if self.httpsess: + await self.httpsess.close() + self.httpsess = None + + async def getDetails(self, ip_address=None, timeout=None): + """ + Get ResProxy details for the specified IP as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + self._ensure_aiohttp_ready() + + # If the supplied IP address uses the object defined in the built-in + # module ipaddress extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, (IPv4Address, IPv6Address)): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + return Details(details) + + # check cache first. + try: + cached_data = self.cache[cache_key(ip_address)] + return Details(cached_data) + except KeyError: + pass + + # not in cache, do http request + url = RESPROXY_API_URL + if ip_address: + url += "/" + ip_address + + headers = handler_utils.get_headers(self.access_token, self.headers) + req_opts = {} + if timeout is not None: + req_opts["timeout"] = timeout + + async with self.httpsess.get(url, headers=headers, **req_opts) as resp: + if resp.status == 429: + raise RequestQuotaExceededError() + if resp.status >= 400: + error_code = resp.status + content_type = resp.headers.get("Content-Type") + if content_type == "application/json": + error_response = resp.json() + else: + error_response = {"error": resp.text()} + raise APIError(error_code, error_response) + details = await resp.json() + + self.cache[cache_key(ip_address)] = details + return Details(details) + + async def getBatchDetails( + self, + ip_addresses, + batch_size=None, + timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT, + timeout_total=None, + raise_on_fail=True, + ): + """ + Get ResProxy details for a batch of IP addresses at once. + + There is no specified limit to the number of IPs this function can + accept; it can handle as much as the user can fit in RAM (along with + all of the response data, which is at least a magnitude larger than the + input list). + + The input list is broken up into batches to abide by API requirements. + The batch size can be adjusted with `batch_size` but is clipped to + `BATCH_MAX_SIZE`. + Defaults to `BATCH_MAX_SIZE`. + + For each batch, `timeout_per_batch` indicates the maximum seconds to + spend waiting for the HTTP request to complete. If any batch fails with + this timeout, the whole operation fails. + Defaults to `BATCH_REQ_TIMEOUT_DEFAULT` seconds. + + `timeout_total` is a seconds-denominated hard-timeout for the time + spent in HTTP operations; regardless of whether all batches have + succeeded so far, if `timeout_total` is reached, the whole operation + will fail by raising `TimeoutExceededError`. + Defaults to being turned off. + + `raise_on_fail`, if turned off, will return any result retrieved so far + rather than raise an exception when errors occur, including timeout and + quota errors. + Defaults to on. + """ + self._ensure_aiohttp_ready() + + if batch_size is None: + batch_size = BATCH_MAX_SIZE + + result = {} + lookup_addresses = [] + + # pre-populate with anything we've got in the cache, and keep around + # the IPs not in the cache. + for ip_address in ip_addresses: + if isinstance(ip_address, (IPv4Address, IPv6Address)): + ip_address = ip_address.exploded + + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + result[ip_address] = Details(details) + else: + try: + cached_data = self.cache[cache_key(ip_address)] + result[ip_address] = Details(cached_data) + except KeyError: + # prepend the url pattern for bulk lookup operations + lookup_addresses.append(f"resproxy/{ip_address}") + + if len(lookup_addresses) == 0: + return result + + # Loop over batch chunks and prepare coroutines for each. + url = "https://api.ipinfo.io/batch" + headers = handler_utils.get_headers(self.access_token, self.headers) + headers["content-type"] = "application/json" + + tasks = [ + asyncio.create_task( + self._do_batch_req( + lookup_addresses[i : i + batch_size], + url, + headers, + timeout_per_batch, + raise_on_fail, + result, + ) + ) + for i in range(0, len(lookup_addresses), batch_size) + ] + + try: + _, pending = await asyncio.wait( + tasks, + timeout=timeout_total, + return_when=asyncio.FIRST_EXCEPTION, + ) + + # if all done, return result. + if not pending: + return result + + # if some had a timeout, first cancel timed out stuff and wait for + # cleanup. then exit with return_or_fail. + for co in pending: + try: + co.cancel() + await co + except asyncio.CancelledError: + pass + + return handler_utils.return_or_fail( + raise_on_fail, TimeoutExceededError(), result + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + async def _do_batch_req( + self, chunk, url, headers, timeout_per_batch, raise_on_fail, result + ): + """ + Coroutine which will do the actual POST request for getBatchDetails. + """ + try: + resp = await self.httpsess.post( + url, + data=json.dumps(chunk), + headers=headers, + timeout=timeout_per_batch, + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, None) + + # gather data + try: + if resp.status == 429: + raise RequestQuotaExceededError() + resp.raise_for_status() + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, None) + + json_resp = await resp.json() + + # format & fill up cache + for ip_address, data in json_resp.items(): + unwound_ip_address = re.sub(r"^resproxy\/", "", ip_address) + self.cache[cache_key(unwound_ip_address)] = data + result[unwound_ip_address] = Details(data) + + def _ensure_aiohttp_ready(self): + """Ensures aiohttp internal state is initialized.""" + if self.httpsess: + return + + timeout = aiohttp.ClientTimeout(total=self.request_options["timeout"]) + self.httpsess = aiohttp.ClientSession(timeout=timeout) diff --git a/ipinfo/handler_utils.py b/ipinfo/handler_utils.py index 9beb833..f37bbfb 100644 --- a/ipinfo/handler_utils.py +++ b/ipinfo/handler_utils.py @@ -21,6 +21,9 @@ # Base URL for the IPinfo Plus API (same as Core) PLUS_API_URL = "https://api.ipinfo.io/lookup" +# Base URL for the IPinfo Residential Proxy API +RESPROXY_API_URL = "https://ipinfo.io/resproxy" + # Base URL to get country flag image link. # "PK" -> "https://cdn.ipinfo.io/static/images/countries-flags/PK.svg" COUNTRY_FLAGS_URL = "https://cdn.ipinfo.io/static/images/countries-flags/" diff --git a/tests/handler_resproxy_async_test.py b/tests/handler_resproxy_async_test.py new file mode 100644 index 0000000..58baf21 --- /dev/null +++ b/tests/handler_resproxy_async_test.py @@ -0,0 +1,187 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_resproxy_async import AsyncHandlerResProxy + + +@pytest.mark.asyncio +async def test_init(): + token = "mytesttoken" + handler = AsyncHandlerResProxy(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + await handler.deinit() + + +@pytest.mark.asyncio +async def test_headers(): + token = "mytesttoken" + handler = AsyncHandlerResProxy(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + await handler.deinit() + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +@pytest.mark.asyncio +async def test_get_details(): + """Test basic ResProxy API lookup""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerResProxy(token) + details = await handler.getDetails("139.5.0.122") + + # Should return Details object + assert isinstance(details, Details) + assert details.ip == "139.5.0.122" + + # Check ResProxy-specific fields + assert hasattr(details, "last_seen") + assert hasattr(details, "percent_days_seen") + assert hasattr(details, "service") + + await handler.deinit() + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +@pytest.mark.asyncio +async def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerResProxy(token) + details = await handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} + await handler.deinit() + + +##################### +# BATCH TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +@pytest.mark.asyncio +async def test_batch_ips(): + """Test batch request with IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerResProxy(token) + results = await handler.getBatchDetails(["139.5.0.122", "45.95.168.1"]) + + assert len(results) == 2 + assert "139.5.0.122" in results + assert "45.95.168.1" in results + + # Both should be Details objects + assert isinstance(results["139.5.0.122"], Details) + assert isinstance(results["45.95.168.1"], Details) + + # Check ResProxy-specific fields + assert hasattr(results["139.5.0.122"], "last_seen") + assert hasattr(results["139.5.0.122"], "percent_days_seen") + assert hasattr(results["139.5.0.122"], "service") + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +@pytest.mark.asyncio +async def test_batch_with_bogon(): + """Test batch including bogon IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerResProxy(token) + results = await handler.getBatchDetails( + [ + "139.5.0.122", + "127.0.0.1", # Bogon + "45.95.168.1", + ] + ) + + assert len(results) == 3 + + # Normal IPs should be Details + assert isinstance(results["139.5.0.122"], Details) + assert isinstance(results["45.95.168.1"], Details) + + # Bogon should also be Details with bogon flag + assert isinstance(results["127.0.0.1"], Details) + assert results["127.0.0.1"].bogon == True + + await handler.deinit() + + +##################### +# CACHING TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +@pytest.mark.asyncio +async def test_caching(): + """Test that results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerResProxy(token) + + # First request - should hit API + details1 = await handler.getDetails("139.5.0.122") + assert isinstance(details1, Details) + + # Second request - should come from cache + details2 = await handler.getDetails("139.5.0.122") + assert isinstance(details2, Details) + assert details2.ip == details1.ip + + # Verify cache key exists + cache_key_val = handler_utils.cache_key("139.5.0.122") + assert cache_key_val in handler.cache + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +@pytest.mark.asyncio +async def test_batch_caching(): + """Test that batch results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerResProxy(token) + + # First batch request + results1 = await handler.getBatchDetails(["139.5.0.122", "45.95.168.1"]) + assert len(results1) == 2 + + # Second batch with same IPs (should come from cache) + results2 = await handler.getBatchDetails(["139.5.0.122", "45.95.168.1"]) + assert len(results2) == 2 + assert results2["139.5.0.122"].ip == results1["139.5.0.122"].ip + + await handler.deinit() diff --git a/tests/handler_resproxy_test.py b/tests/handler_resproxy_test.py new file mode 100644 index 0000000..d924d01 --- /dev/null +++ b/tests/handler_resproxy_test.py @@ -0,0 +1,166 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_resproxy import HandlerResProxy + + +def test_init(): + token = "mytesttoken" + handler = HandlerResProxy(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + + +def test_headers(): + token = "mytesttoken" + handler = HandlerResProxy(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +def test_get_details(): + """Test basic ResProxy API lookup""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerResProxy(token) + details = handler.getDetails("139.5.0.122") + + # Should return Details object + assert isinstance(details, Details) + assert details.ip == "139.5.0.122" + + # Check ResProxy-specific fields + assert hasattr(details, "last_seen") + assert hasattr(details, "percent_days_seen") + assert hasattr(details, "service") + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerResProxy(token) + details = handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} + + +##################### +# BATCH TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +def test_batch_ips(): + """Test batch request with IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerResProxy(token) + results = handler.getBatchDetails(["139.5.0.122", "45.95.168.1"]) + + assert len(results) == 2 + assert "139.5.0.122" in results + assert "45.95.168.1" in results + + # Both should be Details objects + assert isinstance(results["139.5.0.122"], Details) + assert isinstance(results["45.95.168.1"], Details) + + # Check ResProxy-specific fields + assert hasattr(results["139.5.0.122"], "last_seen") + assert hasattr(results["139.5.0.122"], "percent_days_seen") + assert hasattr(results["139.5.0.122"], "service") + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +def test_batch_with_bogon(): + """Test batch including bogon IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerResProxy(token) + results = handler.getBatchDetails( + [ + "139.5.0.122", + "127.0.0.1", # Bogon + "45.95.168.1", + ] + ) + + assert len(results) == 3 + + # Normal IPs should be Details + assert isinstance(results["139.5.0.122"], Details) + assert isinstance(results["45.95.168.1"], Details) + + # Bogon should also be Details with bogon flag + assert isinstance(results["127.0.0.1"], Details) + assert results["127.0.0.1"].bogon == True + + +##################### +# CACHING TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +def test_caching(): + """Test that results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerResProxy(token) + + # First request - should hit API + details1 = handler.getDetails("139.5.0.122") + assert isinstance(details1, Details) + + # Second request - should come from cache + details2 = handler.getDetails("139.5.0.122") + assert isinstance(details2, Details) + assert details2.ip == details1.ip + + # Verify cache key exists + cache_key_val = handler_utils.cache_key("139.5.0.122") + assert cache_key_val in handler.cache + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call ResProxy API without token", +) +def test_batch_caching(): + """Test that batch results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerResProxy(token) + + # First batch request + results1 = handler.getBatchDetails(["139.5.0.122", "45.95.168.1"]) + assert len(results1) == 2 + + # Second batch with same IPs (should come from cache) + results2 = handler.getBatchDetails(["139.5.0.122", "45.95.168.1"]) + assert len(results2) == 2 + assert results2["139.5.0.122"].ip == results1["139.5.0.122"].ip