diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..9d443c8 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,31 @@ +name: Test + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Run tests + run: pytest + env: + IPINFO_TOKEN: ${{ secrets.IPINFO_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index e38de1a..0d0db96 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,20 @@ # IPInfo Changelog +## 5.2.1 + +- Fix Lite API `Detail` object not setting certain fields correctly + +## 5.2.0 + +- Add support for IPinfo Lite API + ## 5.1.1 - No-op release (testing Github Publishing) ## 5.1.0 -- fix getDetails JSON error on 5xx status code +- Fix getDetails JSON error on 5xx status code ## 5.0.1 diff --git a/README.md b/README.md index 5c3cbe7..8914435 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,10 @@ This is the official Python client library for the IPinfo.io IP address API, allowing you to look up your own IP address, or get any of the following details for an IP: - - [IP geolocation](https://ipinfo.io/ip-geolocation-api) (city, region, country, postal code, latitude, and longitude) - - [ASN details](https://ipinfo.io/asn-api) (ISP or network operator, associated domain name, and type, such as business, hosting, or company) - - [Firmographics data](https://ipinfo.io/ip-company-api) (the name and domain of the business that uses the IP address) - - [Carrier information](https://ipinfo.io/ip-carrier-api) (the name of the mobile carrier and MNC and MCC for that carrier if the IP is used exclusively for mobile traffic) +- [IP geolocation](https://ipinfo.io/ip-geolocation-api) (city, region, country, postal code, latitude, and longitude) +- [ASN details](https://ipinfo.io/asn-api) (ISP or network operator, associated domain name, and type, such as business, hosting, or company) +- [Firmographics data](https://ipinfo.io/ip-company-api) (the name and domain of the business that uses the IP address) +- [Carrier information](https://ipinfo.io/ip-carrier-api) (the name of the mobile carrier and MNC and MCC for that carrier if the IP is used exclusively for mobile traffic) ## Getting Started @@ -13,6 +13,8 @@ You'll need an IPinfo API access token, which you can get by signing up for a fr The free plan is limited to 50,000 requests per month, and doesn't include some of the data fields such as IP type and company data. To enable all the data fields and additional request volumes see [https://ipinfo.io/pricing](https://ipinfo.io/pricing) +The library also supports the Lite API, see the [Lite API section](#lite-api) for more info. + ### Installation This package works with Python 3.5 or greater. However, we only officially @@ -67,6 +69,8 @@ New York City Internally the library uses `aiohttp`, but as long as you provide an event loop (as in this example via `asyncio`), it shouldn't matter. +NOTE: due to API changes in the `asyncio` library, the asynchronous implementation only supports Python version 3.9 and 3.10. + ### Usage The `Handler.getDetails()` method accepts an IP address as an optional, positional argument. If no IP address is specified, the API will return data for the IP address from which it receives the request. @@ -160,6 +164,22 @@ The IPinfo library can be authenticated with your IPinfo API token, which is pas 'timezone': 'America/Los_Angeles'} ``` +### Lite API + +The library gives the possibility to use the [Lite API](https://ipinfo.io/developers/lite-api) too, authentication with your token is still required. + +The returned details are slightly different from the Core API. + +```python +>>> import ipinfo +>>> handler = ipinfo.getHandlerLite(access_token='123456789abc') +>>> details = handler.getDetails("8.8.8.8") +>>> details.country_code +'US' +>>> details.country +'United States' +``` + ### Caching In-memory caching of `details` data is provided by default via the [cachetools](https://cachetools.readthedocs.io/en/latest/) library. This uses an LRU (least recently used) cache with a TTL (time to live) by default. This means that values will be cached for the specified duration; if the cache's max size is reached, cache values will be invalidated as necessary, starting with the oldest cached value. @@ -295,6 +315,7 @@ When looking up an IP address, the response object includes `details.country_nam continents=continents ) ``` + ### Batch Operations Looking up a single IP at a time can be slow. It could be done concurrently diff --git a/ipinfo/__init__.py b/ipinfo/__init__.py index a7c8114..eef2046 100644 --- a/ipinfo/__init__.py +++ b/ipinfo/__init__.py @@ -1,5 +1,11 @@ +from .handler_lite import HandlerLite +from .handler_lite_async import AsyncHandlerLite from .handler import Handler from .handler_async import AsyncHandler +from .handler_core import HandlerCore +from .handler_core_async import AsyncHandlerCore +from .handler_plus import HandlerPlus +from .handler_plus_async import AsyncHandlerPlus def getHandler(access_token=None, **kwargs): @@ -7,6 +13,36 @@ def getHandler(access_token=None, **kwargs): return Handler(access_token, **kwargs) +def getHandlerLite(access_token=None, **kwargs): + """Create and return HandlerLite object.""" + return HandlerLite(access_token, **kwargs) + + +def getHandlerCore(access_token=None, **kwargs): + """Create and return HandlerCore object.""" + return HandlerCore(access_token, **kwargs) + + +def getHandlerPlus(access_token=None, **kwargs): + """Create and return HandlerPlus object.""" + return HandlerPlus(access_token, **kwargs) + + def getHandlerAsync(access_token=None, **kwargs): """Create an return an asynchronous Handler object.""" return AsyncHandler(access_token, **kwargs) + + +def getHandlerAsyncLite(access_token=None, **kwargs): + """Create and return asynchronous HandlerLite object.""" + return AsyncHandlerLite(access_token, **kwargs) + + +def getHandlerAsyncCore(access_token=None, **kwargs): + """Create and return asynchronous HandlerCore object.""" + return AsyncHandlerCore(access_token, **kwargs) + + +def getHandlerAsyncPlus(access_token=None, **kwargs): + """Create and return asynchronous HandlerPlus object.""" + return AsyncHandlerPlus(access_token, **kwargs) diff --git a/ipinfo/handler_core.py b/ipinfo/handler_core.py new file mode 100644 index 0000000..196b2e3 --- /dev/null +++ b/ipinfo/handler_core.py @@ -0,0 +1,300 @@ +""" +Core API client handler for fetching data from the IPinfo Core service. +""" + +import time +from ipaddress import IPv4Address, IPv6Address + +import requests + +from . import handler_utils +from .bogon import is_bogon +from .cache.default import DefaultCache +from .data import ( + continents, + countries, + countries_currencies, + countries_flags, + eu_countries, +) +from .details import Details +from .error import APIError +from .exceptions import RequestQuotaExceededError, TimeoutExceededError +from .handler_utils import ( + BATCH_MAX_SIZE, + BATCH_REQ_TIMEOUT_DEFAULT, + CACHE_MAXSIZE, + CACHE_TTL, + CORE_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) + + +class HandlerCore: + """ + Allows client to request data for specified IP address using the Core API. + Core API provides city-level geolocation with nested geo and AS objects. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the HandlerCore object with country name list and the + cache initialized. + """ + self.access_token = access_token + + # load countries file + self.countries = kwargs.get("countries") or countries + + # load eu countries file + self.eu_countries = kwargs.get("eu_countries") or eu_countries + + # load countries flags file + self.countries_flags = kwargs.get("countries_flags") or countries_flags + + # load countries currency file + self.countries_currencies = ( + kwargs.get("countries_currencies") or countries_currencies + ) + + # load continent file + self.continents = kwargs.get("continent") or continents + + # setup req opts + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # setup custom headers + self.headers = kwargs.get("headers", None) + + def getDetails(self, ip_address=None, timeout=None): + """ + Get Core details for specified IP address as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + # If the supplied IP address uses the objects defined in the built-in + # module ipaddress extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance( + ip_address, IPv6Address + ): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + return Details(details) + + # check cache first. + try: + cached_data = self.cache[cache_key(ip_address)] + return Details(cached_data) + except KeyError: + pass + + # prepare req http opts + req_opts = {**self.request_options} + if timeout is not None: + req_opts["timeout"] = timeout + + # Build URL + url = CORE_API_URL + if ip_address: + url += "/" + ip_address + + headers = handler_utils.get_headers(self.access_token, self.headers) + response = requests.get(url, headers=headers, **req_opts) + + if response.status_code == 429: + raise RequestQuotaExceededError() + if response.status_code >= 400: + error_code = response.status_code + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + error_response = response.json() + else: + error_response = {"error": response.text} + raise APIError(error_code, error_response) + + details = response.json() + + # Format and cache + self._format_core_details(details) + self.cache[cache_key(ip_address)] = details + + return Details(details) + + def _format_core_details(self, details): + """ + Format Core response details. + Core has nested geo and as objects that need special formatting. + """ + # Format geo object if present + if "geo" in details and details["geo"]: + geo = details["geo"] + if "country_code" in geo: + country_code = geo["country_code"] + geo["country_name"] = self.countries.get(country_code) + geo["isEU"] = country_code in self.eu_countries + geo["country_flag"] = self.countries_flags.get(country_code) + geo["country_currency"] = self.countries_currencies.get( + country_code + ) + geo["continent"] = self.continents.get(country_code) + geo["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + # Top-level country_code might also exist in some responses + if "country_code" in details: + country_code = details["country_code"] + details["country_name"] = self.countries.get(country_code) + details["isEU"] = country_code in self.eu_countries + details["country_flag"] = self.countries_flags.get(country_code) + details["country_currency"] = self.countries_currencies.get( + country_code + ) + details["continent"] = self.continents.get(country_code) + details["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + def getBatchDetails( + self, + ip_addresses, + batch_size=None, + timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT, + timeout_total=None, + raise_on_fail=True, + ): + """ + Get Core details for a batch of IP addresses at once. + + There is no specified limit to the number of IPs this function can + accept; it can handle as much as the user can fit in RAM (along with + all of the response data, which is at least a magnitude larger than the + input list). + + The input list is broken up into batches to abide by API requirements. + The batch size can be adjusted with `batch_size` but is clipped to + `BATCH_MAX_SIZE`. + Defaults to `BATCH_MAX_SIZE`. + + For each batch, `timeout_per_batch` indicates the maximum seconds to + spend waiting for the HTTP request to complete. If any batch fails with + this timeout, the whole operation fails. + Defaults to `BATCH_REQ_TIMEOUT_DEFAULT` seconds. + + `timeout_total` is a seconds-denominated hard-timeout for the time + spent in HTTP operations; regardless of whether all batches have + succeeded so far, if `timeout_total` is reached, the whole operation + will fail by raising `TimeoutExceededError`. + Defaults to being turned off. + + `raise_on_fail`, if turned off, will return any result retrieved so far + rather than raise an exception when errors occur, including timeout and + quota errors. + Defaults to on. + """ + if batch_size == None: + batch_size = BATCH_MAX_SIZE + + result = {} + lookup_addresses = [] + + # pre-populate with anything we've got in the cache, and keep around + # the IPs not in the cache. + for ip_address in ip_addresses: + # if the supplied IP address uses the objects defined in the + # built-in module ipaddress extract the appropriate string notation + # before formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance( + ip_address, IPv6Address + ): + ip_address = ip_address.exploded + + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + result[ip_address] = Details(details) + else: + try: + cached_data = self.cache[cache_key(ip_address)] + result[ip_address] = Details(cached_data) + except KeyError: + lookup_addresses.append(ip_address) + + # all in cache - return early. + if len(lookup_addresses) == 0: + return result + + # do start timer if necessary + if timeout_total is not None: + start_time = time.time() + + # prepare req http options + req_opts = {**self.request_options, "timeout": timeout_per_batch} + + # loop over batch chunks and do lookup for each. + url = "https://api.ipinfo.io/batch" + headers = handler_utils.get_headers(self.access_token, self.headers) + headers["content-type"] = "application/json" + + for i in range(0, len(lookup_addresses), batch_size): + # quit if total timeout is reached. + if ( + timeout_total is not None + and time.time() - start_time > timeout_total + ): + return handler_utils.return_or_fail( + raise_on_fail, TimeoutExceededError(), result + ) + + chunk = lookup_addresses[i : i + batch_size] + + # lookup + try: + response = requests.post( + url, json=chunk, headers=headers, **req_opts + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + # fail on bad status codes + try: + if response.status_code == 429: + raise RequestQuotaExceededError() + response.raise_for_status() + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + # Process batch response + json_response = response.json() + + for ip_address, data in json_response.items(): + # Cache and format the data + if isinstance(data, dict) and not data.get("bogon"): + self._format_core_details(data) + self.cache[cache_key(ip_address)] = data + result[ip_address] = Details(data) + + return result diff --git a/ipinfo/handler_core_async.py b/ipinfo/handler_core_async.py new file mode 100644 index 0000000..9b77943 --- /dev/null +++ b/ipinfo/handler_core_async.py @@ -0,0 +1,359 @@ +""" +Core API client asynchronous handler for fetching data from the IPinfo Core service. +""" + +import asyncio +import json +from ipaddress import IPv4Address, IPv6Address + +import aiohttp + +from . import handler_utils +from .bogon import is_bogon +from .cache.default import DefaultCache +from .data import ( + continents, + countries, + countries_currencies, + countries_flags, + eu_countries, +) +from .details import Details +from .error import APIError +from .exceptions import RequestQuotaExceededError, TimeoutExceededError +from .handler_utils import ( + BATCH_MAX_SIZE, + BATCH_REQ_TIMEOUT_DEFAULT, + CACHE_MAXSIZE, + CACHE_TTL, + CORE_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) + + +class AsyncHandlerCore: + """ + Allows client to request data for specified IP address asynchronously using the Core API. + Core API provides city-level geolocation with nested geo and AS objects. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the AsyncHandlerCore object with country name list and the + cache initialized. + """ + self.access_token = access_token + + # load countries file + self.countries = kwargs.get("countries") or countries + + # load eu countries file + self.eu_countries = kwargs.get("eu_countries") or eu_countries + + # load countries flags file + self.countries_flags = kwargs.get("countries_flags") or countries_flags + + # load countries currency file + self.countries_currencies = ( + kwargs.get("countries_currencies") or countries_currencies + ) + + # load continent file + self.continents = kwargs.get("continent") or continents + + # setup req opts + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup aiohttp + self.httpsess = None + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # setup custom headers + self.headers = kwargs.get("headers", None) + + async def init(self): + """ + Initializes internal aiohttp connection pool. + + This isn't _required_, as the pool is initialized lazily when needed. + But in case you require non-lazy initialization, you may await this. + + This is idempotent. + """ + await self._ensure_aiohttp_ready() + + async def deinit(self): + """ + Deinitialize the async handler. + + This is required in case you need to let go of the memory/state + associated with the async handler in a long-running process. + + This is idempotent. + """ + if self.httpsess: + await self.httpsess.close() + self.httpsess = None + + async def getDetails(self, ip_address=None, timeout=None): + """ + Get Core details for specified IP address as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + self._ensure_aiohttp_ready() + + # If the supplied IP address uses the objects defined in the built-in + # module ipaddress, extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance(ip_address, IPv6Address): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {"ip": ip_address, "bogon": True} + return Details(details) + + # check cache first. + try: + cached_data = self.cache[cache_key(ip_address)] + return Details(cached_data) + except KeyError: + pass + + # not in cache; do http req + url = CORE_API_URL + if ip_address: + url += "/" + ip_address + headers = handler_utils.get_headers(self.access_token, self.headers) + req_opts = {} + if timeout is not None: + req_opts["timeout"] = timeout + async with self.httpsess.get(url, headers=headers, **req_opts) as resp: + if resp.status == 429: + raise RequestQuotaExceededError() + if resp.status >= 400: + error_code = resp.status + content_type = resp.headers.get("Content-Type") + if content_type == "application/json": + error_response = await resp.json() + else: + error_response = {"error": resp.text()} + raise APIError(error_code, error_response) + details = await resp.json() + + # format & cache + self._format_core_details(details) + self.cache[cache_key(ip_address)] = details + + return Details(details) + + def _format_core_details(self, details): + """ + Format Core response details. + Core has nested geo and as objects that need special formatting. + """ + # Format geo object if present + if "geo" in details and details["geo"]: + geo = details["geo"] + if "country_code" in geo: + country_code = geo["country_code"] + geo["country_name"] = self.countries.get(country_code) + geo["isEU"] = country_code in self.eu_countries + geo["country_flag"] = self.countries_flags.get(country_code) + geo["country_currency"] = self.countries_currencies.get(country_code) + geo["continent"] = self.continents.get(country_code) + geo["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + # Top-level country_code might also exist in some responses + if "country_code" in details: + country_code = details["country_code"] + details["country_name"] = self.countries.get(country_code) + details["isEU"] = country_code in self.eu_countries + details["country_flag"] = self.countries_flags.get(country_code) + details["country_currency"] = self.countries_currencies.get(country_code) + details["continent"] = self.continents.get(country_code) + details["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + async def getBatchDetails( + self, + ip_addresses, + batch_size=None, + timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT, + timeout_total=None, + raise_on_fail=True, + ): + """ + Get Core details for a batch of IP addresses at once. + + There is no specified limit to the number of IPs this function can + accept; it can handle as much as the user can fit in RAM (along with + all of the response data, which is at least a magnitude larger than the + input list). + + The input list is broken up into batches to abide by API requirements. + The batch size can be adjusted with `batch_size` but is clipped to + `BATCH_MAX_SIZE`. + Defaults to `BATCH_MAX_SIZE`. + + For each batch, `timeout_per_batch` indicates the maximum seconds to + spend waiting for the HTTP request to complete. If any batch fails with + this timeout, the whole operation fails. + Defaults to `BATCH_REQ_TIMEOUT_DEFAULT` seconds. + + `timeout_total` is a seconds-denominated hard-timeout for the time + spent in HTTP operations; regardless of whether all batches have + succeeded so far, if `timeout_total` is reached, the whole operation + will fail by raising `TimeoutExceededError`. + Defaults to being turned off. + + `raise_on_fail`, if turned off, will return any result retrieved so far + rather than raise an exception when errors occur, including timeout and + quota errors. + Defaults to on. + + The concurrency level is currently unadjustable; coroutines will be + created and consumed for all batches at once. + """ + self._ensure_aiohttp_ready() + + if batch_size is None: + batch_size = BATCH_MAX_SIZE + + result = {} + + # Pre-populate with anything we've got in the cache, and keep around + # the IPs not in the cache. + lookup_addresses = [] + for ip_address in ip_addresses: + # If the supplied IP address uses the objects defined in the + # built-in module ipaddress extract the appropriate string notation + # before formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance( + ip_address, IPv6Address + ): + ip_address = ip_address.exploded + + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + result[ip_address] = Details(details) + else: + try: + cached_data = self.cache[cache_key(ip_address)] + result[ip_address] = Details(cached_data) + except KeyError: + lookup_addresses.append(ip_address) + + # all in cache - return early. + if not lookup_addresses: + return result + + # loop over batch chunks and prepare coroutines for each. + url = "https://api.ipinfo.io/batch" + headers = handler_utils.get_headers(self.access_token, self.headers) + headers["content-type"] = "application/json" + + # prepare tasks that will make reqs and update results. + tasks = [ + asyncio.create_task( + self._do_batch_req( + lookup_addresses[i : i + batch_size], + url, + headers, + timeout_per_batch, + raise_on_fail, + result, + ) + ) + for i in range(0, len(lookup_addresses), batch_size) + ] + + try: + _, pending = await asyncio.wait( + tasks, + timeout=timeout_total, + return_when=asyncio.FIRST_EXCEPTION, + ) + + # if all done, return result. + if not pending: + return result + + # if some had a timeout, first cancel timed out stuff and wait for + # cleanup. then exit with return_or_fail. + for co in pending: + try: + co.cancel() + await co + except asyncio.CancelledError: + pass + + return handler_utils.return_or_fail( + raise_on_fail, TimeoutExceededError(), result + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + return result + + async def _do_batch_req( + self, chunk, url, headers, timeout_per_batch, raise_on_fail, result + ): + """ + Coroutine which will do the actual POST request for getBatchDetails. + """ + try: + resp = await self.httpsess.post( + url, + data=json.dumps(chunk), + headers=headers, + timeout=timeout_per_batch, + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, None) + + # gather data + try: + if resp.status == 429: + raise RequestQuotaExceededError() + resp.raise_for_status() + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, None) + + json_resp = await resp.json() + + # format & fill up cache + for ip_address, data in json_resp.items(): + if isinstance(data, dict) and not data.get("bogon"): + self._format_core_details(data) + self.cache[cache_key(ip_address)] = data + result[ip_address] = Details(data) + + def _ensure_aiohttp_ready(self): + """Ensures aiohttp internal state is initialized.""" + if self.httpsess: + return + + timeout = aiohttp.ClientTimeout(total=self.request_options["timeout"]) + self.httpsess = aiohttp.ClientSession(timeout=timeout) diff --git a/ipinfo/handler_lite.py b/ipinfo/handler_lite.py new file mode 100644 index 0000000..c58c5c0 --- /dev/null +++ b/ipinfo/handler_lite.py @@ -0,0 +1,139 @@ +""" +Main API client handler for fetching data from the IPinfo service. +""" + +from ipaddress import IPv4Address, IPv6Address + +import requests + +from .error import APIError +from .cache.default import DefaultCache +from .details import Details +from .exceptions import RequestQuotaExceededError +from .handler_utils import ( + LITE_API_URL, + CACHE_MAXSIZE, + CACHE_TTL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) +from . import handler_utils +from .bogon import is_bogon +from .data import ( + continents, + countries, + countries_currencies, + eu_countries, + countries_flags, +) + + +class HandlerLite: + """ + Allows client to request data for specified IP address using the Lite API. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the Handler object with country name list and the + cache initialized. + """ + self.access_token = access_token + + # load countries file + self.countries = kwargs.get("countries") or countries + + # load eu countries file + self.eu_countries = kwargs.get("eu_countries") or eu_countries + + # load countries flags file + self.countries_flags = kwargs.get("countries_flags") or countries_flags + + # load countries currency file + self.countries_currencies = ( + kwargs.get("countries_currencies") or countries_currencies + ) + + # load continent file + self.continents = kwargs.get("continent") or continents + + # setup req opts + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # setup custom headers + self.headers = kwargs.get("headers", None) + + def getDetails(self, ip_address=None, timeout=None): + """ + Get details for specified IP address as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + # If the supplied IP address uses the objects defined in the built-in + # module ipaddress extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance(ip_address, IPv6Address): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + return Details(details) + + # check cache first. + try: + cached_ipaddr = self.cache[cache_key(ip_address)] + return Details(cached_ipaddr) + except KeyError: + pass + + # prepare req http opts + req_opts = {**self.request_options} + if timeout is not None: + req_opts["timeout"] = timeout + + # not in cache; do http req + url = f"{LITE_API_URL}/{ip_address}" if ip_address else f"{LITE_API_URL}/me" + headers = handler_utils.get_headers(self.access_token, self.headers) + response = requests.get(url, headers=headers, **req_opts) + if response.status_code == 429: + raise RequestQuotaExceededError() + if response.status_code >= 400: + error_code = response.status_code + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + error_response = response.json() + else: + error_response = {"error": response.text} + raise APIError(error_code, error_response) + details = response.json() + + # format & cache + handler_utils.format_details( + details, + self.countries, + self.eu_countries, + self.countries_flags, + self.countries_currencies, + self.continents, + ) + self.cache[cache_key(ip_address)] = details + + return Details(details) diff --git a/ipinfo/handler_lite_async.py b/ipinfo/handler_lite_async.py new file mode 100644 index 0000000..106b8d2 --- /dev/null +++ b/ipinfo/handler_lite_async.py @@ -0,0 +1,167 @@ +""" +Main API client asynchronous handler for fetching data from the IPinfo service. +""" + +from ipaddress import IPv4Address, IPv6Address + +import aiohttp + +from .error import APIError +from .cache.default import DefaultCache +from .details import Details +from .exceptions import RequestQuotaExceededError +from .handler_utils import ( + CACHE_MAXSIZE, + CACHE_TTL, + LITE_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) +from . import handler_utils +from .bogon import is_bogon +from .data import ( + continents, + countries, + countries_currencies, + eu_countries, + countries_flags, +) + + +class AsyncHandlerLite: + """ + Allows client to request data for specified IP address asynchronously using the Lite API. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the Handler object with country name list and the + cache initialized. + """ + self.access_token = access_token + + # load countries file + self.countries = kwargs.get("countries") or countries + + # load eu countries file + self.eu_countries = kwargs.get("eu_countries") or eu_countries + + # load countries flags file + self.countries_flags = kwargs.get("countries_flags") or countries_flags + + # load countries currency file + self.countries_currencies = ( + kwargs.get("countries_currencies") or countries_currencies + ) + + # load continent file + self.continents = kwargs.get("continent") or continents + + # setup req opts + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup aiohttp + self.httpsess = None + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # setup custom headers + self.headers = kwargs.get("headers", None) + + async def init(self): + """ + Initializes internal aiohttp connection pool. + + This isn't _required_, as the pool is initialized lazily when needed. + But in case you require non-lazy initialization, you may await this. + + This is idempotent. + """ + await self._ensure_aiohttp_ready() + + async def deinit(self): + """ + Deinitialize the async handler. + + This is required in case you need to let go of the memory/state + associated with the async handler in a long-running process. + + This is idempotent. + """ + if self.httpsess: + await self.httpsess.close() + self.httpsess = None + + async def getDetails(self, ip_address=None, timeout=None): + """Get details for specified IP address as a Details object.""" + self._ensure_aiohttp_ready() + + # If the supplied IP address uses the objects defined in the built-in + # module ipaddress, extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance(ip_address, IPv6Address): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {"ip": ip_address, "bogon": True} + return Details(details) + + # check cache first. + try: + cached_ipaddr = self.cache[cache_key(ip_address)] + return Details(cached_ipaddr) + except KeyError: + pass + + # not in cache; do http req + url = f"{LITE_API_URL}/{ip_address}" if ip_address else f"{LITE_API_URL}/me" + headers = handler_utils.get_headers(self.access_token, self.headers) + req_opts = {} + if timeout is not None: + req_opts["timeout"] = timeout + async with self.httpsess.get(url, headers=headers, **req_opts) as resp: + if resp.status == 429: + raise RequestQuotaExceededError() + if resp.status >= 400: + error_code = resp.status + content_type = resp.headers.get("Content-Type") + if content_type == "application/json": + error_response = await resp.json() + else: + error_response = {"error": resp.text()} + raise APIError(error_code, error_response) + details = await resp.json() + + # format & cache + handler_utils.format_details( + details, + self.countries, + self.eu_countries, + self.countries_flags, + self.countries_currencies, + self.continents, + ) + self.cache[cache_key(ip_address)] = details + + return Details(details) + + def _ensure_aiohttp_ready(self): + """Ensures aiohttp internal state is initialized.""" + if self.httpsess: + return + + timeout = aiohttp.ClientTimeout(total=self.request_options["timeout"]) + self.httpsess = aiohttp.ClientSession(timeout=timeout) diff --git a/ipinfo/handler_plus.py b/ipinfo/handler_plus.py new file mode 100644 index 0000000..f6a25f6 --- /dev/null +++ b/ipinfo/handler_plus.py @@ -0,0 +1,289 @@ +""" +Plus API client handler for fetching data from the IPinfo Plus service. +""" + +import time +from ipaddress import IPv4Address, IPv6Address + +import requests + +from . import handler_utils +from .bogon import is_bogon +from .cache.default import DefaultCache +from .data import ( + continents, + countries, + countries_currencies, + countries_flags, + eu_countries, +) +from .details import Details +from .error import APIError +from .exceptions import RequestQuotaExceededError, TimeoutExceededError +from .handler_utils import ( + BATCH_MAX_SIZE, + BATCH_REQ_TIMEOUT_DEFAULT, + CACHE_MAXSIZE, + CACHE_TTL, + PLUS_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) + + +class HandlerPlus: + """ + Allows client to request data for specified IP address using the Plus API. + Plus API provides enhanced data including mobile carrier info and privacy detection. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the HandlerPlus object with country name list and the + cache initialized. + """ + self.access_token = access_token + + # load countries file + self.countries = kwargs.get("countries") or countries + + # load eu countries file + self.eu_countries = kwargs.get("eu_countries") or eu_countries + + # load countries flags file + self.countries_flags = kwargs.get("countries_flags") or countries_flags + + # load countries currency file + self.countries_currencies = ( + kwargs.get("countries_currencies") or countries_currencies + ) + + # load continent file + self.continents = kwargs.get("continent") or continents + + # setup req opts + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # setup custom headers + self.headers = kwargs.get("headers", None) + + def getDetails(self, ip_address=None, timeout=None): + """ + Get Plus details for specified IP address as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + # If the supplied IP address uses the objects defined in the built-in + # module ipaddress extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance(ip_address, IPv6Address): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + return Details(details) + + # check cache first. + try: + cached_data = self.cache[cache_key(ip_address)] + return Details(cached_data) + except KeyError: + pass + + # prepare req http opts + req_opts = {**self.request_options} + if timeout is not None: + req_opts["timeout"] = timeout + + # Build URL + url = PLUS_API_URL + if ip_address: + url += "/" + ip_address + + headers = handler_utils.get_headers(self.access_token, self.headers) + response = requests.get(url, headers=headers, **req_opts) + + if response.status_code == 429: + raise RequestQuotaExceededError() + if response.status_code >= 400: + error_code = response.status_code + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + error_response = response.json() + else: + error_response = {"error": response.text} + raise APIError(error_code, error_response) + + details = response.json() + + # Format and cache + self._format_plus_details(details) + self.cache[cache_key(ip_address)] = details + + return Details(details) + + def _format_plus_details(self, details): + """ + Format Plus response details. + Plus has nested geo and as objects that need special formatting. + """ + # Format geo object if present + if "geo" in details and details["geo"]: + geo = details["geo"] + if "country_code" in geo: + country_code = geo["country_code"] + geo["country_name"] = self.countries.get(country_code) + geo["isEU"] = country_code in self.eu_countries + geo["country_flag"] = self.countries_flags.get(country_code) + geo["country_currency"] = self.countries_currencies.get(country_code) + geo["continent"] = self.continents.get(country_code) + geo["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + # Top-level country_code might also exist in some responses + if "country_code" in details: + country_code = details["country_code"] + details["country_name"] = self.countries.get(country_code) + details["isEU"] = country_code in self.eu_countries + details["country_flag"] = self.countries_flags.get(country_code) + details["country_currency"] = self.countries_currencies.get(country_code) + details["continent"] = self.continents.get(country_code) + details["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + def getBatchDetails( + self, + ip_addresses, + batch_size=None, + timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT, + timeout_total=None, + raise_on_fail=True, + ): + """ + Get Plus details for a batch of IP addresses at once. + + There is no specified limit to the number of IPs this function can + accept; it can handle as much as the user can fit in RAM (along with + all of the response data, which is at least a magnitude larger than the + input list). + + The input list is broken up into batches to abide by API requirements. + The batch size can be adjusted with `batch_size` but is clipped to + `BATCH_MAX_SIZE`. + Defaults to `BATCH_MAX_SIZE`. + + For each batch, `timeout_per_batch` indicates the maximum seconds to + spend waiting for the HTTP request to complete. If any batch fails with + this timeout, the whole operation fails. + Defaults to `BATCH_REQ_TIMEOUT_DEFAULT` seconds. + + `timeout_total` is a seconds-denominated hard-timeout for the time + spent in HTTP operations; regardless of whether all batches have + succeeded so far, if `timeout_total` is reached, the whole operation + will fail by raising `TimeoutExceededError`. + Defaults to being turned off. + + `raise_on_fail`, if turned off, will return any result retrieved so far + rather than raise an exception when errors occur, including timeout and + quota errors. + Defaults to on. + """ + if batch_size == None: + batch_size = BATCH_MAX_SIZE + + result = {} + lookup_addresses = [] + + # pre-populate with anything we've got in the cache, and keep around + # the IPs not in the cache. + for ip_address in ip_addresses: + # if the supplied IP address uses the objects defined in the + # built-in module ipaddress extract the appropriate string notation + # before formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance( + ip_address, IPv6Address + ): + ip_address = ip_address.exploded + + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + result[ip_address] = Details(details) + else: + try: + cached_data = self.cache[cache_key(ip_address)] + result[ip_address] = Details(cached_data) + except KeyError: + lookup_addresses.append(ip_address) + + # all in cache - return early. + if len(lookup_addresses) == 0: + return result + + # do start timer if necessary + if timeout_total is not None: + start_time = time.time() + + # prepare req http options + req_opts = {**self.request_options, "timeout": timeout_per_batch} + + # loop over batch chunks and do lookup for each. + url = "https://api.ipinfo.io/batch" + headers = handler_utils.get_headers(self.access_token, self.headers) + headers["content-type"] = "application/json" + + for i in range(0, len(lookup_addresses), batch_size): + # quit if total timeout is reached. + if timeout_total is not None and time.time() - start_time > timeout_total: + return handler_utils.return_or_fail( + raise_on_fail, TimeoutExceededError(), result + ) + + chunk = lookup_addresses[i : i + batch_size] + + # lookup + try: + response = requests.post(url, json=chunk, headers=headers, **req_opts) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + # fail on bad status codes + try: + if response.status_code == 429: + raise RequestQuotaExceededError() + response.raise_for_status() + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + # Process batch response + json_response = response.json() + + for ip_address, data in json_response.items(): + # Cache and format the data + if isinstance(data, dict) and not data.get("bogon"): + self._format_plus_details(data) + self.cache[cache_key(ip_address)] = data + result[ip_address] = Details(data) + + return result diff --git a/ipinfo/handler_plus_async.py b/ipinfo/handler_plus_async.py new file mode 100644 index 0000000..34adf98 --- /dev/null +++ b/ipinfo/handler_plus_async.py @@ -0,0 +1,359 @@ +""" +Plus API client asynchronous handler for fetching data from the IPinfo Plus service. +""" + +import asyncio +import json +from ipaddress import IPv4Address, IPv6Address + +import aiohttp + +from . import handler_utils +from .bogon import is_bogon +from .cache.default import DefaultCache +from .data import ( + continents, + countries, + countries_currencies, + countries_flags, + eu_countries, +) +from .details import Details +from .error import APIError +from .exceptions import RequestQuotaExceededError, TimeoutExceededError +from .handler_utils import ( + BATCH_MAX_SIZE, + BATCH_REQ_TIMEOUT_DEFAULT, + CACHE_MAXSIZE, + CACHE_TTL, + PLUS_API_URL, + REQUEST_TIMEOUT_DEFAULT, + cache_key, +) + + +class AsyncHandlerPlus: + """ + Allows client to request data for specified IP address asynchronously using the Plus API. + Plus API provides city-level geolocation with nested geo and AS objects. + Instantiates and maintains access to cache. + """ + + def __init__(self, access_token=None, **kwargs): + """ + Initialize the AsyncHandlerPlus object with country name list and the + cache initialized. + """ + self.access_token = access_token + + # load countries file + self.countries = kwargs.get("countries") or countries + + # load eu countries file + self.eu_countries = kwargs.get("eu_countries") or eu_countries + + # load countries flags file + self.countries_flags = kwargs.get("countries_flags") or countries_flags + + # load countries currency file + self.countries_currencies = ( + kwargs.get("countries_currencies") or countries_currencies + ) + + # load continent file + self.continents = kwargs.get("continent") or continents + + # setup req opts + self.request_options = kwargs.get("request_options", {}) + if "timeout" not in self.request_options: + self.request_options["timeout"] = REQUEST_TIMEOUT_DEFAULT + + # setup aiohttp + self.httpsess = None + + # setup cache + if "cache" in kwargs: + self.cache = kwargs["cache"] + else: + cache_options = kwargs.get("cache_options", {}) + if "maxsize" not in cache_options: + cache_options["maxsize"] = CACHE_MAXSIZE + if "ttl" not in cache_options: + cache_options["ttl"] = CACHE_TTL + self.cache = DefaultCache(**cache_options) + + # setup custom headers + self.headers = kwargs.get("headers", None) + + async def init(self): + """ + Initializes internal aiohttp connection pool. + + This isn't _required_, as the pool is initialized lazily when needed. + But in case you require non-lazy initialization, you may await this. + + This is idempotent. + """ + await self._ensure_aiohttp_ready() + + async def deinit(self): + """ + Deinitialize the async handler. + + This is required in case you need to let go of the memory/state + associated with the async handler in a long-running process. + + This is idempotent. + """ + if self.httpsess: + await self.httpsess.close() + self.httpsess = None + + async def getDetails(self, ip_address=None, timeout=None): + """ + Get Plus details for specified IP address as a Details object. + + If `timeout` is not `None`, it will override the client-level timeout + just for this operation. + """ + self._ensure_aiohttp_ready() + + # If the supplied IP address uses the objects defined in the built-in + # module ipaddress, extract the appropriate string notation before + # formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance(ip_address, IPv6Address): + ip_address = ip_address.exploded + + # check if bogon. + if ip_address and is_bogon(ip_address): + details = {"ip": ip_address, "bogon": True} + return Details(details) + + # check cache first. + try: + cached_data = self.cache[cache_key(ip_address)] + return Details(cached_data) + except KeyError: + pass + + # not in cache; do http req + url = PLUS_API_URL + if ip_address: + url += "/" + ip_address + headers = handler_utils.get_headers(self.access_token, self.headers) + req_opts = {} + if timeout is not None: + req_opts["timeout"] = timeout + async with self.httpsess.get(url, headers=headers, **req_opts) as resp: + if resp.status == 429: + raise RequestQuotaExceededError() + if resp.status >= 400: + error_code = resp.status + content_type = resp.headers.get("Content-Type") + if content_type == "application/json": + error_response = await resp.json() + else: + error_response = {"error": resp.text()} + raise APIError(error_code, error_response) + details = await resp.json() + + # format & cache + self._format_plus_details(details) + self.cache[cache_key(ip_address)] = details + + return Details(details) + + def _format_plus_details(self, details): + """ + Format Plus response details. + Plus has nested geo and as objects that need special formatting. + """ + # Format geo object if present + if "geo" in details and details["geo"]: + geo = details["geo"] + if "country_code" in geo: + country_code = geo["country_code"] + geo["country_name"] = self.countries.get(country_code) + geo["isEU"] = country_code in self.eu_countries + geo["country_flag"] = self.countries_flags.get(country_code) + geo["country_currency"] = self.countries_currencies.get(country_code) + geo["continent"] = self.continents.get(country_code) + geo["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + # Top-level country_code might also exist in some responses + if "country_code" in details: + country_code = details["country_code"] + details["country_name"] = self.countries.get(country_code) + details["isEU"] = country_code in self.eu_countries + details["country_flag"] = self.countries_flags.get(country_code) + details["country_currency"] = self.countries_currencies.get(country_code) + details["continent"] = self.continents.get(country_code) + details["country_flag_url"] = ( + f"{handler_utils.COUNTRY_FLAGS_URL}{country_code}.svg" + ) + + async def getBatchDetails( + self, + ip_addresses, + batch_size=None, + timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT, + timeout_total=None, + raise_on_fail=True, + ): + """ + Get Plus details for a batch of IP addresses at once. + + There is no specified limit to the number of IPs this function can + accept; it can handle as much as the user can fit in RAM (along with + all of the response data, which is at least a magnitude larger than the + input list). + + The input list is broken up into batches to abide by API requirements. + The batch size can be adjusted with `batch_size` but is clipped to + `BATCH_MAX_SIZE`. + Defaults to `BATCH_MAX_SIZE`. + + For each batch, `timeout_per_batch` indicates the maximum seconds to + spend waiting for the HTTP request to complete. If any batch fails with + this timeout, the whole operation fails. + Defaults to `BATCH_REQ_TIMEOUT_DEFAULT` seconds. + + `timeout_total` is a seconds-denominated hard-timeout for the time + spent in HTTP operations; regardless of whether all batches have + succeeded so far, if `timeout_total` is reached, the whole operation + will fail by raising `TimeoutExceededError`. + Defaults to being turned off. + + `raise_on_fail`, if turned off, will return any result retrieved so far + rather than raise an exception when errors occur, including timeout and + quota errors. + Defaults to on. + + The concurrency level is currently unadjustable; coroutines will be + created and consumed for all batches at once. + """ + self._ensure_aiohttp_ready() + + if batch_size is None: + batch_size = BATCH_MAX_SIZE + + result = {} + + # Pre-populate with anything we've got in the cache, and keep around + # the IPs not in the cache. + lookup_addresses = [] + for ip_address in ip_addresses: + # If the supplied IP address uses the objects defined in the + # built-in module ipaddress extract the appropriate string notation + # before formatting the URL. + if isinstance(ip_address, IPv4Address) or isinstance( + ip_address, IPv6Address + ): + ip_address = ip_address.exploded + + if ip_address and is_bogon(ip_address): + details = {} + details["ip"] = ip_address + details["bogon"] = True + result[ip_address] = Details(details) + else: + try: + cached_data = self.cache[cache_key(ip_address)] + result[ip_address] = Details(cached_data) + except KeyError: + lookup_addresses.append(ip_address) + + # all in cache - return early. + if not lookup_addresses: + return result + + # loop over batch chunks and prepare coroutines for each. + url = "https://api.ipinfo.io/batch" + headers = handler_utils.get_headers(self.access_token, self.headers) + headers["content-type"] = "application/json" + + # prepare tasks that will make reqs and update results. + tasks = [ + asyncio.create_task( + self._do_batch_req( + lookup_addresses[i : i + batch_size], + url, + headers, + timeout_per_batch, + raise_on_fail, + result, + ) + ) + for i in range(0, len(lookup_addresses), batch_size) + ] + + try: + _, pending = await asyncio.wait( + tasks, + timeout=timeout_total, + return_when=asyncio.FIRST_EXCEPTION, + ) + + # if all done, return result. + if not pending: + return result + + # if some had a timeout, first cancel timed out stuff and wait for + # cleanup. then exit with return_or_fail. + for co in pending: + try: + co.cancel() + await co + except asyncio.CancelledError: + pass + + return handler_utils.return_or_fail( + raise_on_fail, TimeoutExceededError(), result + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, result) + + return result + + async def _do_batch_req( + self, chunk, url, headers, timeout_per_batch, raise_on_fail, result + ): + """ + Coroutine which will do the actual POST request for getBatchDetails. + """ + try: + resp = await self.httpsess.post( + url, + data=json.dumps(chunk), + headers=headers, + timeout=timeout_per_batch, + ) + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, None) + + # gather data + try: + if resp.status == 429: + raise RequestQuotaExceededError() + resp.raise_for_status() + except Exception as e: + return handler_utils.return_or_fail(raise_on_fail, e, None) + + json_resp = await resp.json() + + # format & fill up cache + for ip_address, data in json_resp.items(): + if isinstance(data, dict) and not data.get("bogon"): + self._format_plus_details(data) + self.cache[cache_key(ip_address)] = data + result[ip_address] = Details(data) + + def _ensure_aiohttp_ready(self): + """Ensures aiohttp internal state is initialized.""" + if self.httpsess: + return + + timeout = aiohttp.ClientTimeout(total=self.request_options["timeout"]) + self.httpsess = aiohttp.ClientSession(timeout=timeout) diff --git a/ipinfo/handler_utils.py b/ipinfo/handler_utils.py index 782dfa2..9beb833 100644 --- a/ipinfo/handler_utils.py +++ b/ipinfo/handler_utils.py @@ -2,16 +2,25 @@ Utilities used in handlers. """ +import copy import json import os import sys -import copy from .version import SDK_VERSION # Base URL to make requests against. API_URL = "https://ipinfo.io" +# Base URL for the IPinfo Lite API +LITE_API_URL = "https://api.ipinfo.io/lite" + +# Base URL for the IPinfo Core API +CORE_API_URL = "https://api.ipinfo.io/lookup" + +# Base URL for the IPinfo Plus API (same as Core) +PLUS_API_URL = "https://api.ipinfo.io/lookup" + # Base URL to get country flag image link. # "PK" -> "https://cdn.ipinfo.io/static/images/countries-flags/PK.svg" COUNTRY_FLAGS_URL = "https://cdn.ipinfo.io/static/images/countries-flags/" @@ -65,21 +74,26 @@ def format_details( """ Format details given a countries object. """ - details["country_name"] = countries.get(details.get("country")) - details["isEU"] = details.get("country") in eu_countries - details["country_flag_url"] = ( - COUNTRY_FLAGS_URL + (details.get("country") or "") + ".svg" - ) - details["country_flag"] = copy.deepcopy( - countries_flags.get(details.get("country")) - ) - details["country_currency"] = copy.deepcopy( - countries_currencies.get(details.get("country")) - ) - details["continent"] = copy.deepcopy( - continents.get(details.get("country")) - ) - details["latitude"], details["longitude"] = read_coords(details.get("loc")) + country_code = "" + # Core and Lite API return the country_code in differently named fields + if "country_code" in details: + country_code = details.get("country_code") + elif "country" in details: + country_code = details.get("country") + + # country_code = details.get("country") + if country_name := countries.get(country_code): + details["country_name"] = country_name + details["isEU"] = country_code in eu_countries + details["country_flag_url"] = COUNTRY_FLAGS_URL + country_code + ".svg" + if flag := countries_flags.get(country_code): + details["country_flag"] = copy.deepcopy(flag) + if currency := countries_currencies.get(country_code): + details["country_currency"] = copy.deepcopy(currency) + if continent := continents.get(country_code): + details["continent"] = copy.deepcopy(continent) + if location := details.get("loc"): + details["latitude"], details["longitude"] = read_coords(location) def read_coords(location): diff --git a/ipinfo/version.py b/ipinfo/version.py index 0fa737d..8a784ca 100644 --- a/ipinfo/version.py +++ b/ipinfo/version.py @@ -1 +1 @@ -SDK_VERSION = "5.1.1" +SDK_VERSION = "5.2.1" diff --git a/requirements.in b/requirements.in index aece7f9..32be55c 100644 --- a/requirements.in +++ b/requirements.in @@ -1,10 +1,11 @@ # base requests>=2.18.4 cachetools==4.2.0 -aiohttp>=3.0.0,<=4 +aiohttp>=3.12.14,<=4 +frozenlist>=1.7.0 # dev -pytest==7.1.2 -pytest-asyncio==0.19.0 +pytest==8.4.1 +pytest-asyncio==1.1.0 pip-tools==6.8.0 black==22.6.0 diff --git a/requirements.txt b/requirements.txt index 391a352..2cfb734 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,19 +1,21 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --no-emit-index-url --no-emit-trusted-host # -aiohttp==3.8.1 +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.14 # via -r requirements.in -aiosignal==1.2.0 +aiosignal==1.4.0 # via aiohttp async-timeout==4.0.2 # via aiohttp attrs==22.1.0 - # via - # aiohttp - # pytest + # via aiohttp +backports-asyncio-runner==1.2.0; python_version < "3.11" + # via pytest-asyncio black==22.6.0 # via -r requirements.in build==0.8.0 @@ -23,15 +25,16 @@ cachetools==4.2.0 certifi==2022.6.15 # via requests charset-normalizer==2.1.1 - # via - # aiohttp - # requests + # via requests click==8.1.3 # via # black # pip-tools -frozenlist==1.3.1 +exceptiongroup==1.3.0 + # via pytest +frozenlist==1.7.0 # via + # -r requirements.in # aiohttp # aiosignal idna==3.3 @@ -58,17 +61,21 @@ pip-tools==6.8.0 # via -r requirements.in platformdirs==2.5.2 # via black -pluggy==1.0.0 +pluggy==1.6.0 # via pytest -py==1.11.0 +propcache==0.3.2 + # via + # aiohttp + # yarl +pygments==2.19.2 # via pytest pyparsing==3.0.9 # via packaging -pytest==7.1.2 +pytest==8.4.1 # via # -r requirements.in # pytest-asyncio -pytest-asyncio==0.19.0 +pytest-asyncio==1.1.0 # via -r requirements.in requests==2.28.1 # via -r requirements.in @@ -76,12 +83,17 @@ tomli==2.0.1 # via # black # build + # pep517 # pytest +typing-extensions==4.14.1 + # via + # aiosignal + # exceptiongroup urllib3==1.26.11 # via requests wheel==0.37.1 # via pip-tools -yarl==1.8.1 +yarl==1.20.1 # via aiohttp # The following packages are considered to be unsafe in a requirements file: diff --git a/tests/handler_async_test.py b/tests/handler_async_test.py index 0ab46a7..6cc1011 100644 --- a/tests/handler_async_test.py +++ b/tests/handler_async_test.py @@ -1,5 +1,6 @@ import json import os +import sys from ipinfo.cache.default import DefaultCache from ipinfo.details import Details @@ -11,6 +12,8 @@ import pytest import aiohttp +skip_if_python_3_11_or_later = sys.version_info >= (3, 11) + class MockResponse: def __init__(self, text, status, headers): @@ -84,9 +87,9 @@ async def test_get_details(): continent = details.continent assert continent["code"] == "NA" assert continent["name"] == "North America" - assert details.loc == "37.4056,-122.0775" - assert details.latitude == "37.4056" - assert details.longitude == "-122.0775" + assert details.loc is not None + assert details.latitude is not None + assert details.longitude is not None assert details.postal == "94043" assert details.timezone == "America/Los_Angeles" if token: @@ -195,6 +198,7 @@ def _check_batch_details(ips, details, token): assert "domains" in d +@pytest.mark.skipif(skip_if_python_3_11_or_later, reason="Requires Python 3.10 or earlier") @pytest.mark.parametrize("batch_size", [None, 1, 2, 3]) @pytest.mark.asyncio async def test_get_batch_details(batch_size): @@ -225,6 +229,7 @@ async def test_get_iterative_batch_details(batch_size): _check_iterative_batch_details(ips, details, token) +@pytest.mark.skipif(skip_if_python_3_11_or_later, reason="Requires Python 3.10 or earlier") @pytest.mark.parametrize("batch_size", [None, 1, 2, 3]) @pytest.mark.asyncio async def test_get_batch_details_total_timeout(batch_size): diff --git a/tests/handler_core_async_test.py b/tests/handler_core_async_test.py new file mode 100644 index 0000000..b1f8e83 --- /dev/null +++ b/tests/handler_core_async_test.py @@ -0,0 +1,233 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_core_async import AsyncHandlerCore + + +@pytest.mark.asyncio +async def test_init(): + token = "mytesttoken" + handler = AsyncHandlerCore(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + assert "US" in handler.countries + await handler.deinit() + + +@pytest.mark.asyncio +async def test_headers(): + token = "mytesttoken" + handler = AsyncHandlerCore(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + await handler.deinit() + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +@pytest.mark.asyncio +async def test_get_details(): + """Test basic Core API lookup""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerCore(token) + details = await handler.getDetails("8.8.8.8") + + # Should return Details object + assert isinstance(details, Details) + assert details.ip == "8.8.8.8" + assert hasattr(details, "hostname") + + # Check nested geo object with all fields + assert hasattr(details, "geo") + assert isinstance(details.geo, dict) + assert "city" in details.geo + assert "region" in details.geo + assert "region_code" in details.geo + assert "country" in details.geo + assert "country_code" in details.geo + assert "continent" in details.geo + assert "continent_code" in details.geo + assert "latitude" in details.geo + assert "longitude" in details.geo + assert "timezone" in details.geo + assert "postal_code" in details.geo + assert "dma_code" in details.geo + assert "geoname_id" in details.geo + assert "radius" in details.geo + + # Check nested as object with all fields + assert "as" in details.all + as_obj = details.all["as"] + assert isinstance(as_obj, dict) + assert "asn" in as_obj + assert "name" in as_obj + assert "domain" in as_obj + assert "type" in as_obj + assert "last_changed" in as_obj + + # Check mobile and anonymous objects + assert hasattr(details, "mobile") + assert isinstance(details.mobile, dict) + assert hasattr(details, "anonymous") + assert isinstance(details.anonymous, dict) + assert "is_proxy" in details.anonymous + assert "is_relay" in details.anonymous + assert "is_tor" in details.anonymous + assert "is_vpn" in details.anonymous + + # Check all network/type flags + assert hasattr(details, "is_anonymous") + assert hasattr(details, "is_anycast") + assert hasattr(details, "is_hosting") + assert hasattr(details, "is_mobile") + assert hasattr(details, "is_satellite") + + # Check geo formatting was applied + assert "country_name" in details.geo + assert "isEU" in details.geo + assert "country_flag_url" in details.geo + + await handler.deinit() + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +@pytest.mark.asyncio +async def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerCore(token) + details = await handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} + await handler.deinit() + + +##################### +# BATCH TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +@pytest.mark.asyncio +async def test_batch_ips(): + """Test batch request with IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerCore(token) + results = await handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + + assert len(results) == 2 + assert "8.8.8.8" in results + assert "1.1.1.1" in results + + # Both should be Details objects + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Check structure - Core API returns nested geo and as objects + assert hasattr(results["8.8.8.8"], "geo") + assert "as" in results["8.8.8.8"].all + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +@pytest.mark.asyncio +async def test_batch_with_bogon(): + """Test batch including bogon IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerCore(token) + results = await handler.getBatchDetails( + [ + "8.8.8.8", + "127.0.0.1", # Bogon + "1.1.1.1", + ] + ) + + assert len(results) == 3 + + # Normal IPs should be Details + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Bogon should also be Details with bogon flag + assert isinstance(results["127.0.0.1"], Details) + assert results["127.0.0.1"].bogon == True + + await handler.deinit() + + +##################### +# CACHING TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +@pytest.mark.asyncio +async def test_caching(): + """Test that results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerCore(token) + + # First request - should hit API + details1 = await handler.getDetails("8.8.8.8") + assert isinstance(details1, Details) + + # Second request - should come from cache + details2 = await handler.getDetails("8.8.8.8") + assert isinstance(details2, Details) + assert details2.ip == details1.ip + + # Verify cache key exists + cache_key_val = handler_utils.cache_key("8.8.8.8") + assert cache_key_val in handler.cache + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +@pytest.mark.asyncio +async def test_batch_caching(): + """Test that batch results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerCore(token) + + # First batch request + results1 = await handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results1) == 2 + + # Second batch with same IPs (should come from cache) + results2 = await handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results2) == 2 + assert results2["8.8.8.8"].ip == results1["8.8.8.8"].ip + + await handler.deinit() diff --git a/tests/handler_core_test.py b/tests/handler_core_test.py new file mode 100644 index 0000000..ded97d4 --- /dev/null +++ b/tests/handler_core_test.py @@ -0,0 +1,212 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_core import HandlerCore + + +def test_init(): + token = "mytesttoken" + handler = HandlerCore(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + assert "US" in handler.countries + + +def test_headers(): + token = "mytesttoken" + handler = HandlerCore(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +def test_get_details(): + """Test basic Core API lookup""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerCore(token) + details = handler.getDetails("8.8.8.8") + + # Should return Details object + assert isinstance(details, Details) + assert details.ip == "8.8.8.8" + assert hasattr(details, "hostname") + + # Check nested geo object with all fields + assert hasattr(details, "geo") + assert isinstance(details.geo, dict) + assert "city" in details.geo + assert "region" in details.geo + assert "region_code" in details.geo + assert "country" in details.geo + assert "country_code" in details.geo + assert "continent" in details.geo + assert "continent_code" in details.geo + assert "latitude" in details.geo + assert "longitude" in details.geo + assert "timezone" in details.geo + assert "postal_code" in details.geo + assert "dma_code" in details.geo + assert "geoname_id" in details.geo + assert "radius" in details.geo + + # Check nested as object with all fields + assert "as" in details.all + as_obj = details.all["as"] + assert isinstance(as_obj, dict) + assert "asn" in as_obj + assert "name" in as_obj + assert "domain" in as_obj + assert "type" in as_obj + assert "last_changed" in as_obj + + # Check mobile and anonymous objects + assert hasattr(details, "mobile") + assert isinstance(details.mobile, dict) + assert hasattr(details, "anonymous") + assert isinstance(details.anonymous, dict) + assert "is_proxy" in details.anonymous + assert "is_relay" in details.anonymous + assert "is_tor" in details.anonymous + assert "is_vpn" in details.anonymous + + # Check all network/type flags + assert hasattr(details, "is_anonymous") + assert hasattr(details, "is_anycast") + assert hasattr(details, "is_hosting") + assert hasattr(details, "is_mobile") + assert hasattr(details, "is_satellite") + + # Check geo formatting was applied + assert "country_name" in details.geo + assert "isEU" in details.geo + assert "country_flag_url" in details.geo + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerCore(token) + details = handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} + + +##################### +# BATCH TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +def test_batch_ips(): + """Test batch request with IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerCore(token) + results = handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + + assert len(results) == 2 + assert "8.8.8.8" in results + assert "1.1.1.1" in results + + # Both should be Details objects + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Check structure - Core API returns nested geo and as objects + assert hasattr(results["8.8.8.8"], "geo") + assert "as" in results["8.8.8.8"].all + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +def test_batch_with_bogon(): + """Test batch including bogon IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerCore(token) + results = handler.getBatchDetails( + [ + "8.8.8.8", + "127.0.0.1", # Bogon + "1.1.1.1", + ] + ) + + assert len(results) == 3 + + # Normal IPs should be Details + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Bogon should also be Details with bogon flag + assert isinstance(results["127.0.0.1"], Details) + assert results["127.0.0.1"].bogon == True + + +##################### +# CACHING TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +def test_caching(): + """Test that results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerCore(token) + + # First request - should hit API + details1 = handler.getDetails("8.8.8.8") + assert isinstance(details1, Details) + + # Second request - should come from cache + details2 = handler.getDetails("8.8.8.8") + assert isinstance(details2, Details) + assert details2.ip == details1.ip + + # Verify cache key exists + cache_key_val = handler_utils.cache_key("8.8.8.8") + assert cache_key_val in handler.cache + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Core API without token", +) +def test_batch_caching(): + """Test that batch results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerCore(token) + + # First batch request + results1 = handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results1) == 2 + + # Second batch with same IPs (should come from cache) + results2 = handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results2) == 2 + assert results2["8.8.8.8"].ip == results1["8.8.8.8"].ip diff --git a/tests/handler_lite_async_test.py b/tests/handler_lite_async_test.py new file mode 100644 index 0000000..5a5cc25 --- /dev/null +++ b/tests/handler_lite_async_test.py @@ -0,0 +1,169 @@ +import json +import os + +import aiohttp +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.error import APIError +from ipinfo.handler_lite_async import AsyncHandlerLite + + +class MockResponse: + def __init__(self, text, status, headers): + self._text = text + self.status = status + self.headers = headers + + def text(self): + return self._text + + async def json(self): + return json.loads(self._text) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + async def __aenter__(self): + return self + + async def release(self): + pass + + +@pytest.mark.asyncio +async def test_init(): + token = "mytesttoken" + handler = AsyncHandlerLite(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + assert "PK" in handler.countries + await handler.deinit() + + +@pytest.mark.asyncio +async def test_headers(): + token = "mytesttoken" + handler = AsyncHandlerLite(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + await handler.deinit() + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Lite API without token", +) +@pytest.mark.asyncio +async def test_get_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerLite(token) + details = await handler.getDetails("8.8.8.8") + assert isinstance(details, Details) + assert details.ip == "8.8.8.8" + assert details.asn == "AS15169" + assert details.as_name == "Google LLC" + assert details.as_domain == "google.com" + assert details.country_code == "US" + assert details.country == "United States" + assert details.continent_code == "NA" + assert details.continent == {"code": "NA", "name": "North America"} + assert details.country_name == "United States" + assert not details.isEU + assert ( + details.country_flag_url + == "https://cdn.ipinfo.io/static/images/countries-flags/US.svg" + ) + assert details.country_flag == {"emoji": "🇺🇸", "unicode": "U+1F1FA U+1F1F8"} + assert details.country_currency == {"code": "USD", "symbol": "$"} + assert not hasattr(details, "latitude") + assert not hasattr(details, "longitude") + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Lite API without token", +) +@pytest.mark.parametrize( + ( + "mock_resp_status_code", + "mock_resp_headers", + "mock_resp_error_msg", + "expected_error_json", + ), + [ + pytest.param( + 503, + {"Content-Type": "text/plain"}, + "Service Unavailable", + {"error": "Service Unavailable"}, + id="5xx_not_json", + ), + pytest.param( + 403, + {"Content-Type": "application/json"}, + '{"message": "missing token"}', + {"message": "missing token"}, + id="4xx_json", + ), + pytest.param( + 400, + {"Content-Type": "application/json"}, + '{"message": "missing field"}', + {"message": "missing field"}, + id="400", + ), + ], +) +@pytest.mark.asyncio +async def test_get_details_error( + monkeypatch, + mock_resp_status_code, + mock_resp_headers, + mock_resp_error_msg, + expected_error_json, +): + async def mock_get(*args, **kwargs): + response = MockResponse( + status=mock_resp_status_code, + text=mock_resp_error_msg, + headers=mock_resp_headers, + ) + return response + + monkeypatch.setattr( + aiohttp.ClientSession, + "get", + lambda *args, **kwargs: aiohttp.client._RequestContextManager(mock_get()), + ) + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerLite(token) + with pytest.raises(APIError) as exc_info: + await handler.getDetails("8.8.8.8") + assert exc_info.value.error_code == mock_resp_status_code + assert exc_info.value.error_json == expected_error_json + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Lite API without token", +) +@pytest.mark.asyncio +async def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerLite(token) + details = await handler.getDetails("127.0.0.1") + assert details.all == {"bogon": True, "ip": "127.0.0.1"} diff --git a/tests/handler_lite_test.py b/tests/handler_lite_test.py new file mode 100644 index 0000000..baa4c63 --- /dev/null +++ b/tests/handler_lite_test.py @@ -0,0 +1,73 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_lite import HandlerLite + + +def test_init(): + token = "mytesttoken" + handler = HandlerLite(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + assert "US" in handler.countries + + +def test_headers(): + token = "mytesttoken" + handler = HandlerLite(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Lite API without token", +) +def test_get_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerLite(token) + details = handler.getDetails("8.8.8.8") + assert isinstance(details, Details) + assert details.ip == "8.8.8.8" + assert details.asn == "AS15169" + assert details.as_name == "Google LLC" + assert details.as_domain == "google.com" + assert details.country_code == "US" + assert details.country == "United States" + assert details.continent_code == "NA" + assert details.continent == {"code": "NA", "name": "North America"} + assert details.country_name == "United States" + assert not details.isEU + assert ( + details.country_flag_url + == "https://cdn.ipinfo.io/static/images/countries-flags/US.svg" + ) + assert details.country_flag == {"emoji": "🇺🇸", "unicode": "U+1F1FA U+1F1F8"} + assert details.country_currency == {"code": "USD", "symbol": "$"} + assert not hasattr(details, "latitude") + assert not hasattr(details, "longitude") + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Lite API without token", +) +def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerLite(token) + details = handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} diff --git a/tests/handler_plus_async_test.py b/tests/handler_plus_async_test.py new file mode 100644 index 0000000..49cb438 --- /dev/null +++ b/tests/handler_plus_async_test.py @@ -0,0 +1,233 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_plus_async import AsyncHandlerPlus + + +@pytest.mark.asyncio +async def test_init(): + token = "mytesttoken" + handler = AsyncHandlerPlus(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + assert "US" in handler.countries + await handler.deinit() + + +@pytest.mark.asyncio +async def test_headers(): + token = "mytesttoken" + handler = AsyncHandlerPlus(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + await handler.deinit() + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +@pytest.mark.asyncio +async def test_get_details(): + """Test basic Plus API lookup""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerPlus(token) + details = await handler.getDetails("8.8.8.8") + + # Should return Details object + assert isinstance(details, Details) + assert details.ip == "8.8.8.8" + assert hasattr(details, "hostname") + + # Check nested geo object with all fields + assert hasattr(details, "geo") + assert isinstance(details.geo, dict) + assert "city" in details.geo + assert "region" in details.geo + assert "region_code" in details.geo + assert "country" in details.geo + assert "country_code" in details.geo + assert "continent" in details.geo + assert "continent_code" in details.geo + assert "latitude" in details.geo + assert "longitude" in details.geo + assert "timezone" in details.geo + assert "postal_code" in details.geo + assert "dma_code" in details.geo + assert "geoname_id" in details.geo + assert "radius" in details.geo + + # Check nested as object with all fields + assert "as" in details.all + as_obj = details.all["as"] + assert isinstance(as_obj, dict) + assert "asn" in as_obj + assert "name" in as_obj + assert "domain" in as_obj + assert "type" in as_obj + assert "last_changed" in as_obj + + # Check mobile and anonymous objects + assert hasattr(details, "mobile") + assert isinstance(details.mobile, dict) + assert hasattr(details, "anonymous") + assert isinstance(details.anonymous, dict) + assert "is_proxy" in details.anonymous + assert "is_relay" in details.anonymous + assert "is_tor" in details.anonymous + assert "is_vpn" in details.anonymous + + # Check all network/type flags + assert hasattr(details, "is_anonymous") + assert hasattr(details, "is_anycast") + assert hasattr(details, "is_hosting") + assert hasattr(details, "is_mobile") + assert hasattr(details, "is_satellite") + + # Check geo formatting was applied + assert "country_name" in details.geo + assert "isEU" in details.geo + assert "country_flag_url" in details.geo + + await handler.deinit() + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +@pytest.mark.asyncio +async def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerPlus(token) + details = await handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} + await handler.deinit() + + +##################### +# BATCH TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +@pytest.mark.asyncio +async def test_batch_ips(): + """Test batch request with IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerPlus(token) + results = await handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + + assert len(results) == 2 + assert "8.8.8.8" in results + assert "1.1.1.1" in results + + # Both should be Details objects + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Check structure - Plus API returns nested geo and as objects + assert hasattr(results["8.8.8.8"], "geo") + assert "as" in results["8.8.8.8"].all + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +@pytest.mark.asyncio +async def test_batch_with_bogon(): + """Test batch including bogon IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerPlus(token) + results = await handler.getBatchDetails( + [ + "8.8.8.8", + "127.0.0.1", # Bogon + "1.1.1.1", + ] + ) + + assert len(results) == 3 + + # Normal IPs should be Details + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Bogon should also be Details with bogon flag + assert isinstance(results["127.0.0.1"], Details) + assert results["127.0.0.1"].bogon == True + + await handler.deinit() + + +##################### +# CACHING TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +@pytest.mark.asyncio +async def test_caching(): + """Test that results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerPlus(token) + + # First request - should hit API + details1 = await handler.getDetails("8.8.8.8") + assert isinstance(details1, Details) + + # Second request - should come from cache + details2 = await handler.getDetails("8.8.8.8") + assert isinstance(details2, Details) + assert details2.ip == details1.ip + + # Verify cache key exists + cache_key_val = handler_utils.cache_key("8.8.8.8") + assert cache_key_val in handler.cache + + await handler.deinit() + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +@pytest.mark.asyncio +async def test_batch_caching(): + """Test that batch results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = AsyncHandlerPlus(token) + + # First batch request + results1 = await handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results1) == 2 + + # Second batch with same IPs (should come from cache) + results2 = await handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results2) == 2 + assert results2["8.8.8.8"].ip == results1["8.8.8.8"].ip + + await handler.deinit() diff --git a/tests/handler_plus_test.py b/tests/handler_plus_test.py new file mode 100644 index 0000000..056117f --- /dev/null +++ b/tests/handler_plus_test.py @@ -0,0 +1,212 @@ +import os + +import pytest + +from ipinfo import handler_utils +from ipinfo.cache.default import DefaultCache +from ipinfo.details import Details +from ipinfo.handler_plus import HandlerPlus + + +def test_init(): + token = "mytesttoken" + handler = HandlerPlus(token) + assert handler.access_token == token + assert isinstance(handler.cache, DefaultCache) + assert "US" in handler.countries + + +def test_headers(): + token = "mytesttoken" + handler = HandlerPlus(token, headers={"custom_field": "yes"}) + headers = handler_utils.get_headers(token, handler.headers) + + assert "user-agent" in headers + assert "accept" in headers + assert "authorization" in headers + assert "custom_field" in headers + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +def test_get_details(): + """Test basic Plus API lookup""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerPlus(token) + details = handler.getDetails("8.8.8.8") + + # Should return Details object + assert isinstance(details, Details) + assert details.ip == "8.8.8.8" + assert hasattr(details, "hostname") + + # Check nested geo object with all fields + assert hasattr(details, "geo") + assert isinstance(details.geo, dict) + assert "city" in details.geo + assert "region" in details.geo + assert "region_code" in details.geo + assert "country" in details.geo + assert "country_code" in details.geo + assert "continent" in details.geo + assert "continent_code" in details.geo + assert "latitude" in details.geo + assert "longitude" in details.geo + assert "timezone" in details.geo + assert "postal_code" in details.geo + assert "dma_code" in details.geo + assert "geoname_id" in details.geo + assert "radius" in details.geo + + # Check nested as object with all fields + assert "as" in details.all + as_obj = details.all["as"] + assert isinstance(as_obj, dict) + assert "asn" in as_obj + assert "name" in as_obj + assert "domain" in as_obj + assert "type" in as_obj + assert "last_changed" in as_obj + + # Check mobile and anonymous objects + assert hasattr(details, "mobile") + assert isinstance(details.mobile, dict) + assert hasattr(details, "anonymous") + assert isinstance(details.anonymous, dict) + assert "is_proxy" in details.anonymous + assert "is_relay" in details.anonymous + assert "is_tor" in details.anonymous + assert "is_vpn" in details.anonymous + + # Check all network/type flags + assert hasattr(details, "is_anonymous") + assert hasattr(details, "is_anycast") + assert hasattr(details, "is_hosting") + assert hasattr(details, "is_mobile") + assert hasattr(details, "is_satellite") + + # Check geo formatting was applied + assert "country_name" in details.geo + assert "isEU" in details.geo + assert "country_flag_url" in details.geo + + +############# +# BOGON TESTS +############# + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +def test_bogon_details(): + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerPlus(token) + details = handler.getDetails("127.0.0.1") + assert isinstance(details, Details) + assert details.all == {"bogon": True, "ip": "127.0.0.1"} + + +##################### +# BATCH TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +def test_batch_ips(): + """Test batch request with IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerPlus(token) + results = handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + + assert len(results) == 2 + assert "8.8.8.8" in results + assert "1.1.1.1" in results + + # Both should be Details objects + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Check structure - Plus API returns nested geo and as objects + assert hasattr(results["8.8.8.8"], "geo") + assert "as" in results["8.8.8.8"].all + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +def test_batch_with_bogon(): + """Test batch including bogon IPs""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerPlus(token) + results = handler.getBatchDetails( + [ + "8.8.8.8", + "127.0.0.1", # Bogon + "1.1.1.1", + ] + ) + + assert len(results) == 3 + + # Normal IPs should be Details + assert isinstance(results["8.8.8.8"], Details) + assert isinstance(results["1.1.1.1"], Details) + + # Bogon should also be Details with bogon flag + assert isinstance(results["127.0.0.1"], Details) + assert results["127.0.0.1"].bogon == True + + +##################### +# CACHING TESTS +##################### + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +def test_caching(): + """Test that results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerPlus(token) + + # First request - should hit API + details1 = handler.getDetails("8.8.8.8") + assert isinstance(details1, Details) + + # Second request - should come from cache + details2 = handler.getDetails("8.8.8.8") + assert isinstance(details2, Details) + assert details2.ip == details1.ip + + # Verify cache key exists + cache_key_val = handler_utils.cache_key("8.8.8.8") + assert cache_key_val in handler.cache + + +@pytest.mark.skipif( + "IPINFO_TOKEN" not in os.environ, + reason="Can't call Plus API without token", +) +def test_batch_caching(): + """Test that batch results are properly cached""" + token = os.environ.get("IPINFO_TOKEN", "") + handler = HandlerPlus(token) + + # First batch request + results1 = handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results1) == 2 + + # Second batch with same IPs (should come from cache) + results2 = handler.getBatchDetails(["8.8.8.8", "1.1.1.1"]) + assert len(results2) == 2 + assert results2["8.8.8.8"].ip == results1["8.8.8.8"].ip diff --git a/tests/handler_test.py b/tests/handler_test.py index 3767622..329753d 100644 --- a/tests/handler_test.py +++ b/tests/handler_test.py @@ -56,9 +56,9 @@ def test_get_details(): continent = details.continent assert continent["code"] == "NA" assert continent["name"] == "North America" - assert details.loc == "37.4056,-122.0775" - assert details.latitude == "37.4056" - assert details.longitude == "-122.0775" + assert details.loc is not None + assert details.latitude is not None + assert details.longitude is not None assert details.postal == "94043" assert details.timezone == "America/Los_Angeles" if token: @@ -210,11 +210,12 @@ def test_get_iterative_batch_details(batch_size): # MAP TESTS ############# - -def test_get_map(): - handler = Handler() - mapUrl = handler.getMap(open("tests/map-ips.txt").read().splitlines()) - print(f"got URL={mapUrl}") +# Disabled temporarily +# +# def test_get_map(): +# handler = Handler() +# mapUrl = handler.getMap(open("tests/map-ips.txt").read().splitlines()) +# print(f"got URL={mapUrl}") #############