diff --git a/censys/asm/__init__.py b/censys/asm/__init__.py index 634ebf59..df8fb3d1 100644 --- a/censys/asm/__init__.py +++ b/censys/asm/__init__.py @@ -1,4 +1,4 @@ -"""An easy-to-use and lightweight API wrapper for Censys ASM (censys.io).""" +"""An easy-to-use and lightweight API wrapper for Censys ASM (app.censys.io).""" from .assets import Assets, CertificatesAssets, DomainsAssets, HostsAssets from .client import AsmClient from .clouds import Clouds diff --git a/censys/cli/commands/search.py b/censys/cli/commands/search.py index 39c3d806..8b47473d 100644 --- a/censys/cli/commands/search.py +++ b/censys/cli/commands/search.py @@ -5,7 +5,7 @@ from typing import List from urllib.parse import urlencode -from censys.cli.utils import INDEXES, V1_INDEXES, V2_INDEXES, console, write_file +from censys.cli.utils import INDEXES, V1_INDEXES, V2_INDEXES, err_console, write_file from censys.common.exceptions import CensysCLIException from censys.search import SearchClient @@ -13,23 +13,6 @@ Results = List[dict] DEFAULT_FIELDS = { - "ipv4": [ - "updated_at", - "protocols", - "metadata.description", - "autonomous_system.name", - "23.telnet.banner.banner", - "80.http.get.title", - "80.http.get.metadata.description", - "8080.http.get.metadata.description", - "8888.http.get.metadata.description", - "443.https.get.metadata.description", - "443.https.get.title", - "443.https.tls.certificate.parsed.subject_dn", - "443.https.tls.certificate.parsed.names", - "443.https.tls.certificate.parsed.subject.common_name", - "443.https.tls.certificate.parsed.extensions.subject_alt_name.dns_names", - ], "certs": [ "metadata.updated_at", "parsed.issuer.common_name", @@ -44,15 +27,6 @@ "metadata.seen_in_scan", "tags", ], - "websites": [ - "443.https.tls.version", - "alexa_rank", - "domain", - "ports", - "protocols", - "tags", - "updated_at", - ], } @@ -69,17 +43,12 @@ def cli_search(args: argparse.Namespace): if args.open: url_query = {"q": args.query} - if index_type in V1_INDEXES: - if index_type == "certs": - index_type = "certificates" - # TODO: Remove when v1 is fully deprecated - webbrowser.open( - f"https://search.censys.io/{index_type}?{urlencode(url_query)}" - ) - sys.exit(0) - webbrowser.open(f"https://censys.io/{index_type}?{urlencode(url_query)}") + if index_type in {"certs", "certificates"}: + webbrowser.open( + f"https://search.censys.io/certificates?{urlencode(url_query)}" + ) sys.exit(0) - elif index_type in V2_INDEXES: + if index_type in V2_INDEXES: url_query.update({"resource": index_type}) webbrowser.open(f"https://search.censys.io/search?{urlencode(url_query)}") sys.exit(0) @@ -120,7 +89,7 @@ def cli_search(args: argparse.Namespace): search_args["fields"] = fields - with console.status("Searching"): + with err_console.status("Searching"): results = list(index.search(args.query, **search_args)) elif index_type in V2_INDEXES: if args.format == "csv": @@ -132,7 +101,7 @@ def cli_search(args: argparse.Namespace): if args.pages: search_args["pages"] = args.pages - with console.status("Searching"): + with err_console.status("Searching"): query = index.search(args.query, **search_args) results = [] @@ -142,7 +111,7 @@ def cli_search(args: argparse.Namespace): try: write_file(results, **write_args) except ValueError as error: # pragma: no cover - console.print(f"Error writing log file. Error: {error}") + err_console.print(f"Error writing log file. Error: {error}") def include(parent_parser: argparse._SubParsersAction, parents: dict): diff --git a/censys/cli/utils.py b/censys/cli/utils.py index 4af883e8..71a6df29 100644 --- a/censys/cli/utils.py +++ b/censys/cli/utils.py @@ -4,6 +4,7 @@ import datetime import json import os.path +import sys from typing import Any, Dict, List, Optional, Union from rich.console import Console @@ -14,13 +15,15 @@ Fields = List[str] Results = Union[List[dict], Dict[str, Any]] -V1_INDEXES = ["ipv4", "certs", "websites"] +V1_INDEXES = ["certs"] V2_INDEXES = ["hosts"] INDEXES = V1_INDEXES + V2_INDEXES config = get_config() color = config.get(DEFAULT, "color") -console = Console(color_system=("auto" if color else None)) +color_system = "auto" if color else None +console = Console(color_system=color_system) # type: ignore +err_console = Console(color_system=color_system, file=sys.stderr) # type: ignore def print_wrote_file(file_path: str): diff --git a/censys/search/__init__.py b/censys/search/__init__.py index 7f8bb481..287114c3 100644 --- a/censys/search/__init__.py +++ b/censys/search/__init__.py @@ -1,6 +1,6 @@ -"""An easy-to-use and lightweight API wrapper for Censys Search API (censys.io).""" +"""An easy-to-use and lightweight API wrapper for Censys Search API (search.censys.io).""" from .client import SearchClient -from .v1 import CensysCertificates, CensysData, CensysIPv4, CensysWebsites +from .v1 import CensysCertificates, CensysData from .v2 import CensysCerts, CensysHosts __copyright__ = "Copyright 2021 Censys, Inc." @@ -8,8 +8,6 @@ "SearchClient", "CensysCertificates", "CensysData", - "CensysIPv4", - "CensysWebsites", "CensysCerts", "CensysHosts", ] diff --git a/censys/search/client.py b/censys/search/client.py index ab2c5791..41321cb1 100644 --- a/censys/search/client.py +++ b/censys/search/client.py @@ -1,6 +1,6 @@ """Interact with all Search APIs.""" from ..common.deprecation import DeprecationDecorator -from .v1 import CensysCertificates, CensysData, CensysIPv4, CensysWebsites +from .v1 import CensysCertificates, CensysData from .v2 import CensysCerts, CensysHosts @@ -19,8 +19,6 @@ class SearchClient: >>> certs = c.v1.certificates # CensysCertificates() >>> data = c.v1.data # CensysData() - >>> ipv4 = c.v1.ipv4 # CensysIPv4() - >>> websites = c.v1.websites # CensysWebsites() >>> hosts = c.v2.hosts # CensysHosts() >>> certs = c.v2.certs # CensysCerts() """ @@ -42,8 +40,6 @@ def __init__(self, *args, **kwargs): # Alias certs to certificates self.certs = self.certificates self.data = CensysData(*args, **kwargs) - self.ipv4 = CensysIPv4(*args, **kwargs) - self.websites = CensysWebsites(*args, **kwargs) class _V2: """Class for v2 Search APIs.""" diff --git a/censys/search/v1/__init__.py b/censys/search/v1/__init__.py index 728543b4..9af13212 100644 --- a/censys/search/v1/__init__.py +++ b/censys/search/v1/__init__.py @@ -1,7 +1,5 @@ """Interact with the Censys Search v1 APIs.""" from .certificates import CensysCertificates from .data import CensysData -from .ipv4 import CensysIPv4 -from .websites import CensysWebsites -__all__ = ["CensysCertificates", "CensysData", "CensysIPv4", "CensysWebsites"] +__all__ = ["CensysCertificates", "CensysData"] diff --git a/censys/search/v1/api.py b/censys/search/v1/api.py index a1e93d34..10b4e705 100644 --- a/censys/search/v1/api.py +++ b/censys/search/v1/api.py @@ -18,7 +18,7 @@ class CensysSearchAPIv1(CensysAPIBase): """This class is the base class for all v1 API indexes.""" - DEFAULT_URL: str = "https://censys.io/api/v1" + DEFAULT_URL: str = "https://search.censys.io/api/v1" """Default Search API base URL.""" INDEX_NAME: Optional[str] = None """Name of Censys Index.""" @@ -112,7 +112,7 @@ def search( ) -> Iterator[dict]: """Searches the given index for all records that match the given query. - For more details, see our documentation: https://censys.io/api/v1/docs/search + For more details, see our documentation: https://search.censys.io/api Args: query (str): The query to be executed. @@ -152,7 +152,7 @@ def search( def view(self, document_id: str) -> dict: """View the current structured data we have on a specific document. - For more details, see our documentation: https://censys.io/api/v1/docs/view + For more details, see our documentation: https://search.censys.io/api Args: document_id (str): The ID of the document you are requesting. @@ -165,7 +165,7 @@ def view(self, document_id: str) -> dict: def report(self, query: str, field: str, buckets: int = 50) -> dict: """Creates a report on the breakdown of the values of a field in a result set. - For more details, see our documentation: https://censys.io/api/v1/docs/report + For more details, see our documentation: https://search.censys.io/api Args: query (str): The query to be executed. @@ -177,6 +177,3 @@ def report(self, query: str, field: str, buckets: int = 50) -> dict: """ data = {"query": query, "field": field, "buckets": int(buckets)} return self._post(self.report_path, data=data) - - -CensysSearchAPI = CensysSearchAPIv1 diff --git a/censys/search/v1/data.py b/censys/search/v1/data.py index cf324559..45ec1594 100644 --- a/censys/search/v1/data.py +++ b/censys/search/v1/data.py @@ -5,7 +5,7 @@ class CensysData(CensysSearchAPIv1): """Interacts with the Data index. - For more details, see our documentation: https://censys.io/api/v1/docs/data + For more details, see our documentation: https://search.censys.io/api """ _PREFIX = "/data" diff --git a/censys/search/v1/ipv4.py b/censys/search/v1/ipv4.py deleted file mode 100644 index 81d2c4ff..00000000 --- a/censys/search/v1/ipv4.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Interact with the Censys Search IPv4 API.""" -from .api import CensysSearchAPIv1 - - -class CensysIPv4(CensysSearchAPIv1): - """Interacts with the IPv4 index.""" - - INDEX_NAME = "ipv4" - """Name of Censys Index.""" diff --git a/censys/search/v1/websites.py b/censys/search/v1/websites.py deleted file mode 100644 index 13483a77..00000000 --- a/censys/search/v1/websites.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Interact with the Censys Search Website API.""" -from .api import CensysSearchAPIv1 - - -class CensysWebsites(CensysSearchAPIv1): - """Interacts with the Websites index.""" - - INDEX_NAME = "websites" - """Name of Censys Index.""" diff --git a/censys/search/v2/api.py b/censys/search/v2/api.py index 87a4fd09..f95b2fac 100644 --- a/censys/search/v2/api.py +++ b/censys/search/v2/api.py @@ -14,7 +14,6 @@ ) from censys.common.types import Datetime from censys.common.utils import format_rfc3339 -from censys.search.v1.api import CensysSearchAPIv1 Fields = Optional[List[str]] @@ -28,12 +27,10 @@ class CensysSearchAPIv2(CensysAPIBase): >>> c = CensysSearchAPIv2() """ - DEFAULT_URL: str = "https://search.censys.io/api/v2" + DEFAULT_URL: str = "https://search.censys.io/api" """Default Search API base URL.""" INDEX_NAME: str = "" """Name of Censys Index.""" - v1: CensysSearchAPIv1 - """Search V1 Endpoints on V2""" def __init__( self, api_id: Optional[str] = None, api_secret: Optional[str] = None, **kwargs @@ -70,22 +67,12 @@ def __init__( self._session.auth = (self._api_id, self._api_secret) # Generate concrete paths to be called - self.view_path = f"/{self.INDEX_NAME}/" - self.search_path = f"/{self.INDEX_NAME}/search" - self.aggregate_path = f"/{self.INDEX_NAME}/aggregate" - self.metadata_path = f"/metadata/{self.INDEX_NAME}" - self.tags_path = "/tags" - - # Set up the v1 API - v1_kwargs = kwargs.copy() - v1_kwargs.update( - { - "url": "https://search.censys.io/api/v1", - "api_id": self._api_id, - "api_secret": self._api_secret, - } - ) - self.v1 = CensysSearchAPIv1(**v1_kwargs) + self.view_path = f"/v2/{self.INDEX_NAME}/" + self.search_path = f"/v2/{self.INDEX_NAME}/search" + self.aggregate_path = f"/v2/{self.INDEX_NAME}/aggregate" + self.metadata_path = f"/v2/metadata/{self.INDEX_NAME}" + self.tags_path = "/v2/tags" + self.account_path = "/v1/account" def _get_exception_class( # type: ignore self, res: Response @@ -100,8 +87,7 @@ def account(self) -> dict: Returns: dict: Quota response. """ - # Make account call to v1 endpoint - return self.v1.account() + return self._get(self.account_path) def quota(self) -> dict: """Returns metadata of a given search query. @@ -190,11 +176,14 @@ def __iter__(self) -> Iterator[List[dict]]: """ return self - def view_all(self) -> Dict[str, dict]: + def view_all(self, max_workers: int = 20) -> Dict[str, dict]: """View each document returned from query. Please note that each result returned by the query will be looked up using the view method. + Args: + max_workers (int): The number of workers to use. Defaults to 20. + Returns: Dict[str, dict]: Dictionary mapping documents to that document's result set. """ @@ -203,7 +192,7 @@ def view_all(self) -> Dict[str, dict]: document_key = INDEX_TO_KEY.get(self.api.INDEX_NAME, "ip") - with ThreadPoolExecutor(max_workers=20) as executor: + with ThreadPoolExecutor(max_workers) as executor: for hit in self.__call__(): document_id = hit[document_key] threads.append(executor.submit(self.api.view, document_id)) @@ -261,6 +250,42 @@ def view( return self._get(self.view_path + document_id, args)["result"] + def bulk_view( + self, + document_ids: List[str], + at_time: Optional[Datetime] = None, + max_workers: int = 20, + ) -> Dict[str, dict]: + """Bulk view documents from current index. + + View the current structured data we have on a list of documents. + For more details, see our documentation: https://search.censys.io/api + + Args: + document_ids (List[str]): The IDs of the documents you are requesting. + at_time ([str, datetime.date, datetime.datetime]): + Optional; Fetches a document at a given point in time. + max_workers (int): The number of workers to use. Defaults to 20. + + Returns: + Dict[str, dict]: Dictionary mapping document IDs to that document's result set. + """ + args = {} + if at_time: + args["at_time"] = format_rfc3339(at_time) + + threads = [] + documents = {} + with ThreadPoolExecutor(max_workers) as executor: + for document_id in document_ids: + threads.append(executor.submit(self.view, document_id, at_time)) + + for task in as_completed(threads): + result = task.result() + documents[result["ip"]] = result + + return documents + def aggregate( self, query: str, field: str, num_buckets: Optional[int] = None ) -> dict: diff --git a/censys/search/v2/hosts.py b/censys/search/v2/hosts.py index 63281138..f4c8cf9e 100644 --- a/censys/search/v2/hosts.py +++ b/censys/search/v2/hosts.py @@ -115,9 +115,9 @@ def view_host_events( if end_time: args["end_time"] = format_rfc3339(end_time) - return self._get(f"/experimental/{self.INDEX_NAME}/{ip_address}/events", args)[ - "result" - ]["events"] + return self._get( + f"/v2/experimental/{self.INDEX_NAME}/{ip_address}/events", args + )["result"]["events"] def list_hosts_with_tag(self, tag_id: str) -> List[str]: """Returns a list of hosts which are tagged with the specified tag. diff --git a/docs/advanced-usage.rst b/docs/advanced-usage.rst index 97ce9c31..fbc8abe5 100644 --- a/docs/advanced-usage.rst +++ b/docs/advanced-usage.rst @@ -8,18 +8,15 @@ If you need to use a proxy, you can configure resource indexes with the proxies .. code:: python - from censys.search import CensysIPv4 + from censys.search import CensysHosts proxies = { "https": "http://10.10.1.10:1080", } - c = CensysIPv4(proxies=proxies) + c = CensysHosts(proxies=proxies) - for page in c.search( - "443.https.get.headers.server: Apache AND location.country: Japan", max_records=10 - ): - print(page) + c.account() .. note:: diff --git a/docs/usage-v1.rst b/docs/usage-v1.rst index d21d0483..477c48c9 100644 --- a/docs/usage-v1.rst +++ b/docs/usage-v1.rst @@ -1,39 +1,37 @@ Usage v1 ======== -The Censys Search API provides functionality for interacting with Censys resources such as IPv4 addresses, Websites, and Certificates, and for viewing Account information such as query quota. +The Censys Search API provides functionality for interacting with Censys resources such as Certificates, and for viewing Account information such as query quota. There are six API options that this library provides access to: -- :attr:`search ` - Allows searches against the IPv4 addresses, Websites, and Certificates indexes using the same search syntax as the `web app `__. -- :attr:`view ` - Returns the structured data we have about a specific IPv4 address, Website, or Certificate, given the resource's natural ID. -- :attr:`report ` - Allows you to view resources as a spectrum based on attributes of the resource, similar to the `Report Builder page `__ on the web app. +- :attr:`search ` - Allows searches against the Certificates indexes using the same search syntax as the `web app `__. +- :attr:`view ` - Returns the structured data we have about a specific Certificate, given the resource's natural ID. +- :attr:`report ` - Allows you to view resources as a spectrum based on attributes of the resource, similar to the `Report Builder page `__ on the web app. - :attr:`data ` - Returns collections of scan series whose metadata includes a description of the data collected in the series and links to the individual scan results. - :attr:`account ` - Returns information about your Censys account, including your current query quota usage. This function is available for all index types. - :attr:`bulk ` - Returns the structured data for certificates in bulk, given the certificates' SHA-256 fingerprints. -More details about each option can be found in the `Censys API documentation `__. A list of index fields can be found in the `Censys API definitions page `__. +More details about each option can be found in the `Censys API documentation `__. A list of index fields can be found in the `Censys API definitions page `__. -Python class objects must be initialized for each resource index (IPv4 addresses, Websites, and Certificates). +Python class objects must be initialized for each resource index (Certificates). -- :attr:`CensysIPv4 ` -- :attr:`CensysWebsites ` - :attr:`CensysCertificates ` - :attr:`CensysData ` ``search`` ---------- -Below we show an example using the :attr:`CensysIPv4 ` index. +Below we show an example using the :attr:`CensysCertificates ` index. .. code:: python - from censys.search import CensysIPv4 + from censys.search import CensysCertificates - c = CensysIPv4() + c = CensysCertificates() for page in c.search( - "443.https.get.headers.server: Apache AND location.country: Japan", + "validation.nss.valid: true and validation.nss.type: intermediate", max_records=10 ): print(page) @@ -42,19 +40,16 @@ Below we show an example using the :attr:`CensysIPv4 ` index. +Below we show an example using the :attr:`CensysCertificates ` index. .. code:: python - from censys.search import CensysWebsites + from censys.search import CensysCertificates - c = CensysWebsites() + c = CensysCertificates() # The report method constructs a report using a query, an aggregation field, and the # number of buckets to bin. - websites = c.report( - """ "welcome to" AND tags.raw: "http" """, - field="80.http.get.headers.server.raw", + certificates = c.report( + """censys.io and tags: trusted""", + field="parsed.version", buckets=5, ) - print(websites) + print(certificates) ``data`` -------- @@ -113,13 +108,13 @@ Below we show an example using the :attr:`CensysData ` index. +Below we show an example using the :attr:`CensysCertificates ` index. .. code:: python - from censys.search import CensysIPv4 + from censys.search import CensysCertificates - c = CensysIPv4() + c = CensysCertificates() # Gets account data account = c.account() diff --git a/examples/README.md b/examples/README.md index d07589f3..efad0ea5 100644 --- a/examples/README.md +++ b/examples/README.md @@ -18,8 +18,21 @@ from censys.search import SearchClient from censys.asm import AsmClient ``` -## [ASM](asm) +## Available Examples -## [Search v2](v2) +### Search Examples -## [Search v1](v1) +[Using `SearchClient`](search/search_client.py) +[View Host](search/view_host.py) +[Search Hosts](search/search_hosts.py) +[Aggregate Hosts](search/aggregate_hosts.py) +[Bulk View Hosts](search/bulk_view_hosts.py) +[View Certificate](search/view_cert.py) +[Search Certificates](search/search_certs.py) +[Report Certificates](search/report_certs.py) + +### ASM Examples + +[Get Cloud Host Counts](asm/cloud_host_count.py) +[Get Host Ricks](asm/get_host_risks.py) +[Get Domains and Subdomains](asm/get_subdomains.py) diff --git a/examples/v2/aggregate_hosts.py b/examples/search/aggregate_hosts.py similarity index 100% rename from examples/v2/aggregate_hosts.py rename to examples/search/aggregate_hosts.py diff --git a/examples/search/bulk_view_hosts.py b/examples/search/bulk_view_hosts.py new file mode 100644 index 00000000..2940bc17 --- /dev/null +++ b/examples/search/bulk_view_hosts.py @@ -0,0 +1,17 @@ +"""Bulk IP Lookup Example.""" +from censys.search import CensysHosts + +h = CensysHosts() + +IPS = [ + "1.1.1.1", + "1.1.1.2", + "1.1.1.3", +] + +hosts = h.bulk_view(IPS) +# { +# "1.1.1.1": {...}, +# "1.1.1.2": {...}, +# "1.1.1.3": {...}, +# } diff --git a/examples/search/report_certs.py b/examples/search/report_certs.py new file mode 100644 index 00000000..bc302ea5 --- /dev/null +++ b/examples/search/report_certs.py @@ -0,0 +1,9 @@ +"""View specific certificate.""" +from censys.search import SearchClient + +c = SearchClient() + +res = c.v1.certificates.report( + "github.com and tags: trusted", "parsed.validity.start", 5 +) +print(res) diff --git a/examples/v1/search.py b/examples/search/search_certs.py similarity index 100% rename from examples/v1/search.py rename to examples/search/search_certs.py diff --git a/examples/search_client.py b/examples/search/search_client.py similarity index 66% rename from examples/search_client.py rename to examples/search/search_client.py index 2d2c69c2..96254cc9 100644 --- a/examples/search_client.py +++ b/examples/search/search_client.py @@ -4,13 +4,9 @@ c = SearchClient() # v1 -certs = c.v1.certificates +certs = c.v1.certificates # or c.v1.certs data = c.v1.data -ipv4 = c.v1.ipv4 - -websites = c.v1.websites - # v2 hosts = c.v2.hosts diff --git a/examples/v2/search_hosts.py b/examples/search/search_hosts.py similarity index 100% rename from examples/v2/search_hosts.py rename to examples/search/search_hosts.py diff --git a/examples/v1/view.py b/examples/search/view_cert.py similarity index 100% rename from examples/v1/view.py rename to examples/search/view_cert.py diff --git a/examples/v2/view_host.py b/examples/search/view_host.py similarity index 100% rename from examples/v2/view_host.py rename to examples/search/view_host.py diff --git a/examples/v1/report.py b/examples/v1/report.py deleted file mode 100644 index e863273d..00000000 --- a/examples/v1/report.py +++ /dev/null @@ -1,9 +0,0 @@ -"""View specific certificate.""" -from censys.search import SearchClient - -c = SearchClient() - -res = c.v1.websites.report( - ' "welcome to" AND tags.raw: "http" ', "80.http.get.headers.server.raw", 5 -) -print(res) diff --git a/poetry.lock b/poetry.lock index 84da302e..57df8f3d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -46,7 +46,7 @@ python-versions = "*" [[package]] name = "black" -version = "21.11b1" +version = "21.12b0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -58,7 +58,6 @@ dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0,<1" platformdirs = ">=2" -regex = ">=2021.4.4" tomli = ">=0.2.6,<2.0.0" typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ @@ -75,7 +74,7 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2021.5.30" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -83,7 +82,7 @@ python-versions = "*" [[package]] name = "charset-normalizer" -version = "2.0.4" +version = "2.0.8" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -94,7 +93,7 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.1" +version = "8.0.3" description = "Composable command line interface toolkit" category = "dev" optional = false @@ -265,7 +264,7 @@ importlib-metadata = {version = ">=0.9", markers = "python_version < \"3.8\""} [[package]] name = "idna" -version = "3.2" +version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false @@ -346,14 +345,14 @@ python-versions = "*" [[package]] name = "packaging" -version = "21.0" +version = "21.3" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "parameterized" @@ -388,7 +387,7 @@ flake8-polyfill = ">=1.0.2,<2" [[package]] name = "platformdirs" -version = "2.2.0" +version = "2.4.0" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false @@ -400,25 +399,26 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycodestyle" @@ -460,11 +460,14 @@ python-versions = ">=3.5" [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.6" description = "Python parsing module" category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" @@ -503,14 +506,6 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] -[[package]] -name = "regex" -version = "2021.8.21" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "requests" version = "2.26.0" @@ -547,7 +542,7 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake [[package]] name = "rich" -version = "10.15.0" +version = "10.15.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "main" optional = false @@ -573,7 +568,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "snowballstemmer" -version = "2.1.0" +version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." category = "dev" optional = false @@ -581,7 +576,7 @@ python-versions = "*" [[package]] name = "testfixtures" -version = "6.18.1" +version = "6.18.3" description = "A collection of helpers and mock objects for unit tests and doc tests." category = "dev" optional = false @@ -602,7 +597,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.2.1" +version = "1.2.2" description = "A lil' TOML parser" category = "dev" optional = false @@ -626,15 +621,15 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "3.10.0.2" -description = "Backported and Experimental Type Hints for Python 3.5+" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "urllib3" -version = "1.26.6" +version = "1.26.7" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -647,7 +642,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "zipp" -version = "3.5.0" +version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -660,7 +655,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = ">=3.6.2,<4.0" -content-hash = "5f53214449f442ce6f8530ce077a07ddfd14d0e985aab5a1a97343c5ea9c3d21" +content-hash = "ad1cbd8077c416b614fcf6f8f9626f2d10f75e470c2eef1712aa5b981778d23a" [metadata.files] astor = [ @@ -683,20 +678,20 @@ backoff-stubs = [ {file = "backoff-stubs-1.10.0.tar.gz", hash = "sha256:03e995de0a70016c6fe758498e1ca811f1db517c00cbd06e3039c9e4f6ea2566"}, ] black = [ - {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, - {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, + {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, + {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] certifi = [ - {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, - {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, - {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, + {file = "charset-normalizer-2.0.8.tar.gz", hash = "sha256:735e240d9a8506778cd7a453d97e817e536bb1fc29f4f6961ce297b9c7a917b0"}, + {file = "charset_normalizer-2.0.8-py3-none-any.whl", hash = "sha256:83fcdeb225499d6344c8f7f34684c2981270beacc32ede2e669e94f7fa544405"}, ] click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -800,8 +795,8 @@ flake8-simplify = [ {file = "flake8_simplify-0.14.2.tar.gz", hash = "sha256:4a8f103607195c3d0743a2fd8beeebe24926e19fb3e24521042cfc35771a8d4d"}, ] idna = [ - {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, - {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ {file = "importlib_metadata-4.8.2-py3-none-any.whl", hash = "sha256:53ccfd5c134223e497627b9815d5030edf77d2ed573922f7a0b8f8bb81a1c100"}, @@ -849,8 +844,8 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, - {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] parameterized = [ {file = "parameterized-0.8.1-py2.py3-none-any.whl", hash = "sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9"}, @@ -865,16 +860,16 @@ pep8-naming = [ {file = "pep8_naming-0.12.1-py2.py3-none-any.whl", hash = "sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37"}, ] platformdirs = [ - {file = "platformdirs-2.2.0-py3-none-any.whl", hash = "sha256:4666d822218db6a262bdfdc9c39d21f23b4cfdb08af331a81e92751daf6c866c"}, - {file = "platformdirs-2.2.0.tar.gz", hash = "sha256:632daad3ab546bd8e6af0537d09805cec458dce201bccfe23012df73332e181e"}, + {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, + {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, ] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, @@ -893,8 +888,8 @@ pygments = [ {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, @@ -904,49 +899,6 @@ pytest-cov = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] -regex = [ - {file = "regex-2021.8.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b0c211c55d4aac4309c3209833c803fada3fc21cdf7b74abedda42a0c9dc3ce"}, - {file = "regex-2021.8.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d5209c3ba25864b1a57461526ebde31483db295fc6195fdfc4f8355e10f7376"}, - {file = "regex-2021.8.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c835c30f3af5c63a80917b72115e1defb83de99c73bc727bddd979a3b449e183"}, - {file = "regex-2021.8.21-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:615fb5a524cffc91ab4490b69e10ae76c1ccbfa3383ea2fad72e54a85c7d47dd"}, - {file = "regex-2021.8.21-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9966337353e436e6ba652814b0a957a517feb492a98b8f9d3b6ba76d22301dcc"}, - {file = "regex-2021.8.21-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a49f85f0a099a5755d0a2cc6fc337e3cb945ad6390ec892332c691ab0a045882"}, - {file = "regex-2021.8.21-cp310-cp310-win32.whl", hash = "sha256:f93a9d8804f4cec9da6c26c8cfae2c777028b4fdd9f49de0302e26e00bb86504"}, - {file = "regex-2021.8.21-cp310-cp310-win_amd64.whl", hash = "sha256:a795829dc522227265d72b25d6ee6f6d41eb2105c15912c230097c8f5bfdbcdc"}, - {file = "regex-2021.8.21-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bca14dfcfd9aae06d7d8d7e105539bd77d39d06caaae57a1ce945670bae744e0"}, - {file = "regex-2021.8.21-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41acdd6d64cd56f857e271009966c2ffcbd07ec9149ca91f71088574eaa4278a"}, - {file = "regex-2021.8.21-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f0c79a70642dfdf7e6a018ebcbea7ea5205e27d8e019cad442d2acfc9af267"}, - {file = "regex-2021.8.21-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45f97ade892ace20252e5ccecdd7515c7df5feeb42c3d2a8b8c55920c3551c30"}, - {file = "regex-2021.8.21-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f9974826aeeda32a76648fc677e3125ade379869a84aa964b683984a2dea9f1"}, - {file = "regex-2021.8.21-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea9753d64cba6f226947c318a923dadaf1e21cd8db02f71652405263daa1f033"}, - {file = "regex-2021.8.21-cp36-cp36m-win32.whl", hash = "sha256:ef9326c64349e2d718373415814e754183057ebc092261387a2c2f732d9172b2"}, - {file = "regex-2021.8.21-cp36-cp36m-win_amd64.whl", hash = "sha256:6dbd51c3db300ce9d3171f4106da18fe49e7045232630fe3d4c6e37cb2b39ab9"}, - {file = "regex-2021.8.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a89ca4105f8099de349d139d1090bad387fe2b208b717b288699ca26f179acbe"}, - {file = "regex-2021.8.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6c2b1d78ceceb6741d703508cd0e9197b34f6bf6864dab30f940f8886e04ade"}, - {file = "regex-2021.8.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a34ba9e39f8269fd66ab4f7a802794ffea6d6ac500568ec05b327a862c21ce23"}, - {file = "regex-2021.8.21-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ecb6e7c45f9cd199c10ec35262b53b2247fb9a408803ed00ee5bb2b54aa626f5"}, - {file = "regex-2021.8.21-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:330836ad89ff0be756b58758878409f591d4737b6a8cef26a162e2a4961c3321"}, - {file = "regex-2021.8.21-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:71a904da8c9c02aee581f4452a5a988c3003207cb8033db426f29e5b2c0b7aea"}, - {file = "regex-2021.8.21-cp37-cp37m-win32.whl", hash = "sha256:b511c6009d50d5c0dd0bab85ed25bc8ad6b6f5611de3a63a59786207e82824bb"}, - {file = "regex-2021.8.21-cp37-cp37m-win_amd64.whl", hash = "sha256:93f9f720081d97acee38a411e861d4ce84cbc8ea5319bc1f8e38c972c47af49f"}, - {file = "regex-2021.8.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a195e26df1fbb40ebee75865f9b64ba692a5824ecb91c078cc665b01f7a9a36"}, - {file = "regex-2021.8.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06ba444bbf7ede3890a912bd4904bb65bf0da8f0d8808b90545481362c978642"}, - {file = "regex-2021.8.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8d551f1bd60b3e1c59ff55b9e8d74607a5308f66e2916948cafd13480b44a3"}, - {file = "regex-2021.8.21-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ebbceefbffae118ab954d3cd6bf718f5790db66152f95202ebc231d58ad4e2c2"}, - {file = "regex-2021.8.21-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccd721f1d4fc42b541b633d6e339018a08dd0290dc67269df79552843a06ca92"}, - {file = "regex-2021.8.21-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ae87ab669431f611c56e581679db33b9a467f87d7bf197ac384e71e4956b4456"}, - {file = "regex-2021.8.21-cp38-cp38-win32.whl", hash = "sha256:38600fd58c2996829480de7d034fb2d3a0307110e44dae80b6b4f9b3d2eea529"}, - {file = "regex-2021.8.21-cp38-cp38-win_amd64.whl", hash = "sha256:61e734c2bcb3742c3f454dfa930ea60ea08f56fd1a0eb52d8cb189a2f6be9586"}, - {file = "regex-2021.8.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b091dcfee169ad8de21b61eb2c3a75f9f0f859f851f64fdaf9320759a3244239"}, - {file = "regex-2021.8.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:640ccca4d0a6fcc6590f005ecd7b16c3d8f5d52174e4854f96b16f34c39d6cb7"}, - {file = "regex-2021.8.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac95101736239260189f426b1e361dc1b704513963357dc474beb0f39f5b7759"}, - {file = "regex-2021.8.21-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b79dc2b2e313565416c1e62807c7c25c67a6ff0a0f8d83a318df464555b65948"}, - {file = "regex-2021.8.21-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b623fc429a38a881ab2d9a56ef30e8ea20c72a891c193f5ebbddc016e083ee"}, - {file = "regex-2021.8.21-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8021dee64899f993f4b5cca323aae65aabc01a546ed44356a0965e29d7893c94"}, - {file = "regex-2021.8.21-cp39-cp39-win32.whl", hash = "sha256:d6ec4ae13760ceda023b2e5ef1f9bc0b21e4b0830458db143794a117fdbdc044"}, - {file = "regex-2021.8.21-cp39-cp39-win_amd64.whl", hash = "sha256:03840a07a402576b8e3a6261f17eb88abd653ad4e18ec46ef10c9a63f8c99ebd"}, - {file = "regex-2021.8.21.tar.gz", hash = "sha256:faf08b0341828f6a29b8f7dd94d5cf8cc7c39bfc3e67b78514c54b494b66915a"}, -] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, @@ -956,28 +908,28 @@ responses = [ {file = "responses-0.16.0.tar.gz", hash = "sha256:a2e3aca2a8277e61257cd3b1c154b1dd0d782b1ae3d38b7fa37cbe3feb531791"}, ] rich = [ - {file = "rich-10.15.0-py3-none-any.whl", hash = "sha256:11516740a10dddead0c782dc11fdde552b19fd0614dbbba8f78ea7602d940720"}, - {file = "rich-10.15.0.tar.gz", hash = "sha256:3f7b0851e097ae90e43216375db413c2f910a0f310705614bce1a2ae43c8264e"}, + {file = "rich-10.15.2-py3-none-any.whl", hash = "sha256:43b2c6ad51f46f6c94992aee546f1c177719f4e05aff8f5ea4d2efae3ebdac89"}, + {file = "rich-10.15.2.tar.gz", hash = "sha256:1dded089b79dd042b3ab5cd63439a338e16652001f0c16e73acdcf4997ad772d"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] snowballstemmer = [ - {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, - {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] testfixtures = [ - {file = "testfixtures-6.18.1-py2.py3-none-any.whl", hash = "sha256:486be7b01eb71326029811878a3317b7e7994324621c0ec633c8e24499d8d5b3"}, - {file = "testfixtures-6.18.1.tar.gz", hash = "sha256:0a6422737f6d89b45cdef1e2df5576f52ad0f507956002ce1020daa9f44211d6"}, + {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, + {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"}, - {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"}, + {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, + {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, ] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, @@ -1016,15 +968,14 @@ types-requests = [ {file = "types_requests-2.26.1-py3-none-any.whl", hash = "sha256:853571b3accc188976c0f4feffcaebf6cdfc170082b5e43f3358aa78de61f531"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, - {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, - {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] urllib3 = [ - {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, - {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, ] zipp = [ - {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, - {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/pyproject.toml b/pyproject.toml index 7154ce81..96dca8b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "censys" -version = "2.0.9" +version = "2.1.0" description = "An easy-to-use and lightweight API wrapper for Censys APIs (censys.io)." readme = "README.md" authors = ["Censys, Inc. "] @@ -48,7 +48,7 @@ censys = "censys.cli:main" python = ">=3.6.2,<4.0" requests = ">=2.26.0" backoff = "^1.11.1" -rich = "^10.6.0" +rich = "^10.15.2" importlib-metadata = { version = "^4.6.1", markers = "python_version < '3.8'" } [tool.poetry.dev-dependencies] @@ -62,7 +62,7 @@ flake8-isort = "^4.1.1" isort = "^5.10.1" pep8-naming = "^0.12.1" flake8-black = "^0.2.3" -black = "^21.11b1" +black = "^21.12b0" darglint = "^1.8.1" # Tests diff --git a/tests/cli/test_account.py b/tests/cli/test_account.py index 73bc0054..f03abb79 100644 --- a/tests/cli/test_account.py +++ b/tests/cli/test_account.py @@ -7,7 +7,7 @@ import responses from tests.search.v1.test_api import ACCOUNT_JSON -from tests.utils import V1_ENDPOINT_ON_V2_URL, CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.cli import main as cli_main @@ -24,7 +24,7 @@ class CensysCliAccountTest(CensysTestCase): def test_table(self): self.responses.add( responses.GET, - V1_ENDPOINT_ON_V2_URL + "/account", + V1_URL + "/account", status=200, json=ACCOUNT_JSON, ) @@ -53,7 +53,7 @@ def test_table(self): def test_json(self): self.responses.add( responses.GET, - V1_ENDPOINT_ON_V2_URL + "/account", + V1_URL + "/account", status=200, json=ACCOUNT_JSON, ) diff --git a/tests/cli/test_config.py b/tests/cli/test_config.py index f06d208f..5aca1bf6 100644 --- a/tests/cli/test_config.py +++ b/tests/cli/test_config.py @@ -5,7 +5,7 @@ import responses from tests.search.v1.test_api import ACCOUNT_JSON -from tests.utils import V1_ENDPOINT_ON_V2_URL, CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.cli import main as cli_main from censys.common.config import DEFAULT, censys_path, config_path, get_config @@ -64,7 +64,7 @@ class CensysConfigCliTest(CensysTestCase): def test_search_config(self, mock_write_config, mock_file): self.responses.add( responses.GET, - V1_ENDPOINT_ON_V2_URL + "/account", + V1_URL + "/account", status=200, json=ACCOUNT_JSON, ) @@ -91,7 +91,7 @@ def test_search_config(self, mock_write_config, mock_file): def test_search_config_failed(self, mock_file): self.responses.add( responses.GET, - V1_ENDPOINT_ON_V2_URL + "/account", + V1_URL + "/account", status=401, json={"error": "Unauthorized"}, ) diff --git a/tests/cli/test_hnri.py b/tests/cli/test_hnri.py index edc26cba..19900eae 100644 --- a/tests/cli/test_hnri.py +++ b/tests/cli/test_hnri.py @@ -6,7 +6,7 @@ import responses from tests.search.v2.test_hosts import VIEW_HOST_JSON -from tests.utils import CensysTestCase +from tests.utils import V2_URL, CensysTestCase from censys.cli import main as cli_main from censys.cli.commands.hnri import CensysHNRI @@ -20,7 +20,6 @@ class CensysCliHNRITest(CensysTestCase): def setUp(self): super().setUp() self.api = CensysHNRI(self.api_id, self.api_secret) - self.base_url = self.api.index._api_url @patch( "argparse._sys.argv", @@ -37,7 +36,7 @@ def test_hnri_medium(self, mock_ip): ] self.responses.add( responses.GET, - f"{self.base_url}/hosts/{self.IP_ADDRESS}", + f"{V2_URL}/hosts/{self.IP_ADDRESS}", status=200, json=response, ) @@ -66,7 +65,7 @@ def test_hnri_no_medium(self, mock_ip): response["result"]["services"] = [{"port": 23, "service_name": "VNC"}] self.responses.add( responses.GET, - f"{self.base_url}/hosts/{self.IP_ADDRESS}", + f"{V2_URL}/hosts/{self.IP_ADDRESS}", status=200, json=response, ) @@ -94,7 +93,7 @@ def test_hnri_not_found(self, mock_ip): response["result"]["services"] = [] self.responses.add( responses.GET, - f"{self.base_url}/hosts/{self.IP_ADDRESS}", + f"{V2_URL}/hosts/{self.IP_ADDRESS}", status=200, json=response, ) diff --git a/tests/cli/test_search.py b/tests/cli/test_search.py index db0b85ff..6d0c343a 100644 --- a/tests/cli/test_search.py +++ b/tests/cli/test_search.py @@ -10,7 +10,7 @@ import responses from tests.search.v2.test_hosts import SEARCH_HOSTS_JSON -from tests.utils import V1_ENDPOINT_ON_V2_URL, V1_URL, V2_URL, CensysTestCase +from tests.utils import V1_URL, V2_URL, CensysTestCase from censys.cli import main as cli_main from censys.common.exceptions import CensysCLIException, CensysException @@ -63,7 +63,7 @@ def test_no_creds(self, mock_file): def test_write_json(self): self.responses.add_callback( responses.POST, - V1_ENDPOINT_ON_V2_URL + "/search/certificates", + V1_URL + "/search/certificates", callback=search_callback, content_type="application/json", ) @@ -93,9 +93,9 @@ def test_write_json(self): [ "censys", "search", - "8.8.8.8", + "parsed.names: censys.io", "--index-type", - "ipv4", + "certs", "--fields", "protocols", "--format", @@ -106,7 +106,7 @@ def test_write_json(self): def test_write_csv(self): self.responses.add_callback( responses.POST, - V1_URL + "/search/ipv4", + V1_URL + "/search/certificates", callback=search_callback, content_type="application/json", ) @@ -151,7 +151,7 @@ def test_write_csv(self): def test_write_output_path(self): self.responses.add_callback( responses.POST, - V1_ENDPOINT_ON_V2_URL + "/search/certificates", + V1_URL + "/search/certificates", callback=search_callback, content_type="application/json", ) @@ -178,7 +178,7 @@ def test_write_output_path(self): "search", "domain: censys.io AND ports: 443", "--index-type", - "websites", + "certs", "--fields", "443.https.get.headers.server", "--format", @@ -189,7 +189,7 @@ def test_write_output_path(self): def test_write_screen(self): self.responses.add_callback( responses.POST, - V1_URL + "/search/websites", + V1_URL + "/search/certificates", callback=search_callback, content_type="application/json", ) @@ -208,9 +208,9 @@ def test_write_screen(self): [ "censys", "search", - "domain: censys.io AND ports: 443", + "parsed.names: censys.io", "--index-type", - "websites", + "certs", "--overwrite", "--fields", "domain", @@ -228,7 +228,7 @@ def test_write_screen(self): def test_overwrite(self): self.responses.add_callback( responses.POST, - V1_URL + "/search/websites", + V1_URL + "/search/certificates", callback=search_callback, content_type="application/json", ) @@ -297,9 +297,9 @@ def test_field_max(self): [ "censys", "search", - "domain: censys.io AND ports: 443", + "parsed.names: censys.io", "--index-type", - "websites", + "certs", "--format", "screen", "--max-records", @@ -310,7 +310,7 @@ def test_field_max(self): def test_max_records(self): self.responses.add_callback( responses.POST, - V1_URL + "/search/websites", + V1_URL + "/search/certificates", callback=search_callback, content_type="application/json", ) @@ -376,24 +376,6 @@ def test_write_csv_v2(self): ): cli_main() - @patch( - "argparse._sys.argv", - [ - "censys", - "search", - "domain: censys.io AND ports: 443", - "--index-type", - "ipv4", - "--open", - ], - ) - @patch("censys.cli.commands.search.webbrowser.open") - def test_open_v1(self, mock_open): - with pytest.raises(SystemExit, match="0"): - cli_main() - query_str = urlencode({"q": "domain: censys.io AND ports: 443"}) - mock_open.assert_called_with(f"https://censys.io/ipv4?{query_str}") - @patch( "argparse._sys.argv", [ diff --git a/tests/search/v1/test_api.py b/tests/search/v1/test_api.py index 6eca8a5a..9f60eed6 100644 --- a/tests/search/v1/test_api.py +++ b/tests/search/v1/test_api.py @@ -6,14 +6,14 @@ from parameterized import parameterized from requests.models import Response -from tests.utils import CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.common.exceptions import ( CensysException, CensysExceptionMapper, CensysSearchException, ) -from censys.search.v1.api import CensysSearchAPI +from censys.search.v1.api import CensysSearchAPIv1 ACCOUNT_JSON = { "login": "test@censys.io", @@ -32,12 +32,12 @@ class CensysSearchAPITests(CensysTestCase): def setUp(self): super().setUp() - self.setUpApi(CensysSearchAPI(self.api_id, self.api_secret)) + self.setUpApi(CensysSearchAPIv1(self.api_id, self.api_secret)) def test_account(self): self.responses.add( responses.GET, - self.base_url + "/account", + V1_URL + "/account", status=200, json=ACCOUNT_JSON, ) @@ -48,7 +48,7 @@ def test_account(self): def test_quota(self): self.responses.add( responses.GET, - self.base_url + "/account", + V1_URL + "/account", status=200, json=ACCOUNT_JSON, ) @@ -76,4 +76,4 @@ def test_no_env(self, mock_file): with pytest.raises( CensysException, match="No API ID or API secret configured." ): - CensysSearchAPI() + CensysSearchAPIv1() diff --git a/tests/search/v1/test_certificates.py b/tests/search/v1/test_certificates.py index 1db40b9c..fda6166f 100644 --- a/tests/search/v1/test_certificates.py +++ b/tests/search/v1/test_certificates.py @@ -1,6 +1,6 @@ import responses -from tests.utils import CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.search import SearchClient @@ -20,7 +20,7 @@ def setUp(self): def test_bulk(self): self.responses.add( responses.POST, - f"{self.base_url}/bulk/certificates", + f"{V1_URL}/bulk/certificates", status=200, json=BULK_JSON, ) diff --git a/tests/search/v1/test_data.py b/tests/search/v1/test_data.py index 76459cd9..6b75ed75 100644 --- a/tests/search/v1/test_data.py +++ b/tests/search/v1/test_data.py @@ -1,6 +1,6 @@ import responses -from tests.utils import CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.search import SearchClient @@ -28,7 +28,7 @@ def setUp(self): def test_get_series(self): self.responses.add( responses.GET, - f"{self.base_url}/data", + f"{V1_URL}/data", status=200, json=SERIES_JSON, ) @@ -39,7 +39,7 @@ def test_get_series(self): def test_view_series(self): self.responses.add( responses.GET, - f"{self.base_url}/data/{SERIES}", + f"{V1_URL}/data/{SERIES}", status=200, json=VIEW_JSON, ) @@ -50,7 +50,7 @@ def test_view_series(self): def test_view_result(self): self.responses.add( responses.GET, - f"{self.base_url}/data/{SERIES}/{RESULT}", + f"{V1_URL}/data/{SERIES}/{RESULT}", status=200, json=RESULT_JSON, ) diff --git a/tests/search/v1/test_indexes.py b/tests/search/v1/test_indexes.py index 2be77d36..ce938c43 100644 --- a/tests/search/v1/test_indexes.py +++ b/tests/search/v1/test_indexes.py @@ -2,7 +2,7 @@ import responses from parameterized import parameterized_class -from tests.utils import CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.common.exceptions import CensysException from censys.search import SearchClient @@ -55,8 +55,6 @@ "certificates", "fce621c0dc1c666d03d660472f636ce91e66e96460545f0da7eb1a24873e2f70", ), - ("ipv4", "8.8.8.8"), - ("websites", "google.com"), ], ) class CensysIndexTests(CensysTestCase): @@ -72,7 +70,7 @@ def setUp(self): def test_view(self): self.responses.add( responses.GET, - f"{self.base_url}/view/{self.index}/{self.document_id}", + f"{V1_URL}/view/{self.index}/{self.document_id}", status=200, json=VIEW_JSON, ) @@ -84,7 +82,7 @@ def test_view(self): def test_search(self): self.responses.add( responses.POST, - f"{self.base_url}/search/{self.index}", + f"{V1_URL}/search/{self.index}", status=200, json=SEARCH_JSON, ) @@ -96,7 +94,7 @@ def test_search(self): def test_report(self): self.responses.add( responses.POST, - f"{self.base_url}/report/{self.index}", + f"{V1_URL}/report/{self.index}", status=200, json=REPORT_JSON, ) @@ -108,7 +106,7 @@ def test_report(self): def test_metadata(self): self.responses.add( responses.POST, - f"{self.base_url}/search/{self.index}", + f"{V1_URL}/search/{self.index}", status=200, json=SEARCH_JSON, ) @@ -126,7 +124,7 @@ def test_max_records_search(self): temp_json["results"] = [{"sample": "results"} for _ in range(MAX_RECORDS + 5)] self.responses.add( responses.POST, - f"{self.base_url}/search/{self.index}", + f"{V1_URL}/search/{self.index}", status=200, json=temp_json, ) diff --git a/tests/search/v2/test_api.py b/tests/search/v2/test_api.py index 2197ba8d..7712ee05 100644 --- a/tests/search/v2/test_api.py +++ b/tests/search/v2/test_api.py @@ -8,7 +8,7 @@ from requests.models import Response from tests.search.v1.test_api import ACCOUNT_JSON -from tests.utils import V1_ENDPOINT_ON_V2_URL, CensysTestCase +from tests.utils import V1_URL, CensysTestCase from censys.common.exceptions import CensysException, CensysExceptionMapper from censys.search.v2.api import CensysSearchAPIv2 @@ -36,7 +36,7 @@ def test_get_exception_class(self, status_code, exception): def test_account_and_quota(self): self.responses.add( responses.GET, - f"{V1_ENDPOINT_ON_V2_URL}/account", + f"{V1_URL}/account", status=200, json=ACCOUNT_JSON, ) @@ -46,22 +46,6 @@ def test_account_and_quota(self): results = self.api.quota() assert results == ACCOUNT_JSON["quota"] - def test_v1_endpoint_on_v2_url(self): - # Asserts that the API URL was set correctly - assert self.api.v1._api_url == V1_ENDPOINT_ON_V2_URL - - # Asserts that proxies get set correctly - api_with_proxy = CensysSearchAPIv2( - self.api_id, self.api_secret, proxies={"https": "test.proxy.com"} - ) - assert list(api_with_proxy.v1._session.proxies.keys()) == ["https"] - - # Asserts that cookies get set correctly - api_with_cookies = CensysSearchAPIv2( - self.api_id, self.api_secret, cookies={"_ga": "GA"} - ) - assert list(api_with_cookies.v1._session.cookies.keys()) == ["_ga"] - @patch.dict("os.environ", {"CENSYS_API_ID": "", "CENSYS_API_SECRET": ""}) class CensysAPIBaseTestsNoSearchEnv(unittest.TestCase): diff --git a/tests/search/v2/test_certs.py b/tests/search/v2/test_certs.py index bef0b1e9..2ddffac3 100644 --- a/tests/search/v2/test_certs.py +++ b/tests/search/v2/test_certs.py @@ -4,7 +4,7 @@ import responses from parameterized import parameterized -from tests.utils import CensysTestCase +from tests.utils import V2_URL, CensysTestCase from censys.search import SearchClient @@ -43,7 +43,7 @@ def test_not_implemented_methods(self, function_name: str): def test_get_hosts_by_cert(self): self.responses.add( responses.GET, - f"{self.base_url}/certificates/{TEST_CERT}/hosts", + f"{V2_URL}/certificates/{TEST_CERT}/hosts", status=200, json=VIEW_HOSTS_BY_CERT_JSON, ) @@ -54,7 +54,7 @@ def test_get_hosts_by_cert(self): def test_get_hosts_by_cert_with_cursor(self): self.responses.add( responses.GET, - f"{self.base_url}/certificates/{TEST_CERT}/hosts?cursor=nextCursorToken", + f"{V2_URL}/certificates/{TEST_CERT}/hosts?cursor=nextCursorToken", status=200, json=VIEW_HOSTS_BY_CERT_JSON, ) diff --git a/tests/search/v2/test_comments.py b/tests/search/v2/test_comments.py index b3ae52d3..ec7f4d82 100644 --- a/tests/search/v2/test_comments.py +++ b/tests/search/v2/test_comments.py @@ -3,7 +3,7 @@ import responses from parameterized import parameterized_class -from tests.utils import CensysTestCase +from tests.utils import V2_URL, CensysTestCase from censys.search.v2 import CensysCerts, CensysHosts from censys.search.v2.api import CensysSearchAPIv2 @@ -61,7 +61,7 @@ def setUp(self): def test_get_comments(self): self.responses.add( responses.GET, - f"{self.base_url}/{self.index}/{self.document_id}/comments", + f"{V2_URL}/{self.index}/{self.document_id}/comments", status=200, json=GET_COMMENTS_RESPONSE, ) @@ -71,7 +71,7 @@ def test_get_comments(self): def test_add_comment(self): self.responses.add( responses.POST, - f"{self.base_url}/{self.index}/{self.document_id}/comments", + f"{V2_URL}/{self.index}/{self.document_id}/comments", status=200, json=ADD_COMMENTS_RESPONSE, match=[responses.json_params_matcher({"contents": TEST_COMMENT})], @@ -82,7 +82,7 @@ def test_add_comment(self): def test_delete_comment(self): self.responses.add( responses.DELETE, - f"{self.base_url}/{self.index}/{self.document_id}/comments/comment-id", + f"{V2_URL}/{self.index}/{self.document_id}/comments/comment-id", status=209, ) self.api.delete_comment(self.document_id, "comment-id") @@ -90,7 +90,7 @@ def test_delete_comment(self): def test_update_comment(self): self.responses.add( responses.PUT, - f"{self.base_url}/{self.index}/{self.document_id}/comments/comment-id", + f"{V2_URL}/{self.index}/{self.document_id}/comments/comment-id", status=200, json={"code": 200, "status": "OK"}, match=[responses.json_params_matcher({"contents": TEST_COMMENT})], diff --git a/tests/search/v2/test_hosts.py b/tests/search/v2/test_hosts.py index 4d98d3bd..7f302876 100644 --- a/tests/search/v2/test_hosts.py +++ b/tests/search/v2/test_hosts.py @@ -4,9 +4,9 @@ import responses from parameterized import parameterized -from tests.utils import CensysTestCase +from tests.utils import V2_URL, CensysTestCase -from censys.search import SearchClient +from censys.search import CensysHosts, SearchClient VIEW_HOST_JSON = { "code": 200, @@ -147,6 +147,8 @@ class TestHosts(CensysTestCase): + api: CensysHosts + def setUp(self): super().setUp() self.setUpApi(SearchClient(self.api_id, self.api_secret).v2.hosts) @@ -154,7 +156,7 @@ def setUp(self): def test_view(self): self.responses.add( responses.GET, - f"{self.base_url}/hosts/{TEST_HOST}", + f"{V2_URL}/hosts/{TEST_HOST}", status=200, json=VIEW_HOST_JSON, ) @@ -166,7 +168,7 @@ def test_view(self): def test_view_at_time(self): self.responses.add( responses.GET, - f"{self.base_url}/hosts/{TEST_HOST}?at_time=2021-03-01T00:00:00.000000Z", + f"{V2_URL}/hosts/{TEST_HOST}?at_time=2021-03-01T00:00:00.000000Z", status=200, json=VIEW_HOST_JSON, ) @@ -177,10 +179,46 @@ def test_view_at_time(self): assert res == VIEW_HOST_JSON["result"] + def test_bulk_view(self): + ips = ["1.1.1.1", "1.1.1.2", "1.1.1.3"] + expected = {} + for ip in ips: + host_json = VIEW_HOST_JSON.copy() + host_json["result"]["ip"] = ip + self.responses.add( + responses.GET, + f"{V2_URL}/hosts/{ip}", + status=200, + json=host_json, + ) + expected[ip] = host_json["result"].copy() + + results = self.api.bulk_view(ips) + assert results == expected + + def test_bulk_view_at_time(self): + ips = ["1.1.1.1", "1.1.1.2", "1.1.1.3"] + expected = {} + for ip in ips: + host_json = VIEW_HOST_JSON.copy() + host_json["result"]["ip"] = ip + self.responses.add( + responses.GET, + f"{V2_URL}/hosts/{ip}?at_time=2021-03-01T00:00:00.000000Z", + status=200, + json=host_json, + ) + expected[ip] = host_json["result"].copy() + + date = datetime.date(2021, 3, 1) + + results = self.api.bulk_view(ips, at_time=date) + assert results == expected + def test_search(self): self.responses.add( responses.GET, - self.base_url + "/hosts/search?q=service.service_name: HTTP&per_page=100", + V2_URL + "/hosts/search?q=service.service_name: HTTP&per_page=100", status=200, json=SEARCH_HOSTS_JSON, ) @@ -192,7 +230,7 @@ def test_search_per_page(self): test_per_page = 50 self.responses.add( responses.GET, - self.base_url + V2_URL + f"/hosts/search?q=service.service_name: HTTP&per_page={test_per_page}", status=200, json=SEARCH_HOSTS_JSON, @@ -209,7 +247,7 @@ def test_search_invalid_query(self): no_hosts_json["result"]["links"]["next"] = "" self.responses.add( responses.GET, - self.base_url + f"/hosts/search?q={invalid_query}&per_page=100", + V2_URL + f"/hosts/search?q={invalid_query}&per_page=100", status=200, json=no_hosts_json, ) @@ -223,7 +261,7 @@ def test_search_invalid_query(self): def test_search_pages(self): self.responses.add( responses.GET, - self.base_url + "/hosts/search?q=service.service_name: HTTP&per_page=100", + V2_URL + "/hosts/search?q=service.service_name: HTTP&per_page=100", status=200, json=SEARCH_HOSTS_JSON, ) @@ -243,7 +281,7 @@ def test_search_pages(self): page_2_json["result"]["links"]["next"] = None self.responses.add( responses.GET, - self.base_url + V2_URL + "/hosts/search?q=service.service_name: HTTP&per_page=100" + f"&cursor={next_cursor}", status=200, @@ -259,7 +297,7 @@ def test_search_pages(self): def test_aggregate(self): self.responses.add( responses.GET, - self.base_url + V2_URL + "/hosts/aggregate?field=services.port&q=service.service_name: HTTP&num_buckets=4", status=200, json=AGGREGATE_HOSTS_JSON, @@ -280,7 +318,7 @@ def test_search_view_all(self): search_json["result"]["links"]["next"] = "" self.responses.add( responses.GET, - f"{self.base_url}/hosts/search?q=service.service_name: HTTP&per_page={test_per_page}", + f"{V2_URL}/hosts/search?q=service.service_name: HTTP&per_page={test_per_page}", status=200, json=search_json, ) @@ -291,7 +329,7 @@ def test_search_view_all(self): view_json["result"]["ip"] = ip self.responses.add( responses.GET, - f"{self.base_url}/hosts/{ip}", + f"{V2_URL}/hosts/{ip}", status=200, json=view_json, ) @@ -304,7 +342,7 @@ def test_search_view_all(self): def test_view_host_names(self): self.responses.add( responses.GET, - f"{self.base_url}/hosts/{TEST_HOST}/names", + f"{V2_URL}/hosts/{TEST_HOST}/names", status=200, json=VIEW_HOST_NAMES_JSON, ) @@ -314,7 +352,7 @@ def test_view_host_names(self): def test_host_metadata(self): self.responses.add( responses.GET, - f"{self.base_url}/metadata/hosts", + f"{V2_URL}/metadata/hosts", status=200, json=HOST_METADATA_JSON, ) @@ -324,7 +362,7 @@ def test_host_metadata(self): def test_view_host_events(self): self.responses.add( responses.GET, - f"{self.base_url}/experimental/hosts/{TEST_HOST}/events", + f"{V2_URL}/experimental/hosts/{TEST_HOST}/events", status=200, json=VIEW_HOST_EVENTS_JSON, ) @@ -350,7 +388,7 @@ def test_view_host_events(self): def test_view_host_events_params(self, kwargs, query_params): self.responses.add( responses.GET, - f"{self.base_url}/experimental/hosts/{TEST_HOST}/events?{query_params}", + f"{V2_URL}/experimental/hosts/{TEST_HOST}/events?{query_params}", status=200, json=VIEW_HOST_EVENTS_JSON, ) diff --git a/tests/search/v2/test_tags.py b/tests/search/v2/test_tags.py index 704b13c9..70bdb34c 100644 --- a/tests/search/v2/test_tags.py +++ b/tests/search/v2/test_tags.py @@ -4,7 +4,7 @@ import responses from parameterized import parameterized_class -from tests.utils import CensysTestCase +from tests.utils import BASE_URL, CensysTestCase from censys.search.v2 import CensysCerts, CensysHosts from censys.search.v2.api import CensysSearchAPIv2 @@ -82,7 +82,7 @@ def setUp(self): def test_list_all_tags(self): self.responses.add( responses.GET, - self.base_url + self.api.tags_path, + BASE_URL + self.api.tags_path, status=200, json=LIST_TAGS_RESPONSE, ) @@ -92,7 +92,7 @@ def test_list_all_tags(self): def test_create_tag(self): self.responses.add( responses.POST, - self.base_url + self.api.tags_path, + BASE_URL + self.api.tags_path, status=200, json=CREATE_TAG_RESPONSE, match=[ @@ -107,7 +107,7 @@ def test_create_tag(self): def test_get_tag(self): self.responses.add( responses.GET, - self.base_url + self.api.tags_path + "/" + TEST_TAG_ID, + BASE_URL + self.api.tags_path + "/" + TEST_TAG_ID, status=200, json=CREATE_TAG_RESPONSE, ) @@ -117,7 +117,7 @@ def test_get_tag(self): def test_update_tag(self): self.responses.add( responses.PUT, - self.base_url + self.api.tags_path + "/" + TEST_TAG_ID, + BASE_URL + self.api.tags_path + "/" + TEST_TAG_ID, status=200, json=CREATE_TAG_RESPONSE, match=[ @@ -132,7 +132,7 @@ def test_update_tag(self): def test_delete_tag(self): self.responses.add( responses.DELETE, - self.base_url + self.api.tags_path + "/" + TEST_TAG_ID, + BASE_URL + self.api.tags_path + "/" + TEST_TAG_ID, status=204, ) self.api.delete_tag(TEST_TAG_ID) @@ -141,7 +141,7 @@ def test_delete_tag(self): def test_list_tags_on_document(self): self.responses.add( responses.GET, - self.base_url + self.api.view_path + self.document_id + "/tags", + BASE_URL + self.api.view_path + self.document_id + "/tags", status=200, json=LIST_TAGS_RESPONSE, ) @@ -151,7 +151,7 @@ def test_list_tags_on_document(self): def test_add_tag_to_document(self): self.responses.add( responses.PUT, - f"{self.base_url}{self.api.view_path}{self.document_id}/tags/{TEST_TAG_ID}", + f"{BASE_URL}{self.api.view_path}{self.document_id}/tags/{TEST_TAG_ID}", status=200, ) self.api.add_tag_to_document(self.document_id, TEST_TAG_ID) @@ -160,7 +160,7 @@ def test_add_tag_to_document(self): def test_remove_tag_to_document(self): self.responses.add( responses.DELETE, - f"{self.base_url}{self.api.view_path}{self.document_id}/tags/{TEST_TAG_ID}", + f"{BASE_URL}{self.api.view_path}{self.document_id}/tags/{TEST_TAG_ID}", status=200, ) self.api.remove_tag_from_document(self.document_id, TEST_TAG_ID) @@ -171,7 +171,7 @@ def test_list_hosts_with_tag(self): pytest.skip("Only applicable to hosts assets") self.responses.add( responses.GET, - self.base_url + self.api.tags_path + "/" + TEST_TAG_ID + "/hosts", + BASE_URL + self.api.tags_path + "/" + TEST_TAG_ID + "/hosts", status=200, json=LIST_HOSTS_RESPONSE, ) @@ -185,7 +185,7 @@ def test_list_certs_with_tag(self): pytest.skip("Only applicable to certs assets") self.responses.add( responses.GET, - self.base_url + self.api.tags_path + "/" + TEST_TAG_ID + "/certificates", + BASE_URL + self.api.tags_path + "/" + TEST_TAG_ID + "/certificates", status=200, json=LIST_CERTS_RESPONSE, ) diff --git a/tests/test_client.py b/tests/test_client.py index 53b836c7..15757849 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -2,7 +2,7 @@ from censys.search import SearchClient ALL_INDEXES = { - "v1": ["certificates", "data", "ipv4", "websites"], + "v1": ["certificates", "data"], "v2": ["hosts"], } diff --git a/tests/utils.py b/tests/utils.py index 1e6e1cd6..9d6aed6a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,9 +5,9 @@ from censys.common.base import CensysAPIBase -V1_URL = "https://censys.io/api/v1" -V1_ENDPOINT_ON_V2_URL = "https://search.censys.io/api/v1" -V2_URL = "https://search.censys.io/api/v2" +BASE_URL = "https://search.censys.io/api" +V1_URL = BASE_URL + "/v1" +V2_URL = BASE_URL + "/v2" class CensysTestCase(unittest.TestCase):