mirror of
https://github.com/secynic/ipwhois.git
synced 2025-12-10 00:40:57 -06:00
Merge branch 'dev' into secynic-gh-action-pypi
This commit is contained in:
commit
b3d9c4f353
@ -1,4 +1,5 @@
|
|||||||
[report]
|
[report]
|
||||||
|
show_missing = True
|
||||||
omit =
|
omit =
|
||||||
*/python?.?/*
|
*/python?.?/*
|
||||||
*/site-packages/nose/*
|
*/site-packages/nose/*
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -36,4 +36,7 @@ nosetests.xml
|
|||||||
.pydevproject
|
.pydevproject
|
||||||
|
|
||||||
MANIFEST
|
MANIFEST
|
||||||
.idea
|
.idea
|
||||||
|
.history
|
||||||
|
.vscode
|
||||||
|
.venv
|
||||||
@ -1,5 +1,5 @@
|
|||||||
language: python
|
language: python
|
||||||
sudo: required
|
os: linux
|
||||||
dist: xenial
|
dist: xenial
|
||||||
python:
|
python:
|
||||||
- 2.7
|
- 2.7
|
||||||
@ -7,6 +7,7 @@ python:
|
|||||||
- 3.5
|
- 3.5
|
||||||
- 3.6
|
- 3.6
|
||||||
- 3.7
|
- 3.7
|
||||||
|
- 3.8
|
||||||
install:
|
install:
|
||||||
- pip install --upgrade setuptools
|
- pip install --upgrade setuptools
|
||||||
- pip install --upgrade pip
|
- pip install --upgrade pip
|
||||||
|
|||||||
31
ASN.rst
31
ASN.rst
@ -5,6 +5,25 @@ IP ASN Lookups
|
|||||||
This is new functionality as of v0.15.0. This functionality was migrated from
|
This is new functionality as of v0.15.0. This functionality was migrated from
|
||||||
net.Net and is still used by IPWhois.lookup*().
|
net.Net and is still used by IPWhois.lookup*().
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Cymru ASN data should not be considered a primary source for data points
|
||||||
|
like country code.
|
||||||
|
|
||||||
|
Message from the Cymru site::
|
||||||
|
|
||||||
|
The country code, registry, and allocation date are all based on data
|
||||||
|
obtained directly from the regional registries including: ARIN, RIPE,
|
||||||
|
AFRINIC, APNIC, LACNIC. The information returned relating to these
|
||||||
|
categories will only be as accurate as the data present in the RIR
|
||||||
|
databases.
|
||||||
|
|
||||||
|
IMPORTANT NOTE: Country codes are likely to vary significantly from
|
||||||
|
actual IP locations, and we must strongly advise that the IP to ASN
|
||||||
|
mapping tool not be used as an IP geolocation (GeoIP) service.
|
||||||
|
|
||||||
|
https://team-cymru.com/community-services/ip-asn-mapping/
|
||||||
|
|
||||||
.. _ip-asn-input:
|
.. _ip-asn-input:
|
||||||
|
|
||||||
IP ASN Input
|
IP ASN Input
|
||||||
@ -24,12 +43,6 @@ Arguments supported by IPASN.lookup().
|
|||||||
| | | resets, etc. are encountered. |
|
| | | resets, etc. are encountered. |
|
||||||
| | | Defaults to 3. |
|
| | | Defaults to 3. |
|
||||||
+------------------------+--------+-------------------------------------------+
|
+------------------------+--------+-------------------------------------------+
|
||||||
| asn_alts | list | Additional lookup types to attempt if the |
|
|
||||||
| | | ASN dns lookup fails. Allow permutations |
|
|
||||||
| | | must be enabled. If None, defaults to all |
|
|
||||||
| | | ['whois', 'http']. *WARNING* deprecated |
|
|
||||||
| | | in favor of new argument asn_methods. |
|
|
||||||
+------------------------+--------+-------------------------------------------+
|
|
||||||
| extra_org_map | dict | Dictionary mapping org handles to RIRs. |
|
| extra_org_map | dict | Dictionary mapping org handles to RIRs. |
|
||||||
| | | This is for limited cases where ARIN |
|
| | | This is for limited cases where ARIN |
|
||||||
| | | REST (ASN fallback HTTP lookup) does not |
|
| | | REST (ASN fallback HTTP lookup) does not |
|
||||||
@ -157,12 +170,6 @@ Arguments supported by ASNOrigin.lookup().
|
|||||||
| | | ['description', 'maintainer', 'updated', |
|
| | | ['description', 'maintainer', 'updated', |
|
||||||
| | | 'source']. If None, defaults to all. |
|
| | | 'source']. If None, defaults to all. |
|
||||||
+------------------------+--------+-------------------------------------------+
|
+------------------------+--------+-------------------------------------------+
|
||||||
| asn_alts | list | Additional lookup types to attempt if the |
|
|
||||||
| | | ASN dns lookup fails. Allow permutations |
|
|
||||||
| | | must be enabled. If None, defaults to all |
|
|
||||||
| | | ['http']. *WARNING* deprecated |
|
|
||||||
| | | in favor of new argument asn_methods. |
|
|
||||||
+------------------------+--------+-------------------------------------------+
|
|
||||||
| asn_methods | list | ASN lookup types to attempt, in order. If |
|
| asn_methods | list | ASN lookup types to attempt, in order. If |
|
||||||
| | | None, defaults to all ['whois', 'http']. |
|
| | | None, defaults to all ['whois', 'http']. |
|
||||||
+------------------------+--------+-------------------------------------------+
|
+------------------------+--------+-------------------------------------------+
|
||||||
|
|||||||
34
CHANGES.rst
34
CHANGES.rst
@ -1,6 +1,38 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
1.2.0 (TBD)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
- Removed deprecated functions: asn.IPASN._parse_fields_http,
|
||||||
|
asn.IPASN._parse_fields_dns, asn.IPASN._parse_fields_whois,
|
||||||
|
asn.ASNOrigin._parse_fields, asn.ASNOrigin._get_nets_radb,
|
||||||
|
net.Net.lookup_asn, whois.Whois._parse_fields, whois.Whois._get_nets_arin
|
||||||
|
whois.Whois._get_nets_lacnic, whois.Whois._get_nets_other,
|
||||||
|
nir.NIRWhois._parse_fields, nir.NIRWhois._get_nets_jpnic
|
||||||
|
nir.NIRWhois._get_nets_krnic, nir.NIRWhois._get_contact (#230)
|
||||||
|
- Removed deprecated asn_alts parameter (#230)
|
||||||
|
- Removed deprecated allow_permutations parameter (#230)
|
||||||
|
- Fixed ASNOrigin lookups (#216)
|
||||||
|
- Fixed bug in ASNOrigin lookups when multiple asn_methods provided (#216)
|
||||||
|
- Fixed bug in KRNIC queries due to a change in their service (#243)
|
||||||
|
- Fixed bug in experimental.bulk_lookup_rdap where only the last
|
||||||
|
result was returned (#262 - ameidatou)
|
||||||
|
- Fixed deprecation warnings due to invalid escape sequences
|
||||||
|
(#272 - tirkarthi)
|
||||||
|
- Fixed bug in root and sub-entities not getting queried/data (#247)
|
||||||
|
- Fixed NIR datetime parsing issue if only date is returned (#284)
|
||||||
|
- Added new argument root_ent_check to IPWhois.lookup_rdap and
|
||||||
|
RDAP.lookup. Set this to False to revert to old functionality - missing data,
|
||||||
|
but less queries (#247)
|
||||||
|
- Added support for Python 3.8 (#267)
|
||||||
|
- Fixed travis build warnings (#268)
|
||||||
|
- Pinned requirements (#274)
|
||||||
|
- Added ip_failed_total key to stats dictionary in
|
||||||
|
experimental.bulk_lookup_rdap (#235)
|
||||||
|
- Added ipv4_generate_random and ipv6_generate_random to utils CLI (#236)
|
||||||
|
- Added documentation note for ASN data (#278)
|
||||||
|
|
||||||
1.1.0 (2019-02-01)
|
1.1.0 (2019-02-01)
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
@ -217,4 +249,4 @@ Changelog
|
|||||||
- Added support for IPv4Address or IPv6Address as the address arg in IPWhois.
|
- Added support for IPv4Address or IPv6Address as the address arg in IPWhois.
|
||||||
- Fixed file open encoding bug. Moved from open to io.open.
|
- Fixed file open encoding bug. Moved from open to io.open.
|
||||||
- Fixed parameter in IPWhois ip defined checks.
|
- Fixed parameter in IPWhois ip defined checks.
|
||||||
- Fixed TestIPWhois.test_ip_invalid() assertions.
|
- Fixed TestIPWhois.test_ip_invalid() assertions.
|
||||||
|
|||||||
40
CLI.rst
40
CLI.rst
@ -22,7 +22,7 @@ ipwhois_cli.py [-h] [--whois] [--exclude_nir] [--json] [--hr]
|
|||||||
[--proxy_http "PROXY_HTTP"]
|
[--proxy_http "PROXY_HTTP"]
|
||||||
[--proxy_https "PROXY_HTTPS"]
|
[--proxy_https "PROXY_HTTPS"]
|
||||||
[--inc_raw] [--retry_count RETRY_COUNT]
|
[--inc_raw] [--retry_count RETRY_COUNT]
|
||||||
[--asn_alts "ASN_ALTS"] [--asn_methods "ASN_METHODS"]
|
[--asn_methods "ASN_METHODS"]
|
||||||
[--extra_org_map "EXTRA_ORG_MAP"]
|
[--extra_org_map "EXTRA_ORG_MAP"]
|
||||||
[--skip_asn_description] [--depth COLOR_DEPTH]
|
[--skip_asn_description] [--depth COLOR_DEPTH]
|
||||||
[--excluded_entities "EXCLUDED_ENTITIES"] [--bootstrap]
|
[--excluded_entities "EXCLUDED_ENTITIES"] [--bootstrap]
|
||||||
@ -66,12 +66,6 @@ Common settings (RDAP & Legacy Whois):
|
|||||||
--retry_count RETRY_COUNT
|
--retry_count RETRY_COUNT
|
||||||
The number of times to retry in case socket errors,
|
The number of times to retry in case socket errors,
|
||||||
timeouts, connection resets, etc. are encountered.
|
timeouts, connection resets, etc. are encountered.
|
||||||
--asn_alts ASN_ALTS
|
|
||||||
A comma delimited list of additional lookup types to
|
|
||||||
attempt if the ASN dns lookup fails. Allow
|
|
||||||
permutations must be enabled. Defaults to all:
|
|
||||||
"whois,http". *WARNING* deprecated in favor of new
|
|
||||||
argument asn_methods.
|
|
||||||
--asn_methods ASN_METHODS
|
--asn_methods ASN_METHODS
|
||||||
List of ASN lookup types to attempt, in order.
|
List of ASN lookup types to attempt, in order.
|
||||||
Defaults to all ['dns', 'whois', 'http'].
|
Defaults to all ['dns', 'whois', 'http'].
|
||||||
@ -174,6 +168,12 @@ optional arguments:
|
|||||||
--ipv6_is_defined IPADDRESS
|
--ipv6_is_defined IPADDRESS
|
||||||
Check if an IPv6 address is defined (in a reserved
|
Check if an IPv6 address is defined (in a reserved
|
||||||
address range).
|
address range).
|
||||||
|
--ipv4_generate_random TOTAL
|
||||||
|
Generate random, unique IPv4 addresses that are not
|
||||||
|
defined (can be looked up using ipwhois).
|
||||||
|
--ipv6_generate_random TOTAL
|
||||||
|
Generate random, unique IPv6 addresses that are not
|
||||||
|
defined (can be looked up using ipwhois).
|
||||||
--unique_everseen ITERABLE
|
--unique_everseen ITERABLE
|
||||||
List unique elements from input iterable, preserving
|
List unique elements from input iterable, preserving
|
||||||
the order.
|
the order.
|
||||||
@ -267,6 +267,32 @@ ipv6_is_defined
|
|||||||
Name: Unique Local Unicast
|
Name: Unique Local Unicast
|
||||||
RFC: RFC 4193
|
RFC: RFC 4193
|
||||||
|
|
||||||
|
ipv4_generate_random
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
>>>> ipwhois_utils_cli.py --ipv4_generate_random 5
|
||||||
|
|
||||||
|
119.224.47.74
|
||||||
|
128.106.183.195
|
||||||
|
54.97.0.158
|
||||||
|
52.206.105.37
|
||||||
|
126.180.201.81
|
||||||
|
|
||||||
|
ipv6_generate_random
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
>>>> ipwhois_utils_cli.py --ipv6_generate_random 5
|
||||||
|
|
||||||
|
3e8c:dc93:49c8:57fd:31dd:2963:6332:426e
|
||||||
|
2e3d:fd84:b57b:9282:91e6:5d4d:18d5:34f1
|
||||||
|
21d4:9d25:7dd6:e28b:77d7:7ce9:f85f:b34f
|
||||||
|
3659:2b9:12ed:1eac:fd40:5756:3753:6d2d
|
||||||
|
2e05:6d47:83fd:5de8:c6cb:85cb:912:fdb1
|
||||||
|
|
||||||
unique_everseen
|
unique_everseen
|
||||||
^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
|||||||
@ -68,21 +68,21 @@ Basic usage
|
|||||||
>>>> pprint(results.split('\n'))
|
>>>> pprint(results.split('\n'))
|
||||||
|
|
||||||
[
|
[
|
||||||
"Bulk mode; whois.cymru.com [2017-07-30 23:02:21 +0000]",
|
"Bulk mode; whois.cymru.com [2020-09-15 16:42:29 +0000]",
|
||||||
"15169 | 74.125.225.229 | 74.125.225.0/24 | US | arin | 2007-03-13 | GOOGLE - Google Inc., US",
|
"15169 | 74.125.225.229 | 74.125.225.0/24 | US | arin | 2007-03-13 | GOOGLE, US",
|
||||||
"15169 | 2001:4860:4860::8888 | 2001:4860::/32 | US | arin | 2005-03-14 | GOOGLE - Google Inc., US",
|
"15169 | 2001:4860:4860::8888 | 2001:4860::/32 | US | arin | 2005-03-14 | GOOGLE, US",
|
||||||
"2856 | 62.239.237.1 | 62.239.0.0/16 | GB | ripencc | 2001-01-02 | BT-UK-AS BTnet UK Regional network, GB",
|
"2856 | 62.239.237.1 | 62.239.0.0/16 | GB | ripencc | 2001-01-02 | BT-UK-AS BTnet UK Regional network, GB",
|
||||||
"2856 | 2a00:2381:ffff::1 | 2a00:2380::/25 | GB | ripencc | 2007-08-29 | BT-UK-AS BTnet UK Regional network, GB",
|
"2856 | 2a00:2381:ffff::1 | 2a00:2380::/25 | GB | ripencc | 2007-08-29 | BT-UK-AS BTnet UK Regional network, GB",
|
||||||
"3786 | 210.107.73.73 | 210.107.0.0/17 | KR | apnic | | LGDACOM LG DACOM Corporation, KR",
|
"3786 | 210.107.73.73 | 210.107.0.0/17 | KR | apnic | 1997-08-29 | LGDACOM LG DACOM Corporation, KR",
|
||||||
"2497 | 2001:240:10c:1::ca20:9d1d | 2001:240::/32 | JP | apnic | 2000-03-08 | IIJ Internet Initiative Japan Inc., JP",
|
"2497 | 2001:240:10c:1::ca20:9d1d | 2001:240::/32 | JP | apnic | 2000-03-08 | IIJ Internet Initiative Japan Inc., JP",
|
||||||
"19373 | 200.57.141.161 | 200.57.128.0/20 | MX | lacnic | 2000-12-04 | Triara.com, S.A. de C.V., MX",
|
"19373 | 200.57.141.161 | 200.57.128.0/20 | MX | lacnic | 2000-12-04 | Triara.com, S.A. de C.V., MX",
|
||||||
"NA | 2801:10:c000:: | NA | CO | lacnic | 2013-10-29 | NA",
|
"NA | 2801:10:c000:: | NA | CO | lacnic | 2013-10-29 | NA",
|
||||||
"12091 | 196.11.240.215 | 196.11.240.0/24 | ZA | afrinic | | MTNNS-1, ZA",
|
"12091 | 196.11.240.215 | 196.11.240.0/24 | ZA | afrinic | 1994-07-21 | MTNNS-1, ZA",
|
||||||
"37578 | 2001:43f8:7b0:: | 2001:43f8:7b0::/48 | KE | afrinic | 2013-03-22 | Tespok, KE",
|
"37578 | 2001:43f8:7b0:: | 2001:43f8:7b0::/48 | KE | afrinic | 2013-03-22 | Tespok, KE",
|
||||||
"4730 | 133.1.2.5 | 133.1.0.0/16 | JP | apnic | | ODINS Osaka University, JP",
|
"4730 | 133.1.2.5 | 133.1.0.0/16 | JP | apnic | 1997-03-01 | ODINS Osaka University, JP",
|
||||||
"4134 | 115.1.2.3 | 115.0.0.0/14 | KR | apnic | 2008-07-01 | CHINANET-BACKBONE No.31,Jin-rong Street, CN",
|
"4766 | 115.1.2.3 | 115.0.0.0/12 | KR | apnic | 2008-07-01 | KIXS-AS-KR Korea Telecom, KR",
|
||||||
""
|
""
|
||||||
}
|
]
|
||||||
|
|
||||||
.. GET_BULK_ASN_WHOIS_OUTPUT_BASIC END
|
.. GET_BULK_ASN_WHOIS_OUTPUT_BASIC END
|
||||||
|
|
||||||
@ -175,11 +175,14 @@ The stats dictionary returned by ipwhois.experimental.bulk_lookup_rdap()
|
|||||||
'ip_lookup_total' (int) - The total number of addresses that
|
'ip_lookup_total' (int) - The total number of addresses that
|
||||||
lookups were attempted for, excluding any that failed ASN
|
lookups were attempted for, excluding any that failed ASN
|
||||||
registry checks.
|
registry checks.
|
||||||
|
'ip_failed_total' (int) - The total number of addresses that
|
||||||
|
lookups failed for. Excludes any that failed initially, but
|
||||||
|
succeeded after further retries.
|
||||||
'lacnic' (dict) -
|
'lacnic' (dict) -
|
||||||
{
|
{
|
||||||
'failed' (list) - The addresses that failed to lookup.
|
'failed' (list) - The addresses that failed to lookup.
|
||||||
Excludes any that failed initially, but succeeded after
|
Excludes any that failed initially, but succeeded after
|
||||||
futher retries.
|
further retries.
|
||||||
'rate_limited' (list) - The addresses that encountered
|
'rate_limited' (list) - The addresses that encountered
|
||||||
rate-limiting. Unless an address is also in 'failed',
|
rate-limiting. Unless an address is also in 'failed',
|
||||||
it eventually succeeded.
|
it eventually succeeded.
|
||||||
@ -222,9 +225,7 @@ Basic usage
|
|||||||
"total": 2
|
"total": 2
|
||||||
},
|
},
|
||||||
"apnic": {
|
"apnic": {
|
||||||
"failed": [
|
"failed": [],
|
||||||
"115.1.2.3"
|
|
||||||
],
|
|
||||||
"rate_limited": [],
|
"rate_limited": [],
|
||||||
"total": 4
|
"total": 4
|
||||||
},
|
},
|
||||||
@ -233,6 +234,7 @@ Basic usage
|
|||||||
"rate_limited": [],
|
"rate_limited": [],
|
||||||
"total": 2
|
"total": 2
|
||||||
},
|
},
|
||||||
|
"ip_failed_total": 0,
|
||||||
"ip_input_total": 12,
|
"ip_input_total": 12,
|
||||||
"ip_lookup_total": 12,
|
"ip_lookup_total": 12,
|
||||||
"ip_unique_total": 12,
|
"ip_unique_total": 12,
|
||||||
|
|||||||
19
RDAP.rst
19
RDAP.rst
@ -42,12 +42,6 @@ Arguments supported by IPWhois.lookup_rdap().
|
|||||||
| | | when a rate limit notice is returned via |
|
| | | when a rate limit notice is returned via |
|
||||||
| | | rdap+json. Defaults to 120. |
|
| | | rdap+json. Defaults to 120. |
|
||||||
+--------------------+--------+-----------------------------------------------+
|
+--------------------+--------+-----------------------------------------------+
|
||||||
| asn_alts | list | Additional lookup types to attempt if the ASN |
|
|
||||||
| | | dns lookup fails. Allow permutations must be |
|
|
||||||
| | | enabled. If None, defaults to all |
|
|
||||||
| | | ['whois', 'http']. *WARNING* deprecated in |
|
|
||||||
| | | favor of new argument asn_methods. |
|
|
||||||
+--------------------+--------+-----------------------------------------------+
|
|
||||||
| extra_org_map | dict | Dictionary mapping org handles to RIRs. |
|
| extra_org_map | dict | Dictionary mapping org handles to RIRs. |
|
||||||
| | | This is for limited cases where ARIN REST |
|
| | | This is for limited cases where ARIN REST |
|
||||||
| | | (ASN fallback HTTP lookup) does not show an |
|
| | | (ASN fallback HTTP lookup) does not show an |
|
||||||
@ -79,6 +73,10 @@ Arguments supported by IPWhois.lookup_rdap().
|
|||||||
| | | pulling ASN information via dns, in order to |
|
| | | pulling ASN information via dns, in order to |
|
||||||
| | | get the ASN description. Defaults to True. |
|
| | | get the ASN description. Defaults to True. |
|
||||||
+--------------------+--------+-----------------------------------------------+
|
+--------------------+--------+-----------------------------------------------+
|
||||||
|
| root_ent_check | bool | If True, will perform additional RDAP HTTP |
|
||||||
|
| | | queries for missing entity data at the root |
|
||||||
|
| | | level. Defaults to True. |
|
||||||
|
+--------------------+--------+-----------------------------------------------+
|
||||||
|
|
||||||
.. _rdap-output:
|
.. _rdap-output:
|
||||||
|
|
||||||
@ -599,3 +597,12 @@ this very low for bulk queries, or disable completely by setting retry_count=0.
|
|||||||
|
|
||||||
Note that setting this result too low may cause a larger number of IP lookups
|
Note that setting this result too low may cause a larger number of IP lookups
|
||||||
to fail.
|
to fail.
|
||||||
|
|
||||||
|
root_ent_check
|
||||||
|
^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
When root level entities (depth=0) are missing vcard data, additional
|
||||||
|
entity specific HTTP lookups are performed. In the past, you would expect
|
||||||
|
depth=0 to mean a single lookup per IP. This was a bug and has been fixed as of
|
||||||
|
v1.2.0. Set this to False to revert back to the old method, although you will be
|
||||||
|
missing entity specific data.
|
||||||
|
|||||||
@ -7,8 +7,10 @@ ipwhois
|
|||||||
.. image:: https://coveralls.io/repos/github/secynic/ipwhois/badge.svg?branch=
|
.. image:: https://coveralls.io/repos/github/secynic/ipwhois/badge.svg?branch=
|
||||||
master
|
master
|
||||||
:target: https://coveralls.io/github/secynic/ipwhois?branch=master
|
:target: https://coveralls.io/github/secynic/ipwhois?branch=master
|
||||||
|
.. image:: https://img.shields.io/github/issues-raw/secynic/ipwhois
|
||||||
|
:target: https://github.com/secynic/ipwhois/issues
|
||||||
.. image:: https://codeclimate.com/github/secynic/ipwhois/badges/issue_count.svg
|
.. image:: https://codeclimate.com/github/secynic/ipwhois/badges/issue_count.svg
|
||||||
:target: https://codeclimate.com/github/secynic/ipwhois
|
:target: https://codeclimate.com/github/secynic/ipwhois
|
||||||
.. image:: https://img.shields.io/badge/license-BSD%202--Clause-blue.svg
|
.. image:: https://img.shields.io/badge/license-BSD%202--Clause-blue.svg
|
||||||
:target: https://github.com/secynic/ipwhois/tree/master/LICENSE.txt
|
:target: https://github.com/secynic/ipwhois/tree/master/LICENSE.txt
|
||||||
.. image:: https://img.shields.io/badge/python-2.7%2C%203.4+-blue.svg
|
.. image:: https://img.shields.io/badge/python-2.7%2C%203.4+-blue.svg
|
||||||
@ -170,11 +172,6 @@ Input
|
|||||||
| proxy_opener | object | The urllib.request.OpenerDirector request for |
|
| proxy_opener | object | The urllib.request.OpenerDirector request for |
|
||||||
| | | proxy support or None. |
|
| | | proxy support or None. |
|
||||||
+--------------------+--------+-----------------------------------------------+
|
+--------------------+--------+-----------------------------------------------+
|
||||||
| allow_permutations | bool | Allow net.Net() to use additional methods if |
|
|
||||||
| | | DNS lookups to Cymru fail. *WARNING* |
|
|
||||||
| | | deprecated in favor of new argument |
|
|
||||||
| | | asn_methods. Defaults to False. |
|
|
||||||
+--------------------+--------+-----------------------------------------------+
|
|
||||||
|
|
||||||
RDAP (HTTP)
|
RDAP (HTTP)
|
||||||
-----------
|
-----------
|
||||||
|
|||||||
@ -9,6 +9,26 @@ any changes that may affect user experience when upgrading to a new release.
|
|||||||
This page is new as of version 1.0.0. Any information on older versions is
|
This page is new as of version 1.0.0. Any information on older versions is
|
||||||
likely missing or incomplete.
|
likely missing or incomplete.
|
||||||
|
|
||||||
|
******
|
||||||
|
v1.2.0
|
||||||
|
******
|
||||||
|
|
||||||
|
- Removed deprecated functions: asn.IPASN._parse_fields_http,
|
||||||
|
asn.IPASN._parse_fields_dns, asn.IPASN._parse_fields_whois,
|
||||||
|
asn.ASNOrigin._parse_fields, asn.ASNOrigin._get_nets_radb,
|
||||||
|
net.Net.lookup_asn, whois.Whois._parse_fields, whois.Whois._get_nets_arin
|
||||||
|
whois.Whois._get_nets_lacnic, whois.Whois._get_nets_other,
|
||||||
|
nir.NIRWhois._parse_fields, nir.NIRWhois._get_nets_jpnic
|
||||||
|
nir.NIRWhois._get_nets_krnic, nir.NIRWhois._get_contact
|
||||||
|
- Removed deprecated asn_alts parameter
|
||||||
|
- Removed deprecated allow_permutations parameter
|
||||||
|
- Added new argument root_ent_check to IPWhois.lookup_rdap and
|
||||||
|
RDAP.lookup. Set this to False to revert to old functionality - missing data,
|
||||||
|
but less queries. If you leave this set to default of True, you will notice
|
||||||
|
more queries and potentially more rate-limiting.
|
||||||
|
- Added support for Python 3.8
|
||||||
|
- Pinned requirements
|
||||||
|
|
||||||
******
|
******
|
||||||
v1.1.0
|
v1.1.0
|
||||||
******
|
******
|
||||||
|
|||||||
@ -53,12 +53,6 @@ Arguments supported by IPWhois.lookup_whois().
|
|||||||
| | | 'postal_code', 'emails', 'created', |
|
| | | 'postal_code', 'emails', 'created', |
|
||||||
| | | 'updated']. If None, defaults to all. |
|
| | | 'updated']. If None, defaults to all. |
|
||||||
+------------------------+--------+-------------------------------------------+
|
+------------------------+--------+-------------------------------------------+
|
||||||
| asn_alts | list | Additional lookup types to attempt if the |
|
|
||||||
| | | ASN dns lookup fails. Allow permutations |
|
|
||||||
| | | must be enabled. If None, defaults to all |
|
|
||||||
| | | ['whois', 'http']. *WARNING* deprecated |
|
|
||||||
| | | in favor of new argument asn_methods. |
|
|
||||||
+------------------------+--------+-------------------------------------------+
|
|
||||||
| extra_org_map | dict | Dictionary mapping org handles to RIRs. |
|
| extra_org_map | dict | Dictionary mapping org handles to RIRs. |
|
||||||
| | | This is for limited cases where ARIN |
|
| | | This is for limited cases where ARIN |
|
||||||
| | | REST (ASN fallback HTTP lookup) does not |
|
| | | REST (ASN fallback HTTP lookup) does not |
|
||||||
|
|||||||
127
ipwhois/asn.py
127
ipwhois/asn.py
@ -61,21 +61,21 @@ ASN_ORIGIN_WHOIS = {
|
|||||||
|
|
||||||
ASN_ORIGIN_HTTP = {
|
ASN_ORIGIN_HTTP = {
|
||||||
'radb': {
|
'radb': {
|
||||||
'url': 'http://www.radb.net/query/',
|
'url': 'http://www.radb.net/query',
|
||||||
'form_data_asn_field': 'keywords',
|
'form_data_asn_field': 'keywords',
|
||||||
'form_data': {
|
'form_data': {
|
||||||
'advanced_query': '1',
|
'advanced_query': '1',
|
||||||
'query': 'Query',
|
'query': 'Query',
|
||||||
'-T option': 'inet-rtr',
|
# '-T option': 'inet-rtr',
|
||||||
'ip_option': '',
|
'ip_option': '',
|
||||||
'-i': '1',
|
'-i': '1',
|
||||||
'-i option': 'origin'
|
'-i option': 'origin'
|
||||||
},
|
},
|
||||||
'fields': {
|
'fields': {
|
||||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)\<br\>',
|
'description': r'(descr):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'maintainer': r'(mnt-by):[^\S\n]+(?P<val>.+?)\<br\>',
|
'maintainer': r'(mnt-by):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'updated': r'(changed):[^\S\n]+(?P<val>.+?)\<br\>',
|
'updated': r'(changed):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'source': r'(source):[^\S\n]+(?P<val>.+?)\<br\>',
|
'source': r'(source):[^\S\n]+(?P<val>.+?)\<',
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -169,16 +169,6 @@ class IPASN:
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields_dns(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN._parse_fields_dns() has been deprecated and will be '
|
|
||||||
'removed. You should now use IPASN.parse_fields_dns().')
|
|
||||||
return self.parse_fields_dns(*args, **kwargs)
|
|
||||||
|
|
||||||
def parse_fields_verbose_dns(self, response):
|
def parse_fields_verbose_dns(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing ASN fields from a verbose dns response.
|
The function for parsing ASN fields from a verbose dns response.
|
||||||
@ -293,16 +283,6 @@ class IPASN:
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields_whois(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN._parse_fields_whois() has been deprecated and will be '
|
|
||||||
'removed. You should now use IPASN.parse_fields_whois().')
|
|
||||||
return self.parse_fields_whois(*args, **kwargs)
|
|
||||||
|
|
||||||
def parse_fields_http(self, response, extra_org_map=None):
|
def parse_fields_http(self, response, extra_org_map=None):
|
||||||
"""
|
"""
|
||||||
The function for parsing ASN fields from a http response.
|
The function for parsing ASN fields from a http response.
|
||||||
@ -403,19 +383,8 @@ class IPASN:
|
|||||||
|
|
||||||
return asn_data
|
return asn_data
|
||||||
|
|
||||||
def _parse_fields_http(self, *args, **kwargs):
|
def lookup(self, inc_raw=False, retry_count=3, extra_org_map=None,
|
||||||
"""
|
asn_methods=None, get_asn_description=True):
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN._parse_fields_http() has been deprecated and will be '
|
|
||||||
'removed. You should now use IPASN.parse_fields_http().')
|
|
||||||
return self.parse_fields_http(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, inc_raw=False, retry_count=3, asn_alts=None,
|
|
||||||
extra_org_map=None, asn_methods=None,
|
|
||||||
get_asn_description=True):
|
|
||||||
"""
|
"""
|
||||||
The wrapper function for retrieving and parsing ASN information for an
|
The wrapper function for retrieving and parsing ASN information for an
|
||||||
IP address.
|
IP address.
|
||||||
@ -426,10 +395,6 @@ class IPASN:
|
|||||||
retry_count (:obj:`int`): The number of times to retry in case
|
retry_count (:obj:`int`): The number of times to retry in case
|
||||||
socket errors, timeouts, connection resets, etc. are
|
socket errors, timeouts, connection resets, etc. are
|
||||||
encountered. Defaults to 3.
|
encountered. Defaults to 3.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN dns lookup fails. Allow permutations must be enabled.
|
|
||||||
Defaults to all ['whois', 'http']. *WARNING* deprecated in
|
|
||||||
favor of new argument asn_methods. Defaults to None.
|
|
||||||
extra_org_map (:obj:`dict`): Mapping org handles to RIRs. This is
|
extra_org_map (:obj:`dict`): Mapping org handles to RIRs. This is
|
||||||
for limited cases where ARIN REST (ASN fallback HTTP lookup)
|
for limited cases where ARIN REST (ASN fallback HTTP lookup)
|
||||||
does not show an RIR as the org handle e.g., DNIC (which is
|
does not show an RIR as the org handle e.g., DNIC (which is
|
||||||
@ -466,17 +431,7 @@ class IPASN:
|
|||||||
|
|
||||||
if asn_methods is None:
|
if asn_methods is None:
|
||||||
|
|
||||||
if asn_alts is None:
|
lookups = ['dns', 'whois', 'http']
|
||||||
|
|
||||||
lookups = ['dns', 'whois', 'http']
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN.lookup() asn_alts argument has been deprecated '
|
|
||||||
'and will be removed. You should now use the asn_methods '
|
|
||||||
'argument.')
|
|
||||||
lookups = ['dns'] + asn_alts
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
@ -492,12 +447,6 @@ class IPASN:
|
|||||||
dns_success = False
|
dns_success = False
|
||||||
for index, lookup_method in enumerate(lookups):
|
for index, lookup_method in enumerate(lookups):
|
||||||
|
|
||||||
if index > 0 and not asn_methods and not (
|
|
||||||
self._net.allow_permutations):
|
|
||||||
|
|
||||||
raise ASNRegistryError('ASN registry lookup failed. '
|
|
||||||
'Permutations not allowed.')
|
|
||||||
|
|
||||||
if lookup_method == 'dns':
|
if lookup_method == 'dns':
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -706,16 +655,6 @@ class ASNOrigin:
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('ASNOrigin._parse_fields() has been deprecated and will be '
|
|
||||||
'removed. You should now use ASNOrigin.parse_fields().')
|
|
||||||
return self.parse_fields(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_radb(self, response, is_http=False):
|
def get_nets_radb(self, response, is_http=False):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from ASN origin data.
|
The function for parsing network blocks from ASN origin data.
|
||||||
@ -743,7 +682,7 @@ class ASNOrigin:
|
|||||||
nets = []
|
nets = []
|
||||||
|
|
||||||
if is_http:
|
if is_http:
|
||||||
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)<br>'
|
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)\n'
|
||||||
else:
|
else:
|
||||||
regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$'
|
regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$'
|
||||||
|
|
||||||
@ -769,18 +708,8 @@ class ASNOrigin:
|
|||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_radb(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('ASNOrigin._get_nets_radb() has been deprecated and will be '
|
|
||||||
'removed. You should now use ASNOrigin.get_nets_radb().')
|
|
||||||
return self.get_nets_radb(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, asn=None, inc_raw=False, retry_count=3, response=None,
|
def lookup(self, asn=None, inc_raw=False, retry_count=3, response=None,
|
||||||
field_list=None, asn_alts=None, asn_methods=None):
|
field_list=None, asn_methods=None):
|
||||||
"""
|
"""
|
||||||
The function for retrieving and parsing ASN origin whois information
|
The function for retrieving and parsing ASN origin whois information
|
||||||
via port 43/tcp (WHOIS).
|
via port 43/tcp (WHOIS).
|
||||||
@ -797,9 +726,6 @@ class ASNOrigin:
|
|||||||
field_list (:obj:`list`): If provided, fields to parse:
|
field_list (:obj:`list`): If provided, fields to parse:
|
||||||
['description', 'maintainer', 'updated', 'source']
|
['description', 'maintainer', 'updated', 'source']
|
||||||
If None, defaults to all.
|
If None, defaults to all.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN whois lookup fails. If None, defaults to all ['http'].
|
|
||||||
*WARNING* deprecated in favor of new argument asn_methods.
|
|
||||||
asn_methods (:obj:`list`): ASN lookup types to attempt, in order.
|
asn_methods (:obj:`list`): ASN lookup types to attempt, in order.
|
||||||
If None, defaults to all ['whois', 'http'].
|
If None, defaults to all ['whois', 'http'].
|
||||||
|
|
||||||
@ -828,17 +754,7 @@ class ASNOrigin:
|
|||||||
|
|
||||||
if asn_methods is None:
|
if asn_methods is None:
|
||||||
|
|
||||||
if asn_alts is None:
|
lookups = ['whois', 'http']
|
||||||
|
|
||||||
lookups = ['whois', 'http']
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('ASNOrigin.lookup() asn_alts argument has been deprecated'
|
|
||||||
' and will be removed. You should now use the asn_methods'
|
|
||||||
' argument.')
|
|
||||||
lookups = ['whois'] + asn_alts
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
@ -875,6 +791,8 @@ class ASNOrigin:
|
|||||||
asn=asn, retry_count=retry_count
|
asn=asn, retry_count=retry_count
|
||||||
)
|
)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
except (WhoisLookupError, WhoisRateLimitError) as e:
|
except (WhoisLookupError, WhoisRateLimitError) as e:
|
||||||
|
|
||||||
log.debug('ASN origin WHOIS lookup failed: {0}'
|
log.debug('ASN origin WHOIS lookup failed: {0}'
|
||||||
@ -888,17 +806,22 @@ class ASNOrigin:
|
|||||||
log.debug('Response not given, perform ASN origin '
|
log.debug('Response not given, perform ASN origin '
|
||||||
'HTTP lookup for: {0}'.format(asn))
|
'HTTP lookup for: {0}'.format(asn))
|
||||||
|
|
||||||
tmp = ASN_ORIGIN_HTTP['radb']['form_data']
|
# tmp = ASN_ORIGIN_HTTP['radb']['form_data']
|
||||||
tmp[str(ASN_ORIGIN_HTTP['radb']['form_data_asn_field']
|
# tmp[str(
|
||||||
)] = asn
|
# ASN_ORIGIN_HTTP['radb']['form_data_asn_field']
|
||||||
|
# )] = asn
|
||||||
response = self._net.get_http_raw(
|
response = self._net.get_http_raw(
|
||||||
url=ASN_ORIGIN_HTTP['radb']['url'],
|
url=('{0}?advanced_query=1&keywords={1}&-T+option'
|
||||||
|
'=&ip_option=&-i=1&-i+option=origin'
|
||||||
|
).format(ASN_ORIGIN_HTTP['radb']['url'], asn),
|
||||||
retry_count=retry_count,
|
retry_count=retry_count,
|
||||||
request_type='POST',
|
request_type='GET',
|
||||||
form_data=tmp
|
# form_data=tmp
|
||||||
)
|
)
|
||||||
is_http = True # pragma: no cover
|
is_http = True # pragma: no cover
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
except HTTPLookupError as e:
|
except HTTPLookupError as e:
|
||||||
|
|
||||||
log.debug('ASN origin HTTP lookup failed: {0}'
|
log.debug('ASN origin HTTP lookup failed: {0}'
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
sphinx
|
sphinx
|
||||||
sphinxcontrib-napoleon
|
|
||||||
sphinx_rtd_theme
|
sphinx_rtd_theme
|
||||||
dnspython
|
dnspython<=2.0.0
|
||||||
|
|||||||
@ -40,7 +40,7 @@ extensions = [
|
|||||||
'sphinx.ext.doctest',
|
'sphinx.ext.doctest',
|
||||||
'sphinx.ext.coverage',
|
'sphinx.ext.coverage',
|
||||||
'sphinx.ext.viewcode',
|
'sphinx.ext.viewcode',
|
||||||
'sphinxcontrib.napoleon'
|
'sphinx.ext.napoleon'
|
||||||
]
|
]
|
||||||
|
|
||||||
napoleon_google_docstring = True
|
napoleon_google_docstring = True
|
||||||
|
|||||||
@ -158,11 +158,14 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||||||
'ip_lookup_total' (int) - The total number of addresses that
|
'ip_lookup_total' (int) - The total number of addresses that
|
||||||
lookups were attempted for, excluding any that failed ASN
|
lookups were attempted for, excluding any that failed ASN
|
||||||
registry checks.
|
registry checks.
|
||||||
|
'ip_failed_total' (int) - The total number of addresses that
|
||||||
|
lookups failed for. Excludes any that failed initially, but
|
||||||
|
succeeded after further retries.
|
||||||
'lacnic' (dict) -
|
'lacnic' (dict) -
|
||||||
{
|
{
|
||||||
'failed' (list) - The addresses that failed to lookup.
|
'failed' (list) - The addresses that failed to lookup.
|
||||||
Excludes any that failed initially, but succeeded after
|
Excludes any that failed initially, but succeeded after
|
||||||
futher retries.
|
further retries.
|
||||||
'rate_limited' (list) - The addresses that encountered
|
'rate_limited' (list) - The addresses that encountered
|
||||||
rate-limiting. Unless an address is also in 'failed',
|
rate-limiting. Unless an address is also in 'failed',
|
||||||
it eventually succeeded.
|
it eventually succeeded.
|
||||||
@ -196,6 +199,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||||||
'ip_input_total': len(addresses),
|
'ip_input_total': len(addresses),
|
||||||
'ip_unique_total': 0,
|
'ip_unique_total': 0,
|
||||||
'ip_lookup_total': 0,
|
'ip_lookup_total': 0,
|
||||||
|
'ip_failed_total': 0,
|
||||||
'lacnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
'lacnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||||
'ripencc': {'failed': [], 'rate_limited': [], 'total': 0},
|
'ripencc': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||||
'apnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
'apnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||||
@ -253,15 +257,15 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
results = ipasn.parse_fields_whois(asn_result)
|
asn_parsed = ipasn.parse_fields_whois(asn_result)
|
||||||
|
|
||||||
except ASNRegistryError: # pragma: no cover
|
except ASNRegistryError: # pragma: no cover
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Add valid IP ASN result to asn_parsed_results for RDAP lookup
|
# Add valid IP ASN result to asn_parsed_results for RDAP lookup
|
||||||
asn_parsed_results[ip] = results
|
asn_parsed_results[ip] = asn_parsed
|
||||||
stats[results['asn_registry']]['total'] += 1
|
stats[asn_parsed['asn_registry']]['total'] += 1
|
||||||
|
|
||||||
# Set the list of IPs that are not allocated/failed ASN lookup
|
# Set the list of IPs that are not allocated/failed ASN lookup
|
||||||
stats['unallocated_addresses'] = list(k for k in addresses if k not in
|
stats['unallocated_addresses'] = list(k for k in addresses if k not in
|
||||||
@ -362,7 +366,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||||||
|
|
||||||
# Perform the RDAP lookup. retry_count is set to 0
|
# Perform the RDAP lookup. retry_count is set to 0
|
||||||
# here since we handle that in this function
|
# here since we handle that in this function
|
||||||
results = rdap.lookup(
|
rdap_result = rdap.lookup(
|
||||||
inc_raw=inc_raw, retry_count=0, asn_data=asn_data,
|
inc_raw=inc_raw, retry_count=0, asn_data=asn_data,
|
||||||
depth=depth, excluded_entities=excluded_entities
|
depth=depth, excluded_entities=excluded_entities
|
||||||
)
|
)
|
||||||
@ -373,7 +377,9 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||||||
# Lookup was successful, add to result. Set the nir
|
# Lookup was successful, add to result. Set the nir
|
||||||
# key to None as this is not supported
|
# key to None as this is not supported
|
||||||
# (yet - requires more queries)
|
# (yet - requires more queries)
|
||||||
results[ip] = results
|
results[ip] = asn_data
|
||||||
|
results[ip].update(rdap_result)
|
||||||
|
|
||||||
results[ip]['nir'] = None
|
results[ip]['nir'] = None
|
||||||
|
|
||||||
# Remove the IP from the lookup queue
|
# Remove the IP from the lookup queue
|
||||||
@ -423,6 +429,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||||||
|
|
||||||
del asn_parsed_results[ip]
|
del asn_parsed_results[ip]
|
||||||
stats[rir]['failed'].append(ip)
|
stats[rir]['failed'].append(ip)
|
||||||
|
stats['ip_failed_total'] += 1
|
||||||
|
|
||||||
if rir == 'lacnic':
|
if rir == 'lacnic':
|
||||||
|
|
||||||
|
|||||||
@ -42,17 +42,12 @@ class IPWhois:
|
|||||||
seconds. Defaults to 5.
|
seconds. Defaults to 5.
|
||||||
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
||||||
proxy support. Defaults to None.
|
proxy support. Defaults to None.
|
||||||
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
|
|
||||||
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
|
|
||||||
favor of new argument asn_methods. Defaults to False.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
def __init__(self, address, timeout=5, proxy_opener=None):
|
||||||
allow_permutations=False):
|
|
||||||
|
|
||||||
self.net = Net(
|
self.net = Net(
|
||||||
address=address, timeout=timeout, proxy_opener=proxy_opener,
|
address=address, timeout=timeout, proxy_opener=proxy_opener
|
||||||
allow_permutations=allow_permutations
|
|
||||||
)
|
)
|
||||||
self.ipasn = IPASN(self.net)
|
self.ipasn = IPASN(self.net)
|
||||||
|
|
||||||
@ -71,7 +66,7 @@ class IPWhois:
|
|||||||
|
|
||||||
def lookup_whois(self, inc_raw=False, retry_count=3, get_referral=False,
|
def lookup_whois(self, inc_raw=False, retry_count=3, get_referral=False,
|
||||||
extra_blacklist=None, ignore_referral_errors=False,
|
extra_blacklist=None, ignore_referral_errors=False,
|
||||||
field_list=None, asn_alts=None, extra_org_map=None,
|
field_list=None, extra_org_map=None,
|
||||||
inc_nir=True, nir_field_list=None, asn_methods=None,
|
inc_nir=True, nir_field_list=None, asn_methods=None,
|
||||||
get_asn_description=True):
|
get_asn_description=True):
|
||||||
"""
|
"""
|
||||||
@ -95,10 +90,6 @@ class IPWhois:
|
|||||||
['name', 'handle', 'description', 'country', 'state', 'city',
|
['name', 'handle', 'description', 'country', 'state', 'city',
|
||||||
'address', 'postal_code', 'emails', 'created', 'updated']
|
'address', 'postal_code', 'emails', 'created', 'updated']
|
||||||
If None, defaults to all.
|
If None, defaults to all.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN dns lookup fails. Allow permutations must be enabled.
|
|
||||||
If None, defaults to all ['whois', 'http']. *WARNING*
|
|
||||||
deprecated in favor of new argument asn_methods.
|
|
||||||
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
||||||
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
||||||
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
||||||
@ -161,7 +152,7 @@ class IPWhois:
|
|||||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||||
|
|
||||||
asn_data = self.ipasn.lookup(
|
asn_data = self.ipasn.lookup(
|
||||||
inc_raw=inc_raw, retry_count=retry_count, asn_alts=asn_alts,
|
inc_raw=inc_raw, retry_count=retry_count,
|
||||||
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
||||||
get_asn_description=get_asn_description
|
get_asn_description=get_asn_description
|
||||||
)
|
)
|
||||||
@ -206,9 +197,9 @@ class IPWhois:
|
|||||||
|
|
||||||
def lookup_rdap(self, inc_raw=False, retry_count=3, depth=0,
|
def lookup_rdap(self, inc_raw=False, retry_count=3, depth=0,
|
||||||
excluded_entities=None, bootstrap=False,
|
excluded_entities=None, bootstrap=False,
|
||||||
rate_limit_timeout=120, asn_alts=None, extra_org_map=None,
|
rate_limit_timeout=120, extra_org_map=None,
|
||||||
inc_nir=True, nir_field_list=None, asn_methods=None,
|
inc_nir=True, nir_field_list=None, asn_methods=None,
|
||||||
get_asn_description=True):
|
get_asn_description=True, root_ent_check=True):
|
||||||
"""
|
"""
|
||||||
The function for retrieving and parsing whois information for an IP
|
The function for retrieving and parsing whois information for an IP
|
||||||
address via HTTP (RDAP).
|
address via HTTP (RDAP).
|
||||||
@ -233,10 +224,6 @@ class IPWhois:
|
|||||||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||||
before retrying when a rate limit notice is returned via
|
before retrying when a rate limit notice is returned via
|
||||||
rdap+json. Defaults to 120.
|
rdap+json. Defaults to 120.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN dns lookup fails. Allow permutations must be enabled.
|
|
||||||
If None, defaults to all ['whois', 'http']. *WARNING*
|
|
||||||
deprecated in favor of new argument asn_methods.
|
|
||||||
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
||||||
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
||||||
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
||||||
@ -260,6 +247,9 @@ class IPWhois:
|
|||||||
get_asn_description (:obj:`bool`): Whether to run an additional
|
get_asn_description (:obj:`bool`): Whether to run an additional
|
||||||
query when pulling ASN information via dns, in order to get
|
query when pulling ASN information via dns, in order to get
|
||||||
the ASN description. Defaults to True.
|
the ASN description. Defaults to True.
|
||||||
|
root_ent_check (:obj:`bool`): If True, will perform
|
||||||
|
additional RDAP HTTP queries for missing entity data at the
|
||||||
|
root level. Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: The IP RDAP lookup results
|
dict: The IP RDAP lookup results
|
||||||
@ -303,7 +293,7 @@ class IPWhois:
|
|||||||
# Retrieve the ASN information.
|
# Retrieve the ASN information.
|
||||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||||
asn_data = self.ipasn.lookup(
|
asn_data = self.ipasn.lookup(
|
||||||
inc_raw=inc_raw, retry_count=retry_count, asn_alts=asn_alts,
|
inc_raw=inc_raw, retry_count=retry_count,
|
||||||
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
||||||
get_asn_description=get_asn_description
|
get_asn_description=get_asn_description
|
||||||
)
|
)
|
||||||
@ -318,7 +308,8 @@ class IPWhois:
|
|||||||
inc_raw=inc_raw, retry_count=retry_count, asn_data=asn_data,
|
inc_raw=inc_raw, retry_count=retry_count, asn_data=asn_data,
|
||||||
depth=depth, excluded_entities=excluded_entities,
|
depth=depth, excluded_entities=excluded_entities,
|
||||||
response=response, bootstrap=bootstrap,
|
response=response, bootstrap=bootstrap,
|
||||||
rate_limit_timeout=rate_limit_timeout
|
rate_limit_timeout=rate_limit_timeout,
|
||||||
|
root_ent_check=root_ent_check
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add the RDAP information to the return dictionary.
|
# Add the RDAP information to the return dictionary.
|
||||||
|
|||||||
@ -103,17 +103,13 @@ class Net:
|
|||||||
seconds. Defaults to 5.
|
seconds. Defaults to 5.
|
||||||
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
||||||
proxy support. Defaults to None.
|
proxy support. Defaults to None.
|
||||||
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
|
|
||||||
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
|
|
||||||
favor of new argument asn_methods. Defaults to False.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IPDefinedError: The address provided is defined (does not need to be
|
IPDefinedError: The address provided is defined (does not need to be
|
||||||
resolved).
|
resolved).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
def __init__(self, address, timeout=5, proxy_opener=None):
|
||||||
allow_permutations=False):
|
|
||||||
|
|
||||||
# IPv4Address or IPv6Address
|
# IPv4Address or IPv6Address
|
||||||
if isinstance(address, IPv4Address) or isinstance(
|
if isinstance(address, IPv4Address) or isinstance(
|
||||||
@ -129,16 +125,6 @@ class Net:
|
|||||||
# Default timeout for socket connections.
|
# Default timeout for socket connections.
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
|
||||||
# Allow other than DNS lookups for ASNs.
|
|
||||||
self.allow_permutations = allow_permutations
|
|
||||||
|
|
||||||
if self.allow_permutations:
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('allow_permutations has been deprecated and will be removed. '
|
|
||||||
'It is no longer needed, due to the deprecation of asn_alts, '
|
|
||||||
'and the addition of the asn_methods argument.')
|
|
||||||
|
|
||||||
self.dns_resolver = dns.resolver.Resolver()
|
self.dns_resolver = dns.resolver.Resolver()
|
||||||
self.dns_resolver.timeout = timeout
|
self.dns_resolver.timeout = timeout
|
||||||
self.dns_resolver.lifetime = timeout
|
self.dns_resolver.lifetime = timeout
|
||||||
@ -219,21 +205,6 @@ class Net:
|
|||||||
|
|
||||||
self.dns_zone = IPV6_DNS_ZONE.format(self.reversed)
|
self.dns_zone = IPV6_DNS_ZONE.format(self.reversed)
|
||||||
|
|
||||||
def lookup_asn(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Temporary wrapper for IP ASN lookups (moved to
|
|
||||||
asn.IPASN.lookup()). This will be removed in a future
|
|
||||||
release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Net.lookup_asn() has been deprecated and will be removed. '
|
|
||||||
'You should now use asn.IPASN.lookup() for IP ASN lookups.')
|
|
||||||
from .asn import IPASN
|
|
||||||
response = None
|
|
||||||
ipasn = IPASN(self)
|
|
||||||
return ipasn.lookup(*args, **kwargs), response
|
|
||||||
|
|
||||||
def get_asn_dns(self):
|
def get_asn_dns(self):
|
||||||
"""
|
"""
|
||||||
The function for retrieving ASN information for an IP address from
|
The function for retrieving ASN information for an IP address from
|
||||||
|
|||||||
@ -87,9 +87,9 @@ NIR_WHOIS = {
|
|||||||
'updated': r'(\[Last Update\])[^\S\n]+(?P<val>.*?)\n',
|
'updated': r'(\[Last Update\])[^\S\n]+(?P<val>.*?)\n',
|
||||||
'nameservers': r'(\[Nameserver\])[^\S\n]+(?P<val>.*?)\n',
|
'nameservers': r'(\[Nameserver\])[^\S\n]+(?P<val>.*?)\n',
|
||||||
'contact_admin': r'(\[Administrative Contact\])[^\S\n]+.+?\>'
|
'contact_admin': r'(\[Administrative Contact\])[^\S\n]+.+?\>'
|
||||||
'(?P<val>.+?)\<\/A\>\n',
|
'(?P<val>.+?)\\<\\/A\\>\n',
|
||||||
'contact_tech': r'(\[Technical Contact\])[^\S\n]+.+?\>'
|
'contact_tech': r'(\[Technical Contact\])[^\S\n]+.+?\>'
|
||||||
'(?P<val>.+?)\<\/A\>\n'
|
'(?P<val>.+?)\\<\\/A\\>\n'
|
||||||
},
|
},
|
||||||
'contact_fields': {
|
'contact_fields': {
|
||||||
'name': r'(\[Last, First\])[^\S\n]+(?P<val>.*?)\n',
|
'name': r'(\[Last, First\])[^\S\n]+(?P<val>.*?)\n',
|
||||||
@ -108,9 +108,14 @@ NIR_WHOIS = {
|
|||||||
},
|
},
|
||||||
'krnic': {
|
'krnic': {
|
||||||
'country_code': 'KR',
|
'country_code': 'KR',
|
||||||
'url': 'https://whois.kisa.or.kr/eng/whois.jsc',
|
'url': 'https://xn--c79as89aj0e29b77z.xn--3e0b707e/eng/whois.jsc',
|
||||||
'request_type': 'POST',
|
'request_type': 'POST',
|
||||||
'request_headers': {'Accept': 'text/html'},
|
'request_headers': {
|
||||||
|
'Accept': 'text/html',
|
||||||
|
'Referer': (
|
||||||
|
'https://xn--c79as89aj0e29b77z.xn--3e0b707e/eng/whois.jsp'
|
||||||
|
),
|
||||||
|
},
|
||||||
'form_data_ip_field': 'query',
|
'form_data_ip_field': 'query',
|
||||||
'fields': {
|
'fields': {
|
||||||
'name': r'(Organization Name)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
'name': r'(Organization Name)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||||
@ -120,9 +125,9 @@ NIR_WHOIS = {
|
|||||||
'postal_code': r'(Zip Code)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
'postal_code': r'(Zip Code)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||||
'created': r'(Registration Date)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
'created': r'(Registration Date)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||||
'contact_admin': r'(id="eng_isp_contact").+?\>(?P<val>.*?)\<'
|
'contact_admin': r'(id="eng_isp_contact").+?\>(?P<val>.*?)\<'
|
||||||
'\/div\>\n',
|
'\\/div\\>\n',
|
||||||
'contact_tech': r'(id="eng_user_contact").+?\>(?P<val>.*?)\<'
|
'contact_tech': r'(id="eng_user_contact").+?\>(?P<val>.*?)\<'
|
||||||
'\/div\>\n'
|
'\\/div\\>\n'
|
||||||
},
|
},
|
||||||
'contact_fields': {
|
'contact_fields': {
|
||||||
'name': r'(Name)[^\S\n]+?:[^\S\n]+?(?P<val>.*?)\n',
|
'name': r'(Name)[^\S\n]+?:[^\S\n]+?(?P<val>.*?)\n',
|
||||||
@ -260,12 +265,20 @@ class NIRWhois:
|
|||||||
|
|
||||||
if field in ['created', 'updated'] and dt_format:
|
if field in ['created', 'updated'] and dt_format:
|
||||||
|
|
||||||
value = (
|
try:
|
||||||
datetime.strptime(
|
value = (
|
||||||
values[0],
|
datetime.strptime(
|
||||||
str(dt_format)
|
values[0],
|
||||||
) - timedelta(hours=hourdelta)
|
str(dt_format)
|
||||||
).isoformat('T')
|
) - timedelta(hours=hourdelta)
|
||||||
|
).isoformat('T')
|
||||||
|
except ValueError:
|
||||||
|
value = (
|
||||||
|
datetime.strptime(
|
||||||
|
values[0],
|
||||||
|
'%Y/%m/%d'
|
||||||
|
)
|
||||||
|
).isoformat('T')
|
||||||
|
|
||||||
elif field in ['nameservers']:
|
elif field in ['nameservers']:
|
||||||
|
|
||||||
@ -286,16 +299,6 @@ class NIRWhois:
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._parse_fields() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.parse_fields().')
|
|
||||||
return self.parse_fields(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_jpnic(self, response):
|
def get_nets_jpnic(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from jpnic whois data.
|
The function for parsing network blocks from jpnic whois data.
|
||||||
@ -359,16 +362,6 @@ class NIRWhois:
|
|||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_jpnic(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._get_nets_jpnic() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.get_nets_jpnic().')
|
|
||||||
return self.get_nets_jpnic(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_krnic(self, response):
|
def get_nets_krnic(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from krnic whois data.
|
The function for parsing network blocks from krnic whois data.
|
||||||
@ -394,7 +387,7 @@ class NIRWhois:
|
|||||||
# and the start and end positions.
|
# and the start and end positions.
|
||||||
for match in re.finditer(
|
for match in re.finditer(
|
||||||
r'^(IPv4 Address)[\s]+:[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+?)'
|
r'^(IPv4 Address)[\s]+:[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+?)'
|
||||||
'[^\S\n]\((.+?)\)|.+)$',
|
'[^\\S\n]\\((.+?)\\)|.+)$',
|
||||||
response,
|
response,
|
||||||
re.MULTILINE
|
re.MULTILINE
|
||||||
):
|
):
|
||||||
@ -434,16 +427,6 @@ class NIRWhois:
|
|||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_krnic(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._get_nets_krnic() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.get_nets_krnic().')
|
|
||||||
return self.get_nets_krnic(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_contact(self, response=None, nir=None, handle=None,
|
def get_contact(self, response=None, nir=None, handle=None,
|
||||||
retry_count=3, dt_format=None):
|
retry_count=3, dt_format=None):
|
||||||
"""
|
"""
|
||||||
@ -491,16 +474,6 @@ class NIRWhois:
|
|||||||
is_contact=True
|
is_contact=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_contact(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._get_contact() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.get_contact().')
|
|
||||||
return self.get_contact(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, nir=None, inc_raw=False, retry_count=3, response=None,
|
def lookup(self, nir=None, inc_raw=False, retry_count=3, response=None,
|
||||||
field_list=None, is_offline=False):
|
field_list=None, is_offline=False):
|
||||||
"""
|
"""
|
||||||
|
|||||||
174
ipwhois/rdap.py
174
ipwhois/rdap.py
@ -28,6 +28,7 @@ from .utils import ipv4_lstrip_zeros, calculate_cidr, unique_everseen
|
|||||||
from .net import ip_address
|
from .net import ip_address
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -553,7 +554,7 @@ class _RDAPNetwork(_RDAPCommon):
|
|||||||
|
|
||||||
self.vars[v] = self.json[v].strip()
|
self.vars[v] = self.json[v].strip()
|
||||||
|
|
||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError, AttributeError):
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -688,9 +689,95 @@ class RDAP:
|
|||||||
raise NetError('The provided net parameter is not an instance of '
|
raise NetError('The provided net parameter is not an instance of '
|
||||||
'ipwhois.net.Net')
|
'ipwhois.net.Net')
|
||||||
|
|
||||||
|
def _get_entity(self, entity=None, roles=None, inc_raw=False, retry_count=3,
|
||||||
|
asn_data=None, bootstrap=False, rate_limit_timeout=120):
|
||||||
|
"""
|
||||||
|
The function for retrieving and parsing information for an entity via
|
||||||
|
RDAP (HTTP).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entity (:obj:`str`): The entity name to lookup.
|
||||||
|
roles (:obj:`dict`): The mapping of entity handles to roles.
|
||||||
|
inc_raw (:obj:`bool`, optional): Whether to include the raw
|
||||||
|
results in the returned dictionary. Defaults to False.
|
||||||
|
retry_count (:obj:`int`): The number of times to retry in case
|
||||||
|
socket errors, timeouts, connection resets, etc. are
|
||||||
|
encountered. Defaults to 3.
|
||||||
|
asn_data (:obj:`dict`): Result from
|
||||||
|
:obj:`ipwhois.asn.IPASN.lookup`. Optional if the bootstrap
|
||||||
|
parameter is True.
|
||||||
|
bootstrap (:obj:`bool`): If True, performs lookups via ARIN
|
||||||
|
bootstrap rather than lookups based on ASN data. Defaults to
|
||||||
|
False.
|
||||||
|
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||||
|
before retrying when a rate limit notice is returned via
|
||||||
|
rdap+json. Defaults to 120.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
namedtuple:
|
||||||
|
|
||||||
|
:result (dict): Consists of the fields listed in the
|
||||||
|
ipwhois.rdap._RDAPEntity dict. The raw result is included for
|
||||||
|
each object if the inc_raw parameter is True.
|
||||||
|
:roles (dict): The mapping of entity handles to roles.
|
||||||
|
"""
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
if bootstrap:
|
||||||
|
entity_url = '{0}/entity/{1}'.format(
|
||||||
|
BOOTSTRAP_URL, entity)
|
||||||
|
else:
|
||||||
|
tmp_reg = asn_data['asn_registry']
|
||||||
|
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
||||||
|
entity_url = str(entity_url).format(entity)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# RDAP entity query
|
||||||
|
response = self._net.get_http_json(
|
||||||
|
url=entity_url, retry_count=retry_count,
|
||||||
|
rate_limit_timeout=rate_limit_timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
# Parse the entity
|
||||||
|
result_ent = _RDAPEntity(response)
|
||||||
|
result_ent.parse()
|
||||||
|
result = result_ent.vars
|
||||||
|
|
||||||
|
result['roles'] = None
|
||||||
|
try:
|
||||||
|
|
||||||
|
result['roles'] = roles[entity]
|
||||||
|
|
||||||
|
except KeyError: # pragma: no cover
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
for tmp in response['entities']:
|
||||||
|
|
||||||
|
if tmp['handle'] not in roles:
|
||||||
|
roles[tmp['handle']] = tmp['roles']
|
||||||
|
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
if inc_raw:
|
||||||
|
result['raw'] = response
|
||||||
|
|
||||||
|
except (HTTPLookupError, InvalidEntityObject):
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
return_tuple = namedtuple('return_tuple', ['result', 'roles'])
|
||||||
|
return return_tuple(result, roles)
|
||||||
|
|
||||||
def lookup(self, inc_raw=False, retry_count=3, asn_data=None, depth=0,
|
def lookup(self, inc_raw=False, retry_count=3, asn_data=None, depth=0,
|
||||||
excluded_entities=None, response=None, bootstrap=False,
|
excluded_entities=None, response=None, bootstrap=False,
|
||||||
rate_limit_timeout=120):
|
rate_limit_timeout=120, root_ent_check=True):
|
||||||
"""
|
"""
|
||||||
The function for retrieving and parsing information for an IP
|
The function for retrieving and parsing information for an IP
|
||||||
address via RDAP (HTTP).
|
address via RDAP (HTTP).
|
||||||
@ -716,6 +803,9 @@ class RDAP:
|
|||||||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||||
before retrying when a rate limit notice is returned via
|
before retrying when a rate limit notice is returned via
|
||||||
rdap+json. Defaults to 120.
|
rdap+json. Defaults to 120.
|
||||||
|
root_ent_check (:obj:`bool`): If True, will perform
|
||||||
|
additional RDAP HTTP queries for missing entity data at the
|
||||||
|
root level. Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: The IP RDAP lookup results
|
dict: The IP RDAP lookup results
|
||||||
@ -792,10 +882,23 @@ class RDAP:
|
|||||||
if ent['handle'] not in [results['entities'],
|
if ent['handle'] not in [results['entities'],
|
||||||
excluded_entities]:
|
excluded_entities]:
|
||||||
|
|
||||||
result_ent = _RDAPEntity(ent)
|
if 'vcardArray' not in ent and root_ent_check:
|
||||||
result_ent.parse()
|
entity_object, roles = self._get_entity(
|
||||||
|
entity=ent['handle'],
|
||||||
|
roles=roles,
|
||||||
|
inc_raw=inc_raw,
|
||||||
|
retry_count=retry_count,
|
||||||
|
asn_data=asn_data,
|
||||||
|
bootstrap=bootstrap,
|
||||||
|
rate_limit_timeout=rate_limit_timeout
|
||||||
|
)
|
||||||
|
results['objects'][ent['handle']] = entity_object
|
||||||
|
|
||||||
results['objects'][ent['handle']] = result_ent.vars
|
else:
|
||||||
|
result_ent = _RDAPEntity(ent)
|
||||||
|
result_ent.parse()
|
||||||
|
|
||||||
|
results['objects'][ent['handle']] = result_ent.vars
|
||||||
|
|
||||||
results['entities'].append(ent['handle'])
|
results['entities'].append(ent['handle'])
|
||||||
|
|
||||||
@ -835,57 +938,18 @@ class RDAP:
|
|||||||
list(new_objects.keys()) +
|
list(new_objects.keys()) +
|
||||||
excluded_entities):
|
excluded_entities):
|
||||||
|
|
||||||
if bootstrap:
|
entity_object, roles = self._get_entity(
|
||||||
entity_url = '{0}/entity/{1}'.format(
|
entity=ent,
|
||||||
BOOTSTRAP_URL, ent)
|
roles=roles,
|
||||||
else:
|
inc_raw=inc_raw,
|
||||||
tmp_reg = asn_data['asn_registry']
|
retry_count=retry_count,
|
||||||
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
asn_data=asn_data,
|
||||||
entity_url = str(entity_url).format(ent)
|
bootstrap=bootstrap,
|
||||||
|
rate_limit_timeout=rate_limit_timeout
|
||||||
|
)
|
||||||
|
new_objects[ent] = entity_object
|
||||||
|
|
||||||
try:
|
except (KeyError, TypeError):
|
||||||
|
|
||||||
# RDAP entity query
|
|
||||||
response = self._net.get_http_json(
|
|
||||||
url=entity_url, retry_count=retry_count,
|
|
||||||
rate_limit_timeout=rate_limit_timeout
|
|
||||||
)
|
|
||||||
|
|
||||||
# Parse the entity
|
|
||||||
result_ent = _RDAPEntity(response)
|
|
||||||
result_ent.parse()
|
|
||||||
new_objects[ent] = result_ent.vars
|
|
||||||
|
|
||||||
new_objects[ent]['roles'] = None
|
|
||||||
try:
|
|
||||||
|
|
||||||
new_objects[ent]['roles'] = roles[ent]
|
|
||||||
|
|
||||||
except KeyError: # pragma: no cover
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
for tmp in response['entities']:
|
|
||||||
|
|
||||||
if tmp['handle'] not in roles:
|
|
||||||
|
|
||||||
roles[tmp['handle']] = tmp['roles']
|
|
||||||
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
if inc_raw:
|
|
||||||
|
|
||||||
new_objects[ent]['raw'] = response
|
|
||||||
|
|
||||||
except (HTTPLookupError, InvalidEntityObject):
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
except TypeError:
|
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@ -166,17 +166,6 @@ group.add_argument(
|
|||||||
help='The number of times to retry in case socket errors, timeouts, '
|
help='The number of times to retry in case socket errors, timeouts, '
|
||||||
'connection resets, etc. are encountered.'
|
'connection resets, etc. are encountered.'
|
||||||
)
|
)
|
||||||
group.add_argument(
|
|
||||||
'--asn_alts',
|
|
||||||
type=str,
|
|
||||||
nargs=1,
|
|
||||||
default='whois,http',
|
|
||||||
metavar='"ASN_ALTS"',
|
|
||||||
help='A comma delimited list of additional lookup types to attempt if the '
|
|
||||||
'ASN dns lookup fails. Allow permutations must be enabled. '
|
|
||||||
'Defaults to all: "whois,http" *WARNING* deprecated in '
|
|
||||||
'favor of new argument asn_methods.'
|
|
||||||
)
|
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'--asn_methods',
|
'--asn_methods',
|
||||||
type=str,
|
type=str,
|
||||||
@ -1456,9 +1445,6 @@ if script_args.addr:
|
|||||||
field_list=script_args.field_list.split(',') if (
|
field_list=script_args.field_list.split(',') if (
|
||||||
script_args.field_list and
|
script_args.field_list and
|
||||||
len(script_args.field_list) > 0) else None,
|
len(script_args.field_list) > 0) else None,
|
||||||
asn_alts=script_args.asn_alts.split(',') if (
|
|
||||||
script_args.asn_alts and not script_args.asn_methods and
|
|
||||||
len(script_args.asn_alts) > 0) else None,
|
|
||||||
extra_org_map=script_args.extra_org_map,
|
extra_org_map=script_args.extra_org_map,
|
||||||
inc_nir=(not script_args.exclude_nir),
|
inc_nir=(not script_args.exclude_nir),
|
||||||
nir_field_list=script_args.nir_field_list.split(',') if (
|
nir_field_list=script_args.nir_field_list.split(',') if (
|
||||||
@ -1484,9 +1470,6 @@ if script_args.addr:
|
|||||||
len(script_args.excluded_entities) > 0) else None,
|
len(script_args.excluded_entities) > 0) else None,
|
||||||
bootstrap=script_args.bootstrap,
|
bootstrap=script_args.bootstrap,
|
||||||
rate_limit_timeout=script_args.rate_limit_timeout,
|
rate_limit_timeout=script_args.rate_limit_timeout,
|
||||||
asn_alts=script_args.asn_alts.split(',') if (
|
|
||||||
script_args.asn_alts and not script_args.asn_methods and
|
|
||||||
len(script_args.asn_alts) > 0) else None,
|
|
||||||
extra_org_map=script_args.extra_org_map,
|
extra_org_map=script_args.extra_org_map,
|
||||||
inc_nir=(not script_args.exclude_nir),
|
inc_nir=(not script_args.exclude_nir),
|
||||||
nir_field_list=script_args.nir_field_list.split(',') if (
|
nir_field_list=script_args.nir_field_list.split(',') if (
|
||||||
|
|||||||
@ -28,8 +28,9 @@ import argparse
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import json
|
import json
|
||||||
from ipwhois.utils import (ipv4_lstrip_zeros, calculate_cidr, get_countries,
|
from ipwhois.utils import (ipv4_lstrip_zeros, calculate_cidr, get_countries,
|
||||||
ipv4_is_defined, ipv6_is_defined, unique_everseen,
|
ipv4_is_defined, ipv6_is_defined,
|
||||||
unique_addresses)
|
ipv4_generate_random, ipv6_generate_random,
|
||||||
|
unique_everseen, unique_addresses)
|
||||||
|
|
||||||
# CLI ANSI rendering
|
# CLI ANSI rendering
|
||||||
ANSI = {
|
ANSI = {
|
||||||
@ -86,6 +87,22 @@ parser.add_argument(
|
|||||||
metavar='"IP ADDRESS"',
|
metavar='"IP ADDRESS"',
|
||||||
help='Check if an IPv6 address is defined (in a reserved address range).'
|
help='Check if an IPv6 address is defined (in a reserved address range).'
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--ipv4_generate_random',
|
||||||
|
type=int,
|
||||||
|
nargs=1,
|
||||||
|
metavar='TOTAL',
|
||||||
|
help='Generate random, unique IPv4 addresses that are not defined (can be '
|
||||||
|
'looked up using ipwhois).'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--ipv6_generate_random',
|
||||||
|
type=int,
|
||||||
|
nargs=1,
|
||||||
|
metavar='TOTAL',
|
||||||
|
help='Generate random, unique IPv6 addresses that are not defined (can be '
|
||||||
|
'looked up using ipwhois).'
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--unique_everseen',
|
'--unique_everseen',
|
||||||
type=json.loads,
|
type=json.loads,
|
||||||
@ -224,6 +241,34 @@ elif script_args.ipv6_is_defined:
|
|||||||
|
|
||||||
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||||
|
|
||||||
|
elif script_args.ipv4_generate_random:
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
result = ipv4_generate_random(total=script_args.ipv4_generate_random[0])
|
||||||
|
|
||||||
|
for random_ip in result:
|
||||||
|
|
||||||
|
print(random_ip)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||||
|
|
||||||
|
elif script_args.ipv6_generate_random:
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
result = ipv6_generate_random(total=script_args.ipv6_generate_random[0])
|
||||||
|
|
||||||
|
for random_ip in result:
|
||||||
|
|
||||||
|
print(random_ip)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||||
|
|
||||||
elif script_args.unique_everseen:
|
elif script_args.unique_everseen:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -96,4 +96,5 @@ class TestASNOrigin(TestCommon):
|
|||||||
|
|
||||||
net = Net(address='74.125.225.229')
|
net = Net(address='74.125.225.229')
|
||||||
asnorigin = ASNOrigin(net)
|
asnorigin = ASNOrigin(net)
|
||||||
asnorigin.lookup(asn='15169', asn_methods=['whois', 'http'])
|
asnorigin.lookup(asn='15169', asn_methods=['whois'])
|
||||||
|
asnorigin.lookup(asn='15169', asn_methods=['http'])
|
||||||
|
|||||||
@ -67,8 +67,23 @@ class TestExperimental(TestCommon):
|
|||||||
'115.1.2.3' # KRNIC
|
'115.1.2.3' # KRNIC
|
||||||
]
|
]
|
||||||
|
|
||||||
|
expected_stats = {'ip_input_total': 12, 'ip_unique_total': 12,
|
||||||
|
'ip_lookup_total': 12, 'ip_failed_total': 0,
|
||||||
|
'lacnic': {'failed': [], 'rate_limited': [], 'total': 2},
|
||||||
|
'ripencc': {'failed': [], 'rate_limited': [], 'total': 2},
|
||||||
|
'apnic': {'failed': [], 'rate_limited': [], 'total': 4},
|
||||||
|
'afrinic': {'failed': [], 'rate_limited': [], 'total': 2},
|
||||||
|
'arin': {'failed': [], 'rate_limited': [], 'total': 2},
|
||||||
|
'unallocated_addresses': []}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.assertIsInstance(bulk_lookup_rdap(addresses=ips), tuple)
|
result = bulk_lookup_rdap(addresses=ips)
|
||||||
|
self.assertIsInstance(result, tuple)
|
||||||
|
|
||||||
|
results, stats = result
|
||||||
|
self.assertEqual(stats, expected_stats)
|
||||||
|
self.assertEqual(len(results), 12)
|
||||||
|
|
||||||
except ASNLookupError:
|
except ASNLookupError:
|
||||||
pass
|
pass
|
||||||
except AssertionError as e:
|
except AssertionError as e:
|
||||||
|
|||||||
@ -174,8 +174,3 @@ class TestIPWhois(TestCommon):
|
|||||||
result = IPWhois(address='74.125.225.229', timeout=0,
|
result = IPWhois(address='74.125.225.229', timeout=0,
|
||||||
proxy_opener=opener)
|
proxy_opener=opener)
|
||||||
self.assertRaises(ASNRegistryError, result.lookup_rdap)
|
self.assertRaises(ASNRegistryError, result.lookup_rdap)
|
||||||
|
|
||||||
log.debug('Testing allow_permutations')
|
|
||||||
result = IPWhois(address='74.125.225.229', timeout=0,
|
|
||||||
allow_permutations=False)
|
|
||||||
self.assertRaises(ASNRegistryError, result.lookup_rdap)
|
|
||||||
|
|||||||
@ -136,8 +136,29 @@ class TestIPASN(TestCommon):
|
|||||||
self.fail('Unexpected exception raised: {0}'.format(e))
|
self.fail('Unexpected exception raised: {0}'.format(e))
|
||||||
|
|
||||||
def test_lookup(self):
|
def test_lookup(self):
|
||||||
# TODO: need to modify asn.json for this.
|
data_dir = path.dirname(__file__)
|
||||||
return NotImplemented
|
|
||||||
|
with io.open(str(data_dir) + '/asn.json', 'r') as \
|
||||||
|
data_file:
|
||||||
|
data = json.load(data_file)
|
||||||
|
|
||||||
|
for key, val in data.items():
|
||||||
|
|
||||||
|
log.debug('Testing: {0}'.format(key))
|
||||||
|
net = Net(key)
|
||||||
|
obj = IPASN(net)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
self.assertIsInstance(obj.lookup(), dict)
|
||||||
|
|
||||||
|
except AssertionError as e:
|
||||||
|
|
||||||
|
raise e
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
self.fail('Unexpected exception raised: {0}'.format(e))
|
||||||
|
|
||||||
|
|
||||||
class TestASNOrigin(TestCommon):
|
class TestASNOrigin(TestCommon):
|
||||||
@ -243,7 +264,10 @@ class TestASNOrigin(TestCommon):
|
|||||||
obj.get_nets_radb(multi_net_response)
|
obj.get_nets_radb(multi_net_response)
|
||||||
|
|
||||||
self.assertEqual(obj.get_nets_radb(multi_net_response, is_http=True),
|
self.assertEqual(obj.get_nets_radb(multi_net_response, is_http=True),
|
||||||
[])
|
[{'cidr': '66.249.64.0/20', 'description': None, 'maintainer': None, 'updated': None,
|
||||||
|
'source': None, 'start': 2, 'end': 29},
|
||||||
|
{'cidr': '66.249.80.0/20', 'description': None, 'maintainer': None, 'updated': None,
|
||||||
|
'source': None, 'start': 175, 'end': 202}])
|
||||||
|
|
||||||
net = Net('2001:43f8:7b0::')
|
net = Net('2001:43f8:7b0::')
|
||||||
obj = ASNOrigin(net)
|
obj = ASNOrigin(net)
|
||||||
|
|||||||
@ -45,6 +45,11 @@ class TestNIRWhois(TestCommon):
|
|||||||
inc_raw=True),
|
inc_raw=True),
|
||||||
dict)
|
dict)
|
||||||
|
|
||||||
|
self.assertIsInstance(obj.lookup(
|
||||||
|
nir=val['nir'],
|
||||||
|
response=val['response']),
|
||||||
|
dict)
|
||||||
|
|
||||||
except AssertionError as e:
|
except AssertionError as e:
|
||||||
|
|
||||||
raise e
|
raise e
|
||||||
|
|||||||
@ -82,7 +82,8 @@ class TestRDAP(TestCommon):
|
|||||||
'endAddress': '74.125.225.229'
|
'endAddress': '74.125.225.229'
|
||||||
},
|
},
|
||||||
asn_data=val['asn_data'],
|
asn_data=val['asn_data'],
|
||||||
depth=0), dict)
|
depth=0,
|
||||||
|
root_ent_check=False), dict)
|
||||||
|
|
||||||
log.debug('Testing rdap.lookup entitiy checks')
|
log.debug('Testing rdap.lookup entitiy checks')
|
||||||
net = Net('74.125.225.229')
|
net = Net('74.125.225.229')
|
||||||
@ -99,7 +100,8 @@ class TestRDAP(TestCommon):
|
|||||||
'entities': entity
|
'entities': entity
|
||||||
},
|
},
|
||||||
asn_data=val['asn_data'],
|
asn_data=val['asn_data'],
|
||||||
depth=1), dict)
|
depth=0,
|
||||||
|
root_ent_check=False), dict)
|
||||||
|
|
||||||
self.assertIsInstance(obj.lookup(response={
|
self.assertIsInstance(obj.lookup(response={
|
||||||
'handle': 'test',
|
'handle': 'test',
|
||||||
@ -109,9 +111,10 @@ class TestRDAP(TestCommon):
|
|||||||
'entities': entity
|
'entities': entity
|
||||||
},
|
},
|
||||||
asn_data=val['asn_data'],
|
asn_data=val['asn_data'],
|
||||||
depth=1,
|
depth=0,
|
||||||
bootstrap=True,
|
bootstrap=True,
|
||||||
inc_raw=True), dict)
|
inc_raw=True,
|
||||||
|
root_ent_check=False), dict)
|
||||||
|
|
||||||
# No sub entities. This is for coverage, but won't error out.
|
# No sub entities. This is for coverage, but won't error out.
|
||||||
entity = [{'handle': 'test', 'roles': [
|
entity = [{'handle': 'test', 'roles': [
|
||||||
@ -125,7 +128,8 @@ class TestRDAP(TestCommon):
|
|||||||
'entities': entity
|
'entities': entity
|
||||||
},
|
},
|
||||||
asn_data=val['asn_data'],
|
asn_data=val['asn_data'],
|
||||||
depth=1), dict)
|
depth=0,
|
||||||
|
root_ent_check=False), dict)
|
||||||
|
|
||||||
|
|
||||||
class TestRDAPContact(TestCommon):
|
class TestRDAPContact(TestCommon):
|
||||||
|
|||||||
@ -64,35 +64,35 @@ class TestFunctions(TestCommon):
|
|||||||
self.assertRaises(ValueError, ipv4_is_defined, '192.168.0.256')
|
self.assertRaises(ValueError, ipv4_is_defined, '192.168.0.256')
|
||||||
self.assertRaises(AddressValueError, ipv4_is_defined, 1234)
|
self.assertRaises(AddressValueError, ipv4_is_defined, 1234)
|
||||||
|
|
||||||
self.assertEquals(ipv4_is_defined('74.125.225.229'), (False, '', ''))
|
self.assertEqual(ipv4_is_defined('74.125.225.229'), (False, '', ''))
|
||||||
|
|
||||||
self.assertEquals(ipv4_is_defined('0.0.0.0'),
|
self.assertEqual(ipv4_is_defined('0.0.0.0'),
|
||||||
(True, 'This Network', 'RFC 1122, Section 3.2.1.3'))
|
(True, 'This Network', 'RFC 1122, Section 3.2.1.3'))
|
||||||
self.assertEquals(ipv4_is_defined('127.0.0.0'),
|
self.assertEqual(ipv4_is_defined('127.0.0.0'),
|
||||||
(True, 'Loopback', 'RFC 1122, Section 3.2.1.3'))
|
(True, 'Loopback', 'RFC 1122, Section 3.2.1.3'))
|
||||||
self.assertEquals(ipv4_is_defined('169.254.0.0'),
|
self.assertEqual(ipv4_is_defined('169.254.0.0'),
|
||||||
(True, 'Link Local', 'RFC 3927'))
|
(True, 'Link Local', 'RFC 3927'))
|
||||||
self.assertEquals(ipv4_is_defined('192.0.0.0'),
|
self.assertEqual(ipv4_is_defined('192.0.0.0'),
|
||||||
(True, 'IETF Protocol Assignments', 'RFC 5736'))
|
(True, 'IETF Protocol Assignments', 'RFC 5736'))
|
||||||
self.assertEquals(ipv4_is_defined('192.0.2.0'),
|
self.assertEqual(ipv4_is_defined('192.0.2.0'),
|
||||||
(True, 'TEST-NET-1', 'RFC 5737'))
|
(True, 'TEST-NET-1', 'RFC 5737'))
|
||||||
self.assertEquals(ipv4_is_defined('192.88.99.0'),
|
self.assertEqual(ipv4_is_defined('192.88.99.0'),
|
||||||
(True, '6to4 Relay Anycast', 'RFC 3068'))
|
(True, '6to4 Relay Anycast', 'RFC 3068'))
|
||||||
self.assertEquals(ipv4_is_defined('198.18.0.0'),
|
self.assertEqual(ipv4_is_defined('198.18.0.0'),
|
||||||
(True,
|
(True,
|
||||||
'Network Interconnect Device Benchmark Testing',
|
'Network Interconnect Device Benchmark Testing',
|
||||||
'RFC 2544'))
|
'RFC 2544'))
|
||||||
self.assertEquals(ipv4_is_defined('198.51.100.0'),
|
self.assertEqual(ipv4_is_defined('198.51.100.0'),
|
||||||
(True, 'TEST-NET-2', 'RFC 5737'))
|
(True, 'TEST-NET-2', 'RFC 5737'))
|
||||||
self.assertEquals(ipv4_is_defined('203.0.113.0'),
|
self.assertEqual(ipv4_is_defined('203.0.113.0'),
|
||||||
(True, 'TEST-NET-3', 'RFC 5737'))
|
(True, 'TEST-NET-3', 'RFC 5737'))
|
||||||
self.assertEquals(ipv4_is_defined('224.0.0.0'),
|
self.assertEqual(ipv4_is_defined('224.0.0.0'),
|
||||||
(True, 'Multicast', 'RFC 3171'))
|
(True, 'Multicast', 'RFC 3171'))
|
||||||
self.assertEquals(ipv4_is_defined('255.255.255.255'),
|
self.assertEqual(ipv4_is_defined('255.255.255.255'),
|
||||||
(True, 'Limited Broadcast', 'RFC 919, Section 7'))
|
(True, 'Limited Broadcast', 'RFC 919, Section 7'))
|
||||||
self.assertEquals(ipv4_is_defined('192.168.0.1'),
|
self.assertEqual(ipv4_is_defined('192.168.0.1'),
|
||||||
(True, 'Private-Use Networks', 'RFC 1918'))
|
(True, 'Private-Use Networks', 'RFC 1918'))
|
||||||
self.assertEquals(ipv4_is_defined('198.97.38.0'),
|
self.assertEqual(ipv4_is_defined('198.97.38.0'),
|
||||||
(True, 'IANA Reserved', ''))
|
(True, 'IANA Reserved', ''))
|
||||||
|
|
||||||
def test_ipv6_is_defined(self):
|
def test_ipv6_is_defined(self):
|
||||||
@ -105,31 +105,31 @@ class TestFunctions(TestCommon):
|
|||||||
'2001:4860:4860::8888::1234')
|
'2001:4860:4860::8888::1234')
|
||||||
self.assertRaises(AddressValueError, ipv6_is_defined, 1234)
|
self.assertRaises(AddressValueError, ipv6_is_defined, 1234)
|
||||||
|
|
||||||
self.assertEquals(ipv6_is_defined('2001:4860:4860::8888'),
|
self.assertEqual(ipv6_is_defined('2001:4860:4860::8888'),
|
||||||
(False, '', ''))
|
(False, '', ''))
|
||||||
|
|
||||||
self.assertEquals(ipv6_is_defined('ff00::'),
|
self.assertEqual(ipv6_is_defined('ff00::'),
|
||||||
(True, 'Multicast', 'RFC 4291, Section 2.7'))
|
(True, 'Multicast', 'RFC 4291, Section 2.7'))
|
||||||
self.assertEquals(ipv6_is_defined('0:0:0:0:0:0:0:0'),
|
self.assertEqual(ipv6_is_defined('0:0:0:0:0:0:0:0'),
|
||||||
(True, 'Unspecified', 'RFC 4291, Section 2.5.2'))
|
(True, 'Unspecified', 'RFC 4291, Section 2.5.2'))
|
||||||
self.assertEquals(ipv6_is_defined('0:0:0:0:0:0:0:1'),
|
self.assertEqual(ipv6_is_defined('0:0:0:0:0:0:0:1'),
|
||||||
(True, 'Loopback', 'RFC 4291, Section 2.5.3'))
|
(True, 'Loopback', 'RFC 4291, Section 2.5.3'))
|
||||||
self.assertEquals(ipv6_is_defined('100::'),
|
self.assertEqual(ipv6_is_defined('100::'),
|
||||||
(True, 'Reserved', 'RFC 4291'))
|
(True, 'Reserved', 'RFC 4291'))
|
||||||
self.assertEquals(ipv6_is_defined('fe80::'),
|
self.assertEqual(ipv6_is_defined('fe80::'),
|
||||||
(True, 'Link-Local', 'RFC 4291, Section 2.5.6'))
|
(True, 'Link-Local', 'RFC 4291, Section 2.5.6'))
|
||||||
self.assertEquals(ipv6_is_defined('fec0::'),
|
self.assertEqual(ipv6_is_defined('fec0::'),
|
||||||
(True, 'Site-Local', 'RFC 4291, Section 2.5.7'))
|
(True, 'Site-Local', 'RFC 4291, Section 2.5.7'))
|
||||||
self.assertEquals(ipv6_is_defined('fc00::'),
|
self.assertEqual(ipv6_is_defined('fc00::'),
|
||||||
(True, 'Unique Local Unicast', 'RFC 4193'))
|
(True, 'Unique Local Unicast', 'RFC 4193'))
|
||||||
|
|
||||||
def test_unique_everseen(self):
|
def test_unique_everseen(self):
|
||||||
|
|
||||||
input_list = ['b', 'a', 'c', 'a', 'b', 'x', 'a']
|
input_list = ['b', 'a', 'c', 'a', 'b', 'x', 'a']
|
||||||
self.assertEquals(list(unique_everseen(input_list)),
|
self.assertEqual(list(unique_everseen(input_list)),
|
||||||
['b', 'a', 'c', 'x'])
|
['b', 'a', 'c', 'x'])
|
||||||
|
|
||||||
self.assertEquals(list(unique_everseen(input_list, str.lower)),
|
self.assertEqual(list(unique_everseen(input_list, str.lower)),
|
||||||
['b', 'a', 'c', 'x'])
|
['b', 'a', 'c', 'x'])
|
||||||
|
|
||||||
def test_unique_addresses(self):
|
def test_unique_addresses(self):
|
||||||
@ -150,14 +150,14 @@ class TestFunctions(TestCommon):
|
|||||||
'2001:4860:4860::8888': {'count': 2, 'ports': {'443': 1}}
|
'2001:4860:4860::8888': {'count': 2, 'ports': {'443': 1}}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.assertEquals(unique_addresses(input_data), expected_result)
|
self.assertEqual(unique_addresses(input_data), expected_result)
|
||||||
|
|
||||||
data_dir = path.dirname(__file__)
|
data_dir = path.dirname(__file__)
|
||||||
fp = str(data_dir) + '/rdap.json'
|
fp = str(data_dir) + '/rdap.json'
|
||||||
|
|
||||||
# Expected result is different on 2.x vs 3.x, possible issues with
|
# Expected result is different on 2.x vs 3.x, possible issues with
|
||||||
# ipaddr vs ipaddress output. Investigation pending...
|
# ipaddr vs ipaddress output. Investigation pending...
|
||||||
if sys.version_info >= (3, 3):
|
if (3, 3) <= sys.version_info < (3, 8):
|
||||||
|
|
||||||
fp_expected_result = {
|
fp_expected_result = {
|
||||||
'74.125.225.0/24': {'count': 1, 'ports': {}},
|
'74.125.225.0/24': {'count': 1, 'ports': {}},
|
||||||
@ -203,8 +203,60 @@ class TestFunctions(TestCommon):
|
|||||||
'210.0.0.0/8': {'count': 1, 'ports': {}}
|
'210.0.0.0/8': {'count': 1, 'ports': {}}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.assertEquals(unique_addresses(file_path=fp),
|
self.assertEqual(unique_addresses(file_path=fp),
|
||||||
fp_expected_result)
|
fp_expected_result)
|
||||||
|
|
||||||
|
elif sys.version_info >= (3, 8):
|
||||||
|
|
||||||
|
fp_expected_result = {
|
||||||
|
'196.0.0.0': {'count': 1, 'ports': {}},
|
||||||
|
'196.11.239.0': {'count': 2, 'ports': {}},
|
||||||
|
'196.11.240.0/23': {'count': 1, 'ports': {}},
|
||||||
|
'196.11.240.215': {'count': 2, 'ports': {}},
|
||||||
|
'196.11.246.255': {'count': 2, 'ports': {}},
|
||||||
|
'196.255.255.255': {'count': 1, 'ports': {}},
|
||||||
|
'200.57.128.0/20': {'count': 1, 'ports': {}},
|
||||||
|
'200.57.141.161': {'count': 7, 'ports': {}},
|
||||||
|
'2001:200::/23': {'count': 2, 'ports': {}},
|
||||||
|
'2001:240:10c:1::ca20:9d1d':
|
||||||
|
{'count': 2, 'ports': {}},
|
||||||
|
'2001:240::': {'count': 1, 'ports': {}},
|
||||||
|
'2001:240::/32': {'count': 6, 'ports': {}},
|
||||||
|
'2001:240:ffff:ffff:ffff:ffff:ffff:ffff':
|
||||||
|
{'count': 1, 'ports': {}},
|
||||||
|
'2001:4200::/23': {'count': 1, 'ports': {}},
|
||||||
|
'2001:43f8:7b0::': {'count': 3, 'ports': {}},
|
||||||
|
'2001:43f8:7b0:ffff:ffff:ffff:ffff:ffff':
|
||||||
|
{'count': 1, 'ports': {}},
|
||||||
|
'2001:4860:4860::8888': {'count': 10, 'ports': {}},
|
||||||
|
'2001:4860::': {'count': 2, 'ports': {}},
|
||||||
|
'2001:4860::/32': {'count': 1, 'ports': {}},
|
||||||
|
'2001:4860:ffff:ffff:ffff:ffff:ffff:ffff':
|
||||||
|
{'count': 1, 'ports': {}},
|
||||||
|
'210.0.0.0': {'count': 1, 'ports': {}},
|
||||||
|
'210.0.0.0/8': {'count': 1, 'ports': {}},
|
||||||
|
'210.107.0.0': {'count': 2, 'ports': {}},
|
||||||
|
'210.107.0.0/17': {'count': 6, 'ports': {}},
|
||||||
|
'210.107.127.255': {'count': 2, 'ports': {}},
|
||||||
|
'210.107.73.73': {'count': 2, 'ports': {}},
|
||||||
|
'210.255.255.255': {'count': 1, 'ports': {}},
|
||||||
|
'2801:10:c000::': {'count': 7, 'ports': {}},
|
||||||
|
'2a00:2380::/25': {'count': 1, 'ports': {}},
|
||||||
|
'2a00:2381:ffff::1': {'count': 4, 'ports': {}},
|
||||||
|
'62.239.0.0/16': {'count': 1, 'ports': {}},
|
||||||
|
'62.239.237.0': {'count': 1, 'ports': {}},
|
||||||
|
'62.239.237.0/32': {'count': 1, 'ports': {}},
|
||||||
|
'62.239.237.1': {'count': 4, 'ports': {}},
|
||||||
|
'62.239.237.255': {'count': 1, 'ports': {}},
|
||||||
|
'62.239.237.255/32': {'count': 1, 'ports': {}},
|
||||||
|
'74.125.0.0': {'count': 2, 'ports': {}},
|
||||||
|
'74.125.225.0/24': {'count': 1, 'ports': {}},
|
||||||
|
'74.125.225.229': {'count': 8, 'ports': {}},
|
||||||
|
'74.125.255.255': {'count': 1, 'ports': {}}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual(unique_addresses(file_path=fp),
|
||||||
|
fp_expected_result)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
@ -261,8 +313,8 @@ class TestFunctions(TestCommon):
|
|||||||
|
|
||||||
def test_ipv4_generate_random(self):
|
def test_ipv4_generate_random(self):
|
||||||
|
|
||||||
self.assertEquals(len(list(ipv4_generate_random(1000))), 1000)
|
self.assertEqual(len(list(ipv4_generate_random(1000))), 1000)
|
||||||
|
|
||||||
def test_ipv6_generate_random(self):
|
def test_ipv6_generate_random(self):
|
||||||
|
|
||||||
self.assertEquals(len(list(ipv6_generate_random(1000))), 1000)
|
self.assertEqual(len(list(ipv6_generate_random(1000))), 1000)
|
||||||
|
|||||||
@ -87,30 +87,30 @@ IETF_RFC_REFERENCES = {
|
|||||||
IP_REGEX = (
|
IP_REGEX = (
|
||||||
r'(?P<ip>'
|
r'(?P<ip>'
|
||||||
# IPv4
|
# IPv4
|
||||||
'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
r'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
||||||
'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
r'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||||
# IPv6
|
# IPv6
|
||||||
'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
r'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
||||||
'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
r'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
r'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
||||||
'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
r'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
||||||
'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
r'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
||||||
'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
r'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
r'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
||||||
'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
r'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
||||||
'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
||||||
'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
r'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
||||||
'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
r'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
||||||
'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
r'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
||||||
'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
r'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
||||||
'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
r'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
||||||
'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
r'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
||||||
')|:)))(%.+)?))\]?'
|
r')|:)))(%.+)?))\]?'
|
||||||
# Optional IPv4 Port
|
# Optional IPv4 Port
|
||||||
'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
r'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
||||||
# Optional CIDR block
|
# Optional CIDR block
|
||||||
'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
r'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
||||||
')'
|
r')'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -212,6 +212,7 @@ def get_countries(is_legacy_xml=False):
|
|||||||
|
|
||||||
# Read the file.
|
# Read the file.
|
||||||
data = f.read()
|
data = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
# Check if there is data.
|
# Check if there is data.
|
||||||
if not data: # pragma: no cover
|
if not data: # pragma: no cover
|
||||||
@ -258,6 +259,8 @@ def get_countries(is_legacy_xml=False):
|
|||||||
# Add to the countries dictionary.
|
# Add to the countries dictionary.
|
||||||
countries[code] = name
|
countries[code] = name
|
||||||
|
|
||||||
|
f.close()
|
||||||
|
|
||||||
return countries
|
return countries
|
||||||
|
|
||||||
|
|
||||||
@ -506,6 +509,7 @@ def unique_addresses(data=None, file_path=None):
|
|||||||
|
|
||||||
# Read the file.
|
# Read the file.
|
||||||
file_data = f.read()
|
file_data = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
pattern = re.compile(
|
pattern = re.compile(
|
||||||
str(IP_REGEX),
|
str(IP_REGEX),
|
||||||
|
|||||||
@ -67,7 +67,7 @@ RIR_WHOIS = {
|
|||||||
'name': r'(NetName):[^\S\n]+(?P<val>.+?)\n',
|
'name': r'(NetName):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'handle': r'(NetHandle):[^\S\n]+(?P<val>.+?)\n',
|
'handle': r'(NetHandle):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'description': r'(OrgName|CustName):[^\S\n]+(?P<val>.+?)'
|
'description': r'(OrgName|CustName):[^\S\n]+(?P<val>.+?)'
|
||||||
'(?=(\n\S):?)',
|
'(?=(\n\\S):?)',
|
||||||
'country': r'(Country):[^\S\n]+(?P<val>.+?)\n',
|
'country': r'(Country):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'state': r'(StateProv):[^\S\n]+(?P<val>.+?)\n',
|
'state': r'(StateProv):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'city': r'(City):[^\S\n]+(?P<val>.+?)\n',
|
'city': r'(City):[^\S\n]+(?P<val>.+?)\n',
|
||||||
@ -75,7 +75,7 @@ RIR_WHOIS = {
|
|||||||
'postal_code': r'(PostalCode):[^\S\n]+(?P<val>.+?)\n',
|
'postal_code': r'(PostalCode):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': r'(RegDate):[^\S\n]+(?P<val>.+?)\n',
|
'created': r'(RegDate):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'updated': r'(Updated):[^\S\n]+(?P<val>.+?)\n',
|
'updated': r'(Updated):[^\S\n]+(?P<val>.+?)\n',
|
||||||
@ -92,7 +92,7 @@ RIR_WHOIS = {
|
|||||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': (
|
'created': (
|
||||||
r'(created):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]'
|
r'(created):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]'
|
||||||
@ -115,7 +115,7 @@ RIR_WHOIS = {
|
|||||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'updated': r'(changed):[^\S\n]+.*(?P<val>[0-9]{8}).*?\n'
|
'updated': r'(changed):[^\S\n]+.*(?P<val>[0-9]{8}).*?\n'
|
||||||
},
|
},
|
||||||
@ -129,7 +129,7 @@ RIR_WHOIS = {
|
|||||||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': r'(created):[^\S\n]+(?P<val>[0-9]{8}).*?\n',
|
'created': r'(created):[^\S\n]+(?P<val>[0-9]{8}).*?\n',
|
||||||
'updated': r'(changed):[^\S\n]+(?P<val>[0-9]{8}).*?\n'
|
'updated': r'(changed):[^\S\n]+(?P<val>[0-9]{8}).*?\n'
|
||||||
@ -146,7 +146,7 @@ RIR_WHOIS = {
|
|||||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -166,7 +166,7 @@ RWHOIS = {
|
|||||||
'postal_code': r'(network:Postal-Code):(?P<val>.+?)\n',
|
'postal_code': r'(network:Postal-Code):(?P<val>.+?)\n',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': r'(network:Created):(?P<val>.+?)\n',
|
'created': r'(network:Created):(?P<val>.+?)\n',
|
||||||
'updated': r'(network:Updated):(?P<val>.+?)\n'
|
'updated': r'(network:Updated):(?P<val>.+?)\n'
|
||||||
@ -324,16 +324,6 @@ class Whois:
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._parse_fields() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.parse_fields().')
|
|
||||||
return self.parse_fields(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_arin(self, response):
|
def get_nets_arin(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from ARIN whois data.
|
The function for parsing network blocks from ARIN whois data.
|
||||||
@ -415,16 +405,6 @@ class Whois:
|
|||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_arin(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._get_nets_arin() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.get_nets_arin().')
|
|
||||||
return self.get_nets_arin(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_lacnic(self, response):
|
def get_nets_lacnic(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from LACNIC whois data.
|
The function for parsing network blocks from LACNIC whois data.
|
||||||
@ -474,7 +454,7 @@ class Whois:
|
|||||||
for addr in net_range.split(', '):
|
for addr in net_range.split(', '):
|
||||||
|
|
||||||
count = addr.count('.')
|
count = addr.count('.')
|
||||||
if count is not 0 and count < 4:
|
if count != 0 and count < 4:
|
||||||
|
|
||||||
addr_split = addr.strip().split('/')
|
addr_split = addr.strip().split('/')
|
||||||
for i in range(count + 1, 4):
|
for i in range(count + 1, 4):
|
||||||
@ -495,16 +475,6 @@ class Whois:
|
|||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_lacnic(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._get_nets_lacnic() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.get_nets_lacnic().')
|
|
||||||
return self.get_nets_lacnic(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_other(self, response):
|
def get_nets_other(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from generic whois data.
|
The function for parsing network blocks from generic whois data.
|
||||||
@ -577,16 +547,6 @@ class Whois:
|
|||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_other(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._get_nets_other() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.get_nets_other().')
|
|
||||||
return self.get_nets_other(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, inc_raw=False, retry_count=3, response=None,
|
def lookup(self, inc_raw=False, retry_count=3, response=None,
|
||||||
get_referral=False, extra_blacklist=None,
|
get_referral=False, extra_blacklist=None,
|
||||||
ignore_referral_errors=False, asn_data=None,
|
ignore_referral_errors=False, asn_data=None,
|
||||||
@ -667,7 +627,7 @@ class Whois:
|
|||||||
|
|
||||||
# Only fetch the response if we haven't already.
|
# Only fetch the response if we haven't already.
|
||||||
if response is None or (not is_offline and
|
if response is None or (not is_offline and
|
||||||
asn_data['asn_registry'] is not 'arin'):
|
asn_data['asn_registry'] != 'arin'):
|
||||||
|
|
||||||
log.debug('Response not given, perform WHOIS lookup for {0}'
|
log.debug('Response not given, perform WHOIS lookup for {0}'
|
||||||
.format(self._net.address_str))
|
.format(self._net.address_str))
|
||||||
|
|||||||
@ -1,2 +1,2 @@
|
|||||||
dnspython
|
dnspython<=2.0.0
|
||||||
ipaddr
|
ipaddr==2.2.0
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
dnspython
|
dnspython<=2.0.0
|
||||||
|
|||||||
3
setup.py
3
setup.py
@ -58,6 +58,7 @@ CLASSIFIERS = [
|
|||||||
'Programming Language :: Python :: 3.5',
|
'Programming Language :: Python :: 3.5',
|
||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
|
'Programming Language :: Python :: 3.8',
|
||||||
'Topic :: Internet',
|
'Topic :: Internet',
|
||||||
'Topic :: Software Development',
|
'Topic :: Software Development',
|
||||||
]
|
]
|
||||||
@ -66,7 +67,7 @@ PACKAGES = ['ipwhois']
|
|||||||
|
|
||||||
PACKAGE_DATA = {'ipwhois': ['data/*.xml', 'data/*.csv']}
|
PACKAGE_DATA = {'ipwhois': ['data/*.xml', 'data/*.csv']}
|
||||||
|
|
||||||
INSTALL_REQUIRES = ['dnspython', 'ipaddr;python_version<"3.3"']
|
INSTALL_REQUIRES = ['dnspython<=2.0.0', 'ipaddr==2.2.0;python_version<"3.3"']
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=NAME,
|
name=NAME,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user