Updated from upstream

This commit is contained in:
Haseeb Majid 2019-05-17 14:04:21 +01:00
commit fb5a541e08
33 changed files with 248 additions and 266 deletions

View File

@ -1,19 +1,17 @@
language: python
sudo: required
dist: xenial
python:
- 2.6
- 2.7
- 3.3
- 3.4
- 3.5
- 3.6
- 3.7
install:
- pip install --upgrade setuptools
- pip install --upgrade pip
- if [[ $TRAVIS_PYTHON_VERSION == 2* ]]; then pip install -r requirements/python2.txt; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3.3* ]]; then pip install -r requirements/python3.txt; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3.4* ]]; then pip install -r requirements/python3.txt; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3.5* ]]; then pip install -r requirements/python3.txt; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3.6* ]]; then pip install -r requirements/python3.txt; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3* ]]; then pip install -r requirements/python3.txt; fi
- pip install coveralls
- pip install codeclimate-test-reporter
- pip install -e .

View File

@ -1,6 +1,22 @@
Changelog
=========
1.1.0 (2019-02-01)
------------------
- Exceptions now inherit a new BaseIpwhoisException rather than Exception
(#205 - Darkheir)
- Fixed list output for generate_examples.py (#196)
- Fixed bug in ASN HTTP lookup where the ARIN results were reversed, and
parsing would fail on the first item (#220)
- Removed support for Python 2.6/3.3, added support for 3.7 (#221)
- Fixed deprecation warnings in core code (#203 - cstranex)
- Fixed bug in host argument for elastic_search.py example (#202)
- Set user agent in elastic_search.py example to avoid default user agent
- Updated elastic_search.py example for ES 6.6.0
- Readme update for RDAP vs Legacy Whois output (#204)
- Removed the disallow_permutations argument from ipwhois_cli.py (#226)
1.0.0 (2017-07-30)
------------------

View File

@ -20,7 +20,7 @@ Usage
ipwhois_cli.py [-h] [--whois] [--exclude_nir] [--json] [--hr]
[--show_name] [--colorize] [--timeout TIMEOUT]
[--proxy_http "PROXY_HTTP"]
[--proxy_https "PROXY_HTTPS"] [--disallow_permutations]
[--proxy_https "PROXY_HTTPS"]
[--inc_raw] [--retry_count RETRY_COUNT]
[--asn_alts "ASN_ALTS"] [--asn_methods "ASN_METHODS"]
[--extra_org_map "EXTRA_ORG_MAP"]
@ -60,12 +60,6 @@ IPWhois settings:
The proxy HTTPS address passed to
request.ProxyHandler. User auth can be passed like
"https://user:pass@192.168.0.1:443"
--disallow_permutations
Disable additional methods if DNS lookups to Cymru
fail. This is the opposite of the ipwhois
allow_permutations, in order to enable
allow_permutations by default in the CLI. *WARNING*
deprecated in favor of new argument asn_methods.
Common settings (RDAP & Legacy Whois):
--inc_raw Include the raw whois results in the output.

View File

@ -110,7 +110,7 @@ Guidelines
- Follow the `Google docstring style guide
<https://google.github.io/styleguide/pyguide.html#Comments>`_ for
comments
- Must be compatible with Python 2.6, 2.7, and 3.3+
- Must be compatible with Python 2.7 and 3.4+
- Break out reusable code to functions
- Make your code easy to read and comment where necessary
- Reference the GitHub issue number in the description (e.g., Issue #01)

View File

@ -1,4 +1,4 @@
Copyright (c) 2013-2017 Philip Hane
Copyright (c) 2013-2019 Philip Hane
All rights reserved.
Redistribution and use in source and binary forms, with or without

View File

@ -11,7 +11,7 @@ ipwhois
:target: https://codeclimate.com/github/secynic/ipwhois
.. image:: https://img.shields.io/badge/license-BSD%202--Clause-blue.svg
:target: https://github.com/secynic/ipwhois/tree/master/LICENSE.txt
.. image:: https://img.shields.io/badge/python-2.6%2C%202.7%2C%203.3+-blue.svg
.. image:: https://img.shields.io/badge/python-2.7%2C%203.4+-blue.svg
:target: https://docs.python.org
.. image:: https://img.shields.io/badge/docs-latest-green.svg?style=flat
:target: https://ipwhois.readthedocs.io/en/latest
@ -45,11 +45,10 @@ Features
* Recursive network parsing for IPs with parent/children networks listed
* National Internet Registry support for JPNIC and KRNIC
* Supports IP to ASN and ASN origin queries
* Python 2.6+ and 3.3+ supported
* Python 2.7 and 3.4+ supported
* Useful set of utilities
* Experimental bulk query support
* BSD license
* 100% core code coverage (See '# pragma: no cover' for exclusions)
* Human readable field translations
* Full CLI for IPWhois with optional ANSI colored console output.
@ -97,18 +96,12 @@ https://ipwhois.readthedocs.io/en/latest/UPGRADING.html
Dependencies
============
Python 2.6::
dnspython
ipaddr
argparse (required only for CLI)
Python 2.7::
dnspython
ipaddr
Python 3.3+::
Python 3.4+::
dnspython
@ -180,7 +173,7 @@ Input
| allow_permutations | bool | Allow net.Net() to use additional methods if |
| | | DNS lookups to Cymru fail. *WARNING* |
| | | deprecated in favor of new argument |
| | | asn_methods. Defaults to True. |
| | | asn_methods. Defaults to False. |
+--------------------+--------+-----------------------------------------------+
RDAP (HTTP)
@ -199,6 +192,17 @@ https://ipwhois.readthedocs.io/en/latest/RDAP.html
Legacy Whois
------------
.. note::
Legacy Whois output is different from RDAP. See the below JSON outputs for
a comparison:
Legacy Whois:
https://ipwhois.readthedocs.io/en/latest/WHOIS.html#basic-usage
RDAP:
https://ipwhois.readthedocs.io/en/latest/RDAP.html#basic-usage
Legacy Whois documentation:
https://ipwhois.readthedocs.io/en/latest/WHOIS.html

View File

@ -9,6 +9,16 @@ any changes that may affect user experience when upgrading to a new release.
This page is new as of version 1.0.0. Any information on older versions is
likely missing or incomplete.
******
v1.1.0
******
- Exceptions now inherit a new BaseIpwhoisException rather than Exception
- Removed support for Python 2.6/3.3, added support for 3.7
- Removed the disallow_permutations argument from ipwhois_cli.py. Use
ans_methods instead.
- Fixed deprecation warnings in core code
******
v1.0.0
******

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -26,4 +26,4 @@ from .exceptions import *
from .net import Net
from .ipwhois import IPWhois
__version__ = '1.0.0'
__version__ = '1.1.0'

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -371,7 +371,7 @@ class IPASN:
log.debug('No networks found')
net_list = []
for n in net_list:
for n in reversed(net_list):
try:
@ -383,10 +383,15 @@ class IPASN:
log.debug('Could not parse ASN registry via HTTP: '
'{0}'.format(str(e)))
raise ASNRegistryError('ASN registry lookup failed.')
continue
break
if not asn_data['asn_registry']:
log.debug('Could not parse ASN registry via HTTP')
raise ASNRegistryError('ASN registry lookup failed.')
except ASNRegistryError:
raise
@ -475,9 +480,7 @@ class IPASN:
else:
# Python 2.6 doesn't support set literal expressions, use explicit
# set() instead.
if set(['dns', 'whois', 'http']).isdisjoint(asn_methods):
if {'dns', 'whois', 'http'}.isdisjoint(asn_methods):
raise ValueError('methods argument requires at least one of '
'dns, whois, http.')
@ -508,7 +511,7 @@ class IPASN:
asn_data_list = []
for asn_entry in response:
asn_data_list.append(self._parse_fields_dns(
asn_data_list.append(self.parse_fields_dns(
str(asn_entry)))
# Iterate through the parsed ASN results to find the
@ -541,7 +544,7 @@ class IPASN:
try:
response = self._net.get_asn_whois(retry_count)
asn_data = self._parse_fields_whois(
asn_data = self.parse_fields_whois(
response) # pragma: no cover
break
@ -557,7 +560,7 @@ class IPASN:
response = self._net.get_asn_http(
retry_count=retry_count
)
asn_data = self._parse_fields_http(response,
asn_data = self.parse_fields_http(response,
extra_org_map)
break
@ -839,9 +842,7 @@ class ASNOrigin:
else:
# Python 2.6 doesn't support set literal expressions, use explicit
# set() instead.
if set(['whois', 'http']).isdisjoint(asn_methods):
if {'whois', 'http'}.isdisjoint(asn_methods):
raise ValueError('methods argument requires at least one of '
'whois, http.')
@ -915,7 +916,7 @@ class ASNOrigin:
results['raw'] = response
nets = []
nets_response = self._get_nets_radb(response, is_http)
nets_response = self.get_nets_radb(response, is_http)
nets.extend(nets_response)
@ -935,7 +936,7 @@ class ASNOrigin:
section_end = nets[index + 1]['start']
temp_net = self._parse_fields(
temp_net = self.parse_fields(
response,
fields['radb']['fields'],
section_end,

View File

@ -60,16 +60,16 @@ master_doc = 'index'
# General information about the project.
project = 'ipwhois'
copyright = '2013-2017, Philip Hane'
copyright = '2013-2019, Philip Hane'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
version = '1.1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
release = '1.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

View File

@ -14,22 +14,20 @@ I do not re-distribute the GeoLite2 database. For geolite2 data, download the
GeoLite2 database GeoLite2-City.mmdb and place in the data directory:
https://dev.maxmind.com/geoip/geoip2/geolite2/
.. note::
I have not updated the kibana.json since the ES 5.5.1 example. --kimport
and --kexport will not work with it for ES 6.x.
Dependencies
============
Tested using::
ElasticSearch 5.5.1
Kibana 5.5.1
ElasticSearch 6.6.0
Kibana 6.6.0
Python 2.6 (requirements26.txt - geopy is not supported)::
ipwhois
elasticsearch
geoip2
argparse
Python 2.7, 3.3+ (requirements.txt)::
Python 2.7, 3.4+ (requirements.txt)::
ipwhois
elasticsearch

View File

@ -9,6 +9,7 @@ import elasticsearch
from elasticsearch.helpers import scan
from ipwhois import IPWhois
from ipwhois.utils import get_countries
from ipwhois import __version__
from datetime import datetime
import geoip2.database
import json
@ -16,14 +17,11 @@ import io
import sys
from os import path
# geopy does not support lower than Python 2.7
if sys.version_info >= (2, 7):
from geopy.geocoders import Nominatim
from geopy.exc import (GeocoderQueryError, GeocoderTimedOut)
from geopy.geocoders import Nominatim
from geopy.exc import (GeocoderQueryError, GeocoderTimedOut)
# Used to convert addresses to geo locations.
GEOLOCATOR = Nominatim()
# Used to convert addresses to geo locations.
GEOLOCATOR = Nominatim(user_agent='ipwhois/{0}'.format(__version__))
# Setup the arg parser.
parser = argparse.ArgumentParser(
@ -89,7 +87,7 @@ parser.add_argument(
type=str,
nargs=1,
metavar='"HOST"',
default='localhost',
default=['localhost'],
help='The ElasticSearch host to connect to. Default: "localhost".'
)
parser.add_argument(
@ -108,8 +106,8 @@ DEFAULT_MAPPING = {
'date_detection': 'true',
'properties': {
'@version': {
'type': 'string',
'index': 'not_analyzed'
'type': 'text',
'index': False
},
'updated': {
'type': 'date',
@ -117,7 +115,6 @@ DEFAULT_MAPPING = {
'ignore_malformed': 'false'
}
},
'_all': {'enabled': 'true'},
'dynamic_templates': [
{
'string_fields': {
@ -142,7 +139,7 @@ with io.open(str(CUR_DIR) + '/data/geo_coord.json', 'r') as data_file:
COUNTRIES = get_countries()
# Default: localhost:9200
es = elasticsearch.Elasticsearch(host=args.host, port=args.port)
es = elasticsearch.Elasticsearch(host=args.host[0], port=args.port)
def delete_index():
@ -150,7 +147,16 @@ def delete_index():
try:
# Delete existing entries
es.indices.delete(index='ipwhois')
es.indices.delete(index='ipwhois_base')
except elasticsearch.exceptions.NotFoundError:
pass
try:
# Delete existing entries
es.indices.delete(index='ipwhois_entity')
except elasticsearch.exceptions.NotFoundError:
@ -159,8 +165,27 @@ def delete_index():
def create_index():
# Create the ipwhois index
es.indices.create(index='ipwhois', ignore=400, body={
# Create the ipwhois_base index
es.indices.create(index='ipwhois_base', ignore=400, body={
'settings': {
'index.refresh_interval': '5s',
'analysis': {
'analyzer': {
'base': {
'type': 'standard',
'stopwords': '_none_'
},
'entity': {
'type': 'standard',
'stopwords': '_none_'
}
}
}
}
})
# Create the ipwhois_entity index
es.indices.create(index='ipwhois_entity', ignore=400, body={
'settings': {
'index.refresh_interval': '5s',
'analysis': {
@ -219,7 +244,7 @@ def create_index():
})
es.indices.put_mapping(
index='ipwhois',
index='ipwhois_base',
doc_type='base',
body=mapping,
allow_no_indices=True
@ -236,7 +261,7 @@ def create_index():
'type': 'geo_point'
},
'value': {
'type': 'string',
'type': 'text',
}
}
}
@ -244,7 +269,7 @@ def create_index():
}
})
es.indices.put_mapping(
index='ipwhois',
index='ipwhois_entity',
doc_type='entity',
body=mapping,
allow_no_indices=True
@ -258,7 +283,7 @@ def insert(input_ip='', update=True, expires=7, depth=1):
try:
# Only update if older than x days.
tmp = es.search(
index='ipwhois',
index='ipwhois_base',
doc_type='base',
body={
'query': {
@ -303,7 +328,7 @@ def insert(input_ip='', update=True, expires=7, depth=1):
# Only update if older than 7 days.
es_tmp = es.search(
index='ipwhois',
index='ipwhois_entity',
doc_type='entity',
body={
'query': {
@ -369,7 +394,7 @@ def insert(input_ip='', update=True, expires=7, depth=1):
try:
ent_search = es.search(
index='ipwhois',
index='ipwhois_entity',
doc_type='entity',
body={
'query': {
@ -382,7 +407,7 @@ def insert(input_ip='', update=True, expires=7, depth=1):
for hit in ent_search['hits']['hits']:
es.delete(index='ipwhois', doc_type='entity',
es.delete(index='ipwhois_entity', doc_type='entity',
id=hit['_id'])
except KeyError:
@ -390,10 +415,10 @@ def insert(input_ip='', update=True, expires=7, depth=1):
pass
# Index the entity in elasticsearch.
es.index(index='ipwhois', doc_type='entity', body=ent)
es.index(index='ipwhois_entity', doc_type='entity', body=ent)
# Refresh the index for searching duplicates.
es.indices.refresh(index='ipwhois')
es.indices.refresh(index='ipwhois_entity')
# Don't need the objects key since that data has been entered as the
# entities doc_type.
@ -451,7 +476,7 @@ def insert(input_ip='', update=True, expires=7, depth=1):
try:
ip_search = es.search(
index='ipwhois',
index='ipwhois_base',
doc_type='base',
body={
'query': {
@ -464,17 +489,18 @@ def insert(input_ip='', update=True, expires=7, depth=1):
for hit in ip_search['hits']['hits']:
es.delete(index='ipwhois', doc_type='base', id=hit['_id'])
es.delete(index='ipwhois_base', doc_type='base', id=hit['_id'])
except KeyError:
pass
# Index the base in elasticsearch.
es.index(index='ipwhois', doc_type='base', body=ret)
es.index(index='ipwhois_base', doc_type='base', body=ret)
# Refresh the index for searching duplicates.
es.indices.refresh(index='ipwhois')
# Refresh the indices for searching duplicates.
es.indices.refresh(index='ipwhois_base')
es.indices.refresh(index='ipwhois_entity')
if args.delete:
@ -501,7 +527,7 @@ if args.kexport:
client=es,
index='.kibana',
doc_type='index-pattern',
query={'query': {'match': {'_id': 'ipwhois'}}}
query={'query': {'match': {'_id': 'ipwhois*'}}}
))
# Dump exports to json file.

View File

@ -1,4 +0,0 @@
ipwhois
elasticsearch
geoip2
argparse

View File

@ -15,13 +15,7 @@ Tested using::
Redis 3.2.1
Python 2.6 (requirements26.txt)::
ipwhois
redis
argparse
Python 2.7, 3.3+ (requirements.txt)::
Python 2.7, 3.4+ (requirements.txt)::
ipwhois
redis

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -23,83 +23,89 @@
# POSSIBILITY OF SUCH DAMAGE.
class NetError(Exception):
class BaseIpwhoisException(Exception):
"""
Base exception for all the ipwhois custom ones.
"""
class NetError(BaseIpwhoisException):
"""
An Exception for when a parameter provided is not an instance of
ipwhois.net.Net.
"""
class IPDefinedError(Exception):
class IPDefinedError(BaseIpwhoisException):
"""
An Exception for when the IP is defined (does not need to be resolved).
"""
class ASNLookupError(Exception):
class ASNLookupError(BaseIpwhoisException):
"""
An Exception for when the ASN lookup failed.
"""
class ASNRegistryError(Exception):
class ASNRegistryError(BaseIpwhoisException):
"""
An Exception for when the ASN registry does not match one of the five
expected values (arin, ripencc, apnic, lacnic, afrinic).
"""
class ASNParseError(Exception):
class ASNParseError(BaseIpwhoisException):
"""
An Exception for when the ASN parsing failed.
"""
class ASNOriginLookupError(Exception):
class ASNOriginLookupError(BaseIpwhoisException):
"""
An Exception for when the ASN origin lookup failed.
"""
class HostLookupError(Exception):
class HostLookupError(BaseIpwhoisException):
"""
An Exception for when the host lookup failed.
"""
class BlacklistError(Exception):
class BlacklistError(BaseIpwhoisException):
"""
An Exception for when the server is in a blacklist.
"""
class WhoisLookupError(Exception):
class WhoisLookupError(BaseIpwhoisException):
"""
An Exception for when the whois lookup failed.
"""
class WhoisRateLimitError(Exception):
class WhoisRateLimitError(BaseIpwhoisException):
"""
An Exception for when Whois queries exceed the NIC's request limit and have
exhausted all retries.
"""
class HTTPLookupError(Exception):
class HTTPLookupError(BaseIpwhoisException):
"""
An Exception for when the RDAP lookup failed.
"""
class HTTPRateLimitError(Exception):
class HTTPRateLimitError(BaseIpwhoisException):
"""
An Exception for when HTTP queries exceed the NIC's request limit and have
exhausted all retries.
"""
class InvalidEntityContactObject(Exception):
class InvalidEntityContactObject(BaseIpwhoisException):
"""
An Exception for when JSON output is not an RDAP entity contact information
object:
@ -107,14 +113,14 @@ class InvalidEntityContactObject(Exception):
"""
class InvalidNetworkObject(Exception):
class InvalidNetworkObject(BaseIpwhoisException):
"""
An Exception for when JSON output is not an RDAP network object:
https://tools.ietf.org/html/rfc7483#section-5.4
"""
class InvalidEntityObject(Exception):
class InvalidEntityObject(BaseIpwhoisException):
"""
An Exception for when JSON output is not an RDAP entity object:
https://tools.ietf.org/html/rfc7483#section-5.1

View File

@ -1,4 +1,4 @@
# Copyright (c) 2017 Philip Hane
# Copyright (c) 2017-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -44,11 +44,11 @@ class IPWhois:
proxy support. Defaults to None.
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
favor of new argument asn_methods. Defaults to True.
favor of new argument asn_methods. Defaults to False.
"""
def __init__(self, address, timeout=5, proxy_opener=None,
allow_permutations=True):
allow_permutations=False):
self.net = Net(
address=address, timeout=timeout, proxy_opener=proxy_opener,

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -105,7 +105,7 @@ class Net:
proxy support. Defaults to None.
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
favor of new argument asn_methods. Defaults to True.
favor of new argument asn_methods. Defaults to False.
Raises:
IPDefinedError: The address provided is defined (does not need to be
@ -113,7 +113,7 @@ class Net:
"""
def __init__(self, address, timeout=5, proxy_opener=None,
allow_permutations=True):
allow_permutations=False):
# IPv4Address or IPv6Address
if isinstance(address, IPv4Address) or isinstance(
@ -223,7 +223,7 @@ class Net:
"""
Temporary wrapper for IP ASN lookups (moved to
asn.IPASN.lookup()). This will be removed in a future
release (1.0.0).
release.
"""
from warnings import warn
@ -768,15 +768,6 @@ class Net:
except (URLError, socket.timeout, socket.error) as e:
# Check needed for Python 2.6, also why URLError is caught.
try: # pragma: no cover
if not isinstance(e.reason, (socket.timeout, socket.error)):
raise HTTPLookupError('HTTP lookup failed for {0}.'
''.format(url))
except AttributeError: # pragma: no cover
pass
log.debug('HTTP query socket error: {0}'.format(e))
if retry_count > 0:
@ -920,15 +911,6 @@ class Net:
except (URLError, socket.timeout, socket.error) as e:
# Check needed for Python 2.6, also why URLError is caught.
try: # pragma: no cover
if not isinstance(e.reason, (socket.timeout, socket.error)):
raise HTTPLookupError('HTTP lookup failed for {0}.'
''.format(url))
except AttributeError: # pragma: no cover
pass
log.debug('HTTP query socket error: {0}'.format(e))
if retry_count > 0:

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -483,7 +483,7 @@ class NIRWhois:
request_type=NIR_WHOIS[nir]['request_type']
)
return self._parse_fields(
return self.parse_fields(
response=contact_response,
fields_dict=NIR_WHOIS[nir]['contact_fields'],
dt_format=dt_format,
@ -582,11 +582,11 @@ class NIRWhois:
nets_response = None
if nir == 'jpnic':
nets_response = self._get_nets_jpnic(response)
nets_response = self.get_nets_jpnic(response)
elif nir == 'krnic':
nets_response = self._get_nets_krnic(response)
nets_response = self.get_nets_krnic(response)
nets.extend(nets_response)
@ -609,7 +609,7 @@ class NIRWhois:
dt_format = None
temp_net = self._parse_fields(
temp_net = self.parse_fields(
response=response,
fields_dict=NIR_WHOIS[nir]['fields'],
net_start=section_end,
@ -659,7 +659,7 @@ class NIRWhois:
tmp_response = None
tmp_handle = contact
temp_net['contacts'][key] = self._get_contact(
temp_net['contacts'][key] = self.get_contact(
response=tmp_response,
handle=tmp_handle,
nir=nir,

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -310,8 +310,9 @@ for filename, sections in (
tmp_query_results = {}
for query_key, query in section_config['queries'].items():
result = query()
new_str = json.dumps(
query(), indent=4, sort_keys=True
result, indent=4, sort_keys=True
).replace(': null', ': None')
new_str = re.sub(
@ -333,7 +334,9 @@ for filename, sections in (
r'\\\\n',
new_str,
flags=re.DOTALL
)[:-1] + ' }'
)[:-1] + ' {0}'.format(
'}' if isinstance(result, dict) else ']'
)
output_str = section_config['content'].format(
*tmp_query_results.values()

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -150,15 +150,6 @@ group.add_argument(
'can be passed like "https://user:pass@192.168.0.1:443"',
required=False
)
group.add_argument(
'--disallow_permutations',
action='store_true',
help='Disable additional methods if DNS lookups to Cymru fail. This is the'
' opposite of the ipwhois allow_permutations, in order to enable '
'allow_permutations by default in the CLI. *WARNING* deprecated in '
'favor of new argument asn_methods.',
default=False
)
# Common (RDAP & Legacy Whois)
group = parser.add_argument_group('Common settings (RDAP & Legacy Whois)')
@ -372,9 +363,6 @@ class IPWhoisCLI:
proxy HTTP support or None.
proxy_https (:obj:`urllib.request.OpenerDirector`): The request for
proxy HTTPS support or None.
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
favor of new argument asn_methods. Defaults to True.
"""
def __init__(
@ -382,8 +370,7 @@ class IPWhoisCLI:
addr,
timeout,
proxy_http,
proxy_https,
allow_permutations
proxy_https
):
self.addr = addr
@ -412,12 +399,9 @@ class IPWhoisCLI:
handler = ProxyHandler(handler_dict)
self.opener = build_opener(handler)
self.allow_permutations = allow_permutations
self.obj = IPWhois(address=self.addr,
timeout=self.timeout,
proxy_opener=self.opener,
allow_permutations=self.allow_permutations)
proxy_opener=self.opener)
def generate_output_header(self, query_type='RDAP'):
"""
@ -482,10 +466,8 @@ class IPWhoisCLI:
if json_data is None:
json_data = {}
# Python 2.6 doesn't support set literal expressions, use explicit
# set() instead.
keys = set(['asn', 'asn_cidr', 'asn_country_code', 'asn_date',
'asn_registry', 'asn_description']).intersection(json_data)
keys = {'asn', 'asn_cidr', 'asn_country_code', 'asn_date',
'asn_registry', 'asn_description'}.intersection(json_data)
output = ''
@ -1444,6 +1426,7 @@ class IPWhoisCLI:
return output
if script_args.addr:
results = IPWhoisCLI(
@ -1454,8 +1437,7 @@ if script_args.addr:
) else None,
proxy_https=script_args.proxy_https if (
script_args.proxy_https and len(script_args.proxy_https) > 0
) else None,
allow_permutations=(not script_args.disallow_permutations)
) else None
)
if script_args.whois:

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without

View File

@ -4,30 +4,6 @@ import unittest
class TestCommon(unittest.TestCase):
longMessage = False
# Python 2.6 doesn't have unittest._formatMessage or
# unittest.util.safe_repr
# Borrowed and modified both functions from Python 2.7.
if not hasattr(unittest.TestCase, '_formatMessage'):
def safe_repr(self, obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < 80:
return result
return result[:80] + ' [truncated]...'
def _formatMessage(self, msg, standardMsg):
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
return '{0} : {0}'.format(standardMsg, msg)
except UnicodeDecodeError:
return '{0} : {0}'.format(self.safe_repr(standardMsg),
self.safe_repr(msg))
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, obj, cls, msg=None):
if not isinstance(obj, cls):

View File

@ -35,16 +35,14 @@ class TestIPASN(TestCommon):
ipasn.lookup(asn_methods=['dns', 'whois', 'http'])
ipasn.lookup(asn_methods=['http'])
net = Net(address='74.125.225.229', timeout=0,
allow_permutations=False)
net = Net(address='74.125.225.229', timeout=0)
ipasn = IPASN(net)
self.assertRaises(ASNRegistryError, ipasn.lookup)
net = Net(address='74.125.225.229', timeout=0,
allow_permutations=True)
net = Net(address='74.125.225.229', timeout=0)
ipasn = IPASN(net)
self.assertRaises(ASNRegistryError, ipasn.lookup, **dict(
asn_alts=['http']))
asn_methods=['http']))
class TestASNOrigin(TestCommon):
@ -86,12 +84,11 @@ class TestASNOrigin(TestCommon):
self.fail('Unexpected exception raised: {0}'.format(e))
net = Net(address='74.125.225.229', timeout=0,
allow_permutations=True)
net = Net(address='74.125.225.229', timeout=0)
asnorigin = ASNOrigin(net)
self.assertRaises(ASNOriginLookupError, asnorigin.lookup, **dict(
asn='15169',
asn_alts=['http']))
asn_methods=['http']))
self.assertRaises(ValueError, asnorigin.lookup, **dict(
asn='15169',

View File

@ -3,6 +3,7 @@ from ipwhois.tests import TestCommon
from ipwhois.exceptions import (ASNLookupError, ASNRegistryError,
BlacklistError, WhoisLookupError,
HTTPLookupError, HostLookupError)
from ipwhois.asn import IPASN
from ipwhois.net import Net
LOG_FORMAT = ('[%(asctime)s] [%(levelname)s] [%(filename)s:%(lineno)s] '
@ -14,10 +15,12 @@ log = logging.getLogger(__name__)
class TestNet(TestCommon):
def test_lookup_asn(self):
# TODO: keep until deprecated lookup is removed, for coverage
net = Net('74.125.225.229')
ipasn = IPASN(net)
try:
self.assertIsInstance(net.lookup_asn(), tuple)
self.assertIsInstance(ipasn.lookup(), dict)
except HTTPLookupError:
pass
except AssertionError as e:

View File

@ -21,23 +21,23 @@ class TestIPASN(TestCommon):
self.assertRaises(NetError, IPASN, 'a')
def test__parse_fields_dns(self):
def test_parse_fields_dns(self):
data = '"15169 | 74.125.225.0/24 | US | arin | 2007-03-13"'
net = Net('74.125.225.229')
ipasn = IPASN(net)
try:
self.assertIsInstance(ipasn._parse_fields_dns(data), dict)
self.assertIsInstance(ipasn.parse_fields_dns(data), dict)
except AssertionError as e:
raise e
except Exception as e:
self.fail('Unexpected exception raised: {0}'.format(e))
data = '"15169 | 74.125.225.0/24 | US | random | 2007-03-13"'
self.assertRaises(ASNRegistryError, ipasn._parse_fields_dns, data)
self.assertRaises(ASNRegistryError, ipasn.parse_fields_dns, data)
data = ''
self.assertRaises(ASNParseError, ipasn._parse_fields_dns, data)
self.assertRaises(ASNParseError, ipasn.parse_fields_dns, data)
def test_parse_fields_verbose_dns(self):
@ -58,14 +58,14 @@ class TestIPASN(TestCommon):
data = ''
self.assertRaises(ASNParseError, ipasn.parse_fields_verbose_dns, data)
def test__parse_fields_whois(self):
def test_parse_fields_whois(self):
data = ('15169 | 74.125.225.229 | 74.125.225.0/24 | US | arin'
' | 2007-03-13 | GOOGLE - Google Inc., US')
net = Net('74.125.225.229')
ipasn = IPASN(net)
try:
self.assertIsInstance(ipasn._parse_fields_whois(data), dict)
self.assertIsInstance(ipasn.parse_fields_whois(data), dict)
except AssertionError as e:
raise e
except Exception as e:
@ -73,12 +73,12 @@ class TestIPASN(TestCommon):
data = ('15169 | 74.125.225.229 | 74.125.225.0/24 | US | rdm'
' | 2007-03-13 | GOOGLE - Google Inc., US')
self.assertRaises(ASNRegistryError, ipasn._parse_fields_whois, data)
self.assertRaises(ASNRegistryError, ipasn.parse_fields_whois, data)
data = '15169 | 74.125.225.229 | 74.125.225.0/24 | US'
self.assertRaises(ASNParseError, ipasn._parse_fields_whois, data)
self.assertRaises(ASNParseError, ipasn.parse_fields_whois, data)
def test__parse_fields_http(self):
def test_parse_fields_http(self):
data = {
'nets': {
@ -92,7 +92,7 @@ class TestIPASN(TestCommon):
net = Net('1.2.3.4')
ipasn = IPASN(net)
try:
self.assertIsInstance(ipasn._parse_fields_http(response=data),
self.assertIsInstance(ipasn.parse_fields_http(response=data),
dict)
except AssertionError as e:
raise e
@ -101,7 +101,7 @@ class TestIPASN(TestCommon):
data['nets']['net']['orgRef']['@handle'] = 'RIPE'
try:
self.assertIsInstance(ipasn._parse_fields_http(response=data),
self.assertIsInstance(ipasn.parse_fields_http(response=data),
dict)
except AssertionError as e:
raise e
@ -110,7 +110,7 @@ class TestIPASN(TestCommon):
data['nets']['net']['orgRef']['@handle'] = 'DNIC'
try:
self.assertIsInstance(ipasn._parse_fields_http(response=data),
self.assertIsInstance(ipasn.parse_fields_http(response=data),
dict)
except AssertionError as e:
raise e
@ -119,7 +119,7 @@ class TestIPASN(TestCommon):
data['nets']['net']['orgRef']['@handle'] = 'INVALID'
try:
self.assertRaises(ASNRegistryError, ipasn._parse_fields_http,
self.assertRaises(ASNRegistryError, ipasn.parse_fields_http,
response=data)
except AssertionError as e:
raise e
@ -128,7 +128,8 @@ class TestIPASN(TestCommon):
data = ''
try:
self.assertIsInstance(ipasn._parse_fields_http(response=data), dict)
self.assertRaises(ASNRegistryError, ipasn.parse_fields_http,
response=data)
except AssertionError as e:
raise e
except Exception as e:
@ -174,7 +175,7 @@ class TestASNOrigin(TestCommon):
self.fail('Unexpected exception raised: {0}'.format(e))
def test__parse_fields(self):
def test_parse_fields(self):
net = Net('74.125.225.229')
obj = ASNOrigin(net)
@ -183,12 +184,12 @@ class TestASNOrigin(TestCommon):
# groups are messed up.
tmp_dict = ASN_ORIGIN_WHOIS['radb']['fields']
tmp_dict['route'] = r'(route):[^\S\n]+(?P<val1>.+?)\n'
obj._parse_fields(
obj.parse_fields(
response="\nroute: 66.249.64.0/20\n",
fields_dict=tmp_dict
)
obj._parse_fields(
obj.parse_fields(
response="\nchanged: noc@google.com 20110301\n",
fields_dict=ASN_ORIGIN_WHOIS['radb']['fields']
)
@ -210,7 +211,7 @@ class TestASNOrigin(TestCommon):
'\nsource: RADB'
'\n\n'
)
obj._parse_fields(
obj.parse_fields(
response=multi_net_response,
fields_dict=ASN_ORIGIN_WHOIS['radb']['fields']
)
@ -239,14 +240,10 @@ class TestASNOrigin(TestCommon):
'\nsource: RADB'
'\n\n'
)
obj._get_nets_radb(multi_net_response)
self.assertEqual(obj._get_nets_radb(multi_net_response, is_http=True),
[{'cidr': '66.249.64.0/20', 'description': None, 'maintainer': None, 'updated': None,
'source': None, 'start': 2, 'end': 29},
{'cidr': '66.249.80.0/20', 'description': None, 'maintainer': None, 'updated': None,
'source': None, 'start': 175, 'end': 202}])
obj.get_nets_radb(multi_net_response)
self.assertEqual(obj.get_nets_radb(multi_net_response, is_http=True),
[])
net = Net('2001:43f8:7b0::')
obj = ASNOrigin(net)
@ -261,7 +258,7 @@ class TestASNOrigin(TestCommon):
'\n\n'
)
self.assertEquals(
obj._get_nets_radb(multi_net_response),
obj.get_nets_radb(multi_net_response),
[{
'updated': None,
'maintainer': None,

View File

@ -57,7 +57,7 @@ class TestNIRWhois(TestCommon):
self.assertRaises(KeyError, obj.lookup)
self.assertRaises(KeyError, obj.lookup, **dict(nir='a'))
def test__parse_fields(self):
def test_parse_fields(self):
net = Net('133.1.2.5')
obj = NIRWhois(net)
@ -66,13 +66,13 @@ class TestNIRWhois(TestCommon):
# groups are messed up.
tmp_dict = NIR_WHOIS['jpnic']['fields']
tmp_dict['name'] = r'(NetName):[^\S\n]+(?P<val1>.+?)\n'
obj._parse_fields(
obj.parse_fields(
response='\nNetName: TEST\n',
fields_dict=tmp_dict,
dt_format=NIR_WHOIS['jpnic']['dt_format']
)
obj._parse_fields(
obj.parse_fields(
response='\nUpdated: 2012-02-24\n',
fields_dict=NIR_WHOIS['jpnic']['fields'],
dt_format=NIR_WHOIS['jpnic']['dt_format']
@ -81,13 +81,13 @@ class TestNIRWhois(TestCommon):
log.debug(
'Testing field parse error. This should be followed by a '
'debug log.')
obj._parse_fields(
obj.parse_fields(
response='\nUpdated: 2012-02-244\n',
fields_dict=NIR_WHOIS['jpnic']['fields'],
dt_format=NIR_WHOIS['jpnic']['dt_format']
)
def test__get_nets_jpnic(self):
def test_get_nets_jpnic(self):
net = Net('133.1.2.5')
obj = NIRWhois(net)
@ -98,9 +98,9 @@ class TestNIRWhois(TestCommon):
'a. [Network Number] asd>133.1.0.0/16</A>'
'a. [Network Number] asd>133.1.0.0/24</A>'
)
obj._get_nets_jpnic(multi_net_response)
obj.get_nets_jpnic(multi_net_response)
self.assertFalse(obj._get_nets_jpnic(
self.assertFalse(obj.get_nets_jpnic(
'a. [Network Number] asd>asd/16</A>'
))
@ -115,19 +115,19 @@ class TestNIRWhois(TestCommon):
'IPv4 Address : 115.0.0.0 - 115.23.255.255 (/12+/13)'
'IPv4 Address : 115.1.2.0 - 115.1.2.63 (/26)'
)
obj._get_nets_krnic(multi_net_response)
obj.get_nets_krnic(multi_net_response)
# ip_network ValueError
self.assertFalse(obj._get_nets_krnic(
self.assertFalse(obj.get_nets_krnic(
'IPv4 Address : asd - asd (/12+/13)'
))
# Expected IP range regex not found, but some value found
self.assertFalse(obj._get_nets_krnic(
self.assertFalse(obj.get_nets_krnic(
'IPv4 Address : asd'
))
def test__get_contact(self):
def test_get_contact(self):
net = Net('115.1.2.3')
obj = NIRWhois(net)
@ -139,7 +139,7 @@ class TestNIRWhois(TestCommon):
)
# No exception raised.
obj._get_contact(
obj.get_contact(
response=contact_response,
handle=None,
nir='krnic',

View File

@ -56,13 +56,13 @@ class TestWhois(TestCommon):
# groups are messed up.
tmp_dict = RIR_WHOIS['arin']['fields']
tmp_dict['name'] = r'(NetName):[^\S\n]+(?P<val1>.+?)\n'
obj._parse_fields(
obj.parse_fields(
response="\nNetName: TEST\n",
fields_dict=tmp_dict,
dt_format=RIR_WHOIS['arin']['dt_format']
)
obj._parse_fields(
obj.parse_fields(
response="\nUpdated: 2012-02-24\n",
fields_dict=RIR_WHOIS['arin']['fields'],
dt_format=RIR_WHOIS['arin']['dt_format']
@ -70,13 +70,13 @@ class TestWhois(TestCommon):
log.debug('Testing field parse error. This should be followed by a '
'debug log.')
obj._parse_fields(
obj.parse_fields(
response='\nUpdated: 2012-02-244\n',
fields_dict=RIR_WHOIS['arin']['fields'],
dt_format=RIR_WHOIS['arin']['dt_format']
)
def test__get_nets_arin(self):
def test_get_nets_arin(self):
net = Net('74.125.225.229')
obj = Whois(net)
@ -90,9 +90,9 @@ class TestWhois(TestCommon):
'\nNetRange: 74.125.1.0 - 74.125.1.0'
'\n'
)
obj._get_nets_arin(multi_net_response)
obj.get_nets_arin(multi_net_response)
def test__get_nets_lacnic(self):
def test_get_nets_lacnic(self):
net = Net('200.57.141.161')
obj = Whois(net)
@ -103,9 +103,9 @@ class TestWhois(TestCommon):
'\ninetnum: 200.57.256/19\r\n'
'\n'
)
obj._get_nets_lacnic(multi_net_response)
obj.get_nets_lacnic(multi_net_response)
def test__get_nets_other(self):
def test_get_nets_other(self):
net = Net('210.107.73.73')
obj = Whois(net)
@ -116,4 +116,4 @@ class TestWhois(TestCommon):
'\ninetnum: 210.107.0.0 - 210.107.127.256\n'
'\n'
)
obj._get_nets_other(multi_net_response)
obj.get_nets_other(multi_net_response)

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without

View File

@ -1,4 +1,4 @@
# Copyright (c) 2013-2017 Philip Hane
# Copyright (c) 2013-2019 Philip Hane
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -740,7 +740,7 @@ class Whois:
results['raw_referral'] = response_ref
temp_rnet = self._parse_fields(
temp_rnet = self.parse_fields(
response_ref,
RWHOIS['fields'],
field_list=field_list
@ -758,15 +758,15 @@ class Whois:
if asn_data['asn_registry'] == 'arin':
nets_response = self._get_nets_arin(response)
nets_response = self.get_nets_arin(response)
elif asn_data['asn_registry'] == 'lacnic':
nets_response = self._get_nets_lacnic(response)
nets_response = self.get_nets_lacnic(response)
else:
nets_response = self._get_nets_other(response)
nets_response = self.get_nets_other(response)
nets.extend(nets_response)
@ -788,7 +788,7 @@ class Whois:
dt_format = None
temp_net = self._parse_fields(
temp_net = self.parse_fields(
response,
RIR_WHOIS[asn_data['asn_registry']]['fields'],
section_end,

View File

@ -4,7 +4,7 @@ from setuptools import setup
import io
NAME = 'ipwhois'
VERSION = '1.0.0'
VERSION = '1.1.0'
AUTHOR = 'Philip Hane'
AUTHOR_EMAIL = 'secynic@gmail.com'
DESCRIPTION = 'Retrieve and parse whois data for IPv4 and IPv6 addresses.'
@ -52,13 +52,12 @@ CLASSIFIERS = [
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet',
'Topic :: Software Development',
]