fix code Complexity (#8)
* fix code Complexity * Update wrapper.py * codefactor badge
This commit is contained in:
		@@ -5,6 +5,7 @@
 | 
				
			|||||||
[](https://www.codacy.com/manual/akamhy/waybackpy?utm_source=github.com&utm_medium=referral&utm_content=akamhy/waybackpy&utm_campaign=Badge_Grade)
 | 
					[](https://www.codacy.com/manual/akamhy/waybackpy?utm_source=github.com&utm_medium=referral&utm_content=akamhy/waybackpy&utm_campaign=Badge_Grade)
 | 
				
			||||||
[](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
 | 
					[](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
 | 
				
			||||||
[](https://codeclimate.com/github/akamhy/waybackpy/maintainability)
 | 
					[](https://codeclimate.com/github/akamhy/waybackpy/maintainability)
 | 
				
			||||||
 | 
					[](https://www.codefactor.io/repository/github/akamhy/waybackpy)
 | 
				
			||||||
[](https://www.python.org/)
 | 
					[](https://www.python.org/)
 | 
				
			||||||

 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -31,15 +31,7 @@ def wayback_timestamp(**kwargs):
 | 
				
			|||||||
      str(kwargs["minute"]).zfill(2)
 | 
					      str(kwargs["minute"]).zfill(2)
 | 
				
			||||||
      )
 | 
					      )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def save(url, UA=default_UA):
 | 
					def handle_HTTPError(e):
 | 
				
			||||||
    base_save_url = "https://web.archive.org/save/"
 | 
					 | 
				
			||||||
    request_url = (base_save_url + clean_url(url))
 | 
					 | 
				
			||||||
    hdr = { 'User-Agent' : '%s' % UA } #nosec
 | 
					 | 
				
			||||||
    req = Request(request_url, headers=hdr) #nosec
 | 
					 | 
				
			||||||
    url_check(url)
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        response = urlopen(req) #nosec
 | 
					 | 
				
			||||||
    except HTTPError as e:
 | 
					 | 
				
			||||||
    if e.code == 502:
 | 
					    if e.code == 502:
 | 
				
			||||||
        raise BadGateWay(e)
 | 
					        raise BadGateWay(e)
 | 
				
			||||||
    elif e.code == 503:
 | 
					    elif e.code == 503:
 | 
				
			||||||
@@ -48,7 +40,19 @@ def save(url, UA=default_UA):
 | 
				
			|||||||
        raise TooManyArchivingRequests(e)
 | 
					        raise TooManyArchivingRequests(e)
 | 
				
			||||||
    elif e.code == 404:
 | 
					    elif e.code == 404:
 | 
				
			||||||
        raise UrlNotFound(e)
 | 
					        raise UrlNotFound(e)
 | 
				
			||||||
        else:
 | 
					
 | 
				
			||||||
 | 
					def save(url, UA=default_UA):
 | 
				
			||||||
 | 
					    url_check(url)
 | 
				
			||||||
 | 
					    request_url = ("https://web.archive.org/save/" + clean_url(url))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    hdr = { 'User-Agent' : '%s' % UA } #nosec
 | 
				
			||||||
 | 
					    req = Request(request_url, headers=hdr) #nosec
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        response = urlopen(req) #nosec
 | 
				
			||||||
 | 
					    except HTTPError as e:
 | 
				
			||||||
 | 
					        if handle_HTTPError(e) is None:
 | 
				
			||||||
            raise PageNotSaved(e)
 | 
					            raise PageNotSaved(e)
 | 
				
			||||||
    except URLError:
 | 
					    except URLError:
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
@@ -57,16 +61,17 @@ def save(url, UA=default_UA):
 | 
				
			|||||||
            raise UrlNotFound(e)
 | 
					            raise UrlNotFound(e)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    header = response.headers
 | 
					    header = response.headers
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if "exclusion.robots.policy" in str(header):
 | 
					    if "exclusion.robots.policy" in str(header):
 | 
				
			||||||
        raise ArchivingNotAllowed("Can not archive %s. Disabled by site owner." % (url))
 | 
					        raise ArchivingNotAllowed("Can not archive %s. Disabled by site owner." % (url))
 | 
				
			||||||
    archive_id = header['Content-Location']
 | 
					
 | 
				
			||||||
    archived_url = "https://web.archive.org" + archive_id
 | 
					    return "https://web.archive.org" + header['Content-Location']
 | 
				
			||||||
    return archived_url
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get(url, encoding=None, UA=default_UA):
 | 
					def get(url, encoding=None, UA=default_UA):
 | 
				
			||||||
    url_check(url)
 | 
					    url_check(url)
 | 
				
			||||||
    hdr = { 'User-Agent' : '%s' % UA }
 | 
					    hdr = { 'User-Agent' : '%s' % UA }
 | 
				
			||||||
    req = Request(clean_url(url), headers=hdr) #nosec
 | 
					    req = Request(clean_url(url), headers=hdr) #nosec
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        resp=urlopen(req) #nosec
 | 
					        resp=urlopen(req) #nosec
 | 
				
			||||||
    except URLError:
 | 
					    except URLError:
 | 
				
			||||||
@@ -74,13 +79,14 @@ def get(url, encoding=None, UA=default_UA):
 | 
				
			|||||||
            resp=urlopen(req) #nosec
 | 
					            resp=urlopen(req) #nosec
 | 
				
			||||||
        except URLError as e:
 | 
					        except URLError as e:
 | 
				
			||||||
            raise UrlNotFound(e)
 | 
					            raise UrlNotFound(e)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if encoding is None:
 | 
					    if encoding is None:
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            encoding= resp.headers['content-type'].split('charset=')[-1]
 | 
					            encoding= resp.headers['content-type'].split('charset=')[-1]
 | 
				
			||||||
        except AttributeError:
 | 
					        except AttributeError:
 | 
				
			||||||
            encoding = "UTF-8"
 | 
					            encoding = "UTF-8"
 | 
				
			||||||
    encoding = encoding.replace("text/html","UTF-8",1)
 | 
					
 | 
				
			||||||
    return resp.read().decode(encoding)
 | 
					    return resp.read().decode(encoding.replace("text/html", "UTF-8", 1))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def near(
 | 
					def near(
 | 
				
			||||||
    url,
 | 
					    url,
 | 
				
			||||||
@@ -96,15 +102,12 @@ def near(
 | 
				
			|||||||
    request_url = "https://archive.org/wayback/available?url=%s×tamp=%s" % (clean_url(url), str(timestamp))
 | 
					    request_url = "https://archive.org/wayback/available?url=%s×tamp=%s" % (clean_url(url), str(timestamp))
 | 
				
			||||||
    hdr = { 'User-Agent' : '%s' % UA }
 | 
					    hdr = { 'User-Agent' : '%s' % UA }
 | 
				
			||||||
    req = Request(request_url, headers=hdr) # nosec
 | 
					    req = Request(request_url, headers=hdr) # nosec
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        response = urlopen(req) #nosec
 | 
					        response = urlopen(req) #nosec
 | 
				
			||||||
    except HTTPError as e:
 | 
					    except HTTPError as e:
 | 
				
			||||||
        if e.code == 502:
 | 
					        handle_HTTPError(e)
 | 
				
			||||||
            raise BadGateWay(e)
 | 
					
 | 
				
			||||||
        elif e.code == 503:
 | 
					 | 
				
			||||||
            raise WaybackUnavailable(e)
 | 
					 | 
				
			||||||
        elif e.code == 404:
 | 
					 | 
				
			||||||
            raise UrlNotFound(e)
 | 
					 | 
				
			||||||
    data = json.loads(response.read().decode("UTF-8"))
 | 
					    data = json.loads(response.read().decode("UTF-8"))
 | 
				
			||||||
    if not data["archived_snapshots"]:
 | 
					    if not data["archived_snapshots"]:
 | 
				
			||||||
        raise ArchiveNotFound("'%s' is not yet archived." % url)
 | 
					        raise ArchiveNotFound("'%s' is not yet archived." % url)
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user