Update wrapper.py
This commit is contained in:
@@ -42,15 +42,17 @@ def handle_HTTPError(e):
|
|||||||
raise UrlNotFound(e)
|
raise UrlNotFound(e)
|
||||||
|
|
||||||
def save(url, UA=default_UA):
|
def save(url, UA=default_UA):
|
||||||
base_save_url = "https://web.archive.org/save/"
|
url_check(url)
|
||||||
request_url = (base_save_url + clean_url(url))
|
request_url = ("https://web.archive.org/save/" + clean_url(url))
|
||||||
|
|
||||||
hdr = { 'User-Agent' : '%s' % UA } #nosec
|
hdr = { 'User-Agent' : '%s' % UA } #nosec
|
||||||
req = Request(request_url, headers=hdr) #nosec
|
req = Request(request_url, headers=hdr) #nosec
|
||||||
url_check(url)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = urlopen(req) #nosec
|
response = urlopen(req) #nosec
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if handle_HTTPError(e) == None:
|
if handle_HTTPError(e) is None:
|
||||||
raise PageNotSaved(e)
|
raise PageNotSaved(e)
|
||||||
except URLError:
|
except URLError:
|
||||||
try:
|
try:
|
||||||
@@ -59,16 +61,17 @@ def save(url, UA=default_UA):
|
|||||||
raise UrlNotFound(e)
|
raise UrlNotFound(e)
|
||||||
|
|
||||||
header = response.headers
|
header = response.headers
|
||||||
|
|
||||||
if "exclusion.robots.policy" in str(header):
|
if "exclusion.robots.policy" in str(header):
|
||||||
raise ArchivingNotAllowed("Can not archive %s. Disabled by site owner." % (url))
|
raise ArchivingNotAllowed("Can not archive %s. Disabled by site owner." % (url))
|
||||||
archive_id = header['Content-Location']
|
|
||||||
archived_url = "https://web.archive.org" + archive_id
|
return "https://web.archive.org" + header['Content-Location']
|
||||||
return archived_url
|
|
||||||
|
|
||||||
def get(url, encoding=None, UA=default_UA):
|
def get(url, encoding=None, UA=default_UA):
|
||||||
url_check(url)
|
url_check(url)
|
||||||
hdr = { 'User-Agent' : '%s' % UA }
|
hdr = { 'User-Agent' : '%s' % UA }
|
||||||
req = Request(clean_url(url), headers=hdr) #nosec
|
req = Request(clean_url(url), headers=hdr) #nosec
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp=urlopen(req) #nosec
|
resp=urlopen(req) #nosec
|
||||||
except URLError:
|
except URLError:
|
||||||
@@ -76,13 +79,14 @@ def get(url, encoding=None, UA=default_UA):
|
|||||||
resp=urlopen(req) #nosec
|
resp=urlopen(req) #nosec
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
raise UrlNotFound(e)
|
raise UrlNotFound(e)
|
||||||
|
|
||||||
if encoding is None:
|
if encoding is None:
|
||||||
try:
|
try:
|
||||||
encoding= resp.headers['content-type'].split('charset=')[-1]
|
encoding= resp.headers['content-type'].split('charset=')[-1]
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
encoding = "UTF-8"
|
encoding = "UTF-8"
|
||||||
encoding = encoding.replace("text/html","UTF-8",1)
|
|
||||||
return resp.read().decode(encoding)
|
return resp.read().decode(encoding.replace("text/html", "UTF-8", 1))
|
||||||
|
|
||||||
def near(
|
def near(
|
||||||
url,
|
url,
|
||||||
|
Reference in New Issue
Block a user