test adjusted for new changes
This commit is contained in:
@@ -3,81 +3,86 @@ sys.path.append("..")
|
|||||||
import waybackpy
|
import waybackpy
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
user_agent = "Mozilla/5.0 (Windows NT 6.2; rv:20.0) Gecko/20121202 Firefox/20.0"
|
user_agent = "Mozilla/5.0 (Windows NT 6.2; rv:20.0) Gecko/20121202 Firefox/20.0"
|
||||||
|
|
||||||
def test_clean_url():
|
def test_clean_url():
|
||||||
test_url = " https://en.wikipedia.org/wiki/Network security "
|
test_url = " https://en.wikipedia.org/wiki/Network security "
|
||||||
answer = "https://en.wikipedia.org/wiki/Network_security"
|
answer = "https://en.wikipedia.org/wiki/Network_security"
|
||||||
test_result = waybackpy.clean_url(test_url)
|
target = waybackpy.Url(test_url, user_agent)
|
||||||
|
test_result = target.clean_url()
|
||||||
assert answer == test_result
|
assert answer == test_result
|
||||||
|
|
||||||
def test_url_check():
|
def test_url_check():
|
||||||
InvalidUrl = "http://wwwgooglecom/"
|
broken_url = "http://wwwgooglecom/"
|
||||||
with pytest.raises(Exception) as e_info:
|
with pytest.raises(Exception) as e_info:
|
||||||
waybackpy.url_check(InvalidUrl)
|
waybackpy.Url(broken_url, user_agent)
|
||||||
|
|
||||||
def test_save():
|
def test_save():
|
||||||
# Test for urls that exist and can be archived.
|
# Test for urls that exist and can be archived.
|
||||||
url1="https://github.com/akamhy/waybackpy"
|
url1="https://github.com/akamhy/waybackpy"
|
||||||
archived_url1 = waybackpy.save(url1, UA=user_agent)
|
target = waybackpy.Url(url1, user_agent)
|
||||||
|
archived_url1 = target.save()
|
||||||
assert url1 in archived_url1
|
assert url1 in archived_url1
|
||||||
|
|
||||||
# Test for urls that are incorrect.
|
# Test for urls that are incorrect.
|
||||||
with pytest.raises(Exception) as e_info:
|
with pytest.raises(Exception) as e_info:
|
||||||
url2 = "ha ha ha ha"
|
url2 = "ha ha ha ha"
|
||||||
waybackpy.save(url2, UA=user_agent)
|
waybackpy.Url(url2, user_agent)
|
||||||
|
|
||||||
# Test for urls not allowed to archive by robot.txt.
|
# Test for urls not allowed to archive by robot.txt.
|
||||||
with pytest.raises(Exception) as e_info:
|
with pytest.raises(Exception) as e_info:
|
||||||
url3 = "http://www.archive.is/faq.html"
|
url3 = "http://www.archive.is/faq.html"
|
||||||
waybackpy.save(url3, UA=user_agent)
|
target = waybackpy.Url(url3, user_agent)
|
||||||
|
target.save()
|
||||||
|
|
||||||
# Non existent urls, test
|
# Non existent urls, test
|
||||||
with pytest.raises(Exception) as e_info:
|
with pytest.raises(Exception) as e_info:
|
||||||
url4 = "https://githfgdhshajagjstgeths537agajaajgsagudadhuss8762346887adsiugujsdgahub.us"
|
url4 = "https://githfgdhshajagjstgeths537agajaajgsagudadhuss8762346887adsiugujsdgahub.us"
|
||||||
archived_url4 = waybackpy.save(url4, UA=user_agent)
|
target = waybackpy.Url(url3, user_agent)
|
||||||
|
target.save()
|
||||||
|
|
||||||
def test_near():
|
def test_near():
|
||||||
url = "google.com"
|
url = "google.com"
|
||||||
archive_near_year = waybackpy.near(url, year=2010, UA=user_agent)
|
target = waybackpy.Url(url, user_agent)
|
||||||
|
archive_near_year = target.near(year=2010)
|
||||||
assert "2010" in archive_near_year
|
assert "2010" in archive_near_year
|
||||||
|
|
||||||
archive_near_month_year = waybackpy.near(url, year=2015, month=2, UA=user_agent)
|
archive_near_month_year = target.near( year=2015, month=2)
|
||||||
assert ("201502" in archive_near_month_year) or ("201501" in archive_near_month_year) or ("201503" in archive_near_month_year)
|
assert ("201502" in archive_near_month_year) or ("201501" in archive_near_month_year) or ("201503" in archive_near_month_year)
|
||||||
|
|
||||||
archive_near_day_month_year = waybackpy.near(url, year=2006, month=11, day=15, UA=user_agent)
|
archive_near_day_month_year = target.near(year=2006, month=11, day=15)
|
||||||
assert ("20061114" in archive_near_day_month_year) or ("20061115" in archive_near_day_month_year) or ("2006116" in archive_near_day_month_year)
|
assert ("20061114" in archive_near_day_month_year) or ("20061115" in archive_near_day_month_year) or ("2006116" in archive_near_day_month_year)
|
||||||
|
|
||||||
archive_near_hour_day_month_year = waybackpy.near("www.python.org", year=2008, month=5, day=9, hour=15, UA=user_agent)
|
target = waybackpy.Url("www.python.org", user_agent)
|
||||||
|
archive_near_hour_day_month_year = target.near(year=2008, month=5, day=9, hour=15)
|
||||||
assert ("2008050915" in archive_near_hour_day_month_year) or ("2008050914" in archive_near_hour_day_month_year) or ("2008050913" in archive_near_hour_day_month_year)
|
assert ("2008050915" in archive_near_hour_day_month_year) or ("2008050914" in archive_near_hour_day_month_year) or ("2008050913" in archive_near_hour_day_month_year)
|
||||||
|
|
||||||
with pytest.raises(Exception) as e_info:
|
with pytest.raises(Exception) as e_info:
|
||||||
NeverArchivedUrl = "https://ee_3n.wrihkeipef4edia.org/rwti5r_ki/Nertr6w_rork_rse7c_urity"
|
NeverArchivedUrl = "https://ee_3n.wrihkeipef4edia.org/rwti5r_ki/Nertr6w_rork_rse7c_urity"
|
||||||
waybackpy.near(NeverArchivedUrl, year=2010, UA=user_agent)
|
target = waybackpy.Url(NeverArchivedUrl, user_agent)
|
||||||
|
target.near(year=2010)
|
||||||
|
|
||||||
def test_oldest():
|
def test_oldest():
|
||||||
url = "github.com/akamhy/waybackpy"
|
url = "github.com/akamhy/waybackpy"
|
||||||
archive_oldest = waybackpy.oldest(url, UA=user_agent)
|
target = waybackpy.Url(url, user_agent)
|
||||||
assert "20200504141153" in archive_oldest
|
assert "20200504141153" in target.oldest()
|
||||||
|
|
||||||
def test_newest():
|
def test_newest():
|
||||||
url = "github.com/akamhy/waybackpy"
|
url = "github.com/akamhy/waybackpy"
|
||||||
archive_newest = waybackpy.newest(url, UA=user_agent)
|
target = waybackpy.Url(url, user_agent)
|
||||||
assert url in archive_newest
|
assert url in target.newest()
|
||||||
|
|
||||||
def test_get():
|
def test_get():
|
||||||
oldest_google_archive = waybackpy.oldest("google.com", UA=user_agent)
|
target = waybackpy.Url("google.com", user_agent)
|
||||||
oldest_google_page_text = waybackpy.get(oldest_google_archive, UA=user_agent)
|
assert "Welcome to Google" in target.get(target.oldest())
|
||||||
assert "Welcome to Google" in oldest_google_page_text
|
|
||||||
|
|
||||||
def test_total_archives():
|
def test_total_archives():
|
||||||
|
|
||||||
count1 = waybackpy.total_archives(" https://google.com ", UA=user_agent)
|
target = waybackpy.Url(" https://google.com ", user_agent)
|
||||||
assert count1 > 500000
|
assert target.total_archives() > 500000
|
||||||
|
|
||||||
count2 = waybackpy.total_archives("https://gaha.e4i3n.m5iai3kip6ied.cima/gahh2718gs/ahkst63t7gad8", UA=user_agent)
|
target = waybackpy.Url(" https://gaha.e4i3n.m5iai3kip6ied.cima/gahh2718gs/ahkst63t7gad8 ", user_agent)
|
||||||
assert count2 == 0
|
assert target.total_archives() == 0
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
test_clean_url()
|
test_clean_url()
|
||||||
@@ -96,3 +101,4 @@ if __name__ == "__main__":
|
|||||||
print(".")
|
print(".")
|
||||||
test_total_archives()
|
test_total_archives()
|
||||||
print(".")
|
print(".")
|
||||||
|
print("OK")
|
||||||
|
Reference in New Issue
Block a user