From c5de2232ba0e7e15f1c90a02f5434481c7db12c7 Mon Sep 17 00:00:00 2001 From: Akash <64683866+akamhy@users.noreply.github.com> Date: Sun, 9 Aug 2020 10:53:00 +0530 Subject: [PATCH] Update test_wrapper.py --- tests/test_wrapper.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index 0be8888..b1c7dba 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -74,17 +74,16 @@ def test_save(): url2 = "ha ha ha ha" waybackpy.Url(url2, user_agent) time.sleep(5) - # Test for urls not allowed to archive by robot.txt. - with pytest.raises(Exception): - url3 = "http://www.archive.is/faq.html" - target = waybackpy.Url( - url3, - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) " - "Gecko/20100101 Firefox/25.0", - ) - target.save() - - time.sleep(5) + # Test for urls not allowed to archive by robot.txt. Doesn't works anymore. Find alternatives. +# with pytest.raises(Exception): +# url3 = "http://www.archive.is/faq.html" +# target = waybackpy.Url( +# url3, +# "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) " +# "Gecko/20100101 Firefox/25.0", +# ) +# target.save() +# time.sleep(5) # Non existent urls, test with pytest.raises(Exception): url4 = (