Compare commits
173 Commits
Author | SHA1 | Date | |
---|---|---|---|
e231228721 | |||
b8b2d6dfa9 | |||
3eca6294df | |||
eb037a0284 | |||
a01821f20b | |||
b21036f8df | |||
b43bacb7ac | |||
f7313b255a | |||
7457e1c793 | |||
f7493d823f | |||
7fa7b59ce3 | |||
78a608db50 | |||
93f7dfdaf9 | |||
83c6f256c9 | |||
dee9105794 | |||
3bfc3b46d0 | |||
553f150bee | |||
b3a7e714a5 | |||
cd9841713c | |||
1ea9548d46 | |||
be7642c837 | |||
a418a4e464 | |||
aec035ef1e | |||
6d37993ab9 | |||
72b80ca44e | |||
c10aa9279c | |||
68d809a7d6 | |||
4ad09a419b | |||
ddc6620f09 | |||
4066a65678 | |||
8e46a9ba7a | |||
a5a98b9b00 | |||
a721ab7d6c | |||
7db27ae5e1 | |||
8fd4462025 | |||
c458a15820 | |||
bae3412bee | |||
94cb08bb37 | |||
af888db13e | |||
d24f2408ee | |||
ddd2274015 | |||
99abdb7c67 | |||
f3bb9a8540 | |||
bb94e0d1c5 | |||
1a78d88be2 | |||
3ec61758b3 | |||
83c962166d | |||
e87dee3bdf | |||
b27bfff15a | |||
970fc1cd08 | |||
65391bf14b | |||
8ab116f276 | |||
6f82041ec9 | |||
11059c960e | |||
eee1b8eba1 | |||
f7de8f5575 | |||
3fa0c32064 | |||
aa1e3b8825 | |||
58d2d585c8 | |||
e8efed2e2f | |||
49089b7321 | |||
55d8687566 | |||
0fa28527af | |||
68259fd2d9 | |||
e7086a89d3 | |||
e39467227c | |||
ba840404cf | |||
8fbd2d9e55 | |||
eebf6043de | |||
3d3b09d6d8 | |||
ef15b5863c | |||
256c0cdb6b | |||
12c72a8294 | |||
0ad27f5ecc | |||
700b60b5f8 | |||
11032596c8 | |||
9727f92168 | |||
d2893fec13 | |||
f1353b2129 | |||
c76a95ef90 | |||
62d88359ce | |||
9942c474c9 | |||
dfb736e794 | |||
84d1766917 | |||
9d3cdfafb3 | |||
20a16bfa45 | |||
f2112c73f6 | |||
9860527d96 | |||
9ac1e877c8 | |||
f881705d00 | |||
f015c3f4f3 | |||
42ac399362 | |||
e9d010c793 | |||
58a6409528 | |||
7ca2029158 | |||
80331833f2 | |||
5e3d3a815f | |||
6182a18cf4 | |||
9bca750310 | |||
c22749a6a3 | |||
151df94fe3 | |||
24540d0b2c | |||
bdfc72d05d | |||
3b104c1a28 | |||
fb0d4658a7 | |||
48833980e1 | |||
0c4f119981 | |||
afded51a04 | |||
b950616561 | |||
444675538f | |||
0ca6710334 | |||
01a7c591ad | |||
74d3bc154b | |||
a8e94dfb25 | |||
cc38798b32 | |||
bc3dd44f27 | |||
ba46cdafe2 | |||
538afb14e9 | |||
7605b614ee | |||
d0a4e25cf5 | |||
8c5c0153da | |||
e7dac74906 | |||
c686708c9e | |||
f9ae8ada70 | |||
e56ece3dc9 | |||
db127a5c54 | |||
ed497bbd23 | |||
45fe07ddb6 | |||
0029d63d8a | |||
beb5b625ec | |||
b40d734346 | |||
be0a30de85 | |||
3a65a60bd6 | |||
7b626f5ea5 | |||
73371d6c68 | |||
8904ba4d67 | |||
b4a7f7ea6f | |||
a2ead04021 | |||
3513feb075 | |||
d34b98373f | |||
38f3b81742 | |||
660a826aed | |||
a52d035c0e | |||
6737ce0e26 | |||
98cc918c8f | |||
b103bfc6e4 | |||
edd05838b8 | |||
031212e161 | |||
d3bd5b05b5 | |||
d6598a67b9 | |||
e5a6057249 | |||
2a1b3bc6ee | |||
b4ca98eca2 | |||
36b01754ec | |||
3d8bf4eec6 | |||
e7761b3709 | |||
df851dce0c | |||
f5acbcfc95 | |||
44156e5e7e | |||
a6cb955669 | |||
8acb14a243 | |||
7d434c3f0f | |||
057c61d677 | |||
6705c04f38 | |||
e631c0aadb | |||
423782ea75 | |||
7944f0878d | |||
850b055527 | |||
32bc765113 | |||
09b4ba2649 | |||
929790feca | |||
09a521ae43 | |||
a503be5a86 |
31
.github/workflows/python-publish.yml
vendored
Normal file
31
.github/workflows/python-publish.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
# This workflows will upload a Python Package using Twine when a release is created
|
||||
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
||||
|
||||
name: Upload Python Package
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
19
.travis.yml
Normal file
19
.travis.yml
Normal file
@ -0,0 +1,19 @@
|
||||
language: python
|
||||
os: linux
|
||||
dist: xenial
|
||||
cache: pip
|
||||
python:
|
||||
- 2.7
|
||||
- 3.6
|
||||
- 3.8
|
||||
before_install:
|
||||
- python --version
|
||||
- pip install -U pip
|
||||
- pip install -U pytest
|
||||
- pip install codecov
|
||||
- pip install pytest pytest-cov
|
||||
script:
|
||||
- cd tests
|
||||
- pytest --cov=../waybackpy
|
||||
after_success:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == 3.8 ]]; then python -m codecov; fi
|
8
.whitesource
Normal file
8
.whitesource
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"checkRunSettings": {
|
||||
"vulnerableCheckRunConclusionLevel": "failure"
|
||||
},
|
||||
"issueSettings": {
|
||||
"minSeverityLevel": "LOW"
|
||||
}
|
||||
}
|
290
README.md
290
README.md
@ -1,2 +1,288 @@
|
||||
# pywayback
|
||||
A python wrapper for Internet Archive's Wayback Machine
|
||||
# waybackpy
|
||||
|
||||
[](https://travis-ci.org/akamhy/waybackpy)
|
||||
[](https://pypistats.org/packages/waybackpy)
|
||||
[](https://github.com/akamhy/waybackpy/releases)
|
||||
[](https://www.codacy.com/manual/akamhy/waybackpy?utm_source=github.com&utm_medium=referral&utm_content=akamhy/waybackpy&utm_campaign=Badge_Grade)
|
||||
[](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
|
||||
[](https://codeclimate.com/github/akamhy/waybackpy/maintainability)
|
||||
[](https://www.codefactor.io/repository/github/akamhy/waybackpy)
|
||||
[](https://www.python.org/)
|
||||

|
||||

|
||||
[](https://github.com/akamhy/waybackpy/graphs/commit-activity)
|
||||
[](https://codecov.io/gh/akamhy/waybackpy)
|
||||

|
||||

|
||||
|
||||
|
||||

|
||||

|
||||
|
||||
Waybackpy is a Python library that interfaces with the [Internet Archive](https://en.wikipedia.org/wiki/Internet_Archive)'s [Wayback Machine](https://en.wikipedia.org/wiki/Wayback_Machine) API. Archive pages and retrieve archived pages easily.
|
||||
|
||||
Table of contents
|
||||
=================
|
||||
<!--ts-->
|
||||
|
||||
* [Installation](#installation)
|
||||
|
||||
* [Usage](#usage)
|
||||
* [As a python package](#as-a-python-package)
|
||||
* [Saving an url using save()](#capturing-aka-saving-an-url-using-save)
|
||||
* [Receiving the oldest archive for an URL Using oldest()](#receiving-the-oldest-archive-for-an-url-using-oldest)
|
||||
* [Receiving the recent most/newest archive for an URL using newest()](#receiving-the-newest-archive-for-an-url-using-newest)
|
||||
* [Receiving archive close to a specified year, month, day, hour, and minute using near()](#receiving-archive-close-to-a-specified-year-month-day-hour-and-minute-using-near)
|
||||
* [Get the content of webpage using get()](#get-the-content-of-webpage-using-get)
|
||||
* [Count total archives for an URL using total_archives()](#count-total-archives-for-an-url-using-total_archives)
|
||||
* [With CLI](#with-the-cli)
|
||||
* [Save](#save)
|
||||
* [Oldest archive](#oldest-archive)
|
||||
* [Newest archive](#newest-archive)
|
||||
* [Total archives](#total-number-of-archives)
|
||||
* [Archive near a time](#archive-near-time)
|
||||
* [Get the source code](#get-the-source-code)
|
||||
|
||||
* [Tests](#tests)
|
||||
|
||||
* [Dependency](#dependency)
|
||||
|
||||
* [License](#license)
|
||||
|
||||
<!--te-->
|
||||
|
||||
## Installation
|
||||
Using [pip](https://en.wikipedia.org/wiki/Pip_(package_manager)):
|
||||
```bash
|
||||
pip install waybackpy
|
||||
```
|
||||
or direct from this repository using git.
|
||||
```bash
|
||||
pip install git+https://github.com/akamhy/waybackpy.git
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### As a python package
|
||||
|
||||
#### Capturing aka Saving an url using save()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
new_archive_url = waybackpy.Url(
|
||||
|
||||
url = "https://en.wikipedia.org/wiki/Multivariable_calculus",
|
||||
user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).save()
|
||||
|
||||
print(new_archive_url)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20200504141153/https://github.com/akamhy/waybackpy
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPySaveExample></sub>
|
||||
|
||||
|
||||
|
||||
#### Receiving the oldest archive for an URL using oldest()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
oldest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.google.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).oldest()
|
||||
|
||||
print(oldest_archive_url)
|
||||
```
|
||||
```bash
|
||||
http://web.archive.org/web/19981111184551/http://google.com:80/
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyOldestExample></sub>
|
||||
|
||||
|
||||
|
||||
#### Receiving the newest archive for an URL using newest()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
newest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.facebook.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0"
|
||||
|
||||
).newest()
|
||||
|
||||
print(newest_archive_url)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20200714013225/https://www.facebook.com/
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyNewestExample></sub>
|
||||
|
||||
|
||||
|
||||
#### Receiving archive close to a specified year, month, day, hour, and minute using near()
|
||||
```python
|
||||
from waybackpy import Url
|
||||
|
||||
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0"
|
||||
github_url = "https://github.com/"
|
||||
|
||||
|
||||
github_wayback_obj = Url(github_url, user_agent)
|
||||
|
||||
# Do not pad (don't use zeros in the month, year, day, minute, and hour arguments). e.g. For January, set month = 1 and not month = 01.
|
||||
```
|
||||
```python
|
||||
github_archive_near_2010 = github_wayback_obj.near(year=2010)
|
||||
print(github_archive_near_2010)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20100719134402/http://github.com/
|
||||
```
|
||||
```python
|
||||
github_archive_near_2011_may = github_wayback_obj.near(year=2011, month=5)
|
||||
print(github_archive_near_2011_may)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20110519185447/https://github.com/
|
||||
```
|
||||
```python
|
||||
github_archive_near_2015_january_26 = github_wayback_obj.near(
|
||||
year=2015, month=1, day=26
|
||||
)
|
||||
print(github_archive_near_2015_january_26)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20150127031159/https://github.com
|
||||
```
|
||||
```python
|
||||
github_archive_near_2018_4_july_9_2_am = github_wayback_obj.near(
|
||||
year=2018, month=7, day=4, hour = 9, minute = 2
|
||||
)
|
||||
print(github_archive_near_2018_4_july_9_2_am)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20180704090245/https://github.com/
|
||||
|
||||
```
|
||||
|
||||
<sub>The library doesn't supports seconds yet. You are encourged to create a PR ;)</sub>
|
||||
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyNearExample></sub>
|
||||
|
||||
|
||||
|
||||
#### Get the content of webpage using get()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
google_url = "https://www.google.com/"
|
||||
|
||||
User_Agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36"
|
||||
|
||||
waybackpy_url_object = waybackpy.Url(google_url, User_Agent)
|
||||
|
||||
|
||||
# If no argument is passed in get(), it gets the source of the Url used to create the object.
|
||||
current_google_url_source = waybackpy_url_object.get()
|
||||
print(current_google_url_source)
|
||||
|
||||
|
||||
# The following chunk of code will force a new archive of google.com and get the source of the archived page.
|
||||
# waybackpy_url_object.save() type is string.
|
||||
google_newest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.save()
|
||||
)
|
||||
print(google_newest_archive_source)
|
||||
|
||||
|
||||
# waybackpy_url_object.oldest() type is str, it's oldest archive of google.com
|
||||
google_oldest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.oldest()
|
||||
)
|
||||
print(google_oldest_archive_source)
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyGetExample#main.py></sub>
|
||||
|
||||
|
||||
#### Count total archives for an URL using total_archives()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
URL = "https://en.wikipedia.org/wiki/Python (programming language)"
|
||||
|
||||
UA = "Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4"
|
||||
|
||||
archive_count = waybackpy.Url(
|
||||
url=URL,
|
||||
user_agent=UA
|
||||
).total_archives()
|
||||
|
||||
print(archive_count) # total_archives() returns an int
|
||||
```
|
||||
```bash
|
||||
2440
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyTotalArchivesExample></sub>
|
||||
|
||||
### With the CLI
|
||||
|
||||
#### Save
|
||||
```bash
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/Social_media" --user_agent "my-unique-user-agent" --save
|
||||
https://web.archive.org/web/20200719062108/https://en.wikipedia.org/wiki/Social_media
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyBashSave></sub>
|
||||
|
||||
#### Oldest archive
|
||||
```bash
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/SpaceX" --user_agent "my-unique-user-agent" --oldest
|
||||
https://web.archive.org/web/20040803000845/http://en.wikipedia.org:80/wiki/SpaceX
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyBashOldest></sub>
|
||||
|
||||
#### Newest archive
|
||||
```bash
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/YouTube" --user_agent "my-unique-user-agent" --newest
|
||||
https://web.archive.org/web/20200606044708/https://en.wikipedia.org/wiki/YouTube
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyBashNewest></sub>
|
||||
|
||||
#### Total number of archives
|
||||
```bash
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/Linux_kernel" --user_agent "my-unique-user-agent" --total
|
||||
853
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyBashTotal></sub>
|
||||
|
||||
#### Archive near time
|
||||
```bash
|
||||
$ waybackpy --url facebook.com --user_agent "my-unique-user-agent" --near --year 2012 --month 5 --day 12
|
||||
https://web.archive.org/web/20120512142515/https://www.facebook.com/
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyBashNear></sub>
|
||||
|
||||
#### Get the source code
|
||||
```bash
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get url # Prints the source code of the url
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get oldest # Prints the source code of the oldest archive
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get newest # Prints the source code of the newest archive
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get save # Save a new archive on wayback machine then print the source code of this archive.
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/@akamhy/WaybackPyBashGet></sub>
|
||||
|
||||
## Tests
|
||||
* [Here](https://github.com/akamhy/waybackpy/tree/master/tests)
|
||||
|
||||
|
||||
## Dependency
|
||||
* None, just python standard libraries (re, json, urllib, argparse and datetime). Both python 2 and 3 are supported :)
|
||||
|
||||
|
||||
## License
|
||||
[MIT License](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
|
||||
|
1
_config.yml
Normal file
1
_config.yml
Normal file
@ -0,0 +1 @@
|
||||
theme: jekyll-theme-cayman
|
385
index.rst
Normal file
385
index.rst
Normal file
@ -0,0 +1,385 @@
|
||||
waybackpy
|
||||
=========
|
||||
|
||||
|Build Status| |Downloads| |Release| |Codacy Badge| |License: MIT|
|
||||
|Maintainability| |CodeFactor| |made-with-python| |pypi| |PyPI - Python
|
||||
Version| |Maintenance| |codecov| |image12| |contributions welcome|
|
||||
|
||||
|Internet Archive| |Wayback Machine|
|
||||
|
||||
Waybackpy is a Python library that interfaces with the `Internet
|
||||
Archive <https://en.wikipedia.org/wiki/Internet_Archive>`__'s `Wayback
|
||||
Machine <https://en.wikipedia.org/wiki/Wayback_Machine>`__ API. Archive
|
||||
pages and retrieve archived pages easily.
|
||||
|
||||
Table of contents
|
||||
=================
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<!--ts-->
|
||||
|
||||
- `Installation <#installation>`__
|
||||
|
||||
- `Usage <#usage>`__
|
||||
- `As a python package <#as-a-python-package>`__
|
||||
|
||||
- `Saving an url using
|
||||
save() <#capturing-aka-saving-an-url-using-save>`__
|
||||
- `Receiving the oldest archive for an URL Using
|
||||
oldest() <#receiving-the-oldest-archive-for-an-url-using-oldest>`__
|
||||
- `Receiving the recent most/newest archive for an URL using
|
||||
newest() <#receiving-the-newest-archive-for-an-url-using-newest>`__
|
||||
- `Receiving archive close to a specified year, month, day, hour,
|
||||
and minute using
|
||||
near() <#receiving-archive-close-to-a-specified-year-month-day-hour-and-minute-using-near>`__
|
||||
- `Get the content of webpage using
|
||||
get() <#get-the-content-of-webpage-using-get>`__
|
||||
- `Count total archives for an URL using
|
||||
total\_archives() <#count-total-archives-for-an-url-using-total_archives>`__
|
||||
|
||||
- `With CLI <#with-the-cli>`__
|
||||
|
||||
- `Save <#save>`__
|
||||
- `Oldest archive <#oldest-archive>`__
|
||||
- `Newest archive <#newest-archive>`__
|
||||
- `Total archives <#total-number-of-archives>`__
|
||||
- `Archive near a time <#archive-near-time>`__
|
||||
- `Get the source code <#get-the-source-code>`__
|
||||
|
||||
- `Tests <#tests>`__
|
||||
|
||||
- `Dependency <#dependency>`__
|
||||
|
||||
- `License <#license>`__
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<!--te-->
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Using `pip <https://en.wikipedia.org/wiki/Pip_(package_manager)>`__:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install waybackpy
|
||||
|
||||
or direct from this repository using git.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install git+https://github.com/akamhy/waybackpy.git
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
As a python package
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Capturing aka Saving an url using save()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
new_archive_url = waybackpy.Url(
|
||||
|
||||
url = "https://en.wikipedia.org/wiki/Multivariable_calculus",
|
||||
user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).save()
|
||||
|
||||
print(new_archive_url)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20200504141153/https://github.com/akamhy/waybackpy
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPySaveExample\
|
||||
|
||||
Receiving the oldest archive for an URL using oldest()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
oldest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.google.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).oldest()
|
||||
|
||||
print(oldest_archive_url)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
http://web.archive.org/web/19981111184551/http://google.com:80/
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyOldestExample\
|
||||
|
||||
Receiving the newest archive for an URL using newest()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
newest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.facebook.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0"
|
||||
|
||||
).newest()
|
||||
|
||||
print(newest_archive_url)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20200714013225/https://www.facebook.com/
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyNewestExample\
|
||||
|
||||
Receiving archive close to a specified year, month, day, hour, and minute using near()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
from waybackpy import Url
|
||||
|
||||
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0"
|
||||
github_url = "https://github.com/"
|
||||
|
||||
|
||||
github_wayback_obj = Url(github_url, user_agent)
|
||||
|
||||
# Do not pad (don't use zeros in the month, year, day, minute, and hour arguments). e.g. For January, set month = 1 and not month = 01.
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2010 = github_wayback_obj.near(year=2010)
|
||||
print(github_archive_near_2010)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20100719134402/http://github.com/
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2011_may = github_wayback_obj.near(year=2011, month=5)
|
||||
print(github_archive_near_2011_may)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20110519185447/https://github.com/
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2015_january_26 = github_wayback_obj.near(
|
||||
year=2015, month=1, day=26
|
||||
)
|
||||
print(github_archive_near_2015_january_26)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20150127031159/https://github.com
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2018_4_july_9_2_am = github_wayback_obj.near(
|
||||
year=2018, month=7, day=4, hour = 9, minute = 2
|
||||
)
|
||||
print(github_archive_near_2018_4_july_9_2_am)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20180704090245/https://github.com/
|
||||
|
||||
The library doesn't supports seconds yet. You are encourged to create a
|
||||
PR ;)
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyNearExample\
|
||||
|
||||
Get the content of webpage using get()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
google_url = "https://www.google.com/"
|
||||
|
||||
User_Agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36"
|
||||
|
||||
waybackpy_url_object = waybackpy.Url(google_url, User_Agent)
|
||||
|
||||
|
||||
# If no argument is passed in get(), it gets the source of the Url used to create the object.
|
||||
current_google_url_source = waybackpy_url_object.get()
|
||||
print(current_google_url_source)
|
||||
|
||||
|
||||
# The following chunk of code will force a new archive of google.com and get the source of the archived page.
|
||||
# waybackpy_url_object.save() type is string.
|
||||
google_newest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.save()
|
||||
)
|
||||
print(google_newest_archive_source)
|
||||
|
||||
|
||||
# waybackpy_url_object.oldest() type is str, it's oldest archive of google.com
|
||||
google_oldest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.oldest()
|
||||
)
|
||||
print(google_oldest_archive_source)
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyGetExample#main.py\
|
||||
|
||||
Count total archives for an URL using total\_archives()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
URL = "https://en.wikipedia.org/wiki/Python (programming language)"
|
||||
|
||||
UA = "Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4"
|
||||
|
||||
archive_count = waybackpy.Url(
|
||||
url=URL,
|
||||
user_agent=UA
|
||||
).total_archives()
|
||||
|
||||
print(archive_count) # total_archives() returns an int
|
||||
|
||||
.. code:: bash
|
||||
|
||||
2440
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyTotalArchivesExample\
|
||||
|
||||
With the CLI
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Save
|
||||
^^^^
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/Social_media" --user_agent "my-unique-user-agent" --save
|
||||
https://web.archive.org/web/20200719062108/https://en.wikipedia.org/wiki/Social_media
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyBashSave\
|
||||
|
||||
Oldest archive
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/SpaceX" --user_agent "my-unique-user-agent" --oldest
|
||||
https://web.archive.org/web/20040803000845/http://en.wikipedia.org:80/wiki/SpaceX
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyBashOldest\
|
||||
|
||||
Newest archive
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/YouTube" --user_agent "my-unique-user-agent" --newest
|
||||
https://web.archive.org/web/20200606044708/https://en.wikipedia.org/wiki/YouTube
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyBashNewest\
|
||||
|
||||
Total number of archives
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ waybackpy --url "https://en.wikipedia.org/wiki/Linux_kernel" --user_agent "my-unique-user-agent" --total
|
||||
853
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyBashTotal\
|
||||
|
||||
Archive near time
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ waybackpy --url facebook.com --user_agent "my-unique-user-agent" --near --year 2012 --month 5 --day 12
|
||||
https://web.archive.org/web/20120512142515/https://www.facebook.com/
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyBashNear\
|
||||
|
||||
Get the source code
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get url # Prints the source code of the url
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get oldest # Prints the source code of the oldest archive
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get newest # Prints the source code of the newest archive
|
||||
$ waybackpy --url google.com --user_agent "my-unique-user-agent" --get save # Save a new archive on wayback machine then print the source code of this archive.
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/@akamhy/WaybackPyBashGet\
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
- `Here <https://github.com/akamhy/waybackpy/tree/master/tests>`__
|
||||
|
||||
Dependency
|
||||
----------
|
||||
|
||||
- None, just python standard libraries (re, json, urllib, argparse and datetime).
|
||||
Both python 2 and 3 are supported :)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
`MIT
|
||||
License <https://github.com/akamhy/waybackpy/blob/master/LICENSE>`__
|
||||
|
||||
.. |Build Status| image:: https://img.shields.io/travis/akamhy/waybackpy.svg?label=Travis%20CI&logo=travis&style=flat-square
|
||||
:target: https://travis-ci.org/akamhy/waybackpy
|
||||
.. |Downloads| image:: https://img.shields.io/pypi/dm/waybackpy.svg
|
||||
:target: https://pypistats.org/packages/waybackpy
|
||||
.. |Release| image:: https://img.shields.io/github/v/release/akamhy/waybackpy.svg
|
||||
:target: https://github.com/akamhy/waybackpy/releases
|
||||
.. |Codacy Badge| image:: https://api.codacy.com/project/badge/Grade/255459cede9341e39436ec8866d3fb65
|
||||
:target: https://www.codacy.com/manual/akamhy/waybackpy?utm_source=github.com&utm_medium=referral&utm_content=akamhy/waybackpy&utm_campaign=Badge_Grade
|
||||
.. |License: MIT| image:: https://img.shields.io/badge/License-MIT-yellow.svg
|
||||
:target: https://github.com/akamhy/waybackpy/blob/master/LICENSE
|
||||
.. |Maintainability| image:: https://api.codeclimate.com/v1/badges/942f13d8177a56c1c906/maintainability
|
||||
:target: https://codeclimate.com/github/akamhy/waybackpy/maintainability
|
||||
.. |CodeFactor| image:: https://www.codefactor.io/repository/github/akamhy/waybackpy/badge
|
||||
:target: https://www.codefactor.io/repository/github/akamhy/waybackpy
|
||||
.. |made-with-python| image:: https://img.shields.io/badge/Made%20with-Python-1f425f.svg
|
||||
:target: https://www.python.org/
|
||||
.. |pypi| image:: https://img.shields.io/pypi/v/waybackpy.svg
|
||||
.. |PyPI - Python Version| image:: https://img.shields.io/pypi/pyversions/waybackpy?style=flat-square
|
||||
.. |Maintenance| image:: https://img.shields.io/badge/Maintained%3F-yes-green.svg
|
||||
:target: https://github.com/akamhy/waybackpy/graphs/commit-activity
|
||||
.. |codecov| image:: https://codecov.io/gh/akamhy/waybackpy/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/akamhy/waybackpy
|
||||
.. |image12| image:: https://img.shields.io/github/repo-size/akamhy/waybackpy.svg?label=Repo%20size&style=flat-square
|
||||
.. |contributions welcome| image:: https://img.shields.io/static/v1.svg?label=Contributions&message=Welcome&color=0059b3&style=flat-square
|
||||
.. |Internet Archive| image:: https://upload.wikimedia.org/wikipedia/commons/thumb/8/84/Internet_Archive_logo_and_wordmark.svg/84px-Internet_Archive_logo_and_wordmark.svg.png
|
||||
.. |Wayback Machine| image:: https://upload.wikimedia.org/wikipedia/commons/thumb/0/01/Wayback_Machine_logo_2010.svg/284px-Wayback_Machine_logo_2010.svg.png
|
7
setup.cfg
Normal file
7
setup.cfg
Normal file
@ -0,0 +1,7 @@
|
||||
[metadata]
|
||||
description-file = README.md
|
||||
license_file = LICENSE
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203,W503
|
54
setup.py
Normal file
54
setup.py
Normal file
@ -0,0 +1,54 @@
|
||||
import os.path
|
||||
from setuptools import setup
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as f:
|
||||
long_description = f.read()
|
||||
|
||||
about = {}
|
||||
with open(os.path.join(os.path.dirname(__file__), 'waybackpy', '__version__.py')) as f:
|
||||
exec(f.read(), about)
|
||||
|
||||
setup(
|
||||
name = about['__title__'],
|
||||
packages = ['waybackpy'],
|
||||
version = about['__version__'],
|
||||
description = about['__description__'],
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
license= about['__license__'],
|
||||
author = about['__author__'],
|
||||
author_email = about['__author_email__'],
|
||||
url = about['__url__'],
|
||||
download_url = 'https://github.com/akamhy/waybackpy/archive/2.1.3.tar.gz',
|
||||
keywords = ['wayback', 'archive', 'archive website', 'wayback machine', 'Internet Archive'],
|
||||
install_requires=[],
|
||||
python_requires= ">=2.7",
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'Natural Language :: English',
|
||||
'Topic :: Software Development :: Build Tools',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.2',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'waybackpy = waybackpy.cli:main'
|
||||
]
|
||||
},
|
||||
project_urls={
|
||||
'Documentation': 'https://waybackpy.readthedocs.io',
|
||||
'Source': 'https://github.com/akamhy/waybackpy',
|
||||
},
|
||||
)
|
179
tests/test_wrapper.py
Normal file
179
tests/test_wrapper.py
Normal file
@ -0,0 +1,179 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import pytest
|
||||
import random
|
||||
import time
|
||||
|
||||
sys.path.append("..")
|
||||
import waybackpy.wrapper as waybackpy # noqa: E402
|
||||
|
||||
if sys.version_info >= (3, 0): # If the python ver >= 3
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.error import URLError
|
||||
else: # For python2.x
|
||||
from urllib2 import Request, urlopen, URLError
|
||||
|
||||
user_agent = "Mozilla/5.0 (Windows NT 6.2; rv:20.0) Gecko/20121202 Firefox/20.0"
|
||||
|
||||
|
||||
def test_clean_url():
|
||||
test_url = " https://en.wikipedia.org/wiki/Network security "
|
||||
answer = "https://en.wikipedia.org/wiki/Network_security"
|
||||
target = waybackpy.Url(test_url, user_agent)
|
||||
test_result = target._clean_url()
|
||||
assert answer == test_result
|
||||
|
||||
|
||||
def test_url_check():
|
||||
broken_url = "http://wwwgooglecom/"
|
||||
with pytest.raises(Exception):
|
||||
waybackpy.Url(broken_url, user_agent)
|
||||
|
||||
|
||||
def test_save():
|
||||
# Test for urls that exist and can be archived.
|
||||
time.sleep(10)
|
||||
|
||||
url_list = [
|
||||
"en.wikipedia.org",
|
||||
"www.wikidata.org",
|
||||
"commons.wikimedia.org",
|
||||
"www.wiktionary.org",
|
||||
"www.w3schools.com",
|
||||
"twitter.com",
|
||||
]
|
||||
x = random.randint(0, len(url_list) - 1)
|
||||
url1 = url_list[x]
|
||||
target = waybackpy.Url(
|
||||
url1,
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 "
|
||||
"(KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36",
|
||||
)
|
||||
archived_url1 = target.save()
|
||||
assert url1 in archived_url1
|
||||
|
||||
if sys.version_info > (3, 6):
|
||||
|
||||
# Test for urls that are incorrect.
|
||||
with pytest.raises(Exception):
|
||||
url2 = "ha ha ha ha"
|
||||
waybackpy.Url(url2, user_agent)
|
||||
time.sleep(5)
|
||||
# Test for urls not allowed to archive by robot.txt.
|
||||
with pytest.raises(Exception):
|
||||
url3 = "http://www.archive.is/faq.html"
|
||||
target = waybackpy.Url(
|
||||
url3,
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) "
|
||||
"Gecko/20100101 Firefox/25.0",
|
||||
)
|
||||
target.save()
|
||||
|
||||
time.sleep(5)
|
||||
# Non existent urls, test
|
||||
with pytest.raises(Exception):
|
||||
url4 = (
|
||||
"https://githfgdhshajagjstgeths537agajaajgsagudadhuss87623"
|
||||
"46887adsiugujsdgahub.us"
|
||||
)
|
||||
target = waybackpy.Url(
|
||||
url3,
|
||||
"Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) "
|
||||
"AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 "
|
||||
"Safari/533.20.27",
|
||||
)
|
||||
target.save()
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
def test_near():
|
||||
time.sleep(10)
|
||||
url = "google.com"
|
||||
target = waybackpy.Url(
|
||||
url,
|
||||
"Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/533.20.25 "
|
||||
"(KHTML, like Gecko) Version/5.0.3 Safari/533.19.4",
|
||||
)
|
||||
archive_near_year = target.near(year=2010)
|
||||
assert "2010" in archive_near_year
|
||||
|
||||
if sys.version_info > (3, 6):
|
||||
time.sleep(5)
|
||||
archive_near_month_year = target.near(year=2015, month=2)
|
||||
assert (
|
||||
("201502" in archive_near_month_year)
|
||||
or ("201501" in archive_near_month_year)
|
||||
or ("201503" in archive_near_month_year)
|
||||
)
|
||||
|
||||
target = waybackpy.Url(
|
||||
"www.python.org",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
|
||||
"(KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246",
|
||||
)
|
||||
archive_near_hour_day_month_year = target.near(
|
||||
year=2008, month=5, day=9, hour=15
|
||||
)
|
||||
assert (
|
||||
("2008050915" in archive_near_hour_day_month_year)
|
||||
or ("2008050914" in archive_near_hour_day_month_year)
|
||||
or ("2008050913" in archive_near_hour_day_month_year)
|
||||
)
|
||||
|
||||
with pytest.raises(Exception):
|
||||
NeverArchivedUrl = (
|
||||
"https://ee_3n.wrihkeipef4edia.org/rwti5r_ki/Nertr6w_rork_rse7c_urity"
|
||||
)
|
||||
target = waybackpy.Url(NeverArchivedUrl, user_agent)
|
||||
target.near(year=2010)
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
def test_oldest():
|
||||
url = "github.com/akamhy/waybackpy"
|
||||
target = waybackpy.Url(url, user_agent)
|
||||
assert "20200504141153" in target.oldest()
|
||||
|
||||
|
||||
def test_newest():
|
||||
url = "github.com/akamhy/waybackpy"
|
||||
target = waybackpy.Url(url, user_agent)
|
||||
assert url in target.newest()
|
||||
|
||||
|
||||
def test_get():
|
||||
target = waybackpy.Url("google.com", user_agent)
|
||||
assert "Welcome to Google" in target.get(target.oldest())
|
||||
|
||||
|
||||
|
||||
def test_wayback_timestamp():
|
||||
ts = waybackpy._wayback_timestamp(
|
||||
year=2020, month=1, day=2, hour=3, minute=4
|
||||
)
|
||||
assert "202001020304" in str(ts)
|
||||
|
||||
|
||||
def test_get_response():
|
||||
hdr = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) "
|
||||
"Gecko/20100101 Firefox/78.0"
|
||||
}
|
||||
req = Request("https://www.google.com", headers=hdr) # nosec
|
||||
response = waybackpy._get_response(req)
|
||||
assert response.code == 200
|
||||
|
||||
|
||||
def test_total_archives():
|
||||
if sys.version_info > (3, 6):
|
||||
target = waybackpy.Url(" https://google.com ", user_agent)
|
||||
assert target.total_archives() > 500000
|
||||
else:
|
||||
pass
|
||||
target = waybackpy.Url(
|
||||
" https://gaha.e4i3n.m5iai3kip6ied.cima/gahh2718gs/ahkst63t7gad8 ", user_agent
|
||||
)
|
||||
assert target.total_archives() == 0
|
@ -1,6 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from .wrapper import save, near, oldest, newest
|
||||
|
||||
__version__ = "1.1"
|
||||
# ┏┓┏┓┏┓━━━━━━━━━━┏━━┓━━━━━━━━━━┏┓━━┏━━━┓━━━━━
|
||||
# ┃┃┃┃┃┃━━━━━━━━━━┃┏┓┃━━━━━━━━━━┃┃━━┃┏━┓┃━━━━━
|
||||
# ┃┃┃┃┃┃┏━━┓━┏┓━┏┓┃┗┛┗┓┏━━┓━┏━━┓┃┃┏┓┃┗━┛┃┏┓━┏┓
|
||||
# ┃┗┛┗┛┃┗━┓┃━┃┃━┃┃┃┏━┓┃┗━┓┃━┃┏━┛┃┗┛┛┃┏━━┛┃┃━┃┃
|
||||
# ┗┓┏┓┏┛┃┗┛┗┓┃┗━┛┃┃┗━┛┃┃┗┛┗┓┃┗━┓┃┏┓┓┃┃━━━┃┗━┛┃
|
||||
# ━┗┛┗┛━┗━━━┛┗━┓┏┛┗━━━┛┗━━━┛┗━━┛┗┛┗┛┗┛━━━┗━┓┏┛
|
||||
# ━━━━━━━━━━━┏━┛┃━━━━━━━━━━━━━━━━━━━━━━━━┏━┛┃━
|
||||
# ━━━━━━━━━━━┗━━┛━━━━━━━━━━━━━━━━━━━━━━━━┗━━┛━
|
||||
|
||||
__all__ = ['wrapper', 'exceptions']
|
||||
"""
|
||||
Waybackpy is a Python library that interfaces with the Internet Archive's Wayback Machine API.
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Archive pages and retrieve archived pages easily.
|
||||
|
||||
Usage:
|
||||
>>> import waybackpy
|
||||
>>> target_url = waybackpy.Url('https://www.python.org', 'Your-apps-cool-user-agent')
|
||||
>>> new_archive = target_url.save()
|
||||
>>> print(new_archive)
|
||||
https://web.archive.org/web/20200502170312/https://www.python.org/
|
||||
|
||||
Full documentation @ <https://akamhy.github.io/waybackpy/>.
|
||||
:copyright: (c) 2020 by akamhy.
|
||||
:license: MIT
|
||||
"""
|
||||
|
||||
from .wrapper import Url
|
||||
from .__version__ import (
|
||||
__title__,
|
||||
__description__,
|
||||
__url__,
|
||||
__version__,
|
||||
__author__,
|
||||
__author_email__,
|
||||
__license__,
|
||||
__copyright__,
|
||||
)
|
||||
|
10
waybackpy/__version__.py
Normal file
10
waybackpy/__version__.py
Normal file
@ -0,0 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__title__ = "waybackpy"
|
||||
__description__ = "A Python library that interfaces with the Internet Archive's Wayback Machine API. Archive pages and retrieve archived pages easily."
|
||||
__url__ = "https://akamhy.github.io/waybackpy/"
|
||||
__version__ = "2.1.3"
|
||||
__author__ = "akamhy"
|
||||
__author_email__ = "akash3pro@gmail.com"
|
||||
__license__ = "MIT"
|
||||
__copyright__ = "Copyright 2020 akamhy"
|
104
waybackpy/cli.py
Normal file
104
waybackpy/cli.py
Normal file
@ -0,0 +1,104 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
from waybackpy.wrapper import Url
|
||||
from waybackpy.__version__ import __version__
|
||||
|
||||
def _save(obj):
|
||||
print(obj.save())
|
||||
|
||||
def _oldest(obj):
|
||||
print(obj.oldest())
|
||||
|
||||
def _newest(obj):
|
||||
print(obj.newest())
|
||||
|
||||
def _total_archives(obj):
|
||||
print(obj.total_archives())
|
||||
|
||||
def _near(obj, args):
|
||||
_near_args = {}
|
||||
if args.year:
|
||||
_near_args["year"] = args.year
|
||||
if args.month:
|
||||
_near_args["month"] = args.month
|
||||
if args.day:
|
||||
_near_args["day"] = args.day
|
||||
if args.hour:
|
||||
_near_args["hour"] = args.hour
|
||||
if args.minute:
|
||||
_near_args["minute"] = args.minute
|
||||
print(obj.near(**_near_args))
|
||||
|
||||
def _get(obj, args):
|
||||
if args.get.lower() == "url":
|
||||
print(obj.get())
|
||||
|
||||
elif args.get.lower() == "oldest":
|
||||
print(obj.get(obj.oldest()))
|
||||
|
||||
elif args.get.lower() == "latest" or args.get.lower() == "newest":
|
||||
print(obj.get(obj.newest()))
|
||||
|
||||
elif args.get.lower() == "save":
|
||||
print(obj.get(obj.save()))
|
||||
|
||||
else:
|
||||
print("Use get as \"--get 'source'\", 'source' can be one of the followings: \
|
||||
\n1) url - get the source code of the url specified using --url/-u.\
|
||||
\n2) oldest - get the source code of the oldest archive for the supplied url.\
|
||||
\n3) newest - get the source code of the newest archive for the supplied url.\
|
||||
\n4) save - Create a new archive and get the source code of this new archive for the supplied url.")
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-u", "--url", help="URL on which Wayback machine operations would occur.")
|
||||
parser.add_argument("-ua", "--user_agent", help="User agent, default user_agent is \"waybackpy python package - https://github.com/akamhy/waybackpy\".")
|
||||
parser.add_argument("-s", "--save", action='store_true', help="Save the URL on the Wayback machine.")
|
||||
parser.add_argument("-o", "--oldest", action='store_true', help="Oldest archive for the specified URL.")
|
||||
parser.add_argument("-n", "--newest", action='store_true', help="Newest archive for the specified URL.")
|
||||
parser.add_argument("-t", "--total", action='store_true', help="Total number of archives for the specified URL.")
|
||||
parser.add_argument("-g", "--get", help="Prints the source code of the supplied url. Use '--get help' for extended usage.")
|
||||
parser.add_argument("-v", "--version", action='store_true', help="Prints the waybackpy version.")
|
||||
|
||||
parser.add_argument("-N", "--near", action='store_true', help="Latest/Newest archive for the specified URL.")
|
||||
parser.add_argument("-Y", "--year", type=int, help="Year in integer. For use with --near.")
|
||||
parser.add_argument("-M", "--month", type=int, help="Month in integer. For use with --near.")
|
||||
parser.add_argument("-D", "--day", type=int, help="Day in integer. For use with --near.")
|
||||
parser.add_argument("-H", "--hour", type=int, help="Hour in integer. For use with --near.")
|
||||
parser.add_argument("-MIN", "--minute", type=int, help="Minute in integer. For use with --near.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.version:
|
||||
print(__version__)
|
||||
return
|
||||
|
||||
if not args.url:
|
||||
print("Specify an URL. See --help")
|
||||
return
|
||||
|
||||
# create the object with or without the user_agent
|
||||
if args.user_agent:
|
||||
obj = Url(args.url, args.user_agent)
|
||||
else:
|
||||
obj = Url(args.url)
|
||||
|
||||
if args.save:
|
||||
_save(obj)
|
||||
elif args.oldest:
|
||||
_oldest(obj)
|
||||
elif args.newest:
|
||||
_newest(obj)
|
||||
elif args.total:
|
||||
_total_archives(obj)
|
||||
elif args.near:
|
||||
_near(obj, args)
|
||||
elif args.get:
|
||||
_get(obj, args)
|
||||
else:
|
||||
print("Usage: waybackpy --url [URL] --user_agent [USER AGENT] [OPTIONS]. See --help")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,38 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
class TooManyArchivingRequests(Exception):
|
||||
class WaybackError(Exception):
|
||||
"""
|
||||
Error when a single url reqeusted for archiving too many times in a short timespam.
|
||||
Wayback machine doesn't supports archivng any url too many times in a short period of time.
|
||||
"""
|
||||
|
||||
class ArchivingNotAllowed(Exception):
|
||||
"""
|
||||
Files like robots.txt are set to deny robot archiving.
|
||||
Wayback machine respects these file, will not archive.
|
||||
"""
|
||||
|
||||
class PageNotSaved(Exception):
|
||||
"""
|
||||
When unable to save a webpage.
|
||||
"""
|
||||
|
||||
class ArchiveNotFound(Exception):
|
||||
"""
|
||||
When a page was never archived but client asks for old archive.
|
||||
"""
|
||||
|
||||
class UrlNotFound(Exception):
|
||||
"""
|
||||
Raised when 404 UrlNotFound.
|
||||
"""
|
||||
|
||||
class BadGateWay(Exception):
|
||||
"""
|
||||
Raised when 502 bad gateway.
|
||||
"""
|
||||
|
||||
class InvalidUrl(Exception):
|
||||
"""
|
||||
Raised when url doesn't follow the standard url format.
|
||||
Raised when API Service error.
|
||||
"""
|
||||
|
@ -1,68 +1,181 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
from datetime import datetime
|
||||
from waybackpy.exceptions import *
|
||||
try:
|
||||
from waybackpy.exceptions import WaybackError
|
||||
from waybackpy.__version__ import __version__
|
||||
|
||||
if sys.version_info >= (3, 0): # If the python ver >= 3
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.error import HTTPError
|
||||
except ImportError:
|
||||
from urllib2 import Request, urlopen, HTTPError
|
||||
from urllib.error import URLError
|
||||
else: # For python2.x
|
||||
from urllib2 import Request, urlopen, URLError
|
||||
|
||||
default_UA = "waybackpy python package - https://github.com/akamhy/waybackpy"
|
||||
|
||||
|
||||
default_UA = "waybackpy python package"
|
||||
def _archive_url_parser(header):
|
||||
"""Parse out the archive from header."""
|
||||
# Regex1
|
||||
arch = re.search(
|
||||
r"rel=\"memento.*?(web\.archive\.org/web/[0-9]{14}/.*?)>", str(header)
|
||||
)
|
||||
if arch:
|
||||
return arch.group(1)
|
||||
# Regex2
|
||||
arch = re.search(r"X-Cache-Key:\shttps(.*)[A-Z]{2}", str(header))
|
||||
if arch:
|
||||
return arch.group(1)
|
||||
raise WaybackError(
|
||||
"No archive URL found in the API response. "
|
||||
"This version of waybackpy (%s) is likely out of date. Visit "
|
||||
"https://github.com/akamhy/waybackpy for the latest version "
|
||||
"of waybackpy.\nHeader:\n%s" % (__version__, str(header))
|
||||
)
|
||||
|
||||
def clean_url(url):
|
||||
return str(url).strip().replace(" ","_")
|
||||
|
||||
def save(url,UA=default_UA):
|
||||
base_save_url = "https://web.archive.org/save/"
|
||||
request_url = (base_save_url + clean_url(url))
|
||||
hdr = { 'User-Agent' : '%s' % UA }
|
||||
req = Request(request_url, headers=hdr)
|
||||
if "." not in url:
|
||||
raise InvalidUrl("'%s' is not a vaild url." % url)
|
||||
def _wayback_timestamp(**kwargs):
|
||||
"""Return a formatted timestamp."""
|
||||
return "".join(
|
||||
str(kwargs[key]).zfill(2) for key in ["year", "month", "day", "hour", "minute"]
|
||||
)
|
||||
|
||||
|
||||
def _get_response(req):
|
||||
"""Get response for the supplied request."""
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except HTTPError as e:
|
||||
if e.code == 502:
|
||||
raise BadGateWay(e)
|
||||
elif e.code == 429:
|
||||
raise TooManyArchivingRequests(e)
|
||||
elif e.code == 404:
|
||||
raise UrlNotFound(e)
|
||||
else:
|
||||
raise PageNotSaved(e)
|
||||
response = urlopen(req) # nosec
|
||||
except Exception:
|
||||
try:
|
||||
response = urlopen(req) # nosec
|
||||
except Exception as e:
|
||||
raise WaybackError(e)
|
||||
return response
|
||||
|
||||
header = response.headers
|
||||
if "exclusion.robots.policy" in str(header):
|
||||
raise ArchivingNotAllowed("Can not archive %s. Disabled by site owner." % (url))
|
||||
archive_id = header['Content-Location']
|
||||
archived_url = "https://web.archive.org" + archive_id
|
||||
return archived_url
|
||||
|
||||
def near(
|
||||
url,
|
||||
year=datetime.utcnow().strftime('%Y'),
|
||||
month=datetime.utcnow().strftime('%m'),
|
||||
day=datetime.utcnow().strftime('%d'),
|
||||
hour=datetime.utcnow().strftime('%H'),
|
||||
minute=datetime.utcnow().strftime('%M'),
|
||||
UA=default_UA,
|
||||
):
|
||||
timestamp = str(year)+str(month)+str(day)+str(hour)+str(minute)
|
||||
request_url = "https://archive.org/wayback/available?url=%s×tamp=%s" % (clean_url(url), str(timestamp))
|
||||
hdr = { 'User-Agent' : '%s' % UA }
|
||||
req = Request(request_url, headers=hdr)
|
||||
response = urlopen(req) #nosec
|
||||
import json
|
||||
data = json.loads(response.read().decode('utf8'))
|
||||
if not data["archived_snapshots"]:
|
||||
raise ArchiveNotFound("'%s' is not yet archived." % url)
|
||||
|
||||
archive_url = (data["archived_snapshots"]["closest"]["url"])
|
||||
return archive_url
|
||||
class Url:
|
||||
"""waybackpy Url object"""
|
||||
|
||||
def oldest(url,UA=default_UA,year=1994):
|
||||
return near(url,year=year,UA=UA)
|
||||
def __init__(self, url, user_agent=default_UA):
|
||||
self.url = url
|
||||
self.user_agent = user_agent
|
||||
self._url_check() # checks url validity on init.
|
||||
|
||||
def newest(url,UA=default_UA):
|
||||
return near(url,UA=UA)
|
||||
def __repr__(self):
|
||||
return "waybackpy.Url(url=%s, user_agent=%s)" % (self.url, self.user_agent)
|
||||
|
||||
def __str__(self):
|
||||
return "%s" % self._clean_url()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._clean_url())
|
||||
|
||||
def _url_check(self):
|
||||
"""Check for common URL problems."""
|
||||
if "." not in self.url:
|
||||
raise URLError("'%s' is not a vaild URL." % self.url)
|
||||
|
||||
def _clean_url(self):
|
||||
"""Fix the URL, if possible."""
|
||||
return str(self.url).strip().replace(" ", "_")
|
||||
|
||||
def save(self):
|
||||
"""Create a new Wayback Machine archive for this URL."""
|
||||
request_url = "https://web.archive.org/save/" + self._clean_url()
|
||||
hdr = {"User-Agent": "%s" % self.user_agent} # nosec
|
||||
req = Request(request_url, headers=hdr) # nosec
|
||||
header = _get_response(req).headers
|
||||
return "https://" + _archive_url_parser(header)
|
||||
|
||||
def get(self, url="", user_agent="", encoding=""):
|
||||
"""Return the source code of the supplied URL.
|
||||
If encoding is not supplied, it is auto-detected from the response.
|
||||
"""
|
||||
if not url:
|
||||
url = self._clean_url()
|
||||
if not user_agent:
|
||||
user_agent = self.user_agent
|
||||
|
||||
hdr = {"User-Agent": "%s" % user_agent}
|
||||
req = Request(url, headers=hdr) # nosec
|
||||
response = _get_response(req)
|
||||
if not encoding:
|
||||
try:
|
||||
encoding = response.headers["content-type"].split("charset=")[-1]
|
||||
except AttributeError:
|
||||
encoding = "UTF-8"
|
||||
return response.read().decode(encoding.replace("text/html", "UTF-8", 1))
|
||||
|
||||
def get_response(self, req):
|
||||
"""Get response for the supplied request."""
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception:
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception as e:
|
||||
exc = WaybackError("Error while retrieving %s" % req.full_url)
|
||||
exc.__cause__ = e
|
||||
raise exc
|
||||
return response
|
||||
|
||||
def near(self, year=None, month=None, day=None, hour=None, minute=None):
|
||||
""" Return the closest Wayback Machine archive to the time supplied.
|
||||
Supported params are year, month, day, hour and minute.
|
||||
Any non-supplied parameters default to the current time.
|
||||
|
||||
"""
|
||||
now = datetime.utcnow().timetuple()
|
||||
timestamp = _wayback_timestamp(
|
||||
year=year if year else now.tm_year,
|
||||
month=month if month else now.tm_mon,
|
||||
day=day if day else now.tm_mday,
|
||||
hour=hour if hour else now.tm_hour,
|
||||
minute=minute if minute else now.tm_min,
|
||||
)
|
||||
|
||||
request_url = "https://archive.org/wayback/available?url=%s×tamp=%s" % (
|
||||
self._clean_url(),
|
||||
timestamp,
|
||||
)
|
||||
hdr = {"User-Agent": "%s" % self.user_agent}
|
||||
req = Request(request_url, headers=hdr) # nosec
|
||||
response = _get_response(req)
|
||||
data = json.loads(response.read().decode("UTF-8"))
|
||||
if not data["archived_snapshots"]:
|
||||
raise WaybackError(
|
||||
"'%s' is not yet archived. Use wayback.Url(url, user_agent).save() "
|
||||
"to create a new archive." % self._clean_url()
|
||||
)
|
||||
archive_url = data["archived_snapshots"]["closest"]["url"]
|
||||
# wayback machine returns http sometimes, idk why? But they support https
|
||||
archive_url = archive_url.replace(
|
||||
"http://web.archive.org/web/", "https://web.archive.org/web/", 1
|
||||
)
|
||||
return archive_url
|
||||
|
||||
def oldest(self, year=1994):
|
||||
"""Return the oldest Wayback Machine archive for this URL."""
|
||||
return self.near(year=year)
|
||||
|
||||
def newest(self):
|
||||
"""Return the newest Wayback Machine archive available for this URL.
|
||||
|
||||
Due to Wayback Machine database lag, this may not always be the
|
||||
most recent archive.
|
||||
"""
|
||||
return self.near()
|
||||
|
||||
def total_archives(self):
|
||||
"""Returns the total number of Wayback Machine archives for this URL."""
|
||||
hdr = {"User-Agent": "%s" % self.user_agent}
|
||||
request_url = (
|
||||
"https://web.archive.org/cdx/search/cdx?url=%s&output=json&fl=statuscode"
|
||||
% self._clean_url()
|
||||
)
|
||||
req = Request(request_url, headers=hdr) # nosec
|
||||
response = _get_response(req)
|
||||
# Most efficient method to count number of archives (yet)
|
||||
return str(response.read()).count(",")
|
||||
|
Reference in New Issue
Block a user