Compare commits
265 Commits
Author | SHA1 | Date | |
---|---|---|---|
9afe29a819 | |||
d79b10c74c | |||
32314dc102 | |||
50e176e2ba | |||
4007859c92 | |||
d8bd6c628d | |||
28f6ff8df2 | |||
7ac9353f74 | |||
15c7244a22 | |||
8510210e94 | |||
552967487e | |||
86a90a3840 | |||
759874cdc6 | |||
06095202fe | |||
06fc7855bf | |||
c49fe971fd | |||
d6783d5525 | |||
9262f5da21 | |||
d1a1cf2546 | |||
cd8a32ed1f | |||
57512c65ff | |||
d9ea26e11c | |||
2bea92b348 | |||
d506685f68 | |||
7844d15d99 | |||
c0252edff2 | |||
e7488f3a3e | |||
aed75ad1db | |||
d740959c34 | |||
2d83043ef7 | |||
31b1056217 | |||
97712b2c1e | |||
a8acc4c4d8 | |||
1bacd73002 | |||
79901ba968 | |||
df64e839d7 | |||
405e9a2a79 | |||
db551abbf6 | |||
d13dd4db1a | |||
d3bb8337a1 | |||
fd5e85420c | |||
5c685ef5d7 | |||
6a3d96b453 | |||
afe1b15a5f | |||
4fd9d142e7 | |||
5e9fdb40ce | |||
fa72098270 | |||
d18f955044 | |||
9c340d6967 | |||
78d0e0c126 | |||
564101e6f5 | |||
de5a3e1561 | |||
52e46fecc2 | |||
3b6415abc7 | |||
66e16d6d89 | |||
16b9bdd7f9 | |||
7adc01bff2 | |||
9bbd056268 | |||
2ab44391cf | |||
cc3628ae18 | |||
1d751b942b | |||
261a867a21 | |||
2e487e88d3 | |||
c8d0ad493a | |||
ce869177fd | |||
58616fb986 | |||
4e68cd5743 | |||
a7b805292d | |||
6dc6124dc4 | |||
5a7fc7d568 | |||
5a9c861cad | |||
dd1917c77e | |||
db8f902cff | |||
88cda94c0b | |||
09290f88d1 | |||
e5835091c9 | |||
7312ed1f4f | |||
6ae8f843d3 | |||
36b936820b | |||
a3bc6aad2b | |||
edc2f63d93 | |||
ffe0810b12 | |||
40233eb115 | |||
d549d31421 | |||
0725163af8 | |||
712471176b | |||
dcd7b03302 | |||
76205d9cf6 | |||
ec0a0d04cc | |||
7bb01df846 | |||
6142e0b353 | |||
a65990aee3 | |||
259a024eb1 | |||
91402792e6 | |||
eabf4dc046 | |||
5a7bd73565 | |||
4693dbf9c1 | |||
f4f2e51315 | |||
d6b7df6837 | |||
dafba5d0cb | |||
6c71dfbe41 | |||
a6470b1036 | |||
04cda4558e | |||
625ed63482 | |||
a03813315f | |||
a2550f17d7 | |||
15ef5816db | |||
93b52bd0fe | |||
28ff877081 | |||
3e3ecff9df | |||
ce64135ba8 | |||
2af6580ffb | |||
8a3c515176 | |||
d98c4f32ad | |||
e0a4b007d5 | |||
6fb6b2deee | |||
1882862992 | |||
0c6107e675 | |||
bd079978bf | |||
5dec4927cd | |||
62e5217b9e | |||
9823c809e9 | |||
db5737a857 | |||
ca0821a466 | |||
bb4dbc7d3c | |||
7c7fd75376 | |||
0b71433667 | |||
1b499a7594 | |||
da390ee8a3 | |||
d3e68d0e70 | |||
fde28d57aa | |||
6092e504c8 | |||
93ef60ecd2 | |||
461b3f74c9 | |||
3c53b411b0 | |||
8125526061 | |||
2dc81569a8 | |||
fd163f3d36 | |||
a0a918cf0d | |||
4943cf6873 | |||
bc3efc7d63 | |||
f89368f16d | |||
c919a6a605 | |||
0280fca189 | |||
60ee8b95a8 | |||
ca51c14332 | |||
525cf17c6f | |||
406e03c52f | |||
672b33e83a | |||
b19b840628 | |||
a6df4f899c | |||
7686e9c20d | |||
3c5932bc39 | |||
f9a986f489 | |||
0d7458ee90 | |||
ac8b9d6a50 | |||
58cd9c28e7 | |||
5088305a58 | |||
9f847a5e55 | |||
6c04c2f3d3 | |||
925be7b17e | |||
2b132456ac | |||
50e3154a4e | |||
7aef50428f | |||
d8ec0f5025 | |||
0a2f97c034 | |||
3e9cf23578 | |||
7f927ec7be | |||
9de6393cd5 | |||
91e7f65617 | |||
d465454019 | |||
1a81eb97fb | |||
6b3b2e2a7d | |||
82c65454e6 | |||
19710461b6 | |||
a3661d6b85 | |||
58375e4ef4 | |||
ea023e98da | |||
f1065ed1c8 | |||
315519b21f | |||
07c98661de | |||
2cd991a54e | |||
ede251afb3 | |||
a8ce970ca0 | |||
243af26bf6 | |||
0f1db94884 | |||
c304f58ea2 | |||
23f7222cb5 | |||
ce7294d990 | |||
c9fa114d2e | |||
8b6bacb28e | |||
32d8ad7780 | |||
cbf2f90faa | |||
4dde3e3134 | |||
1551e8f1c6 | |||
c84f09e2d2 | |||
57a32669b5 | |||
fe017cbcc8 | |||
5edb03d24b | |||
c5de2232ba | |||
ca9186c301 | |||
8a4b631c13 | |||
ec9ce92f48 | |||
e95d35c37f | |||
36d662b961 | |||
2835f8877e | |||
18cbd2fd30 | |||
a2812fb56f | |||
77effcf649 | |||
7272ef45a0 | |||
56116551ac | |||
4dcda94cb0 | |||
09f59b0182 | |||
ed24184b99 | |||
56bef064b1 | |||
44bb2cf5e4 | |||
e231228721 | |||
b8b2d6dfa9 | |||
3eca6294df | |||
eb037a0284 | |||
a01821f20b | |||
b21036f8df | |||
b43bacb7ac | |||
f7313b255a | |||
7457e1c793 | |||
f7493d823f | |||
7fa7b59ce3 | |||
78a608db50 | |||
93f7dfdaf9 | |||
83c6f256c9 | |||
dee9105794 | |||
3bfc3b46d0 | |||
553f150bee | |||
b3a7e714a5 | |||
cd9841713c | |||
1ea9548d46 | |||
be7642c837 | |||
a418a4e464 | |||
aec035ef1e | |||
6d37993ab9 | |||
72b80ca44e | |||
c10aa9279c | |||
68d809a7d6 | |||
4ad09a419b | |||
ddc6620f09 | |||
4066a65678 | |||
8e46a9ba7a | |||
a5a98b9b00 | |||
a721ab7d6c | |||
7db27ae5e1 | |||
8fd4462025 | |||
c458a15820 | |||
bae3412bee | |||
94cb08bb37 | |||
af888db13e | |||
d24f2408ee | |||
ddd2274015 | |||
99abdb7c67 | |||
f3bb9a8540 | |||
bb94e0d1c5 | |||
1a78d88be2 | |||
3ec61758b3 | |||
83c962166d | |||
e87dee3bdf | |||
b27bfff15a |
30
.github/workflows/build_test.yml
vendored
Normal file
30
.github/workflows/build_test.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
||||
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.6', '3.10']
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel
|
||||
- name: Build test the package
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '30 6 * * 1'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
31
.github/workflows/python-publish.yml
vendored
Normal file
31
.github/workflows/python-publish.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
# This workflows will upload a Python Package using Twine when a release is created
|
||||
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
||||
|
||||
name: Upload Python Package
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
44
.github/workflows/unit_test.yml
vendored
Normal file
44
.github/workflows/unit_test.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
||||
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 waybackpy/ --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
# flake8 waybackpy/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --per-file-ignores="waybackpy/__init__.py:F401"
|
||||
# - name: Static type test with mypy
|
||||
# run: |
|
||||
# mypy
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pytest
|
||||
# - name: Upload coverage to Codecov
|
||||
# run: |
|
||||
# bash <(curl -s https://codecov.io/bash) -t ${{ secrets.CODECOV_TOKEN }}
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,3 +1,6 @@
|
||||
# Files generated while testing
|
||||
*-urls-*.txt
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
14
.travis.yml
14
.travis.yml
@ -1,14 +0,0 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.6"
|
||||
- "3.8"
|
||||
os: linux
|
||||
dist: xenial
|
||||
cache: pip
|
||||
install:
|
||||
- pip install pytest
|
||||
before_script:
|
||||
cd tests
|
||||
script:
|
||||
- pytest test_1.py
|
@ -1,6 +1,10 @@
|
||||
{
|
||||
"scanSettings": {
|
||||
"baseBranches": []
|
||||
},
|
||||
"checkRunSettings": {
|
||||
"vulnerableCheckRunConclusionLevel": "failure"
|
||||
"vulnerableCheckRunConclusionLevel": "failure",
|
||||
"displayMode": "diff"
|
||||
},
|
||||
"issueSettings": {
|
||||
"minSeverityLevel": "LOW"
|
||||
|
9
CONTRIBUTORS.md
Normal file
9
CONTRIBUTORS.md
Normal file
@ -0,0 +1,9 @@
|
||||
## AUTHORS
|
||||
- akamhy (<https://github.com/akamhy>)
|
||||
- danvalen1 (<https://github.com/danvalen1>)
|
||||
- AntiCompositeNumber (<https://github.com/AntiCompositeNumber>)
|
||||
- jonasjancarik (<https://github.com/jonasjancarik>)
|
||||
|
||||
## ACKNOWLEDGEMENTS
|
||||
- mhmdiaa (<https://github.com/mhmdiaa>) for <https://gist.github.com/mhmdiaa/adf6bff70142e5091792841d4b372050>. known_urls is based on this gist.
|
||||
- dequeued0 (<https://github.com/dequeued0>) for reporting bugs and useful feature requests.
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 akamhy
|
||||
Copyright (c) 2020-2022 waybackpy contributors ( https://github.com/akamhy/waybackpy/graphs/contributors )
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
322
README.md
322
README.md
@ -1,230 +1,154 @@
|
||||
# waybackpy
|
||||
<div align="center">
|
||||
|
||||
[](https://travis-ci.org/akamhy/waybackpy)
|
||||
[](https://pypistats.org/packages/waybackpy)
|
||||
[](https://github.com/akamhy/waybackpy/releases)
|
||||
[](https://www.codacy.com/manual/akamhy/waybackpy?utm_source=github.com&utm_medium=referral&utm_content=akamhy/waybackpy&utm_campaign=Badge_Grade)
|
||||
[](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
|
||||
[](https://codeclimate.com/github/akamhy/waybackpy/maintainability)
|
||||
[](https://www.codefactor.io/repository/github/akamhy/waybackpy)
|
||||
[](https://www.python.org/)
|
||||

|
||||

|
||||
[](https://github.com/akamhy/waybackpy/graphs/commit-activity)
|
||||
[](https://codecov.io/gh/akamhy/waybackpy)
|
||||

|
||||

|
||||
<img src="https://raw.githubusercontent.com/akamhy/waybackpy/master/assets/waybackpy_logo.svg"><br>
|
||||
|
||||
<h3>A Python package & CLI tool that interfaces with the Wayback Machine API</h3>
|
||||
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/akamhy/waybackpy/actions?query=workflow%3ATests"><img alt="Unit Tests" src="https://github.com/akamhy/waybackpy/workflows/Tests/badge.svg"></a>
|
||||
<a href="https://pypi.org/project/waybackpy/"><img alt="pypi" src="https://img.shields.io/pypi/v/waybackpy.svg"></a>
|
||||
<a href="https://pepy.tech/project/waybackpy?versions=2*&versions=1*&versions=3*"><img alt="Downloads" src="https://pepy.tech/badge/waybackpy/month"></a>
|
||||
<a href="https://github.com/akamhy/waybackpy/commits/master"><img alt="GitHub lastest commit" src="https://img.shields.io/github/last-commit/akamhy/waybackpy?color=blue&style=flat-square"></a>
|
||||
<a href="#"><img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/waybackpy?style=flat-square"></a>
|
||||
<a href="https://github.com/psf/black"><img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a>
|
||||
</p>
|
||||
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
## ⭐️ Introduction
|
||||
Waybackpy is a [Python package](https://www.udacity.com/blog/2021/01/what-is-a-python-package.html) and a [CLI](https://www.w3schools.com/whatis/whatis_cli.asp) tool that interfaces with the [Wayback Machine](https://en.wikipedia.org/wiki/Wayback_Machine) API.
|
||||
|
||||
Wayback Machine has 3 client side [API](https://www.redhat.com/en/topics/api/what-are-application-programming-interfaces)s.
|
||||
|
||||
- [Save API](https://github.com/akamhy/waybackpy/wiki/Wayback-Machine-APIs#save-api)
|
||||
- [Availability API](https://github.com/akamhy/waybackpy/wiki/Wayback-Machine-APIs#availability-api)
|
||||
- [CDX API](https://github.com/akamhy/waybackpy/wiki/Wayback-Machine-APIs#cdx-api)
|
||||
|
||||
These three APIs can be accessed via the waybackpy either by importing it in a script or from the CLI.
|
||||
|
||||
|
||||

|
||||

|
||||
### 🏗 Installation
|
||||
|
||||
Waybackpy is a Python library that interfaces with the [Internet Archive](https://en.wikipedia.org/wiki/Internet_Archive)'s [Wayback Machine](https://en.wikipedia.org/wiki/Wayback_Machine) API. Archive pages and retrieve archived pages easily.
|
||||
Using [pip](https://en.wikipedia.org/wiki/Pip_(package_manager)), from [PyPI](https://pypi.org/) (recommended):
|
||||
|
||||
Table of contents
|
||||
=================
|
||||
<!--ts-->
|
||||
|
||||
* [Installation](#installation)
|
||||
|
||||
* [Usage](#usage)
|
||||
* [Saving an url using save()](#capturing-aka-saving-an-url-using-save)
|
||||
* [Receiving the oldest archive for an URL Using oldest()](#receiving-the-oldest-archive-for-an-url-using-oldest)
|
||||
* [Receiving the recent most/newest archive for an URL using newest()](#receiving-the-newest-archive-for-an-url-using-newest)
|
||||
* [Receiving archive close to a specified year, month, day, hour, and minute using near()](#receiving-archive-close-to-a-specified-year-month-day-hour-and-minute-using-near)
|
||||
* [Get the content of webpage using get()](#get-the-content-of-webpage-using-get)
|
||||
* [Count total archives for an URL using total_archives()](#count-total-archives-for-an-url-using-total_archives)
|
||||
|
||||
|
||||
* [Tests](#tests)
|
||||
|
||||
* [Dependency](#dependency)
|
||||
|
||||
* [License](#license)
|
||||
|
||||
<!--te-->
|
||||
|
||||
## Installation
|
||||
Using [pip](https://en.wikipedia.org/wiki/Pip_(package_manager)):
|
||||
```bash
|
||||
pip install waybackpy
|
||||
```
|
||||
|
||||
Install directly from [this git repository](https://github.com/akamhy/waybackpy) (NOT recommended):
|
||||
|
||||
## Usage
|
||||
```bash
|
||||
pip install git+https://github.com/akamhy/waybackpy.git
|
||||
```
|
||||
|
||||
#### Capturing aka Saving an url using save()
|
||||
### 🐳 Docker Image
|
||||
Docker Hub : <https://hub.docker.com/r/secsi/waybackpy>
|
||||
|
||||
[Docker image](https://searchitoperations.techtarget.com/definition/Docker-image) is automatically updated on every release by [Regulary and Automatically Updated Docker Images](https://github.com/cybersecsi/RAUDI) (RAUDI).
|
||||
|
||||
RAUDI is a tool by SecSI (<https://secsi.io>), an Italian cybersecurity startup.
|
||||
|
||||
|
||||
### 🚀 Usage
|
||||
|
||||
#### As a Python package
|
||||
|
||||
##### Save API aka SavePageNow
|
||||
```python
|
||||
import waybackpy
|
||||
>>> from waybackpy import WaybackMachineSaveAPI
|
||||
>>> url = "https://github.com"
|
||||
>>> user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
>>>
|
||||
>>> save_api = WaybackMachineSaveAPI(url, user_agent)
|
||||
>>> save_api.save()
|
||||
https://web.archive.org/web/20220118125249/https://github.com/
|
||||
>>> save_api.cached_save
|
||||
False
|
||||
>>> save_api.timestamp()
|
||||
datetime.datetime(2022, 1, 18, 12, 52, 49)
|
||||
```
|
||||
|
||||
new_archive_url = waybackpy.Url(
|
||||
##### Availability API
|
||||
```python
|
||||
>>> from waybackpy import WaybackMachineAvailabilityAPI
|
||||
>>>
|
||||
>>> url = "https://google.com"
|
||||
>>> user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
>>>
|
||||
>>> availability_api = WaybackMachineAvailabilityAPI(url, user_agent)
|
||||
>>>
|
||||
>>> availability_api.oldest()
|
||||
https://web.archive.org/web/19981111184551/http://google.com:80/
|
||||
>>>
|
||||
>>> availability_api.newest()
|
||||
https://web.archive.org/web/20220118150444/https://www.google.com/
|
||||
>>>
|
||||
>>> availability_api.near(year=2010, month=10, day=10, hour=10)
|
||||
https://web.archive.org/web/20101010101708/http://www.google.com/
|
||||
```
|
||||
|
||||
url = "https://en.wikipedia.org/wiki/Multivariable_calculus",
|
||||
user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).save()
|
||||
##### CDX API aka CDXServerAPI
|
||||
```python
|
||||
>>> from waybackpy import WaybackMachineCDXServerAPI
|
||||
>>> url = "https://pypi.org"
|
||||
>>> user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
>>> cdx = WaybackMachineCDXServerAPI(url, user_agent, start_timestamp=2016, end_timestamp=2017)
|
||||
>>> for item in cdx.snapshots():
|
||||
... print(item.archive_url)
|
||||
...
|
||||
https://web.archive.org/web/20160110011047/http://pypi.org/
|
||||
https://web.archive.org/web/20160305104847/http://pypi.org/
|
||||
.
|
||||
. # URLS REDACTED FOR READABILITY
|
||||
.
|
||||
https://web.archive.org/web/20171127171549/https://pypi.org/
|
||||
https://web.archive.org/web/20171206002737/http://pypi.org:80/
|
||||
```
|
||||
|
||||
print(new_archive_url)
|
||||
> Documentation is at <https://github.com/akamhy/waybackpy/wiki/Python-package-docs>.
|
||||
|
||||
|
||||
#### As a CLI tool
|
||||
|
||||
Saving a webpage:
|
||||
```bash
|
||||
waybackpy --save --url "https://en.wikipedia.org/wiki/Social_media" --user_agent "my-unique-user-agent"
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20200504141153/https://github.com/akamhy/waybackpy
|
||||
Archive URL:
|
||||
https://web.archive.org/web/20220121193801/https://en.wikipedia.org/wiki/Social_media
|
||||
Cached save:
|
||||
False
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/repls/CompassionateRemoteOrigin#main.py></sub>
|
||||
|
||||
|
||||
|
||||
#### Receiving the oldest archive for an URL using oldest()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
oldest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.google.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).oldest()
|
||||
|
||||
print(oldest_archive_url)
|
||||
Retriving the oldest archive and also printing the JSON response of the availability API:
|
||||
```bash
|
||||
waybackpy --oldest --json --url "https://en.wikipedia.org/wiki/Humanoid" --user_agent "my-unique-user-agent"
|
||||
```
|
||||
```bash
|
||||
http://web.archive.org/web/19981111184551/http://google.com:80/
|
||||
Archive URL:
|
||||
https://web.archive.org/web/20040415020811/http://en.wikipedia.org:80/wiki/Humanoid
|
||||
JSON response:
|
||||
{"url": "https://en.wikipedia.org/wiki/Humanoid", "archived_snapshots": {"closest": {"status": "200", "available": true, "url": "http://web.archive.org/web/20040415020811/http://en.wikipedia.org:80/wiki/Humanoid", "timestamp": "20040415020811"}}, "timestamp": "199401212126"}
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/repls/MixedSuperDimensions#main.py></sub>
|
||||
|
||||
|
||||
|
||||
#### Receiving the newest archive for an URL using newest()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
newest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.facebook.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0"
|
||||
|
||||
).newest()
|
||||
|
||||
print(newest_archive_url)
|
||||
Archive close to a time, minute level precision is supported:
|
||||
```bash
|
||||
waybackpy --url google.com --user_agent "my-unique-user-agent" --near --year 2008 --month 8 --day 8
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20200714013225/https://www.facebook.com/
|
||||
Archive URL:
|
||||
https://web.archive.org/web/20080808014003/http://www.google.com:80/
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/repls/OblongMiniInteger#main.py></sub>
|
||||
> CLI documentation is at <https://github.com/akamhy/waybackpy/wiki/CLI-docs>.
|
||||
|
||||
### 🛡 License
|
||||
[](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
|
||||
|
||||
Copyright (c) 2020-2022 Akash Mahanty Et al.
|
||||
|
||||
#### Receiving archive close to a specified year, month, day, hour, and minute using near()
|
||||
```python
|
||||
from waybackpy import Url
|
||||
|
||||
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0"
|
||||
github_url = "https://github.com/"
|
||||
|
||||
|
||||
github_wayback_obj = Url(github_url, user_agent)
|
||||
|
||||
# Do not pad (don't use zeros in the month, year, day, minute, and hour arguments). e.g. For January, set month = 1 and not month = 01.
|
||||
```
|
||||
```python
|
||||
github_archive_near_2010 = github_wayback_obj.near(year=2010)
|
||||
print(github_archive_near_2010)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20100719134402/http://github.com/
|
||||
```
|
||||
```python
|
||||
github_archive_near_2011_may = github_wayback_obj.near(year=2011, month=5)
|
||||
print(github_archive_near_2011_may)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20110519185447/https://github.com/
|
||||
```
|
||||
```python
|
||||
github_archive_near_2015_january_26 = github_wayback_obj.near(
|
||||
year=2015, month=1, day=26
|
||||
)
|
||||
print(github_archive_near_2015_january_26)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20150127031159/https://github.com
|
||||
```
|
||||
```python
|
||||
github_archive_near_2018_4_july_9_2_am = github_wayback_obj.near(
|
||||
year=2018, month=7, day=4, hour = 9, minute = 2
|
||||
)
|
||||
print(github_archive_near_2018_4_july_9_2_am)
|
||||
```
|
||||
```bash
|
||||
https://web.archive.org/web/20180704090245/https://github.com/
|
||||
|
||||
```
|
||||
|
||||
<sub>The library doesn't supports seconds yet. You are encourged to create a PR ;)</sub>
|
||||
|
||||
<sub>Try this out in your browser @ <https://repl.it/repls/SparseDeadlySearchservice#main.py></sub>
|
||||
|
||||
|
||||
|
||||
#### Get the content of webpage using get()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
google_url = "https://www.google.com/"
|
||||
|
||||
User_Agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36"
|
||||
|
||||
waybackpy_url_object = waybackpy.Url(google_url, User_Agent)
|
||||
|
||||
|
||||
# If no argument is passed in get(), it gets the source of the Url used to create the object.
|
||||
current_google_url_source = waybackpy_url_object.get()
|
||||
print(current_google_url_source)
|
||||
|
||||
|
||||
# The following chunk of code will force a new archive of google.com and get the source of the archived page.
|
||||
# waybackpy_url_object.save() type is string.
|
||||
google_newest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.save()
|
||||
)
|
||||
print(google_newest_archive_source)
|
||||
|
||||
|
||||
# waybackpy_url_object.oldest() type is str, it's oldest archive of google.com
|
||||
google_oldest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.oldest()
|
||||
)
|
||||
print(google_oldest_archive_source)
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/repls/PinkHoneydewNonagon#main.py></sub>
|
||||
|
||||
|
||||
#### Count total archives for an URL using total_archives()
|
||||
```python
|
||||
import waybackpy
|
||||
|
||||
URL = "https://en.wikipedia.org/wiki/Python (programming language)"
|
||||
|
||||
UA = "Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4"
|
||||
|
||||
archive_count = waybackpy.Url(
|
||||
url=URL,
|
||||
user_agent=UA
|
||||
).total_archives()
|
||||
|
||||
print(archive_count) # total_archives() returns an int
|
||||
```
|
||||
```bash
|
||||
2440
|
||||
```
|
||||
<sub>Try this out in your browser @ <https://repl.it/repls/DigitalUnconsciousNumbers#main.py></sub>
|
||||
|
||||
## Tests
|
||||
* [Here](https://github.com/akamhy/waybackpy/tree/master/tests)
|
||||
|
||||
|
||||
## Dependency
|
||||
* None, just python standard libraries (re, json, urllib and datetime). Both python 2 and 3 are supported :)
|
||||
|
||||
|
||||
## License
|
||||
[MIT License](https://github.com/akamhy/waybackpy/blob/master/LICENSE)
|
||||
Released under the MIT License. See [license](https://github.com/akamhy/waybackpy/blob/master/LICENSE) for details.
|
||||
|
@ -1 +1 @@
|
||||
theme: jekyll-theme-cayman
|
||||
theme: jekyll-theme-cayman
|
||||
|
14
assets/waybackpy_logo.svg
Normal file
14
assets/waybackpy_logo.svg
Normal file
@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<svg width="711.80188pt" height="258.30469pt" viewBox="0 0 711.80188 258.30469" version="1.1" id="svg2" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="surface1" transform="translate(-40.045801,-148)">
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 224.09 309.814 L 224.09 197.997 L 204.768 197.994 L 204.768 312.635 C 204.768 312.635 205.098 312.9 204.105 313.698 C 203.113 314.497 202.408 313.849 202.408 313.849 L 200.518 313.849 L 200.518 197.991 L 181.139 197.991 L 181.139 313.849 L 179.253 313.849 C 179.253 313.849 178.544 314.497 177.551 313.698 C 176.558 312.9 176.888 312.635 176.888 312.635 L 176.888 197.994 L 157.57 197.997 L 157.57 309.814 C 157.57 309.814 156.539 316.772 162.615 321.658 C 168.691 326.546 177.551 326.049 177.551 326.049 L 204.11 326.049 C 204.11 326.049 212.965 326.546 219.041 321.658 C 225.118 316.772 224.09 309.814 224.09 309.814" id="path5"/>
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 253.892 299.821 C 253.892 299.821 253.632 300.965 251.888 300.965 C 250.143 300.965 249.629 299.821 249.629 299.821 L 249.629 278.477 C 249.629 278.477 249.433 278.166 250.078 277.645 C 250.726 277.124 251.243 277.179 251.243 277.179 L 253.892 277.228 Z M 251.588 199.144 C 230.266 199.144 231.071 213.218 231.071 213.218 L 231.071 254.303 L 249.675 254.303 L 249.675 213.69 C 249.675 213.69 249.775 211.276 251.787 211.276 C 253.8 211.276 254 213.542 254 213.542 L 254 265.146 L 246.156 265.146 C 246.156 265.146 240.022 264.579 235.495 268.22 C 230.968 271.858 231.071 276.791 231.071 276.791 L 231.071 298.955 C 231.071 298.955 229.461 308.016 238.914 312.058 C 248.368 316.103 254.805 309.795 254.805 309.795 L 254.805 312.706 L 272.508 312.706 L 272.508 212.895 C 272.508 212.895 272.907 199.144 251.588 199.144" id="path7"/>
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 404.682 318.261 C 404.682 318.261 404.398 319.494 402.485 319.494 C 400.568 319.494 400.001 318.261 400.001 318.261 L 400.001 295.216 C 400.001 295.216 399.786 294.879 400.496 294.315 C 401.208 293.757 401.776 293.812 401.776 293.812 L 404.682 293.868 Z M 402.152 209.568 C 378.728 209.568 379.61 224.761 379.61 224.761 L 379.61 269.117 L 400.051 269.117 L 400.051 225.273 C 400.051 225.273 400.162 222.665 402.374 222.665 C 404.582 222.665 404.805 225.109 404.805 225.109 L 404.805 280.82 L 396.187 280.82 C 396.187 280.82 389.447 280.213 384.475 284.141 C 379.499 288.072 379.61 293.396 379.61 293.396 L 379.61 317.324 C 379.61 317.324 377.843 327.104 388.232 331.469 C 398.616 335.838 405.69 329.027 405.69 329.027 L 405.69 332.169 L 425.133 332.169 L 425.133 224.413 C 425.133 224.413 425.578 209.568 402.152 209.568" id="path9"/>
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 321.114 328.636 L 321.114 206.587 L 302.582 206.587 L 302.582 304.902 C 302.582 304.902 303.211 307.094 300.624 307.094 C 298.035 307.094 298.316 304.902 298.316 304.902 L 298.316 206.587 L 279.784 206.587 C 279.784 206.587 279.922 304.338 279.922 306.756 C 279.922 309.175 280.27 310.526 280.831 312.379 C 281.391 314.238 282.579 318.116 290.901 319.186 C 299.224 320.256 302.44 315.813 302.44 315.813 L 302.44 327.736 C 302.44 327.736 302.862 329.366 300.554 329.366 C 298.246 329.366 298.316 327.849 298.316 327.849 L 298.316 322.957 L 279.642 322.957 L 279.642 327.791 C 279.642 327.791 278.523 341.514 300.274 341.514 C 322.026 341.514 321.114 328.636 321.114 328.636" id="path11"/>
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 352.449 209.811 L 352.449 273.495 C 352.449 277.49 347.911 277.194 347.911 277.194 L 347.911 207.592 C 347.911 207.592 346.929 207.542 349.567 207.542 C 352.817 207.542 352.449 209.811 352.449 209.811 M 352.326 310.393 C 352.326 310.393 352.143 312.366 350.425 312.366 L 348.033 312.366 L 348.033 289.478 L 349.628 289.478 C 349.628 289.478 352.326 289.428 352.326 292.092 Z M 371.341 287.505 C 371.341 284.791 370.727 282.966 368.826 280.993 C 366.925 279.02 363.367 277.441 363.367 277.441 C 363.367 277.441 365.514 276.948 368.704 274.728 C 371.893 272.509 371.525 267.921 371.525 267.921 L 371.525 212.919 C 371.525 212.919 371.801 204.509 366.925 200.587 C 362.049 196.665 352.515 196.363 352.515 196.363 L 328.711 196.363 L 328.711 324.107 L 350.609 324.107 C 360.055 324.107 364.594 322.232 368.336 318.286 C 372.077 314.34 371.341 308.321 371.341 308.321 Z M 371.341 287.505" id="path13"/>
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 452.747 226.744 L 452.747 268.806 L 471.581 268.806 L 471.581 227.459 C 471.581 227.459 471.846 213.532 450.516 213.532 C 429.182 213.532 430.076 227.533 430.076 227.533 L 430.076 313.381 C 430.076 313.381 428.825 327.523 450.872 327.523 C 472.919 327.523 471.401 313.526 471.401 313.526 L 471.401 292.064 L 452.835 292.064 L 452.835 314.389 C 452.835 314.389 452.923 315.61 450.961 315.61 C 448.997 315.61 448.729 314.389 448.729 314.389 L 448.729 226.524 C 448.729 226.524 448.821 225.378 450.692 225.378 C 452.566 225.378 452.747 226.744 452.747 226.744" id="path15"/>
|
||||
<path style="fill: rgb(171, 46, 51); fill-opacity: 1; fill-rule: nonzero; stroke: none;" d="M 520.624 281.841 C 517.672 278.98 514.317 277.904 514.317 277.904 C 514.317 277.904 517.538 277.796 520.489 274.775 C 523.442 271.753 523.173 267.924 523.173 267.924 L 523.173 208.211 L 503.185 208.211 L 503.185 276.014 C 503.185 276.014 503.185 277.361 501.172 277.361 L 498.761 277.309 L 498.761 191.655 L 478.973 191.655 L 478.973 327.905 L 498.692 327.905 L 498.692 290.039 L 501.709 290.039 C 501.709 290.039 502.112 290.039 502.648 290.523 C 503.185 291.01 503.185 291.602 503.185 291.602 L 503.185 327.905 L 523.307 327.905 L 523.307 288.636 C 523.307 288.636 523.576 284.699 520.624 281.841" id="path17"/>
|
||||
<path style="fill-opacity: 1; fill-rule: nonzero; stroke: none; fill: rgb(255, 222, 87);" d="M 638.021 327.182 L 638.021 205.132 L 619.489 205.132 L 619.489 303.448 C 619.489 303.448 620.119 305.64 617.53 305.64 C 614.944 305.64 615.223 303.448 615.223 303.448 L 615.223 205.132 L 596.692 205.132 C 596.692 205.132 596.83 302.884 596.83 305.301 C 596.83 307.721 597.178 309.071 597.738 310.924 C 598.299 312.784 599.487 316.662 607.809 317.732 C 616.132 318.802 619.349 314.359 619.349 314.359 L 619.349 326.281 C 619.349 326.281 619.77 327.913 617.462 327.913 C 615.154 327.913 615.223 326.396 615.223 326.396 L 615.223 321.502 L 596.55 321.502 L 596.55 326.336 C 596.55 326.336 595.43 340.059 617.182 340.059 C 638.934 340.059 638.021 327.182 638.021 327.182" id="path-1"/>
|
||||
<path d="M 592.159 233.846 C 593.222 238.576 593.75 243.873 593.745 249.735 C 593.74 255.598 593.135 261.281 591.931 266.782 C 590.726 272.285 588.901 277.144 586.453 281.361 C 584.006 285.578 580.938 288.946 577.248 291.466 C 573.559 293.985 569.226 295.246 564.25 295.246 C 561.585 295.246 559.008 294.936 556.521 294.32 C 554.033 293.703 551.813 292.854 549.859 291.774 C 547.905 290.694 546.284 289.512 544.997 288.226 C 543.71 286.94 542.934 285.578 542.668 284.138 L 542.629 328.722 L 526.369 328.722 L 526.475 207.466 L 541.003 207.466 L 542.728 216.259 C 544.507 213.38 547.197 211.065 550.797 209.317 C 554.397 207.568 558.374 206.694 562.728 206.694 C 565.66 206.694 568.637 207.157 571.657 208.083 C 574.677 209.008 577.497 210.551 580.116 212.711 C 582.735 214.871 585.11 217.698 587.239 221.196 C 589.369 224.692 591.009 228.909 592.159 233.846 Z M 558.932 280.744 C 561.597 280.744 564.019 279.972 566.197 278.429 C 568.376 276.887 570.243 274.804 571.801 272.182 C 573.358 269.559 574.582 266.423 575.474 262.772 C 576.366 259.121 576.814 255.238 576.817 251.124 C 576.821 247.113 576.424 243.307 575.628 239.708 C 574.831 236.108 573.701 232.92 572.237 230.143 C 570.774 227.366 568.999 225.155 566.912 223.51 C 564.825 221.864 562.405 221.041 559.65 221.041 C 556.985 221.041 554.54 221.813 552.318 223.356 C 550.095 224.898 548.183 226.981 546.581 229.603 C 544.98 232.226 543.755 235.311 542.908 238.86 C 542.061 242.408 541.635 246.239 541.632 250.353 C 541.628 254.466 542.002 258.349 542.754 262 C 543.506 265.651 544.637 268.865 546.145 271.642 C 547.653 274.419 549.472 276.63 551.603 278.276 C 553.734 279.922 556.177 280.744 558.932 280.744 Z" style="fill: rgb(69, 132, 182); white-space: pre;"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 8.3 KiB |
294
index.rst
294
index.rst
@ -1,294 +0,0 @@
|
||||
waybackpy
|
||||
=========
|
||||
|
||||
|Build Status| |Downloads| |Release| |Codacy Badge| |License: MIT|
|
||||
|Maintainability| |CodeFactor| |made-with-python| |pypi| |PyPI - Python
|
||||
Version| |Maintenance| |codecov| |image12| |contributions welcome|
|
||||
|
||||
|Internet Archive| |Wayback Machine|
|
||||
|
||||
Waybackpy is a Python library that interfaces with the `Internet
|
||||
Archive <https://en.wikipedia.org/wiki/Internet_Archive>`__'s `Wayback
|
||||
Machine <https://en.wikipedia.org/wiki/Wayback_Machine>`__ API. Archive
|
||||
pages and retrieve archived pages easily.
|
||||
|
||||
Table of contents
|
||||
=================
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<!--ts-->
|
||||
|
||||
- `Installation <#installation>`__
|
||||
|
||||
- `Usage <#usage>`__
|
||||
- `Saving an url using
|
||||
save() <#capturing-aka-saving-an-url-using-save>`__
|
||||
- `Receiving the oldest archive for an URL Using
|
||||
oldest() <#receiving-the-oldest-archive-for-an-url-using-oldest>`__
|
||||
- `Receiving the recent most/newest archive for an URL using
|
||||
newest() <#receiving-the-newest-archive-for-an-url-using-newest>`__
|
||||
- `Receiving archive close to a specified year, month, day, hour, and
|
||||
minute using
|
||||
near() <#receiving-archive-close-to-a-specified-year-month-day-hour-and-minute-using-near>`__
|
||||
- `Get the content of webpage using
|
||||
get() <#get-the-content-of-webpage-using-get>`__
|
||||
- `Count total archives for an URL using
|
||||
total\_archives() <#count-total-archives-for-an-url-using-total_archives>`__
|
||||
|
||||
- `Tests <#tests>`__
|
||||
|
||||
- `Dependency <#dependency>`__
|
||||
|
||||
- `License <#license>`__
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<!--te-->
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Using `pip <https://en.wikipedia.org/wiki/Pip_(package_manager)>`__:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install waybackpy
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Capturing aka Saving an url using save()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
new_archive_url = waybackpy.Url(
|
||||
|
||||
url = "https://en.wikipedia.org/wiki/Multivariable_calculus",
|
||||
user_agent = "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).save()
|
||||
|
||||
print(new_archive_url)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20200504141153/https://github.com/akamhy/waybackpy
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/repls/CompassionateRemoteOrigin#main.py\
|
||||
|
||||
Receiving the oldest archive for an URL using oldest()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
oldest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.google.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:40.0) Gecko/20100101 Firefox/40.0"
|
||||
|
||||
).oldest()
|
||||
|
||||
print(oldest_archive_url)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
http://web.archive.org/web/19981111184551/http://google.com:80/
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/repls/MixedSuperDimensions#main.py\
|
||||
|
||||
Receiving the newest archive for an URL using newest()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
newest_archive_url = waybackpy.Url(
|
||||
|
||||
"https://www.facebook.com/",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0"
|
||||
|
||||
).newest()
|
||||
|
||||
print(newest_archive_url)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20200714013225/https://www.facebook.com/
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/repls/OblongMiniInteger#main.py\
|
||||
|
||||
Receiving archive close to a specified year, month, day, hour, and minute using near()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
from waybackpy import Url
|
||||
|
||||
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0"
|
||||
github_url = "https://github.com/"
|
||||
|
||||
|
||||
github_wayback_obj = Url(github_url, user_agent)
|
||||
|
||||
# Do not pad (don't use zeros in the month, year, day, minute, and hour arguments). e.g. For January, set month = 1 and not month = 01.
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2010 = github_wayback_obj.near(year=2010)
|
||||
print(github_archive_near_2010)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20100719134402/http://github.com/
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2011_may = github_wayback_obj.near(year=2011, month=5)
|
||||
print(github_archive_near_2011_may)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20110519185447/https://github.com/
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2015_january_26 = github_wayback_obj.near(
|
||||
year=2015, month=1, day=26
|
||||
)
|
||||
print(github_archive_near_2015_january_26)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20150127031159/https://github.com
|
||||
|
||||
.. code:: python
|
||||
|
||||
github_archive_near_2018_4_july_9_2_am = github_wayback_obj.near(
|
||||
year=2018, month=7, day=4, hour = 9, minute = 2
|
||||
)
|
||||
print(github_archive_near_2018_4_july_9_2_am)
|
||||
|
||||
.. code:: bash
|
||||
|
||||
https://web.archive.org/web/20180704090245/https://github.com/
|
||||
|
||||
The library doesn't supports seconds yet. You are encourged to create a
|
||||
PR ;)
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/repls/SparseDeadlySearchservice#main.py\
|
||||
|
||||
Get the content of webpage using get()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
google_url = "https://www.google.com/"
|
||||
|
||||
User_Agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36"
|
||||
|
||||
waybackpy_url_object = waybackpy.Url(google_url, User_Agent)
|
||||
|
||||
|
||||
# If no argument is passed in get(), it gets the source of the Url used to create the object.
|
||||
current_google_url_source = waybackpy_url_object.get()
|
||||
print(current_google_url_source)
|
||||
|
||||
|
||||
# The following chunk of code will force a new archive of google.com and get the source of the archived page.
|
||||
# waybackpy_url_object.save() type is string.
|
||||
google_newest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.save()
|
||||
)
|
||||
print(google_newest_archive_source)
|
||||
|
||||
|
||||
# waybackpy_url_object.oldest() type is str, it's oldest archive of google.com
|
||||
google_oldest_archive_source = waybackpy_url_object.get(
|
||||
waybackpy_url_object.oldest()
|
||||
)
|
||||
print(google_oldest_archive_source)
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/repls/PinkHoneydewNonagon#main.py\
|
||||
|
||||
Count total archives for an URL using total\_archives()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code:: python
|
||||
|
||||
import waybackpy
|
||||
|
||||
URL = "https://en.wikipedia.org/wiki/Python (programming language)"
|
||||
|
||||
UA = "Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4"
|
||||
|
||||
archive_count = waybackpy.Url(
|
||||
url=URL,
|
||||
user_agent=UA
|
||||
).total_archives()
|
||||
|
||||
print(archive_count) # total_archives() returns an int
|
||||
|
||||
.. code:: bash
|
||||
|
||||
2440
|
||||
|
||||
Try this out in your browser @
|
||||
https://repl.it/repls/DigitalUnconsciousNumbers#main.py\
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
- `Here <https://github.com/akamhy/waybackpy/tree/master/tests>`__
|
||||
|
||||
Dependency
|
||||
----------
|
||||
|
||||
- None, just python standard libraries (re, json, urllib and datetime).
|
||||
Both python 2 and 3 are supported :)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
`MIT
|
||||
License <https://github.com/akamhy/waybackpy/blob/master/LICENSE>`__
|
||||
|
||||
.. |Build Status| image:: https://img.shields.io/travis/akamhy/waybackpy.svg?label=Travis%20CI&logo=travis&style=flat-square
|
||||
:target: https://travis-ci.org/akamhy/waybackpy
|
||||
.. |Downloads| image:: https://img.shields.io/pypi/dm/waybackpy.svg
|
||||
:target: https://pypistats.org/packages/waybackpy
|
||||
.. |Release| image:: https://img.shields.io/github/v/release/akamhy/waybackpy.svg
|
||||
:target: https://github.com/akamhy/waybackpy/releases
|
||||
.. |Codacy Badge| image:: https://api.codacy.com/project/badge/Grade/255459cede9341e39436ec8866d3fb65
|
||||
:target: https://www.codacy.com/manual/akamhy/waybackpy?utm_source=github.com&utm_medium=referral&utm_content=akamhy/waybackpy&utm_campaign=Badge_Grade
|
||||
.. |License: MIT| image:: https://img.shields.io/badge/License-MIT-yellow.svg
|
||||
:target: https://github.com/akamhy/waybackpy/blob/master/LICENSE
|
||||
.. |Maintainability| image:: https://api.codeclimate.com/v1/badges/942f13d8177a56c1c906/maintainability
|
||||
:target: https://codeclimate.com/github/akamhy/waybackpy/maintainability
|
||||
.. |CodeFactor| image:: https://www.codefactor.io/repository/github/akamhy/waybackpy/badge
|
||||
:target: https://www.codefactor.io/repository/github/akamhy/waybackpy
|
||||
.. |made-with-python| image:: https://img.shields.io/badge/Made%20with-Python-1f425f.svg
|
||||
:target: https://www.python.org/
|
||||
.. |pypi| image:: https://img.shields.io/pypi/v/waybackpy.svg
|
||||
.. |PyPI - Python Version| image:: https://img.shields.io/pypi/pyversions/waybackpy?style=flat-square
|
||||
.. |Maintenance| image:: https://img.shields.io/badge/Maintained%3F-yes-green.svg
|
||||
:target: https://github.com/akamhy/waybackpy/graphs/commit-activity
|
||||
.. |codecov| image:: https://codecov.io/gh/akamhy/waybackpy/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/akamhy/waybackpy
|
||||
.. |image12| image:: https://img.shields.io/github/repo-size/akamhy/waybackpy.svg?label=Repo%20size&style=flat-square
|
||||
.. |contributions welcome| image:: https://img.shields.io/static/v1.svg?label=Contributions&message=Welcome&color=0059b3&style=flat-square
|
||||
.. |Internet Archive| image:: https://upload.wikimedia.org/wikipedia/commons/thumb/8/84/Internet_Archive_logo_and_wordmark.svg/84px-Internet_Archive_logo_and_wordmark.svg.png
|
||||
.. |Wayback Machine| image:: https://upload.wikimedia.org/wikipedia/commons/thumb/0/01/Wayback_Machine_logo_2010.svg/284px-Wayback_Machine_logo_2010.svg.png
|
11
pytest.ini
Normal file
11
pytest.ini
Normal file
@ -0,0 +1,11 @@
|
||||
[pytest]
|
||||
addopts =
|
||||
# show summary of all tests that did not pass
|
||||
-ra
|
||||
# enable all warnings
|
||||
-Wd
|
||||
# coverage and html report
|
||||
--cov=waybackpy
|
||||
--cov-report=html
|
||||
testpaths =
|
||||
tests
|
8
requirements-dev.txt
Normal file
8
requirements-dev.txt
Normal file
@ -0,0 +1,8 @@
|
||||
click
|
||||
requests
|
||||
pytest
|
||||
pytest-cov
|
||||
codecov
|
||||
flake8
|
||||
mypy
|
||||
black
|
2
requirements.txt
Normal file
2
requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
click
|
||||
requests
|
@ -1,3 +1,7 @@
|
||||
[metadata]
|
||||
description-file = README.md
|
||||
license_file = LICENSE
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203,W503
|
||||
|
89
setup.py
89
setup.py
@ -1,49 +1,66 @@
|
||||
import os.path
|
||||
from setuptools import setup
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as f:
|
||||
readme_path = os.path.join(os.path.dirname(__file__), "README.md")
|
||||
with open(readme_path, encoding="utf-8") as f:
|
||||
long_description = f.read()
|
||||
|
||||
about = {}
|
||||
with open(os.path.join(os.path.dirname(__file__), 'waybackpy', '__version__.py')) as f:
|
||||
version_path = os.path.join(os.path.dirname(__file__), "waybackpy", "__version__.py")
|
||||
with open(version_path, encoding="utf-8") as f:
|
||||
exec(f.read(), about)
|
||||
|
||||
|
||||
version = str(about["__version__"])
|
||||
|
||||
download_url = "https://github.com/akamhy/waybackpy/archive/{version}.tar.gz".format(
|
||||
version=version
|
||||
)
|
||||
|
||||
setup(
|
||||
name = about['__title__'],
|
||||
packages = ['waybackpy'],
|
||||
version = about['__version__'],
|
||||
description = about['__description__'],
|
||||
name=about["__title__"],
|
||||
packages=["waybackpy"],
|
||||
version=version,
|
||||
description=about["__description__"],
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
license= about['__license__'],
|
||||
author = about['__author__'],
|
||||
author_email = about['__author_email__'],
|
||||
url = about['__url__'],
|
||||
download_url = 'https://github.com/akamhy/waybackpy/archive/2.0.2.tar.gz',
|
||||
keywords = ['wayback', 'archive', 'archive website', 'wayback machine', 'Internet Archive'],
|
||||
install_requires=[],
|
||||
python_requires= ">=2.7",
|
||||
long_description_content_type="text/markdown",
|
||||
license=about["__license__"],
|
||||
author=about["__author__"],
|
||||
author_email=about["__author_email__"],
|
||||
url=about["__url__"],
|
||||
download_url=download_url,
|
||||
keywords=[
|
||||
"Archive Website",
|
||||
"Wayback Machine",
|
||||
"Internet Archive",
|
||||
"Wayback Machine CLI",
|
||||
"Wayback Machine Python",
|
||||
"Internet Archiving",
|
||||
"Availability API",
|
||||
"CDX API",
|
||||
"savepagenow",
|
||||
],
|
||||
install_requires=["requests", "click"],
|
||||
python_requires=">=3.4",
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'Natural Language :: English',
|
||||
'Topic :: Software Development :: Build Tools',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.2',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
],
|
||||
"Development Status :: 4 - Beta",
|
||||
"Intended Audience :: Developers",
|
||||
"Natural Language :: English",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
],
|
||||
entry_points={"console_scripts": ["waybackpy = waybackpy.cli:main"]},
|
||||
project_urls={
|
||||
'Documentation': 'https://waybackpy.readthedocs.io',
|
||||
'Source': 'https://github.com/akamhy/waybackpy',
|
||||
"Documentation": "https://github.com/akamhy/waybackpy/wiki",
|
||||
"Source": "https://github.com/akamhy/waybackpy",
|
||||
"Tracker": "https://github.com/akamhy/waybackpy/issues",
|
||||
},
|
||||
)
|
||||
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
134
tests/test_1.py
134
tests/test_1.py
@ -1,134 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path.append("..")
|
||||
import waybackpy
|
||||
import pytest
|
||||
import random
|
||||
import time
|
||||
|
||||
user_agent = "Mozilla/5.0 (Windows NT 6.2; rv:20.0) Gecko/20121202 Firefox/20.0"
|
||||
|
||||
def test_clean_url():
|
||||
time.sleep(10)
|
||||
test_url = " https://en.wikipedia.org/wiki/Network security "
|
||||
answer = "https://en.wikipedia.org/wiki/Network_security"
|
||||
target = waybackpy.Url(test_url, user_agent)
|
||||
test_result = target.clean_url()
|
||||
assert answer == test_result
|
||||
|
||||
def test_url_check():
|
||||
time.sleep(10)
|
||||
broken_url = "http://wwwgooglecom/"
|
||||
with pytest.raises(Exception) as e_info:
|
||||
waybackpy.Url(broken_url, user_agent)
|
||||
|
||||
def test_save():
|
||||
# Test for urls that exist and can be archived.
|
||||
time.sleep(10)
|
||||
|
||||
url_list = [
|
||||
"en.wikipedia.org",
|
||||
"www.wikidata.org",
|
||||
"commons.wikimedia.org",
|
||||
"www.wiktionary.org",
|
||||
"www.w3schools.com",
|
||||
"www.youtube.com"
|
||||
]
|
||||
x = random.randint(0, len(url_list)-1)
|
||||
url1 = url_list[x]
|
||||
target = waybackpy.Url(url1, "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36")
|
||||
archived_url1 = target.save()
|
||||
assert url1 in archived_url1
|
||||
|
||||
if sys.version_info > (3, 6):
|
||||
|
||||
# Test for urls that are incorrect.
|
||||
with pytest.raises(Exception) as e_info:
|
||||
url2 = "ha ha ha ha"
|
||||
waybackpy.Url(url2, user_agent)
|
||||
time.sleep(5)
|
||||
# Test for urls not allowed to archive by robot.txt.
|
||||
with pytest.raises(Exception) as e_info:
|
||||
url3 = "http://www.archive.is/faq.html"
|
||||
target = waybackpy.Url(url3, "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) Gecko/20100101 Firefox/25.0")
|
||||
target.save()
|
||||
|
||||
time.sleep(5)
|
||||
# Non existent urls, test
|
||||
with pytest.raises(Exception) as e_info:
|
||||
url4 = "https://githfgdhshajagjstgeths537agajaajgsagudadhuss8762346887adsiugujsdgahub.us"
|
||||
target = waybackpy.Url(url3, "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27")
|
||||
target.save()
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_near():
|
||||
time.sleep(10)
|
||||
url = "google.com"
|
||||
target = waybackpy.Url(url, "Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4")
|
||||
archive_near_year = target.near(year=2010)
|
||||
assert "2010" in archive_near_year
|
||||
|
||||
if sys.version_info > (3, 6):
|
||||
time.sleep(5)
|
||||
archive_near_month_year = target.near( year=2015, month=2)
|
||||
assert ("201502" in archive_near_month_year) or ("201501" in archive_near_month_year) or ("201503" in archive_near_month_year)
|
||||
|
||||
target = waybackpy.Url("www.python.org", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246")
|
||||
archive_near_hour_day_month_year = target.near(year=2008, month=5, day=9, hour=15)
|
||||
assert ("2008050915" in archive_near_hour_day_month_year) or ("2008050914" in archive_near_hour_day_month_year) or ("2008050913" in archive_near_hour_day_month_year)
|
||||
|
||||
with pytest.raises(Exception) as e_info:
|
||||
NeverArchivedUrl = "https://ee_3n.wrihkeipef4edia.org/rwti5r_ki/Nertr6w_rork_rse7c_urity"
|
||||
target = waybackpy.Url(NeverArchivedUrl, user_agent)
|
||||
target.near(year=2010)
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_oldest():
|
||||
time.sleep(10)
|
||||
url = "github.com/akamhy/waybackpy"
|
||||
target = waybackpy.Url(url, user_agent)
|
||||
assert "20200504141153" in target.oldest()
|
||||
|
||||
def test_newest():
|
||||
time.sleep(10)
|
||||
url = "github.com/akamhy/waybackpy"
|
||||
target = waybackpy.Url(url, user_agent)
|
||||
assert url in target.newest()
|
||||
|
||||
def test_get():
|
||||
time.sleep(10)
|
||||
target = waybackpy.Url("google.com", user_agent)
|
||||
assert "Welcome to Google" in target.get(target.oldest())
|
||||
|
||||
def test_total_archives():
|
||||
time.sleep(10)
|
||||
if sys.version_info > (3, 6):
|
||||
target = waybackpy.Url(" https://google.com ", user_agent)
|
||||
assert target.total_archives() > 500000
|
||||
else:
|
||||
pass
|
||||
time.sleep(5)
|
||||
target = waybackpy.Url(" https://gaha.e4i3n.m5iai3kip6ied.cima/gahh2718gs/ahkst63t7gad8 ", user_agent)
|
||||
assert target.total_archives() == 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_clean_url()
|
||||
print(".") #1
|
||||
test_url_check()
|
||||
print(".") #1
|
||||
test_get()
|
||||
print(".") #3
|
||||
test_near()
|
||||
print(".") #4
|
||||
test_newest()
|
||||
print(".") #5
|
||||
test_save()
|
||||
print(".") #6
|
||||
test_oldest()
|
||||
print(".") #7
|
||||
test_total_archives()
|
||||
print(".") #8
|
||||
print("OK")
|
100
tests/test_availability_api.py
Normal file
100
tests/test_availability_api.py
Normal file
@ -0,0 +1,100 @@
|
||||
import pytest
|
||||
import random
|
||||
import string
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from waybackpy.availability_api import WaybackMachineAvailabilityAPI
|
||||
from waybackpy.exceptions import (
|
||||
InvalidJSONInAvailabilityAPIResponse,
|
||||
ArchiveNotInAvailabilityAPIResponse,
|
||||
)
|
||||
|
||||
now = datetime.utcnow()
|
||||
url = "https://example.com/"
|
||||
user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36"
|
||||
|
||||
rndstr = lambda n: "".join(
|
||||
random.choice(string.ascii_uppercase + string.digits) for _ in range(n)
|
||||
)
|
||||
|
||||
|
||||
def test_oldest():
|
||||
"""
|
||||
Test the oldest archive of Google.com and also checks the attributes.
|
||||
"""
|
||||
url = "https://example.com/"
|
||||
user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36"
|
||||
availability_api = WaybackMachineAvailabilityAPI(url, user_agent)
|
||||
oldest = availability_api.oldest()
|
||||
oldest_archive_url = oldest.archive_url
|
||||
assert "2002" in oldest_archive_url
|
||||
oldest_timestamp = oldest.timestamp()
|
||||
assert abs(oldest_timestamp - now) > timedelta(days=7000) # More than 19 years
|
||||
assert availability_api.JSON["archived_snapshots"]["closest"]["available"] is True
|
||||
assert repr(oldest).find("example.com") != -1
|
||||
assert "2002" in str(oldest)
|
||||
|
||||
|
||||
def test_newest():
|
||||
"""
|
||||
Assuming that the recent most Google Archive was made no more earlier than
|
||||
last one day which is 86400 seconds.
|
||||
"""
|
||||
url = "https://www.youtube.com/"
|
||||
user_agent = "Mozilla/5.0 (X11; Linux x86_64; rv:96.0) Gecko/20100101 Firefox/96.0"
|
||||
availability_api = WaybackMachineAvailabilityAPI(url, user_agent)
|
||||
newest = availability_api.newest()
|
||||
newest_timestamp = newest.timestamp()
|
||||
# betting in favor that latest youtube archive was not before the last 3 days
|
||||
# high tarffic sites like youtube are archived mnay times a day, so seems
|
||||
# very reasonable to me.
|
||||
assert abs(newest_timestamp - now) < timedelta(seconds=86400 * 3)
|
||||
|
||||
|
||||
def test_invalid_json():
|
||||
"""
|
||||
When the API is malfunctioning or we don't pass a URL it may return invalid JSON data.
|
||||
"""
|
||||
with pytest.raises(InvalidJSONInAvailabilityAPIResponse):
|
||||
availability_api = WaybackMachineAvailabilityAPI(url="", user_agent=user_agent)
|
||||
archive_url = availability_api.archive_url
|
||||
|
||||
|
||||
def test_no_archive():
|
||||
"""
|
||||
ArchiveNotInAvailabilityAPIResponse may be raised if Wayback Machine did not
|
||||
replied with the archive despite the fact that we know the site has million
|
||||
of archives. Don't know the reason for this wierd behavior.
|
||||
|
||||
And also if really there are no archives for the passed URL this exception
|
||||
is raised.
|
||||
"""
|
||||
with pytest.raises(ArchiveNotInAvailabilityAPIResponse):
|
||||
availability_api = WaybackMachineAvailabilityAPI(
|
||||
url="https://%s.cn" % rndstr(30), user_agent=user_agent
|
||||
)
|
||||
archive_url = availability_api.archive_url
|
||||
|
||||
|
||||
def test_no_api_call_str_repr():
|
||||
"""
|
||||
Some entitled users maybe want to see what is the string representation
|
||||
if they don’t make any API requests.
|
||||
|
||||
str() must not return None so we return ""
|
||||
"""
|
||||
availability_api = WaybackMachineAvailabilityAPI(
|
||||
url="https://%s.gov" % rndstr(30), user_agent=user_agent
|
||||
)
|
||||
assert "" == str(availability_api)
|
||||
|
||||
|
||||
def test_no_call_timestamp():
|
||||
"""
|
||||
If no API requests were made the bound timestamp() method returns
|
||||
the datetime.max as a default value.
|
||||
"""
|
||||
availability_api = WaybackMachineAvailabilityAPI(
|
||||
url="https://%s.in" % rndstr(30), user_agent=user_agent
|
||||
)
|
||||
assert datetime.max == availability_api.timestamp()
|
41
tests/test_cdx_snapshot.py
Normal file
41
tests/test_cdx_snapshot.py
Normal file
@ -0,0 +1,41 @@
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
|
||||
from waybackpy.cdx_snapshot import CDXSnapshot
|
||||
|
||||
|
||||
def test_CDXSnapshot():
|
||||
sample_input = "org,archive)/ 20080126045828 http://github.com text/html 200 Q4YULN754FHV2U6Q5JUT6Q2P57WEWNNY 1415"
|
||||
prop_values = sample_input.split(" ")
|
||||
properties = {}
|
||||
(
|
||||
properties["urlkey"],
|
||||
properties["timestamp"],
|
||||
properties["original"],
|
||||
properties["mimetype"],
|
||||
properties["statuscode"],
|
||||
properties["digest"],
|
||||
properties["length"],
|
||||
) = prop_values
|
||||
|
||||
snapshot = CDXSnapshot(properties)
|
||||
|
||||
assert properties["urlkey"] == snapshot.urlkey
|
||||
assert properties["timestamp"] == snapshot.timestamp
|
||||
assert properties["original"] == snapshot.original
|
||||
assert properties["mimetype"] == snapshot.mimetype
|
||||
assert properties["statuscode"] == snapshot.statuscode
|
||||
assert properties["digest"] == snapshot.digest
|
||||
assert properties["length"] == snapshot.length
|
||||
assert (
|
||||
datetime.strptime(properties["timestamp"], "%Y%m%d%H%M%S")
|
||||
== snapshot.datetime_timestamp
|
||||
)
|
||||
archive_url = (
|
||||
"https://web.archive.org/web/"
|
||||
+ properties["timestamp"]
|
||||
+ "/"
|
||||
+ properties["original"]
|
||||
)
|
||||
assert archive_url == snapshot.archive_url
|
||||
assert sample_input == str(snapshot)
|
99
tests/test_cdx_utils.py
Normal file
99
tests/test_cdx_utils.py
Normal file
@ -0,0 +1,99 @@
|
||||
import pytest
|
||||
from waybackpy.exceptions import WaybackError
|
||||
from waybackpy.cdx_utils import (
|
||||
get_total_pages,
|
||||
full_url,
|
||||
get_response,
|
||||
check_filters,
|
||||
check_collapses,
|
||||
check_match_type,
|
||||
)
|
||||
|
||||
|
||||
def test_get_total_pages():
|
||||
url = "twitter.com"
|
||||
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15"
|
||||
assert get_total_pages(url=url, user_agent=user_agent) >= 56
|
||||
|
||||
|
||||
def test_full_url():
|
||||
params = {}
|
||||
endpoint = "https://web.archive.org/cdx/search/cdx"
|
||||
assert endpoint == full_url(endpoint, params)
|
||||
|
||||
params = {"a": "1"}
|
||||
assert "https://web.archive.org/cdx/search/cdx?a=1" == full_url(endpoint, params)
|
||||
assert "https://web.archive.org/cdx/search/cdx?a=1" == full_url(
|
||||
endpoint + "?", params
|
||||
)
|
||||
|
||||
params["b"] = 2
|
||||
assert "https://web.archive.org/cdx/search/cdx?a=1&b=2" == full_url(
|
||||
endpoint + "?", params
|
||||
)
|
||||
|
||||
params["c"] = "foo bar"
|
||||
assert "https://web.archive.org/cdx/search/cdx?a=1&b=2&c=foo%20bar" == full_url(
|
||||
endpoint + "?", params
|
||||
)
|
||||
|
||||
|
||||
def test_get_response():
|
||||
url = "https://github.com"
|
||||
user_agent = (
|
||||
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0"
|
||||
)
|
||||
headers = {"User-Agent": "%s" % user_agent}
|
||||
response = get_response(url, headers=headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
url = "http/wwhfhfvhvjhmom"
|
||||
with pytest.raises(WaybackError):
|
||||
get_response(url, headers=headers)
|
||||
|
||||
|
||||
def test_check_filters():
|
||||
filters = []
|
||||
check_filters(filters)
|
||||
|
||||
filters = ["statuscode:200", "timestamp:20215678901234", "original:https://url.com"]
|
||||
check_filters(filters)
|
||||
|
||||
with pytest.raises(WaybackError):
|
||||
check_filters("not-list")
|
||||
|
||||
with pytest.raises(WaybackError):
|
||||
check_filters(["invalid"])
|
||||
|
||||
|
||||
def test_check_collapses():
|
||||
collapses = []
|
||||
check_collapses(collapses)
|
||||
|
||||
collapses = ["timestamp:10"]
|
||||
check_collapses(collapses)
|
||||
|
||||
collapses = ["urlkey"]
|
||||
check_collapses(collapses)
|
||||
|
||||
collapses = "urlkey" # NOT LIST
|
||||
with pytest.raises(WaybackError):
|
||||
check_collapses(collapses)
|
||||
|
||||
collapses = ["also illegal collapse"]
|
||||
with pytest.raises(WaybackError):
|
||||
check_collapses(collapses)
|
||||
|
||||
|
||||
def test_check_match_type():
|
||||
assert None == check_match_type(None, "url")
|
||||
match_type = "exact"
|
||||
url = "test_url"
|
||||
assert None == check_match_type(match_type, url)
|
||||
|
||||
url = "has * in it"
|
||||
with pytest.raises(WaybackError):
|
||||
check_match_type("domain", url)
|
||||
|
||||
with pytest.raises(WaybackError):
|
||||
check_match_type("not a valid type", "url")
|
133
tests/test_save_api.py
Normal file
133
tests/test_save_api.py
Normal file
@ -0,0 +1,133 @@
|
||||
import pytest
|
||||
import time
|
||||
import random
|
||||
import string
|
||||
from datetime import datetime
|
||||
|
||||
from waybackpy.save_api import WaybackMachineSaveAPI
|
||||
from waybackpy.exceptions import MaximumSaveRetriesExceeded
|
||||
|
||||
rndstr = lambda n: "".join(
|
||||
random.choice(string.ascii_uppercase + string.digits) for _ in range(n)
|
||||
)
|
||||
|
||||
|
||||
def test_save():
|
||||
url = "https://github.com/akamhy/waybackpy"
|
||||
user_agent = "Mozilla/5.0 (MacBook Air; M1 Mac OS X 11_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/604.1"
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent)
|
||||
save_api.save()
|
||||
archive_url = save_api.archive_url
|
||||
timestamp = save_api.timestamp()
|
||||
headers = save_api.headers # CaseInsensitiveDict
|
||||
cached_save = save_api.cached_save
|
||||
assert cached_save in [True, False]
|
||||
assert archive_url.find("github.com/akamhy/waybackpy") != -1
|
||||
assert str(headers).find("github.com/akamhy/waybackpy") != -1
|
||||
assert type(save_api.timestamp()) == type(datetime(year=2020, month=10, day=2))
|
||||
|
||||
|
||||
def test_max_redirect_exceeded():
|
||||
with pytest.raises(MaximumSaveRetriesExceeded):
|
||||
url = "https://%s.gov" % rndstr
|
||||
user_agent = "Mozilla/5.0 (MacBook Air; M1 Mac OS X 11_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/604.1"
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent, max_tries=3)
|
||||
save_api.save()
|
||||
|
||||
|
||||
def test_sleep():
|
||||
"""
|
||||
sleeping is actually very important for SaveAPI
|
||||
interface stability.
|
||||
The test checks that the time taken by sleep method
|
||||
is as intended.
|
||||
"""
|
||||
url = "https://example.com"
|
||||
user_agent = "Mozilla/5.0 (MacBook Air; M1 Mac OS X 11_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/604.1"
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent)
|
||||
s_time = int(time.time())
|
||||
save_api.sleep(6) # multiple of 3 sleep for 10 seconds
|
||||
e_time = int(time.time())
|
||||
assert (e_time - s_time) >= 10
|
||||
|
||||
s_time = int(time.time())
|
||||
save_api.sleep(7) # sleeps for 5 seconds
|
||||
e_time = int(time.time())
|
||||
assert (e_time - s_time) >= 5
|
||||
|
||||
|
||||
def test_timestamp():
|
||||
url = "https://example.com"
|
||||
user_agent = "Mozilla/5.0 (MacBook Air; M1 Mac OS X 11_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/604.1"
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent)
|
||||
now = datetime.utcnow()
|
||||
save_api._archive_url = (
|
||||
"https://web.archive.org/web/%s/" % now.strftime("%Y%m%d%H%M%S") + url
|
||||
)
|
||||
save_api.timestamp()
|
||||
assert save_api.cached_save is False
|
||||
save_api._archive_url = "https://web.archive.org/web/%s/" % "20100124063622" + url
|
||||
save_api.timestamp()
|
||||
assert save_api.cached_save is True
|
||||
|
||||
|
||||
def test_archive_url_parser():
|
||||
"""
|
||||
Testing three regex for matches and also tests the response URL.
|
||||
"""
|
||||
url = "https://example.com"
|
||||
user_agent = "Mozilla/5.0 (MacBook Air; M1 Mac OS X 11_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/604.1"
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent)
|
||||
|
||||
save_api.headers = """
|
||||
START
|
||||
Content-Location: /web/20201126185327/https://www.scribbr.com/citing-sources/et-al
|
||||
END
|
||||
"""
|
||||
|
||||
assert (
|
||||
save_api.archive_url_parser()
|
||||
== "https://web.archive.org/web/20201126185327/https://www.scribbr.com/citing-sources/et-al"
|
||||
)
|
||||
|
||||
save_api.headers = """
|
||||
{'Server': 'nginx/1.15.8', 'Date': 'Sat, 02 Jan 2021 09:40:25 GMT', 'Content-Type': 'text/html; charset=UTF-8', 'Transfer-Encoding': 'chunked', 'Connection': 'keep-alive', 'X-Archive-Orig-Server': 'nginx', 'X-Archive-Orig-Date': 'Sat, 02 Jan 2021 09:40:09 GMT', 'X-Archive-Orig-Transfer-Encoding': 'chunked', 'X-Archive-Orig-Connection': 'keep-alive', 'X-Archive-Orig-Vary': 'Accept-Encoding', 'X-Archive-Orig-Last-Modified': 'Fri, 01 Jan 2021 12:19:00 GMT', 'X-Archive-Orig-Strict-Transport-Security': 'max-age=31536000, max-age=0;', 'X-Archive-Guessed-Content-Type': 'text/html', 'X-Archive-Guessed-Charset': 'utf-8', 'Memento-Datetime': 'Sat, 02 Jan 2021 09:40:09 GMT', 'Link': '<https://www.scribbr.com/citing-sources/et-al/>; rel="original", <https://web.archive.org/web/timemap/link/https://www.scribbr.com/citing-sources/et-al/>; rel="timemap"; type="application/link-format", <https://web.archive.org/web/https://www.scribbr.com/citing-sources/et-al/>; rel="timegate", <https://web.archive.org/web/20200601082911/https://www.scribbr.com/citing-sources/et-al/>; rel="first memento"; datetime="Mon, 01 Jun 2020 08:29:11 GMT", <https://web.archive.org/web/20201126185327/https://www.scribbr.com/citing-sources/et-al/>; rel="prev memento"; datetime="Thu, 26 Nov 2020 18:53:27 GMT", <https://web.archive.org/web/20210102094009/https://www.scribbr.com/citing-sources/et-al/>; rel="memento"; datetime="Sat, 02 Jan 2021 09:40:09 GMT", <https://web.archive.org/web/20210102094009/https://www.scribbr.com/citing-sources/et-al/>; rel="last memento"; datetime="Sat, 02 Jan 2021 09:40:09 GMT"', 'Content-Security-Policy': "default-src 'self' 'unsafe-eval' 'unsafe-inline' data: blob: archive.org web.archive.org analytics.archive.org pragma.archivelab.org", 'X-Archive-Src': 'spn2-20210102092956-wwwb-spn20.us.archive.org-8001.warc.gz', 'Server-Timing': 'captures_list;dur=112.646325, exclusion.robots;dur=0.172010, exclusion.robots.policy;dur=0.158205, RedisCDXSource;dur=2.205932, esindex;dur=0.014647, LoadShardBlock;dur=82.205012, PetaboxLoader3.datanode;dur=70.750239, CDXLines.iter;dur=24.306278, load_resource;dur=26.520179', 'X-App-Server': 'wwwb-app200', 'X-ts': '200', 'X-location': 'All', 'X-Cache-Key': 'httpsweb.archive.org/web/20210102094009/https://www.scribbr.com/citing-sources/et-al/IN', 'X-RL': '0', 'X-Page-Cache': 'MISS', 'X-Archive-Screenname': '0', 'Content-Encoding': 'gzip'}
|
||||
"""
|
||||
|
||||
assert (
|
||||
save_api.archive_url_parser()
|
||||
== "https://web.archive.org/web/20210102094009/https://www.scribbr.com/citing-sources/et-al/"
|
||||
)
|
||||
|
||||
save_api.headers = """
|
||||
START
|
||||
X-Cache-Key: https://web.archive.org/web/20171128185327/https://www.scribbr.com/citing-sources/et-al/US
|
||||
END
|
||||
"""
|
||||
|
||||
assert (
|
||||
save_api.archive_url_parser()
|
||||
== "https://web.archive.org/web/20171128185327/https://www.scribbr.com/citing-sources/et-al/"
|
||||
)
|
||||
|
||||
save_api.headers = "TEST TEST TEST AND NO MATCH - TEST FOR RESPONSE URL MATCHING"
|
||||
save_api.response_url = "https://web.archive.org/web/20171128185327/https://www.scribbr.com/citing-sources/et-al"
|
||||
assert (
|
||||
save_api.archive_url_parser()
|
||||
== "https://web.archive.org/web/20171128185327/https://www.scribbr.com/citing-sources/et-al"
|
||||
)
|
||||
|
||||
|
||||
def test_archive_url():
|
||||
"""
|
||||
Checks the attribute archive_url's value when the save method was not
|
||||
explicitly invoked by the end-user but the save method was invoked implicitly
|
||||
by the archive_url method which is an attribute due to @property.
|
||||
"""
|
||||
url = "https://example.com"
|
||||
user_agent = "Mozilla/5.0 (MacBook Air; M1 Mac OS X 11_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/604.1"
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent)
|
||||
save_api.saved_archive = (
|
||||
"https://web.archive.org/web/20220124063056/https://example.com/"
|
||||
)
|
||||
assert save_api.archive_url == save_api.saved_archive
|
13
tests/test_utils.py
Normal file
13
tests/test_utils.py
Normal file
@ -0,0 +1,13 @@
|
||||
from waybackpy.utils import latest_version, DEFAULT_USER_AGENT
|
||||
from waybackpy.__version__ import __version__
|
||||
|
||||
|
||||
def test_default_user_agent():
|
||||
assert (
|
||||
DEFAULT_USER_AGENT
|
||||
== "waybackpy %s - https://github.com/akamhy/waybackpy" % __version__
|
||||
)
|
||||
|
||||
|
||||
def test_latest_version():
|
||||
assert __version__ == latest_version(package_name="waybackpy")
|
@ -1,32 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# ┏┓┏┓┏┓━━━━━━━━━━┏━━┓━━━━━━━━━━┏┓━━┏━━━┓━━━━━
|
||||
# ┃┃┃┃┃┃━━━━━━━━━━┃┏┓┃━━━━━━━━━━┃┃━━┃┏━┓┃━━━━━
|
||||
# ┃┃┃┃┃┃┏━━┓━┏┓━┏┓┃┗┛┗┓┏━━┓━┏━━┓┃┃┏┓┃┗━┛┃┏┓━┏┓
|
||||
# ┃┗┛┗┛┃┗━┓┃━┃┃━┃┃┃┏━┓┃┗━┓┃━┃┏━┛┃┗┛┛┃┏━━┛┃┃━┃┃
|
||||
# ┗┓┏┓┏┛┃┗┛┗┓┃┗━┛┃┃┗━┛┃┃┗┛┗┓┃┗━┓┃┏┓┓┃┃━━━┃┗━┛┃
|
||||
# ━┗┛┗┛━┗━━━┛┗━┓┏┛┗━━━┛┗━━━┛┗━━┛┗┛┗┛┗┛━━━┗━┓┏┛
|
||||
# ━━━━━━━━━━━┏━┛┃━━━━━━━━━━━━━━━━━━━━━━━━┏━┛┃━
|
||||
# ━━━━━━━━━━━┗━━┛━━━━━━━━━━━━━━━━━━━━━━━━┗━━┛━
|
||||
|
||||
"""
|
||||
Waybackpy is a Python library that interfaces with the Internet Archive's Wayback Machine API.
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Archive pages and retrieve archived pages easily.
|
||||
|
||||
Usage:
|
||||
>>> import waybackpy
|
||||
>>> target_url = waybackpy.Url('https://www.python.org', 'Your-apps-cool-user-agent')
|
||||
>>> new_archive = target_url.save()
|
||||
>>> print(new_archive)
|
||||
https://web.archive.org/web/20200502170312/https://www.python.org/
|
||||
|
||||
Full documentation @ <https://akamhy.github.io/waybackpy/>.
|
||||
:copyright: (c) 2020 by akamhy.
|
||||
:license: MIT
|
||||
"""
|
||||
|
||||
from .wrapper import Url
|
||||
from .__version__ import __title__, __description__, __url__, __version__
|
||||
from .__version__ import __author__, __author_email__, __license__, __copyright__
|
||||
from .cdx_api import WaybackMachineCDXServerAPI
|
||||
from .save_api import WaybackMachineSaveAPI
|
||||
from .availability_api import WaybackMachineAvailabilityAPI
|
||||
from .__version__ import (
|
||||
__title__,
|
||||
__description__,
|
||||
__url__,
|
||||
__version__,
|
||||
__author__,
|
||||
__author_email__,
|
||||
__license__,
|
||||
__copyright__,
|
||||
)
|
||||
|
@ -1,10 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__title__ = "waybackpy"
|
||||
__description__ = "A Python library that interfaces with the Internet Archive's Wayback Machine API. Archive pages and retrieve archived pages easily."
|
||||
__description__ = (
|
||||
"Python package that interfaces with the Internet Archive's Wayback Machine APIs. "
|
||||
"Archive pages and retrieve archived pages easily."
|
||||
)
|
||||
__url__ = "https://akamhy.github.io/waybackpy/"
|
||||
__version__ = "2.1.0"
|
||||
__author__ = "akamhy"
|
||||
__author_email__ = "akash3pro@gmail.com"
|
||||
__version__ = "3.0.1"
|
||||
__author__ = "Akash Mahanty"
|
||||
__author_email__ = "akamhy@yahoo.com"
|
||||
__license__ = "MIT"
|
||||
__copyright__ = "Copyright 2020 akamhy"
|
||||
__copyright__ = "Copyright 2020-2022 Akash Mahanty et al."
|
||||
|
198
waybackpy/availability_api.py
Normal file
198
waybackpy/availability_api.py
Normal file
@ -0,0 +1,198 @@
|
||||
import time
|
||||
import json
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from .utils import DEFAULT_USER_AGENT
|
||||
from .exceptions import (
|
||||
ArchiveNotInAvailabilityAPIResponse,
|
||||
InvalidJSONInAvailabilityAPIResponse,
|
||||
)
|
||||
|
||||
|
||||
class WaybackMachineAvailabilityAPI:
|
||||
"""
|
||||
Class that interfaces the availability API of the Wayback Machine.
|
||||
"""
|
||||
|
||||
def __init__(self, url, user_agent=DEFAULT_USER_AGENT, max_tries=3):
|
||||
self.url = str(url).strip().replace(" ", "%20")
|
||||
self.user_agent = user_agent
|
||||
self.headers = {"User-Agent": self.user_agent}
|
||||
self.payload = {"url": "{url}".format(url=self.url)}
|
||||
self.endpoint = "https://archive.org/wayback/available"
|
||||
self.max_tries = max_tries
|
||||
self.tries = 0
|
||||
self.last_api_call_unix_time = int(time.time())
|
||||
self.api_call_time_gap = 5
|
||||
self.JSON = None
|
||||
|
||||
def unix_timestamp_to_wayback_timestamp(self, unix_timestamp):
|
||||
"""
|
||||
Converts Unix time to wayback Machine timestamp.
|
||||
"""
|
||||
return datetime.utcfromtimestamp(int(unix_timestamp)).strftime("%Y%m%d%H%M%S")
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
Same as string representation, just return the archive URL as a string.
|
||||
"""
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
String representation of the class. If atleast one API call was successfully
|
||||
made then return the archive URL as a string. Else returns None.
|
||||
"""
|
||||
|
||||
# String must not return anything other than a string object
|
||||
# So, if some asks for string repr before making the API requests
|
||||
# just return ""
|
||||
if not self.JSON:
|
||||
return ""
|
||||
|
||||
return self.archive_url
|
||||
|
||||
def json(self):
|
||||
"""
|
||||
Makes the API call to the availability API can set the JSON response
|
||||
to the JSON attribute of the instance and also returns the JSON attribute.
|
||||
"""
|
||||
time_diff = int(time.time()) - self.last_api_call_unix_time
|
||||
sleep_time = self.api_call_time_gap - time_diff
|
||||
|
||||
if sleep_time > 0:
|
||||
time.sleep(sleep_time)
|
||||
|
||||
self.response = requests.get(
|
||||
self.endpoint, params=self.payload, headers=self.headers
|
||||
)
|
||||
self.last_api_call_unix_time = int(time.time())
|
||||
self.tries += 1
|
||||
try:
|
||||
self.JSON = self.response.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise InvalidJSONInAvailabilityAPIResponse(
|
||||
"Response data:\n{text}".format(text=self.response.text)
|
||||
)
|
||||
|
||||
return self.JSON
|
||||
|
||||
def timestamp(self):
|
||||
"""
|
||||
Converts the timestamp form the JSON response to datetime object.
|
||||
If JSON attribute of the instance is None it implies that the either
|
||||
the the last API call failed or one was never made.
|
||||
|
||||
If not JSON or if JSON but no timestamp in the JSON response then returns
|
||||
the maximum value for datetime object that is possible.
|
||||
|
||||
If you get an URL as a response form the availability API it is guaranteed
|
||||
that you can get the datetime object from the timestamp.
|
||||
"""
|
||||
if not self.JSON or not self.JSON["archived_snapshots"]:
|
||||
return datetime.max
|
||||
|
||||
return datetime.strptime(
|
||||
self.JSON["archived_snapshots"]["closest"]["timestamp"], "%Y%m%d%H%M%S"
|
||||
)
|
||||
|
||||
@property
|
||||
def archive_url(self):
|
||||
"""
|
||||
Reads the the JSON response data and tries to get the timestamp and returns
|
||||
the timestamp if found else returns None.
|
||||
"""
|
||||
data = self.JSON
|
||||
|
||||
# If the user didn't used oldest, newest or near but tries to access the
|
||||
# archive_url attribute then, we assume they are fine with any archive
|
||||
# and invoke the oldest archive function.
|
||||
if not data:
|
||||
self.oldest()
|
||||
|
||||
# If data is still not none then probably there are no
|
||||
# archive for the requested URL.
|
||||
if not data or not data["archived_snapshots"]:
|
||||
while (self.tries < self.max_tries) and (
|
||||
not data or not data["archived_snapshots"]
|
||||
):
|
||||
self.json() # It makes a new API call
|
||||
data = self.JSON # json() updated the value of JSON attribute
|
||||
|
||||
# Even if after we exhausted teh max_tries, then we give up and
|
||||
# raise exception.
|
||||
|
||||
if not data or not data["archived_snapshots"]:
|
||||
raise ArchiveNotInAvailabilityAPIResponse(
|
||||
"Archive not found in the availability "
|
||||
+ "API response, the URL you requested may not have any "
|
||||
+ "archives yet. You may retry after some time or archive the webpage now."
|
||||
+ "\nResponse data:\n{response}".format(response=self.response.text)
|
||||
)
|
||||
else:
|
||||
archive_url = data["archived_snapshots"]["closest"]["url"]
|
||||
archive_url = archive_url.replace(
|
||||
"http://web.archive.org/web/", "https://web.archive.org/web/", 1
|
||||
)
|
||||
return archive_url
|
||||
|
||||
def wayback_timestamp(self, **kwargs):
|
||||
"""
|
||||
Prepends zero before the year, month, day, hour and minute so that they
|
||||
are conformable with the YYYYMMDDhhmmss wayback machine timestamp format.
|
||||
"""
|
||||
return "".join(
|
||||
str(kwargs[key]).zfill(2)
|
||||
for key in ["year", "month", "day", "hour", "minute"]
|
||||
)
|
||||
|
||||
def oldest(self):
|
||||
"""
|
||||
Passing the year 1994 should return the oldest archive because
|
||||
wayback machine was started in May, 1996 and there should be no archive
|
||||
before the year 1994.
|
||||
"""
|
||||
return self.near(year=1994)
|
||||
|
||||
def newest(self):
|
||||
"""
|
||||
Passing the current UNIX time should be sufficient to get the newest
|
||||
archive considering the API request-response time delay and also the
|
||||
database lags on Wayback machine.
|
||||
"""
|
||||
return self.near(unix_timestamp=int(time.time()))
|
||||
|
||||
def near(
|
||||
self,
|
||||
year=None,
|
||||
month=None,
|
||||
day=None,
|
||||
hour=None,
|
||||
minute=None,
|
||||
unix_timestamp=None,
|
||||
):
|
||||
"""
|
||||
The main method for this Class, oldest and newest methods are dependent on this
|
||||
method.
|
||||
|
||||
It generates the timestamp based on the input either by calling the
|
||||
unix_timestamp_to_wayback_timestamp or wayback_timestamp method with
|
||||
appropriate arguments for their respective parameters.
|
||||
Adds the timestamp to the payload dictionary.
|
||||
And finally invoking the json method to make the API call then returns the instance.
|
||||
"""
|
||||
if unix_timestamp:
|
||||
timestamp = self.unix_timestamp_to_wayback_timestamp(unix_timestamp)
|
||||
else:
|
||||
now = datetime.utcnow().timetuple()
|
||||
timestamp = self.wayback_timestamp(
|
||||
year=year if year else now.tm_year,
|
||||
month=month if month else now.tm_mon,
|
||||
day=day if day else now.tm_mday,
|
||||
hour=hour if hour else now.tm_hour,
|
||||
minute=minute if minute else now.tm_min,
|
||||
)
|
||||
|
||||
self.payload["timestamp"] = timestamp
|
||||
self.json()
|
||||
return self
|
194
waybackpy/cdx_api.py
Normal file
194
waybackpy/cdx_api.py
Normal file
@ -0,0 +1,194 @@
|
||||
from .exceptions import WaybackError
|
||||
from .cdx_snapshot import CDXSnapshot
|
||||
from .cdx_utils import (
|
||||
get_total_pages,
|
||||
get_response,
|
||||
check_filters,
|
||||
check_collapses,
|
||||
check_match_type,
|
||||
full_url,
|
||||
)
|
||||
|
||||
from .utils import DEFAULT_USER_AGENT
|
||||
|
||||
|
||||
class WaybackMachineCDXServerAPI:
|
||||
"""
|
||||
Class that interfaces the CDX server API of the Wayback Machine.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url,
|
||||
user_agent=DEFAULT_USER_AGENT,
|
||||
start_timestamp=None, # from, can not use from as it's a keyword
|
||||
end_timestamp=None, # to, not using to as can not use from
|
||||
filters=[],
|
||||
match_type=None,
|
||||
gzip=None,
|
||||
collapses=[],
|
||||
limit=None,
|
||||
max_tries=3,
|
||||
):
|
||||
self.url = str(url).strip().replace(" ", "%20")
|
||||
self.user_agent = user_agent
|
||||
self.start_timestamp = str(start_timestamp) if start_timestamp else None
|
||||
self.end_timestamp = str(end_timestamp) if end_timestamp else None
|
||||
self.filters = filters
|
||||
check_filters(self.filters)
|
||||
self.match_type = str(match_type).strip() if match_type else None
|
||||
check_match_type(self.match_type, self.url)
|
||||
self.gzip = gzip if gzip else True
|
||||
self.collapses = collapses
|
||||
check_collapses(self.collapses)
|
||||
self.limit = limit if limit else 5000
|
||||
self.max_tries = max_tries
|
||||
self.last_api_request_url = None
|
||||
self.use_page = False
|
||||
self.endpoint = "https://web.archive.org/cdx/search/cdx"
|
||||
|
||||
def cdx_api_manager(self, payload, headers, use_page=False):
|
||||
|
||||
total_pages = get_total_pages(self.url, self.user_agent)
|
||||
# If we only have two or less pages of archives then we care for more accuracy
|
||||
# pagination API is lagged sometimes
|
||||
if use_page is True and total_pages >= 2:
|
||||
blank_pages = 0
|
||||
for i in range(total_pages):
|
||||
payload["page"] = str(i)
|
||||
|
||||
url = full_url(self.endpoint, params=payload)
|
||||
res = get_response(url, headers=headers)
|
||||
|
||||
self.last_api_request_url = url
|
||||
text = res.text
|
||||
if len(text) == 0:
|
||||
blank_pages += 1
|
||||
|
||||
if blank_pages >= 2:
|
||||
break
|
||||
|
||||
yield text
|
||||
else:
|
||||
|
||||
payload["showResumeKey"] = "true"
|
||||
payload["limit"] = str(self.limit)
|
||||
resumeKey = None
|
||||
|
||||
more = True
|
||||
while more:
|
||||
|
||||
if resumeKey:
|
||||
payload["resumeKey"] = resumeKey
|
||||
|
||||
url = full_url(self.endpoint, params=payload)
|
||||
res = get_response(url, headers=headers)
|
||||
|
||||
self.last_api_request_url = url
|
||||
|
||||
text = res.text.strip()
|
||||
lines = text.splitlines()
|
||||
|
||||
more = False
|
||||
|
||||
if len(lines) >= 3:
|
||||
|
||||
second_last_line = lines[-2]
|
||||
|
||||
if len(second_last_line) == 0:
|
||||
|
||||
resumeKey = lines[-1].strip()
|
||||
text = text.replace(resumeKey, "", 1).strip()
|
||||
more = True
|
||||
|
||||
yield text
|
||||
|
||||
def add_payload(self, payload):
|
||||
if self.start_timestamp:
|
||||
payload["from"] = self.start_timestamp
|
||||
|
||||
if self.end_timestamp:
|
||||
payload["to"] = self.end_timestamp
|
||||
|
||||
if self.gzip is not True:
|
||||
payload["gzip"] = "false"
|
||||
|
||||
if self.match_type:
|
||||
payload["matchType"] = self.match_type
|
||||
|
||||
if self.filters and len(self.filters) > 0:
|
||||
for i, f in enumerate(self.filters):
|
||||
payload["filter" + str(i)] = f
|
||||
|
||||
if self.collapses and len(self.collapses) > 0:
|
||||
for i, f in enumerate(self.collapses):
|
||||
payload["collapse" + str(i)] = f
|
||||
|
||||
# Don't need to return anything as it's dictionary.
|
||||
payload["url"] = self.url
|
||||
|
||||
def snapshots(self):
|
||||
payload = {}
|
||||
headers = {"User-Agent": self.user_agent}
|
||||
|
||||
self.add_payload(payload)
|
||||
|
||||
if not self.start_timestamp or self.end_timestamp:
|
||||
self.use_page = True
|
||||
|
||||
if self.collapses != []:
|
||||
self.use_page = False
|
||||
|
||||
texts = self.cdx_api_manager(payload, headers, use_page=self.use_page)
|
||||
|
||||
for text in texts:
|
||||
|
||||
if text.isspace() or len(text) <= 1 or not text:
|
||||
continue
|
||||
|
||||
snapshot_list = text.split("\n")
|
||||
|
||||
for snapshot in snapshot_list:
|
||||
|
||||
if len(snapshot) < 46: # 14 + 32 (timestamp+digest)
|
||||
continue
|
||||
|
||||
properties = {
|
||||
"urlkey": None,
|
||||
"timestamp": None,
|
||||
"original": None,
|
||||
"mimetype": None,
|
||||
"statuscode": None,
|
||||
"digest": None,
|
||||
"length": None,
|
||||
}
|
||||
|
||||
prop_values = snapshot.split(" ")
|
||||
|
||||
prop_values_len = len(prop_values)
|
||||
properties_len = len(properties)
|
||||
|
||||
if prop_values_len != properties_len:
|
||||
raise WaybackError(
|
||||
"Snapshot returned by Cdx API has {prop_values_len} properties".format(
|
||||
prop_values_len=prop_values_len
|
||||
)
|
||||
+ " instead of expected {properties_len} ".format(
|
||||
properties_len=properties_len
|
||||
)
|
||||
+ "properties.\nProblematic Snapshot : {snapshot}".format(
|
||||
snapshot=snapshot
|
||||
)
|
||||
)
|
||||
|
||||
(
|
||||
properties["urlkey"],
|
||||
properties["timestamp"],
|
||||
properties["original"],
|
||||
properties["mimetype"],
|
||||
properties["statuscode"],
|
||||
properties["digest"],
|
||||
properties["length"],
|
||||
) = prop_values
|
||||
|
||||
yield CDXSnapshot(properties)
|
35
waybackpy/cdx_snapshot.py
Normal file
35
waybackpy/cdx_snapshot.py
Normal file
@ -0,0 +1,35 @@
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class CDXSnapshot:
|
||||
"""
|
||||
Class for the CDX snapshot lines returned by the CDX API,
|
||||
Each valid line of the CDX API is casted to an CDXSnapshot object
|
||||
by the CDX API interface.
|
||||
This provides the end-user the ease of using the data as attributes
|
||||
of the CDXSnapshot.
|
||||
"""
|
||||
|
||||
def __init__(self, properties):
|
||||
self.urlkey = properties["urlkey"]
|
||||
self.timestamp = properties["timestamp"]
|
||||
self.datetime_timestamp = datetime.strptime(self.timestamp, "%Y%m%d%H%M%S")
|
||||
self.original = properties["original"]
|
||||
self.mimetype = properties["mimetype"]
|
||||
self.statuscode = properties["statuscode"]
|
||||
self.digest = properties["digest"]
|
||||
self.length = properties["length"]
|
||||
self.archive_url = (
|
||||
"https://web.archive.org/web/" + self.timestamp + "/" + self.original
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "{urlkey} {timestamp} {original} {mimetype} {statuscode} {digest} {length}".format(
|
||||
urlkey=self.urlkey,
|
||||
timestamp=self.timestamp,
|
||||
original=self.original,
|
||||
mimetype=self.mimetype,
|
||||
statuscode=self.statuscode,
|
||||
digest=self.digest,
|
||||
length=self.length,
|
||||
)
|
128
waybackpy/cdx_utils.py
Normal file
128
waybackpy/cdx_utils.py
Normal file
@ -0,0 +1,128 @@
|
||||
import re
|
||||
import requests
|
||||
from urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
from .exceptions import WaybackError
|
||||
from .utils import DEFAULT_USER_AGENT
|
||||
|
||||
|
||||
def get_total_pages(url, user_agent=DEFAULT_USER_AGENT):
|
||||
endpoint = "https://web.archive.org/cdx/search/cdx?"
|
||||
payload = {"showNumPages": "true", "url": str(url)}
|
||||
headers = {"User-Agent": user_agent}
|
||||
request_url = full_url(endpoint, params=payload)
|
||||
response = get_response(request_url, headers=headers)
|
||||
return int(response.text.strip())
|
||||
|
||||
|
||||
def full_url(endpoint, params):
|
||||
if not params:
|
||||
return endpoint
|
||||
full_url = endpoint if endpoint.endswith("?") else (endpoint + "?")
|
||||
for key, val in params.items():
|
||||
key = "filter" if key.startswith("filter") else key
|
||||
key = "collapse" if key.startswith("collapse") else key
|
||||
amp = "" if full_url.endswith("?") else "&"
|
||||
full_url = (
|
||||
full_url
|
||||
+ amp
|
||||
+ "{key}={val}".format(key=key, val=requests.utils.quote(str(val)))
|
||||
)
|
||||
return full_url
|
||||
|
||||
|
||||
def get_response(
|
||||
url,
|
||||
headers=None,
|
||||
retries=5,
|
||||
backoff_factor=0.5,
|
||||
no_raise_on_redirects=False,
|
||||
):
|
||||
session = requests.Session()
|
||||
retries = Retry(
|
||||
total=retries,
|
||||
backoff_factor=backoff_factor,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
)
|
||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
||||
|
||||
try:
|
||||
response = session.get(url, headers=headers)
|
||||
session.close()
|
||||
return response
|
||||
except Exception as e:
|
||||
reason = str(e)
|
||||
exc_message = "Error while retrieving {url}.\n{reason}".format(
|
||||
url=url, reason=reason
|
||||
)
|
||||
exc = WaybackError(exc_message)
|
||||
exc.__cause__ = e
|
||||
raise exc
|
||||
|
||||
|
||||
def check_filters(filters):
|
||||
if not isinstance(filters, list):
|
||||
raise WaybackError("filters must be a list.")
|
||||
|
||||
# [!]field:regex
|
||||
for _filter in filters:
|
||||
try:
|
||||
|
||||
match = re.search(
|
||||
r"(\!?(?:urlkey|timestamp|original|mimetype|statuscode|digest|length)):(.*)",
|
||||
_filter,
|
||||
)
|
||||
|
||||
match.group(1)
|
||||
match.group(2)
|
||||
|
||||
except Exception:
|
||||
|
||||
exc_message = (
|
||||
"Filter '{_filter}' is not following the cdx filter syntax.".format(
|
||||
_filter=_filter
|
||||
)
|
||||
)
|
||||
raise WaybackError(exc_message)
|
||||
|
||||
|
||||
def check_collapses(collapses):
|
||||
|
||||
if not isinstance(collapses, list):
|
||||
raise WaybackError("collapses must be a list.")
|
||||
|
||||
if len(collapses) == 0:
|
||||
return
|
||||
|
||||
for collapse in collapses:
|
||||
try:
|
||||
match = re.search(
|
||||
r"(urlkey|timestamp|original|mimetype|statuscode|digest|length)(:?[0-9]{1,99})?",
|
||||
collapse,
|
||||
)
|
||||
match.group(1)
|
||||
if 2 == len(match.groups()):
|
||||
match.group(2)
|
||||
except Exception:
|
||||
exc_message = "collapse argument '{collapse}' is not following the cdx collapse syntax.".format(
|
||||
collapse=collapse
|
||||
)
|
||||
raise WaybackError(exc_message)
|
||||
|
||||
|
||||
def check_match_type(match_type, url):
|
||||
if not match_type:
|
||||
return
|
||||
|
||||
if "*" in url:
|
||||
raise WaybackError(
|
||||
"Can not use wildcard in the URL along with the match_type arguments."
|
||||
)
|
||||
|
||||
legal_match_type = ["exact", "prefix", "host", "domain"]
|
||||
|
||||
if match_type not in legal_match_type:
|
||||
exc_message = "{match_type} is not an allowed match type.\nUse one from 'exact', 'prefix', 'host' or 'domain'".format(
|
||||
match_type=match_type
|
||||
)
|
||||
raise WaybackError(exc_message)
|
347
waybackpy/cli.py
Normal file
347
waybackpy/cli.py
Normal file
@ -0,0 +1,347 @@
|
||||
import click
|
||||
import re
|
||||
import os
|
||||
import json as JSON
|
||||
import random
|
||||
import string
|
||||
from .__version__ import __version__
|
||||
from .utils import DEFAULT_USER_AGENT
|
||||
from .cdx_api import WaybackMachineCDXServerAPI
|
||||
from .save_api import WaybackMachineSaveAPI
|
||||
from .availability_api import WaybackMachineAvailabilityAPI
|
||||
from .wrapper import Url
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"-u", "--url", help="URL on which Wayback machine operations are to be performed."
|
||||
)
|
||||
@click.option(
|
||||
"-ua",
|
||||
"--user-agent",
|
||||
"--user_agent",
|
||||
default=DEFAULT_USER_AGENT,
|
||||
help="User agent, default user agent is '%s' " % DEFAULT_USER_AGENT,
|
||||
)
|
||||
@click.option(
|
||||
"-v", "--version", is_flag=True, default=False, help="Print waybackpy version."
|
||||
)
|
||||
@click.option(
|
||||
"-n",
|
||||
"--newest",
|
||||
"-au",
|
||||
"--archive_url",
|
||||
"--archive-url",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Fetch the newest archive of the specified URL",
|
||||
)
|
||||
@click.option(
|
||||
"-o",
|
||||
"--oldest",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Fetch the oldest archive of the specified URL",
|
||||
)
|
||||
@click.option(
|
||||
"-j",
|
||||
"--json",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Spit out the JSON data for availability_api commands.",
|
||||
)
|
||||
@click.option(
|
||||
"-N", "--near", default=False, is_flag=True, help="Archive near specified time."
|
||||
)
|
||||
@click.option("-Y", "--year", type=click.IntRange(1994, 9999), help="Year in integer.")
|
||||
@click.option("-M", "--month", type=click.IntRange(1, 12), help="Month in integer.")
|
||||
@click.option("-D", "--day", type=click.IntRange(1, 31), help="Day in integer.")
|
||||
@click.option("-H", "--hour", type=click.IntRange(0, 24), help="Hour in integer.")
|
||||
@click.option("-MIN", "--minute", type=click.IntRange(0, 60), help="Minute in integer.")
|
||||
@click.option(
|
||||
"-s",
|
||||
"--save",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Save the specified URL's webpage and print the archive URL.",
|
||||
)
|
||||
@click.option(
|
||||
"-h",
|
||||
"--headers",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Spit out the headers data for save_api commands.",
|
||||
)
|
||||
@click.option(
|
||||
"-ku",
|
||||
"--known-urls",
|
||||
"--known_urls",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="List known URLs. Uses CDX API.",
|
||||
)
|
||||
@click.option(
|
||||
"-sub",
|
||||
"--subdomain",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Use with '--known_urls' to include known URLs for subdomains.",
|
||||
)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--file",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Use with '--known_urls' to save the URLs in file at current directory.",
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--cdx",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Spit out the headers data for save_api commands.",
|
||||
)
|
||||
@click.option(
|
||||
"-st",
|
||||
"--start-timestamp",
|
||||
"--start_timestamp",
|
||||
)
|
||||
@click.option(
|
||||
"-et",
|
||||
"--end-timestamp",
|
||||
"--end_timestamp",
|
||||
)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--filters",
|
||||
multiple=True,
|
||||
)
|
||||
@click.option(
|
||||
"-mt",
|
||||
"--match-type",
|
||||
"--match_type",
|
||||
)
|
||||
@click.option(
|
||||
"-gz",
|
||||
"--gzip",
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--collapses",
|
||||
multiple=True,
|
||||
)
|
||||
@click.option(
|
||||
"-l",
|
||||
"--limit",
|
||||
)
|
||||
@click.option(
|
||||
"-cp",
|
||||
"--cdx-print",
|
||||
"--cdx_print",
|
||||
multiple=True,
|
||||
)
|
||||
def main(
|
||||
url,
|
||||
user_agent,
|
||||
version,
|
||||
newest,
|
||||
oldest,
|
||||
json,
|
||||
near,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hour,
|
||||
minute,
|
||||
save,
|
||||
headers,
|
||||
known_urls,
|
||||
subdomain,
|
||||
file,
|
||||
cdx,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
filters,
|
||||
match_type,
|
||||
gzip,
|
||||
collapses,
|
||||
limit,
|
||||
cdx_print,
|
||||
):
|
||||
"""
|
||||
┏┓┏┓┏┓━━━━━━━━━━┏━━┓━━━━━━━━━━┏┓━━┏━━━┓━━━━━
|
||||
┃┃┃┃┃┃━━━━━━━━━━┃┏┓┃━━━━━━━━━━┃┃━━┃┏━┓┃━━━━━
|
||||
┃┃┃┃┃┃┏━━┓━┏┓━┏┓┃┗┛┗┓┏━━┓━┏━━┓┃┃┏┓┃┗━┛┃┏┓━┏┓
|
||||
┃┗┛┗┛┃┗━┓┃━┃┃━┃┃┃┏━┓┃┗━┓┃━┃┏━┛┃┗┛┛┃┏━━┛┃┃━┃┃
|
||||
┗┓┏┓┏┛┃┗┛┗┓┃┗━┛┃┃┗━┛┃┃┗┛┗┓┃┗━┓┃┏┓┓┃┃━━━┃┗━┛┃
|
||||
━┗┛┗┛━┗━━━┛┗━┓┏┛┗━━━┛┗━━━┛┗━━┛┗┛┗┛┗┛━━━┗━┓┏┛
|
||||
━━━━━━━━━━━┏━┛┃━━━━━━━━━━━━━━━━━━━━━━━━┏━┛┃━
|
||||
━━━━━━━━━━━┗━━┛━━━━━━━━━━━━━━━━━━━━━━━━┗━━┛━
|
||||
|
||||
waybackpy : Python package & CLI tool that interfaces the Wayback Machine API
|
||||
|
||||
Released under the MIT License.
|
||||
License @ https://github.com/akamhy/waybackpy/blob/master/LICENSE
|
||||
|
||||
Copyright (c) 2020 waybackpy contributors. Contributors list @
|
||||
https://github.com/akamhy/waybackpy/graphs/contributors
|
||||
|
||||
https://github.com/akamhy/waybackpy
|
||||
|
||||
https://pypi.org/project/waybackpy
|
||||
|
||||
"""
|
||||
|
||||
if version:
|
||||
click.echo("waybackpy version %s" % __version__)
|
||||
return
|
||||
|
||||
if not url:
|
||||
click.echo("No URL detected. Please pass an URL.")
|
||||
return
|
||||
|
||||
def echo_availability_api(availability_api_instance):
|
||||
click.echo("Archive URL:")
|
||||
if not availability_api_instance.archive_url:
|
||||
archive_url = (
|
||||
"NO ARCHIVE FOUND - The requested URL is probably "
|
||||
+ "not yet archived or if the URL was recently archived then it is "
|
||||
+ "not yet available via the Wayback Machine's availability API "
|
||||
+ "because of database lag and should be available after some time."
|
||||
)
|
||||
else:
|
||||
archive_url = availability_api_instance.archive_url
|
||||
click.echo(archive_url)
|
||||
if json:
|
||||
click.echo("JSON response:")
|
||||
click.echo(JSON.dumps(availability_api_instance.JSON))
|
||||
|
||||
availability_api = WaybackMachineAvailabilityAPI(url, user_agent=user_agent)
|
||||
|
||||
if oldest:
|
||||
availability_api.oldest()
|
||||
echo_availability_api(availability_api)
|
||||
return
|
||||
|
||||
if newest:
|
||||
availability_api.newest()
|
||||
echo_availability_api(availability_api)
|
||||
return
|
||||
|
||||
if near:
|
||||
near_args = {}
|
||||
keys = ["year", "month", "day", "hour", "minute"]
|
||||
args_arr = [year, month, day, hour, minute]
|
||||
for key, arg in zip(keys, args_arr):
|
||||
if arg:
|
||||
near_args[key] = arg
|
||||
availability_api.near(**near_args)
|
||||
echo_availability_api(availability_api)
|
||||
return
|
||||
|
||||
if save:
|
||||
save_api = WaybackMachineSaveAPI(url, user_agent=user_agent)
|
||||
save_api.save()
|
||||
click.echo("Archive URL:")
|
||||
click.echo(save_api.archive_url)
|
||||
click.echo("Cached save:")
|
||||
click.echo(save_api.cached_save)
|
||||
if headers:
|
||||
click.echo("Save API headers:")
|
||||
click.echo(save_api.headers)
|
||||
return
|
||||
|
||||
def save_urls_on_file(url_gen):
|
||||
domain = None
|
||||
sys_random = random.SystemRandom()
|
||||
uid = "".join(
|
||||
sys_random.choice(string.ascii_lowercase + string.digits) for _ in range(6)
|
||||
)
|
||||
url_count = 0
|
||||
|
||||
for url in url_gen:
|
||||
url_count += 1
|
||||
if not domain:
|
||||
match = re.search("https?://([A-Za-z_0-9.-]+).*", url)
|
||||
|
||||
domain = "domain-unknown"
|
||||
|
||||
if match:
|
||||
domain = match.group(1)
|
||||
|
||||
file_name = "{domain}-urls-{uid}.txt".format(domain=domain, uid=uid)
|
||||
file_path = os.path.join(os.getcwd(), file_name)
|
||||
if not os.path.isfile(file_path):
|
||||
open(file_path, "w+").close()
|
||||
|
||||
with open(file_path, "a") as f:
|
||||
f.write("{url}\n".format(url=url))
|
||||
|
||||
click.echo(url)
|
||||
|
||||
if url_count > 0:
|
||||
click.echo(
|
||||
"\n\n'{file_name}' saved in current working directory".format(
|
||||
file_name=file_name
|
||||
)
|
||||
)
|
||||
else:
|
||||
click.echo("No known URLs found. Please try a diffrent input!")
|
||||
|
||||
if known_urls:
|
||||
wayback = Url(url, user_agent)
|
||||
url_gen = wayback.known_urls(subdomain=subdomain)
|
||||
|
||||
if file:
|
||||
return save_urls_on_file(url_gen)
|
||||
else:
|
||||
for url in url_gen:
|
||||
click.echo(url)
|
||||
|
||||
if cdx:
|
||||
filters = list(filters)
|
||||
collapses = list(collapses)
|
||||
cdx_print = list(cdx_print)
|
||||
|
||||
cdx_api = WaybackMachineCDXServerAPI(
|
||||
url,
|
||||
user_agent=user_agent,
|
||||
start_timestamp=start_timestamp,
|
||||
end_timestamp=end_timestamp,
|
||||
filters=filters,
|
||||
match_type=match_type,
|
||||
gzip=gzip,
|
||||
collapses=collapses,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
snapshots = cdx_api.snapshots()
|
||||
|
||||
for snapshot in snapshots:
|
||||
if len(cdx_print) == 0:
|
||||
click.echo(snapshot)
|
||||
else:
|
||||
output_string = ""
|
||||
if "urlkey" or "url-key" or "url_key" in cdx_print:
|
||||
output_string = output_string + snapshot.urlkey + " "
|
||||
if "timestamp" or "time-stamp" or "time_stamp" in cdx_print:
|
||||
output_string = output_string + snapshot.timestamp + " "
|
||||
if "original" in cdx_print:
|
||||
output_string = output_string + snapshot.original + " "
|
||||
if "original" in cdx_print:
|
||||
output_string = output_string + snapshot.original + " "
|
||||
if "mimetype" or "mime-type" or "mime_type" in cdx_print:
|
||||
output_string = output_string + snapshot.mimetype + " "
|
||||
if "statuscode" or "status-code" or "status_code" in cdx_print:
|
||||
output_string = output_string + snapshot.statuscode + " "
|
||||
if "digest" in cdx_print:
|
||||
output_string = output_string + snapshot.digest + " "
|
||||
if "length" in cdx_print:
|
||||
output_string = output_string + snapshot.length + " "
|
||||
if "archiveurl" or "archive-url" or "archive_url" in cdx_print:
|
||||
output_string = output_string + snapshot.archive_url + " "
|
||||
click.echo(output_string)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,6 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
waybackpy.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
This module contains the set of Waybackpy's exceptions.
|
||||
"""
|
||||
|
||||
|
||||
class WaybackError(Exception):
|
||||
"""
|
||||
Raised when API Service error.
|
||||
Raised when Waybackpy can not return what you asked for.
|
||||
1) Wayback Machine API Service is unreachable/down.
|
||||
2) You passed illegal arguments.
|
||||
|
||||
All other exceptions are inherited from this class.
|
||||
"""
|
||||
|
||||
|
||||
class RedirectSaveError(WaybackError):
|
||||
"""
|
||||
Raised when the original URL is redirected and the
|
||||
redirect URL is archived but not the original URL.
|
||||
"""
|
||||
|
||||
|
||||
class URLError(Exception):
|
||||
"""
|
||||
Raised when malformed URLs are passed as arguments.
|
||||
"""
|
||||
|
||||
|
||||
class MaximumRetriesExceeded(WaybackError):
|
||||
"""
|
||||
MaximumRetriesExceeded
|
||||
"""
|
||||
|
||||
|
||||
class MaximumSaveRetriesExceeded(MaximumRetriesExceeded):
|
||||
"""
|
||||
MaximumSaveRetriesExceeded
|
||||
"""
|
||||
|
||||
|
||||
class ArchiveNotInAvailabilityAPIResponse(WaybackError):
|
||||
"""
|
||||
Could not parse the archive in the JSON response of the availability API.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidJSONInAvailabilityAPIResponse(WaybackError):
|
||||
"""
|
||||
availability api returned invalid JSON
|
||||
"""
|
||||
|
186
waybackpy/save_api.py
Normal file
186
waybackpy/save_api.py
Normal file
@ -0,0 +1,186 @@
|
||||
import re
|
||||
import time
|
||||
import requests
|
||||
|
||||
from datetime import datetime
|
||||
from urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
from .utils import DEFAULT_USER_AGENT
|
||||
from .exceptions import MaximumSaveRetriesExceeded
|
||||
|
||||
|
||||
class WaybackMachineSaveAPI:
|
||||
|
||||
"""
|
||||
WaybackMachineSaveAPI class provides an interface for saving URLs on the
|
||||
Wayback Machine.
|
||||
"""
|
||||
|
||||
def __init__(self, url, user_agent=DEFAULT_USER_AGENT, max_tries=8):
|
||||
self.url = str(url).strip().replace(" ", "%20")
|
||||
self.request_url = "https://web.archive.org/save/" + self.url
|
||||
self.user_agent = user_agent
|
||||
self.request_headers = {"User-Agent": self.user_agent}
|
||||
self.max_tries = max_tries
|
||||
self.total_save_retries = 5
|
||||
self.backoff_factor = 0.5
|
||||
self.status_forcelist = [500, 502, 503, 504]
|
||||
self._archive_url = None
|
||||
self.instance_birth_time = datetime.utcnow()
|
||||
|
||||
@property
|
||||
def archive_url(self):
|
||||
"""
|
||||
Returns the archive URL is already cached by _archive_url
|
||||
else invoke the save method to save the archive which returns the
|
||||
archive thus we return the methods return value.
|
||||
"""
|
||||
|
||||
if self._archive_url:
|
||||
return self._archive_url
|
||||
else:
|
||||
return self.save()
|
||||
|
||||
def get_save_request_headers(self):
|
||||
"""
|
||||
Creates a session and tries 'retries' number of times to
|
||||
retrieve the archive.
|
||||
|
||||
If successful in getting the response, sets the headers, status_code
|
||||
and response_url attributes.
|
||||
|
||||
The archive is usually in the headers but it can also be the response URL
|
||||
as the Wayback Machine redirects to the archive after a successful capture
|
||||
of the webpage.
|
||||
|
||||
Wayback Machine's save API is known
|
||||
to be very unreliable thus if it fails first check opening
|
||||
the response URL yourself in the browser.
|
||||
"""
|
||||
session = requests.Session()
|
||||
retries = Retry(
|
||||
total=self.total_save_retries,
|
||||
backoff_factor=self.backoff_factor,
|
||||
status_forcelist=self.status_forcelist,
|
||||
)
|
||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
||||
self.response = session.get(self.request_url, headers=self.request_headers)
|
||||
self.headers = (
|
||||
self.response.headers
|
||||
) # <class 'requests.structures.CaseInsensitiveDict'>
|
||||
self.status_code = self.response.status_code
|
||||
self.response_url = self.response.url
|
||||
session.close()
|
||||
|
||||
def archive_url_parser(self):
|
||||
"""
|
||||
Three regexen (like oxen?) are used to search for the
|
||||
archive URL in the headers and finally look in the response URL
|
||||
for the archive URL.
|
||||
"""
|
||||
|
||||
regex1 = r"Content-Location: (/web/[0-9]{14}/.*)"
|
||||
match = re.search(regex1, str(self.headers))
|
||||
if match:
|
||||
return "https://web.archive.org" + match.group(1)
|
||||
|
||||
regex2 = r"rel=\"memento.*?(web\.archive\.org/web/[0-9]{14}/.*?)>"
|
||||
match = re.search(regex2, str(self.headers))
|
||||
if match:
|
||||
return "https://" + match.group(1)
|
||||
|
||||
regex3 = r"X-Cache-Key:\shttps(.*)[A-Z]{2}"
|
||||
match = re.search(regex3, str(self.headers))
|
||||
if match:
|
||||
return "https" + match.group(1)
|
||||
|
||||
if self.response_url:
|
||||
self.response_url = self.response_url.strip()
|
||||
if "web.archive.org/web" in self.response_url:
|
||||
regex = r"web\.archive\.org/web/(?:[0-9]*?)/(?:.*)$"
|
||||
match = re.search(regex, self.response_url)
|
||||
if match:
|
||||
return "https://" + match.group(0)
|
||||
|
||||
def sleep(self, tries):
|
||||
"""
|
||||
Ensure that the we wait some time before succesive retries so that we
|
||||
don't waste the retries before the page is even captured by the Wayback
|
||||
Machine crawlers also ensures that we are not putting too much load on
|
||||
the Wayback Machine's save API.
|
||||
|
||||
If tries are multiple of 3 sleep 10 seconds else sleep 5 seconds.
|
||||
"""
|
||||
|
||||
sleep_seconds = 5
|
||||
if tries % 3 == 0:
|
||||
sleep_seconds = 10
|
||||
time.sleep(sleep_seconds)
|
||||
|
||||
def timestamp(self):
|
||||
"""
|
||||
Read the timestamp off the archive URL and convert the Wayback Machine
|
||||
timestamp to datetime object.
|
||||
|
||||
Also check if the time on archive is URL and compare it to instance birth
|
||||
time.
|
||||
|
||||
If time on the archive is older than the instance creation time set the cached_save
|
||||
to True else set it to False. The flag can be used to check if the Wayback Machine
|
||||
didn't serve a Cached URL. It is quite common for the Wayback Machine to serve
|
||||
cached archive if last archive was captured before last 45 minutes.
|
||||
"""
|
||||
m = re.search(
|
||||
r"https?://web\.archive.org/web/([0-9]{14})/http", self._archive_url
|
||||
)
|
||||
string_timestamp = m.group(1)
|
||||
timestamp = datetime.strptime(string_timestamp, "%Y%m%d%H%M%S")
|
||||
|
||||
timestamp_unixtime = time.mktime(timestamp.timetuple())
|
||||
instance_birth_time_unixtime = time.mktime(self.instance_birth_time.timetuple())
|
||||
|
||||
if timestamp_unixtime < instance_birth_time_unixtime:
|
||||
self.cached_save = True
|
||||
else:
|
||||
self.cached_save = False
|
||||
|
||||
return timestamp
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Calls the SavePageNow API of the Wayback Machine with required parameters
|
||||
and headers to save the URL.
|
||||
|
||||
Raises MaximumSaveRetriesExceeded is maximum retries are exhausted but still
|
||||
we were unable to retrieve the archive from the Wayback Machine.
|
||||
"""
|
||||
|
||||
self.saved_archive = None
|
||||
tries = 0
|
||||
|
||||
while True:
|
||||
|
||||
tries += 1
|
||||
|
||||
if tries >= self.max_tries:
|
||||
raise MaximumSaveRetriesExceeded(
|
||||
"Tried %s times but failed to save and retrieve the" % str(tries)
|
||||
+ " archive for %s.\nResponse URL:\n%s \nResponse Header:\n%s\n"
|
||||
% (self.url, self.response_url, str(self.headers)),
|
||||
)
|
||||
|
||||
if not self.saved_archive:
|
||||
|
||||
if tries > 1:
|
||||
self.sleep(tries)
|
||||
|
||||
self.get_save_request_headers()
|
||||
self.saved_archive = self.archive_url_parser()
|
||||
|
||||
if not self.saved_archive:
|
||||
continue
|
||||
else:
|
||||
self._archive_url = self.saved_archive
|
||||
self.timestamp()
|
||||
return self.saved_archive
|
12
waybackpy/utils.py
Normal file
12
waybackpy/utils.py
Normal file
@ -0,0 +1,12 @@
|
||||
import requests
|
||||
from .__version__ import __version__
|
||||
|
||||
DEFAULT_USER_AGENT = "waybackpy %s - https://github.com/akamhy/waybackpy" % __version__
|
||||
|
||||
|
||||
def latest_version(package_name, user_agent=DEFAULT_USER_AGENT):
|
||||
request_url = "https://pypi.org/pypi/" + package_name + "/json"
|
||||
headers = {"User-Agent": user_agent}
|
||||
response = requests.get(request_url, headers=headers)
|
||||
data = response.json()
|
||||
return data["info"]["version"]
|
@ -1,166 +1,129 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from .save_api import WaybackMachineSaveAPI
|
||||
from .availability_api import WaybackMachineAvailabilityAPI
|
||||
from .cdx_api import WaybackMachineCDXServerAPI
|
||||
from .utils import DEFAULT_USER_AGENT
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
from datetime import datetime
|
||||
from waybackpy.exceptions import WaybackError
|
||||
"""
|
||||
The Url class is not recommended to be used anymore, instead use the
|
||||
WaybackMachineSaveAPI, WaybackMachineAvailabilityAPI and WaybackMachineCDXServerAPI.
|
||||
|
||||
if sys.version_info >= (3, 0): # If the python ver >= 3
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.error import URLError
|
||||
else: # For python2.x
|
||||
from urllib2 import Request, urlopen, URLError
|
||||
The reason it is still in the code is backwards compatibility with 2.x.x versions.
|
||||
|
||||
default_UA = "waybackpy python package - https://github.com/akamhy/waybackpy"
|
||||
|
||||
class Url():
|
||||
"""waybackpy Url object"""
|
||||
If were are using the Url before the update to version 3.x.x, your code should still be
|
||||
working fine and there is no hurry to update the interface but is recommended that you
|
||||
do not use the Url class for new code as it would be removed after 2025 also the first
|
||||
3.x.x versions was released in January 2022 and three years are more than enough to update
|
||||
the older interface code.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, url, user_agent=default_UA):
|
||||
class Url:
|
||||
def __init__(self, url, user_agent=DEFAULT_USER_AGENT):
|
||||
self.url = url
|
||||
self.user_agent = user_agent
|
||||
self.url_check() # checks url validity on init.
|
||||
|
||||
def __repr__(self):
|
||||
"""Representation of the object."""
|
||||
return "waybackpy.Url(url=%s, user_agent=%s)" % (self.url, self.user_agent)
|
||||
self.user_agent = str(user_agent)
|
||||
self.archive_url = None
|
||||
self.wayback_machine_availability_api = WaybackMachineAvailabilityAPI(
|
||||
self.url, user_agent=self.user_agent
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""String representation of the object."""
|
||||
return "%s" % self.clean_url()
|
||||
if not self.archive_url:
|
||||
self.newest()
|
||||
return self.archive_url
|
||||
|
||||
def __len__(self):
|
||||
"""Length of the URL."""
|
||||
return len(self.clean_url())
|
||||
td_max = timedelta(
|
||||
days=999999999, hours=23, minutes=59, seconds=59, microseconds=999999
|
||||
)
|
||||
|
||||
def url_check(self):
|
||||
"""Check for common URL problems."""
|
||||
if "." not in self.url:
|
||||
raise URLError("'%s' is not a vaild url." % self.url)
|
||||
return True
|
||||
if not self.timestamp:
|
||||
self.oldest()
|
||||
|
||||
def clean_url(self):
|
||||
"""Fix the URL, if possible."""
|
||||
return str(self.url).strip().replace(" ","_")
|
||||
if self.timestamp == datetime.max:
|
||||
return td_max.days
|
||||
|
||||
def wayback_timestamp(self, **kwargs):
|
||||
"""Return the formatted the timestamp."""
|
||||
return (
|
||||
str(kwargs["year"])
|
||||
+
|
||||
str(kwargs["month"]).zfill(2)
|
||||
+
|
||||
str(kwargs["day"]).zfill(2)
|
||||
+
|
||||
str(kwargs["hour"]).zfill(2)
|
||||
+
|
||||
str(kwargs["minute"]).zfill(2)
|
||||
)
|
||||
return (datetime.utcnow() - self.timestamp).days
|
||||
|
||||
def save(self):
|
||||
"""Create a new archives for an URL on the Wayback Machine."""
|
||||
request_url = ("https://web.archive.org/save/" + self.clean_url())
|
||||
hdr = { 'User-Agent' : '%s' % self.user_agent } #nosec
|
||||
req = Request(request_url, headers=hdr) #nosec
|
||||
try:
|
||||
response = urlopen(req, timeout=30) #nosec
|
||||
except Exception:
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception as e:
|
||||
raise WaybackError(e)
|
||||
header = response.headers
|
||||
self.wayback_machine_save_api = WaybackMachineSaveAPI(
|
||||
self.url, user_agent=self.user_agent
|
||||
)
|
||||
self.archive_url = self.wayback_machine_save_api.archive_url
|
||||
self.timestamp = self.wayback_machine_save_api.timestamp()
|
||||
self.headers = self.wayback_machine_save_api.headers
|
||||
return self
|
||||
|
||||
def archive_url_parser(header):
|
||||
arch = re.search(r"X-Cache-Key:\shttps(.*)[A-Z]{2}", str(header))
|
||||
if arch:
|
||||
return arch.group(1)
|
||||
raise WaybackError(
|
||||
"No archive url found in the API response. Visit https://github.com/akamhy/waybackpy for latest version of waybackpy.\nHeader:\n%s" % str(header)
|
||||
)
|
||||
def near(
|
||||
self,
|
||||
year=None,
|
||||
month=None,
|
||||
day=None,
|
||||
hour=None,
|
||||
minute=None,
|
||||
unix_timestamp=None,
|
||||
):
|
||||
|
||||
return "https://" + archive_url_parser(header)
|
||||
self.wayback_machine_availability_api.near(
|
||||
year=year,
|
||||
month=month,
|
||||
day=day,
|
||||
hour=hour,
|
||||
minute=minute,
|
||||
unix_timestamp=unix_timestamp,
|
||||
)
|
||||
self.set_availability_api_attrs()
|
||||
return self
|
||||
|
||||
def get(self, url=None, user_agent=None, encoding=None):
|
||||
"""Returns the source code of the supplied URL. Auto detects the encoding if not supplied."""
|
||||
|
||||
if not url:
|
||||
url = self.clean_url()
|
||||
if not user_agent:
|
||||
user_agent = self.user_agent
|
||||
|
||||
hdr = { 'User-Agent' : '%s' % user_agent }
|
||||
req = Request(url, headers=hdr) #nosec
|
||||
|
||||
try:
|
||||
resp=urlopen(req) #nosec
|
||||
except Exception:
|
||||
try:
|
||||
resp=urlopen(req) #nosec
|
||||
except Exception as e:
|
||||
raise WaybackError(e)
|
||||
|
||||
if not encoding:
|
||||
try:
|
||||
encoding= resp.headers['content-type'].split('charset=')[-1]
|
||||
except AttributeError:
|
||||
encoding = "UTF-8"
|
||||
|
||||
return resp.read().decode(encoding.replace("text/html", "UTF-8", 1))
|
||||
|
||||
def near(self, **kwargs):
|
||||
""" Returns the archived from Wayback Machine for an URL closest to the time supplied.
|
||||
Supported params are year, month, day, hour and minute.
|
||||
The non supplied parameters are default to the runtime time.
|
||||
"""
|
||||
year=kwargs.get("year", datetime.utcnow().strftime('%Y'))
|
||||
month=kwargs.get("month", datetime.utcnow().strftime('%m'))
|
||||
day=kwargs.get("day", datetime.utcnow().strftime('%d'))
|
||||
hour=kwargs.get("hour", datetime.utcnow().strftime('%H'))
|
||||
minute=kwargs.get("minute", datetime.utcnow().strftime('%M'))
|
||||
timestamp = self.wayback_timestamp(year=year,month=month,day=day,hour=hour,minute=minute)
|
||||
request_url = "https://archive.org/wayback/available?url=%s×tamp=%s" % (self.clean_url(), str(timestamp))
|
||||
hdr = { 'User-Agent' : '%s' % self.user_agent }
|
||||
req = Request(request_url, headers=hdr) # nosec
|
||||
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception:
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception as e:
|
||||
WaybackError(e)
|
||||
|
||||
data = json.loads(response.read().decode("UTF-8"))
|
||||
if not data["archived_snapshots"]:
|
||||
raise WaybackError("'%s' is not yet archived." % url)
|
||||
archive_url = (data["archived_snapshots"]["closest"]["url"])
|
||||
# wayback machine returns http sometimes, idk why? But they support https
|
||||
archive_url = archive_url.replace("http://web.archive.org/web/","https://web.archive.org/web/",1)
|
||||
return archive_url
|
||||
|
||||
def oldest(self, year=1994):
|
||||
"""Returns the oldest archive from Wayback Machine for an URL."""
|
||||
return self.near(year=year)
|
||||
def oldest(self):
|
||||
self.wayback_machine_availability_api.oldest()
|
||||
self.set_availability_api_attrs()
|
||||
return self
|
||||
|
||||
def newest(self):
|
||||
"""Returns the newest archive on Wayback Machine for an URL, sometimes you may not get the newest archive because wayback machine DB lag."""
|
||||
return self.near()
|
||||
self.wayback_machine_availability_api.newest()
|
||||
self.set_availability_api_attrs()
|
||||
return self
|
||||
|
||||
def total_archives(self):
|
||||
"""Returns the total number of archives on Wayback Machine for an URL."""
|
||||
hdr = { 'User-Agent' : '%s' % self.user_agent }
|
||||
request_url = "https://web.archive.org/cdx/search/cdx?url=%s&output=json&fl=statuscode" % self.clean_url()
|
||||
req = Request(request_url, headers=hdr) # nosec
|
||||
def set_availability_api_attrs(self):
|
||||
self.archive_url = self.wayback_machine_availability_api.archive_url
|
||||
self.JSON = self.wayback_machine_availability_api.JSON
|
||||
self.timestamp = self.wayback_machine_availability_api.timestamp()
|
||||
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception:
|
||||
try:
|
||||
response = urlopen(req) #nosec
|
||||
except Exception as e:
|
||||
WaybackError(e)
|
||||
def total_archives(self, start_timestamp=None, end_timestamp=None):
|
||||
cdx = WaybackMachineCDXServerAPI(
|
||||
self.url,
|
||||
user_agent=self.user_agent,
|
||||
start_timestamp=start_timestamp,
|
||||
end_timestamp=end_timestamp,
|
||||
)
|
||||
|
||||
return str(response.read()).count(",") # Most efficient method to count number of archives (yet)
|
||||
count = 0
|
||||
for _ in cdx.snapshots():
|
||||
count = count + 1
|
||||
return count
|
||||
|
||||
def known_urls(
|
||||
self,
|
||||
subdomain=False,
|
||||
host=False,
|
||||
start_timestamp=None,
|
||||
end_timestamp=None,
|
||||
match_type="prefix",
|
||||
):
|
||||
if subdomain:
|
||||
match_type = "domain"
|
||||
if host:
|
||||
match_type = "host"
|
||||
|
||||
cdx = WaybackMachineCDXServerAPI(
|
||||
self.url,
|
||||
user_agent=self.user_agent,
|
||||
start_timestamp=start_timestamp,
|
||||
end_timestamp=end_timestamp,
|
||||
match_type=match_type,
|
||||
collapses=["urlkey"],
|
||||
)
|
||||
|
||||
for snapshot in cdx.snapshots():
|
||||
yield (snapshot.original)
|
||||
|
Reference in New Issue
Block a user