diff --git a/Pipfile b/Pipfile
new file mode 100644
index 00000000..f702e04e
--- /dev/null
+++ b/Pipfile
@@ -0,0 +1,21 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+argparse = ">=1.2.1"
+requests = ">=2.3.0"
+internetarchive = "*"
+kitchen = "*"
+
+[dev-packages]
+tox = "*"
+
+[scripts]
+dump="python dumpgenerator.py"
+wikipediadownloader="python wikipediadownloader.py"
+
+[requires]
+python_version = "2.7"
+
diff --git a/Pipfile.lock b/Pipfile.lock
new file mode 100644
index 00000000..28fd8fef
--- /dev/null
+++ b/Pipfile.lock
@@ -0,0 +1,184 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "788dd6e545cee5710fef84ae987dae7dd8a5578b02c7763ab306f2c079ac4b4f"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "argparse": {
+ "hashes": [
+ "sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4",
+ "sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314"
+ ],
+ "index": "pypi",
+ "version": "==1.4.0"
+ },
+ "args": {
+ "hashes": [
+ "sha256:a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"
+ ],
+ "version": "==0.1.0"
+ },
+ "backports.csv": {
+ "hashes": [
+ "sha256:bed884eeb967c8d6f517dfcf672914324180f1e9ceeb0376fde2c4c32fd7008d",
+ "sha256:f68c7115c7fbe6d4b4104327d2e677efb38aa09bd7f877b47a6de18e44975510"
+ ],
+ "version": "==1.0.6"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638",
+ "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a"
+ ],
+ "version": "==2018.8.24"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "clint": {
+ "hashes": [
+ "sha256:05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"
+ ],
+ "version": "==0.5.1"
+ },
+ "docopt": {
+ "hashes": [
+ "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
+ ],
+ "version": "==0.6.2"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e",
+ "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"
+ ],
+ "version": "==2.7"
+ },
+ "internetarchive": {
+ "hashes": [
+ "sha256:b0f10dfdf8463c8c5a452c0a3ba835e60d7d1671e2681d0d8616b0172ce6ca13",
+ "sha256:fdec5906bb0cd4fddf3b4c0b43c8df437f2a0cfd603370ad8a0ed03ba19dfd2c"
+ ],
+ "index": "pypi",
+ "version": "==1.8.1"
+ },
+ "jsonpatch": {
+ "hashes": [
+ "sha256:49f29cab70e9068db3b1dc6b656cbe2ee4edf7dfe9bf5a0055f17a4b6804a4b9",
+ "sha256:8bf92fa26bc42c346c03bd4517722a8e4f429225dbe775ac774b2c70d95dbd33"
+ ],
+ "version": "==1.23"
+ },
+ "jsonpointer": {
+ "hashes": [
+ "sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362",
+ "sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e"
+ ],
+ "version": "==2.0"
+ },
+ "kitchen": {
+ "hashes": [
+ "sha256:af9fbb60f68cbdb2ead402beb8fa7c7edadbe2aa7b5a70138b7c4b0fa88153fd"
+ ],
+ "index": "pypi",
+ "version": "==1.2.5"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1",
+ "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a"
+ ],
+ "index": "pypi",
+ "version": "==2.19.1"
+ },
+ "schema": {
+ "hashes": [
+ "sha256:d994b0dc4966000037b26898df638e3e2a694cc73636cb2050e652614a350687",
+ "sha256:fa1a53fe5f3b6929725a4e81688c250f46838e25d8c1885a10a590c8c01a7b74"
+ ],
+ "version": "==0.6.8"
+ },
+ "six": {
+ "hashes": [
+ "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
+ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
+ ],
+ "version": "==1.11.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf",
+ "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5"
+ ],
+ "version": "==1.23"
+ }
+ },
+ "develop": {
+ "filelock": {
+ "hashes": [
+ "sha256:86fe6af56ae08ebc9c66d54ba3398c35b98916d0862d782b276a65816ff39392",
+ "sha256:97694f181bdf58f213cca0a7cb556dc7bf90e2f8eb9aa3151260adac56701afb"
+ ],
+ "version": "==3.0.9"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:6e3836e39f4d36ae72840833db137f7b7d35105079aee6ec4a62d9f80d594dd1",
+ "sha256:95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1"
+ ],
+ "version": "==0.7.1"
+ },
+ "py": {
+ "hashes": [
+ "sha256:06a30435d058473046be836d3fc4f27167fd84c45b99704f2fb5509ef61f9af1",
+ "sha256:50402e9d1c9005d759426988a492e0edaadb7f4e68bcddfea586bc7432d009c6"
+ ],
+ "version": "==1.6.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
+ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
+ ],
+ "version": "==1.11.0"
+ },
+ "toml": {
+ "hashes": [
+ "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
+ "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
+ ],
+ "version": "==0.10.0"
+ },
+ "tox": {
+ "hashes": [
+ "sha256:217fb84aecf9792a98f93f07cfcaf014205a76c64e52bd7c2b4135458e6ad2a1",
+ "sha256:4baeb3d8ebdcd9f43afce38aa67d06f1165a87d221d5bb21e8b39a0d4880c134"
+ ],
+ "index": "pypi",
+ "version": "==3.5.2"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:2ce32cd126117ce2c539f0134eb89de91a8413a29baac49cbab3eb50e2026669",
+ "sha256:ca07b4c0b54e14a91af9f34d0919790b016923d157afda5efdde55c96718f752"
+ ],
+ "version": "==16.0.0"
+ }
+ }
+}
diff --git a/README.md b/README.md
index 625865c4..bb860494 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,11 @@ This is a very quick guide for the most used features of WikiTeam tools. For fur
### Requirements
-Confirm you satisfy the requirements:
+If you have both [pipenv](https://github.com/pypa/pipenv) and [pyenv](https://github.com/pyenv/pyenv), you can install all the project dependencies isolated from your system with the following command:
+
+`pipenv install`
+
+Otherwise you can use the traditional way(using pip), first, confirm you satisfy the requirements:
`pip install --upgrade -r requirements.txt`
@@ -41,32 +45,49 @@ To download any wiki, use one of the following options:
`python dumpgenerator.py http://wiki.domain.org --xml --images` (complete XML histories and images)
+`pipenv run dump http://wiki.domain.org --xml --images` **pipenv version**
+
If the script can't find itself the API and/or index.php paths, then you can provide them:
`python dumpgenerator.py --api=http://wiki.domain.org/w/api.php --xml --images`
+`pipenv run dump --api=http://wiki.domain.org/w/api.php --xml --images` **pipenv version**
+
+
`python dumpgenerator.py --api=http://wiki.domain.org/w/api.php --index=http://wiki.domain.org/w/index.php --xml --images`
+`pipenv run dump --api=http://wiki.domain.org/w/api.php --index=http://wiki.domain.org/w/index.php --xml --images` **pipenv version**
+
+
If you only want the XML histories, just use `--xml`. For only the images, just `--images`. For only the current version of every page, `--xml --curonly`.
You can resume an aborted download:
`python dumpgenerator.py --api=http://wiki.domain.org/w/api.php --xml --images --resume --path=/path/to/incomplete-dump`
+`pipenv run dump --api=http://wiki.domain.org/w/api.php --xml --images --resume --path=/path/to/incomplete-dump` **pipenv version**
+
See more options:
`python dumpgenerator.py --help`
+`pipenv run dump --help` **pipenv version**
+
### Download Wikimedia dumps
To download [Wikimedia XML dumps](http://dumps.wikimedia.org/backup-index.html) (Wikipedia, Wikibooks, Wikinews, etc) you can run:
`python wikipediadownloader.py` (download all projects)
+`pipenv run wikipediadownloader` **pipenv version**
+
+
See more options:
`python wikipediadownloader.py --help`
+`pipenv run wikipediadownloader --help` **pipenv version**
+
### Download Wikimedia Commons images
There is a script for this, but we have [uploaded the tarballs](https://archive.org/details/wikimediacommons) to Internet Archive, so it's more useful to reseed their torrents than to re-generate old ones with the script.
@@ -75,22 +96,25 @@ There is a script for this, but we have [uploaded the tarballs](https://archive.
[![Build Status](https://travis-ci.org/WikiTeam/wikiteam.svg)](https://travis-ci.org/WikiTeam/wikiteam)
-You can run tests easily by using the [tox](https://pypi.python.org/pypi/tox) command. It is probably already present in your operating system, you would need version 1.6. If it is not, you can download it from pypi with: `pip install tox`.
+You can run tests easily by using the [tox](https://pypi.python.org/pypi/tox) command. It is probably already present in your operating system, you would need version 1.6. If it is not, you can download it from pypi with: `pip install tox` (if you aren't using pipenv already).
Example usage:
- $ tox
- py27 runtests: commands[0] | nosetests --nocapture --nologcapture
- Checking http://wiki.annotation.jp/api.php
- Trying to parse かずさアノテーション - ソーシャル・ゲノム・アノテーション.jpg from API
- Retrieving image filenames
- . Found 266 images
- .
- -------------------------------------------
- Ran 1 test in 2.253s
-
- OK
- _________________ summary _________________
- py27: commands succeeded
- congratulations :)
- $
+```
+$ tox # or pipenv run tox
+py27 runtests: commands[0] | nosetests --nocapture --nologcapture
+Checking http://wiki.annotation.jp/api.php
+Trying to parse かずさアノテーション - ソーシャル・ゲノム・アノテーション.jpg from API
+Retrieving image filenames
+. Found 266 images
+.
+-------------------------------------------
+Ran 1 test in 2.253s
+
+OK
+_________________ summary _________________
+ py27: commands succeeded
+ congratulations :)
+$
+
+```