diff --git a/.coveragerc-py35 b/.coveragerc-py35
new file mode 100644
index 00000000..fda5e614
--- /dev/null
+++ b/.coveragerc-py35
@@ -0,0 +1,2 @@
+[html]
+directory = htmlcov-py35
diff --git a/.gitignore b/.gitignore
index 8da8435b..339a8f3f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ htmlcov*
**/__pycache__/
.cache
docs/build/
+.python-version
diff --git a/fake_dir/fsdfgh b/fake_dir/fsdfgh
new file mode 100644
index 00000000..ebd244e8
--- /dev/null
+++ b/fake_dir/fsdfgh
@@ -0,0 +1 @@
+dsadsad
diff --git a/fake_dir/popoiopiu b/fake_dir/popoiopiu
new file mode 100644
index 00000000..3158e692
--- /dev/null
+++ b/fake_dir/popoiopiu
@@ -0,0 +1 @@
+oooofiopfsdpio
diff --git a/fake_dir/test2/fssdf b/fake_dir/test2/fssdf
new file mode 100644
index 00000000..cadf66b1
--- /dev/null
+++ b/fake_dir/test2/fssdf
@@ -0,0 +1 @@
+dsdsdsadsdsad
diff --git a/fake_dir/test2/llllg b/fake_dir/test2/llllg
new file mode 100644
index 00000000..5a9d42ab
--- /dev/null
+++ b/fake_dir/test2/llllg
@@ -0,0 +1 @@
+dsdsadjs
diff --git a/fake_dir/test3/ppppoooooooooo b/fake_dir/test3/ppppoooooooooo
new file mode 100644
index 00000000..1199850f
--- /dev/null
+++ b/fake_dir/test3/ppppoooooooooo
@@ -0,0 +1 @@
+dsasasd
diff --git a/ipfsApi/__init__.py b/ipfsApi/__init__.py
index 96c86216..8e3d36c8 100644
--- a/ipfsApi/__init__.py
+++ b/ipfsApi/__init__.py
@@ -2,5 +2,4 @@
from .client import *
-
__version__ = '0.2.3'
diff --git a/ipfsApi/client.py b/ipfsApi/client.py
index f440c8e0..e4d58012 100644
--- a/ipfsApi/client.py
+++ b/ipfsApi/client.py
@@ -1,5 +1,7 @@
-"""
-IPFS API Bindings for Python
+"""IPFS API Bindings for Python.
+
+Classes:
+Client -- a TCP client for interacting with an IPFS daemon
"""
from __future__ import absolute_import
@@ -13,22 +15,89 @@
class Client(object):
- """
- A TCP client for interacting with an IPFS daemon
+ """A TCP client for interacting with an IPFS daemon.
+
+ Public methods:
+ __init__ -- connects to the API port of an IPFS node
+ add -- add a file, or directory of files to IPFS
+ get -- downloads a file, or directory of files from IPFS to the current
+ working directory
+ cat -- returns the contents of a file identified by hash, as a string
+ ls -- returns a list of objects linked to the given hash
+ refs -- returns a list of hashes of objects referenced by the given hash
+ block_stat -- returns a dict with the size of the block with the given hash
+ block_get -- returns the raw contents of a block
+ block_put -- stores input as an IPFS block
+ object_data -- returns the raw bytes in an IPFS object
+ object_new -- creates a new object from an ipfs template
+ object_links -- returns the links pointed to by the specified object
+ object_get -- get and serialize the DAG node named by multihash
+ object_put -- stores input as a DAG object and returns its key
+ object_stat -- get stats for the DAG node named by multihash
+ object_patch_append_data -- create a new merkledag object based on an
+ existing one by appending to the object's data
+ object_patch_add_link -- create a new merkledag object based on an existing
+ one by adding a link to another merkledag object
+ object_patch_rm_link -- create a new merkledag object based on an existing
+ one by removing a link to another merkledag object
+ object_patch_set_data -- create a new merkledag object based on an existing
+ one by replacing the object's data
+ file_ls -- lists directory contents for Unix filesystem objects
+ resolve -- accepts an identifier and resolves it to the referenced item
+ name_publish -- publishes an object to IPNS
+ name_resolve -- gets the value currently published at an IPNS name
+ dns -- resolves DNS links to the referenced object
+ pin_add -- pins objects to local storage
+ pin_rm -- removes a pinned object from local storage
+ pin_ls -- lists objects pinned to local storage
+ repo_gc -- removes stored objects that are not pinned from the repo
+ id -- shows IPFS Node ID info
+ bootstrap -- shows peers in the bootstrap list
+ bootstrap_add -- adds peers to the bootstrap list
+ bootstrap_rm -- removes peers from the bootstrap list
+ swarm_peers -- returns the addresses & IDs of currently connected peers
+ swarm_addrs -- returns the addresses of currently connected peers
+ by peer id
+ swarm_connect -- opens a connection to a given address
+ swarm_disconnect -- closes the connection to a given address
+ swarm_filters_add -- adds a given multiaddr filter to the filter list
+ swarm_filters_rm -- removes a given multiaddr filter from the filter list
+ dht_query -- finds the closest Peer IDs to a given Peer ID by
+ querying the DHT
+ dht_findprovs -- finds peers in the DHT that can provide a specific value,
+ given a key
+ dht_findpeer -- queries the DHT for all of the multiaddresses associated
+ with a Peer ID
+ dht_get -- queries the DHT for its best value related to given key
+ dht_put -- writes a key/value pair to the DHT
+ ping -- provides round-trip latency information for the routing system
+ config -- controls configuration variables
+ config_show -- returns a dict containing the server's configuration
+ config_replace -- replaces the existing config with a user-defined config
+ log_level -- changes the logging output of a running daemon
+ log_ls -- lists the logging subsystems of a running daemon
+ log_tail -- reads log outputs as they are written
+ version -- returns the software version of the currently connected node
+ files_cp -- copies files into MFS
+ files_ls -- lists directory contents in MFS
+ files_mkdir -- creates a directory in MFS
+ files_stat -- displays a file's status (including it's hash) in MFS
+ files_rm -- removes a file from MFS
+ files_read -- reads a file stored in MFS
+ files_write -- writes to a mutable file in MFS
+ files_mv -- moves MFS files
+ add_str -- adds a Python string as a file to IPFS
+ add_json -- adds a json-serializable Python dict as a json file to IPFS
+ get_json -- loads a json object from IPFS
+ add_pyobj -- adds a picklable Python object as a file to IPFS
+ get_pyobj -- loads a pickled Python object from IPFS
"""
_clientfactory = http.HTTPClient
- def __init__(self,
- host=None,
- port=None,
- base=None,
- default_enc='json',
- **defaults):
-
- """
- Connect to the API port of an IPFS node
- """
+ def __init__(self, host=None, port=None,
+ base=None, default_enc='json', **defaults):
+ """Connects to the API port of an IPFS node."""
if host is None:
host = default_host
if port is None:
@@ -45,17 +114,23 @@ def __init__(self,
self._cat = ArgCommand('/cat')
self._ls = ArgCommand('/ls')
self._refs = ArgCommand('/refs')
+ self._refs_local = Command('/refs/local')
# DATA STRUCTURE COMMANDS
self._block_stat = ArgCommand('/block/stat')
self._block_get = ArgCommand('/block/get')
self._block_put = FileCommand('/block/put')
+ self._object_new = ArgCommand('/object/new')
self._object_data = ArgCommand('/object/data')
self._object_links = ArgCommand('/object/links')
self._object_get = ArgCommand('/object/get')
self._object_put = FileCommand('/object/put')
self._object_stat = ArgCommand('/object/stat')
- self._object_patch = ArgCommand('/object/patch')
+ self._object_patch_append_data = FileCommand(
+ '/object/patch/append-data')
+ self._object_patch_add_link = ArgCommand('/object/patch/add-link')
+ self._object_patch_rm_link = ArgCommand('/object/patch/rm-link')
+ self._object_patch_set_data = FileCommand('/object/patch/set-data')
self._file_ls = ArgCommand('/file/ls')
# ADVANCED COMMANDS
@@ -67,6 +142,10 @@ def __init__(self,
self._pin_rm = ArgCommand('/pin/rm')
self._pin_ls = Command('/pin/ls')
self._repo_gc = Command('/repo/gc')
+ self._repo_stat = Command('/repo/stat')
+ self._repo_fsck = Command('/repo/stat')
+ self._repo_version = Command('/repo/version')
+ self._repo_verify = Command('/repo/verify')
# NETWORK COMMANDS
self._id = Command('/id')
@@ -90,8 +169,11 @@ def __init__(self,
self._config = ArgCommand('/config')
self._config_show = Command('/config/show')
self._config_replace = ArgCommand('/config/replace')
+ self._log_level = ArgCommand('/log/level')
+ self._log_ls = Command('/log/ls')
+ self._log_tail = Command('/log/tail')
self._version = Command('/version')
-
+
# MFS COMMANDS
self._files_cp = ArgCommand('/files/cp')
self._files_ls = ArgCommand('/files/ls')
@@ -101,31 +183,37 @@ def __init__(self,
self._files_read = ArgCommand('/files/read')
self._files_write = FileCommand('/files/write')
self._files_mv = ArgCommand('/files/mv')
-
def add(self, files, recursive=False, **kwargs):
- """
- Add a file, or directory of files to IPFS
+ """Add a file, or directory of files to IPFS.
>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f:
... numbytes = f.write(u'Mary had a little lamb')
>> c.add('nurseryrhyme.txt')
{u'Hash': u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab',
u'Name': u'nurseryrhyme.txt'}
+
+ Keyword arguments:
+ files -- a filepath to either a file or directory
+ recursive -- controls if files in subdirectories are added or not
+ kwargs -- additional named arguments
"""
return self._add.request(self._client, (), files,
recursive=recursive, **kwargs)
def get(self, multihash, **kwargs):
- """
- Downloads a file, or directory of files from IPFS to the current
- working directory.
+ """Downloads a file, or directory of files from IPFS.
+
+ Files are placed in the current working directory.
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._get.request(self._client, multihash, **kwargs)
def cat(self, multihash, **kwargs):
- r"""
- Returns the contents of a file identified by hash, as a string
+ r"""Returns the contents of a file identified by hash, as a string.
>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
Traceback (most recent call last):
@@ -133,12 +221,15 @@ def cat(self, multihash, **kwargs):
ipfsApiError: this dag node is a directory
>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX')[:60]
u'\n\n\n
\nipfs example viewer> c.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
{u'Objects': [
@@ -151,12 +242,15 @@ def ls(self, multihash, **kwargs):
u'Name': u'published-version', u'Size': 55, u'Type': 2}
]}
]}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._ls.request(self._client, multihash, **kwargs)
def refs(self, multihash, **kwargs):
- """
- Returns a list of hashes of objects referenced to the given hash
+ """Returns a list of hashes of objects referenced by the given hash.
>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
[{u'Ref': u'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV',
@@ -164,48 +258,91 @@ def refs(self, multihash, **kwargs):
...
{u'Ref': u'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY',
u'Err': u''}]
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._refs.request(self._client, multihash, **kwargs)
- def block_stat(self, multihash, **kwargs):
+ def refs_local(self, **kwargs):
+ """Displays the hashes of all local objects.
+
+ >> c.refs()
+ [{u'Ref': u'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV',
+ u'Err': u''},
+ ...
+ {u'Ref': u'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY',
+ u'Err': u''}]
+
+ Keyword arguments:
+ kwargs -- additional named arguments
"""
- Returns a dict with the size of the block with the given hash
+ return self._refs_local.request(self._client, **kwargs)
+
+ def block_stat(self, multihash, **kwargs):
+ """Returns a dict with the size of the block with the given hash.
>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
{u'Key': u'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D',
u'Size': 258}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._block_stat.request(self._client, multihash, **kwargs)
def block_get(self, multihash, **kwargs):
- r"""
- Returns the raw contents of a block
+ r"""Returns the raw contents of a block.
>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
u'\x121\n"\x12 \xdaW> ... \x11published-version\x187\n\x02\x08\x01'
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._block_get.request(self._client, multihash, **kwargs)
def block_put(self, file, **kwargs):
- """
+ """Stores input as an IPFS block.
+
>> c.block_put(io.StringIO(u'Mary had a little lamb'))
{u'Key': u'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb',
u'Size': 22}
+
+ Keyword arguments:
+ file -- object to be stored
+ kwargs -- additional named arguments
"""
return self._block_put.request(self._client, (), file, **kwargs)
def object_data(self, multihash, **kwargs):
- r"""
+ r"""Returns the raw bytes in an IPFS object.
+
>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
u'\x08\x01'
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._object_data.request(self._client, multihash, **kwargs)
def object_new(self, template=None, **kwargs):
- """
+ """Creates a new object from an IPFS template.
+
+ By default it creates and returns a new empty merkledag node, but you
+ may pass an optional template argument to create a preformatted node.
+
>> c.object_new()
{u'Hash': u'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n',
u'Links': None}
+
+ Keyword arguments:
+ template -- blueprints from which to construct the new object
+ kwargs -- additional named arguments
"""
if template:
return self._object_new.request(self._client, template, **kwargs)
@@ -213,7 +350,8 @@ def object_new(self, template=None, **kwargs):
return self._object_new.request(self._client, **kwargs)
def object_links(self, multihash, **kwargs):
- """
+ """Returns the links pointed to by the specified object.
+
>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
{u'Hash': u'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D',
u'Links': [
@@ -227,11 +365,16 @@ def object_links(self, multihash, **kwargs):
u'Name': u'lib', u'Size': 268261},
{u'Hash': u'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY',
u'Name': u'published-version', u'Size': 55}]}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._object_links.request(self._client, multihash, **kwargs)
def object_get(self, multihash, **kwargs):
- """
+ """Get and serialize the DAG node named by multihash.
+
>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
{u'Data': u'\x08\x01',
u'Links': [
@@ -245,31 +388,99 @@ def object_get(self, multihash, **kwargs):
u'Name': u'lib', u'Size': 268261},
{u'Hash': u'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY',
u'Name': u'published-version', u'Size': 55}]}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._object_get.request(self._client, multihash, **kwargs)
def object_put(self, file, **kwargs):
- """
+ """Stores input as a DAG object and returns its key.
+
+ Keyword arguments:
+ file -- object from which a DAG object will be created
+ kwargs -- additional named arguments
"""
return self._object_put.request(self._client, (), file, **kwargs)
def object_stat(self, multihash, **kwargs):
- """
+ """Get stats for the DAG node named by multihash.
+
>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
{u'LinksSize': 256, u'NumLinks': 5,
u'Hash': u'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D',
u'BlockSize': 258, u'CumulativeSize': 274169, u'DataSize': 2}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._object_stat.request(self._client, multihash, **kwargs)
- def object_patch(self, multihash, **kwargs):
+ def object_patch_append_data(self, multihash, new_data, **kwargs):
+ """Creates a new merkledag object based on an existing one.
+
+ The new object will have the provided data appended to it,
+ and will thus have a new Hash.
+
+ Keyword arguments:
+ multihash -- the hash of an ipfs object to modify
+ new_data -- the data to append to the object's data section
+ kwargs -- additional named arguments
"""
+ return self._object_patch_append_data.request(self._client,
+ [multihash],
+ new_data,
+ **kwargs)
+
+ def object_patch_add_link(self, root, name, ref, **kwargs):
+ """Creates a new merkledag object based on an existing one.
+
+ The new object will have a link to the provided object.
+
+ Keyword arguments:
+ root -- IPFS hash for the object being modified
+ name -- name for the new link
+ ref -- IPFS hash for the object being linked to
+ kwargs -- additional named arguments
"""
- return self._object_patch.request(self._client, multihash, **kwargs)
+ return self._object_patch_add_link.request(self._client,
+ (root, name, ref),
+ **kwargs)
- def file_ls(self, multihash, **kwargs):
+ def object_patch_rm_link(self, root, link, **kwargs):
+ """Creates a new merkledag object based on an existing one.
+
+ The new object will lack a link to the specified object.
+
+ Keyword arguments:
+ root -- IPFS hash of the object to modify
+ link -- name of the link to remove
+ kwargs -- additional named arguments
+ """
+ return self._object_patch_rm_link.request(self._client,
+ (root, link),
+ **kwargs)
+
+ def object_patch_set_data(self, root, data, **kwargs):
+ """Creates a new merkledag object based on an existing one.
+
+ The new object will have the same links as the old object but
+ with the provided data instead of the old object's data contents.
+
+ Keyword arguments:
+ root -- IPFS hash of the object to modify
+ data -- the new data to store in root
+ kwargs -- additional named arguments
"""
- List file and directory objects in the object identified by a hash
+ return self._object_patch_set_data.request(self._client,
+ [root],
+ data,
+ **kwargs)
+
+ def file_ls(self, multihash, **kwargs):
+ """Lists directory contents for Unix filesystem objects.
>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D')
{u'Arguments': {u'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D':
@@ -292,51 +503,112 @@ def file_ls(self, multihash, **kwargs):
u'Size': 0, u'Type': u'Directory'
}
}}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self._file_ls.request(self._client, multihash, **kwargs)
def resolve(self, *args, **kwargs):
- """
+ """Accepts an identifier and resolves it to the referenced item.
+
+ There are a number of mutable name protocols that can link among
+ themselves and into IPNS. For example IPNS references can (currently)
+ point at an IPFS object, and DNS links can point at other DNS links,
+ IPNS entries, or IPFS objects. This command accepts any of these
+ identifiers.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._resolve.request(self._client, *args, **kwargs)
def name_publish(self, *args, **kwargs):
- """
+ """Publishes an object to IPNS.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._name_publish.request(self._client, *args, **kwargs)
def name_resolve(self, *args, **kwargs):
- """
+ """Gets the value currently published at an IPNS name.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._name_resolve.request(self._client, *args, **kwargs)
def dns(self, *args, **kwargs):
- """
+ """Resolves DNS links to the referenced object.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._dns.request(self._client, *args, **kwargs)
def pin_add(self, *args, **kwargs):
- """
+ """Pins objects to local storage.
+
+ Stores an IPFS object(s) from a given path locally to disk.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._pin_add.request(self._client, *args, **kwargs)
def pin_rm(self, *args, **kwargs):
- """
+ """Removes a pinned object from local storage.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._pin_rm.request(self._client, *args, **kwargs)
def pin_ls(self, **kwargs):
- """
+ """Lists objects pinned to local storage.
+
+ Keyword arguments:
+ kwargs -- additional named arguments
"""
return self._pin_ls.request(self._client, **kwargs)
def repo_gc(self, *args, **kwargs):
- """
+ """Removes stored objects that are not pinned from the repo.
+
+ Performs a garbage collection sweep of the local set of
+ stored objects and remove ones that are not pinned in order
+ to reclaim hard disk space. Returns the hashes of all collected
+ objects.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._repo_gc.request(self._client, *args, **kwargs)
- def id(self, *args, **kwargs):
+ def repo_stat(self, *args, **kwargs):
+ """Displays the repo's status.
+
+ Returns the number of objects in the repo and the repo's size,
+ version, and path.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
+ return self._repo_stat.request(self._client, *args, **kwargs)
+
+ def id(self, *args, **kwargs):
+ """Shows IPFS Node ID info.
+
Returns the PublicKey, ProtocolVersion, ID, AgentVersion and
Addresses of the connected daemon
@@ -352,11 +624,16 @@ def id(self, *args, **kwargs):
u'/ip6/::1/tcp/4001/ipfs/QmRA9NuuaJ2GLVgCm ... 1VCn',
u'/ip4/212.159.87.139/tcp/63203/ipfs/QmRA9NuuaJ2GLVgCm ... 1VCn',
u'/ip4/212.159.87.139/tcp/63203/ipfs/QmRA9NuuaJ2GLVgCm ... 1VCn']}
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._id.request(self._client, *args, **kwargs)
def bootstrap(self, *args, **kwargs):
- """
+ """Shows peers in the bootstrap list.
+
Reurns the the addresses of peers used during initial discovery of
the IPFS network
@@ -366,22 +643,33 @@ def bootstrap(self, *args, **kwargs):
u'/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJz ... ca9z',
...
u'/ip4/104.236.151.122/tcp/4001/ipfs/QmSoLju6m7xTh3Du ... 36yx']}
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._bootstrap.request(self._client, *args, **kwargs)
def bootstrap_add(self, *args, **kwargs):
- """
+ """Adds peers to the bootstrap list.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._bootstrap_add.request(self._client, *args, **kwargs)
def bootstrap_rm(self, *args, **kwargs):
- """
+ """Removes peers from the bootstrap list.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._bootstrap_rm.request(self._client, *args, **kwargs)
def swarm_peers(self, *args, **kwargs):
- """
- Returns the addresses & IDs of currently connected peers
+ """Returns the addresses & IDs of currently connected peers.
>> c.swarm_peers()
{u'Strings': [
@@ -390,12 +678,16 @@ def swarm_peers(self, *args, **kwargs):
...
u'/ip4/92.1.172.181/tcp/4001/ipfs/QmdPe9Xs5YGCoVN8nk ... 5cKD',
u'/ip4/94.242.232.165/tcp/4001/ipfs/QmUdVdJs3dM6Qi6Tf ... Dgx9']}
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._swarm_peers.request(self._client, *args, **kwargs)
def swarm_addrs(self, *args, **kwargs):
- """
- Returns the addresses of currently connected peers by peer id
+ """Returns the addresses of currently connected peers by peer id.
+
>> pprint(c.swarm_addrs())
{u'Addrs': {
u'QmNd92Ndyccns8vTvdK66H1PC4qRXzKz3ewAqAzLbooEpB':
@@ -405,46 +697,82 @@ def swarm_addrs(self, *args, **kwargs):
[u'/ip4/127.0.0.1/tcp/4001', u'/ip4/178.62.206.163/tcp/4001'],
...
}}
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._swarm_addrs.request(self._client, *args, **kwargs)
def swarm_connect(self, *args, **kwargs):
- """
+ """Opens a connection to a given address.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._swarm_connecti.request(self._client, *args, **kwargs)
def swarm_disconnect(self, *args, **kwargs):
- """
+ """Closes the connection to a given address.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._swarm_disconnect.request(self._client, *args, **kwargs)
def swarm_filters_add(self, *args, **kwargs):
- """
+ """Adds a given multiaddr filter to the filter list.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._swarm_filters_add.request(self._client, *args, **kwargs)
def swarm_filters_rm(self, *args, **kwargs):
- """
+ """Removes a given multiaddr filter from the filter list.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._swarm_filters_rm.request(self._client, *args, **kwargs)
def dht_query(self, *args, **kwargs):
- """
+ """Finds the closest Peer IDs to a given Peer ID by querying the DHT.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._dht_query.request(self._client, *args, **kwargs)
def dht_findprovs(self, *args, **kwargs):
- """
+ """Finds peers in the DHT that can provide an exact value, given a key.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._dht_findprovs.request(self._client, *args, **kwargs)
def dht_findpeer(self, *args, **kwargs):
- """
+ """Queries the DHT for all of the associated multiaddresses.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._dht_findpeer.request(self._client, *args, **kwargs)
def dht_get(self, *args, **kwargs):
- """
+ """Queries the DHT for its best value related to given key.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
res = self._dht_get.request(self._client, *args, **kwargs)
if isinstance(res, dict) and "Extra" in res:
@@ -456,23 +784,38 @@ def dht_get(self, *args, **kwargs):
raise ipfsApiError("empty response from DHT")
def dht_put(self, key, value, **kwargs):
- """
+ """Writes a key/value pair to the DHT.
+
+ Keyword arguments:
+ key -- a unique identifier
+ value -- object to be associated with the given key
+ kwargs -- additional named arguments
"""
return self._dht_put.request(self._client, key, value, **kwargs)
def ping(self, *args, **kwargs):
- """
+ """Provides round-trip latency information for the routing system.
+
+ Finds nodes via the routing system, sends pings, waits for pongs,
+ and prints out round-trip latency information.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._ping.request(self._client, *args, **kwargs)
def config(self, *args, **kwargs):
- """
+ """Controls configuration variables.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._config.request(self._client, *args, **kwargs)
def config_show(self, *args, **kwargs):
- """
- Returns a dict containing the server's configuration
+ """Returns a dict containing the server's configuration.
>> config = ipfs_client.config_show()
>> pprint(config['Addresses']
@@ -481,71 +824,145 @@ def config_show(self, *args, **kwargs):
u'Swarm': [u'/ip4/0.0.0.0/tcp/4001', u'/ip6/::/tcp/4001']},
>> pprint(config['Discovery'])
{u'MDNS': {u'Enabled': True, u'Interval': 10}}
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._config_show.request(self._client, *args, **kwargs)
def config_replace(self, *args, **kwargs):
- """
+ """Replaces the existing config with a user-defined config.
+
+ Make sure to back up the config file first if neccessary, as this
+ operation can't be undone.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
"""
return self._config_replace.request(self._client, *args, **kwargs)
- def version(self, **kwargs):
+ def log_level(self, subsystem, level, **kwargs):
+ """Changes the logging output of a running daemon.
+
+ Keyword arguments:
+ subsystem -- the subsystem logging identifier
+ (Use 'all' for all subsystems)
+ level -- one of: debug, info, warning, error, fatal, panic
+ kwargs -- additional named arguments
"""
- Returns the software version of the currently connected node
+ return self._log_level.request(self._client, subsystem,
+ level, **kwargs)
+
+ def log_ls(self, *args, **kwargs):
+ """Lists the logging subsystems of a running daemon.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
+ """
+ return self._log_ls.request(self._client, *args, **kwargs)
+
+ def log_tail(self, *args, **kwargs):
+ """Reads log outputs as they are written.
+
+ This function returns a reponse object that can be iterated over
+ by the user. The user should make sure to close the response object
+ when they are done reading from it.
+
+ Keyword arguments:
+ args -- additional unnamed arguments
+ kwargs -- additional named arguments
+ """
+ return self._log_tail.request(self._client, stream=True,
+ *args, **kwargs)
+
+ def version(self, **kwargs):
+ """Returns the software version of the currently connected node.
>> c.version() # doctest: +ELLIPSIS
{u'Version': u'0.3...'}
+
+ Keyword arguments:
+ kwargs -- additional named arguments
"""
return self._version.request(self._client, **kwargs)
-
+
def files_cp(self, source, dest, **kwargs):
- """
- MFS - Copy files into mfs
+ """Copies files into MFS.
+
+ Keyword arguments:
+ source -- file to be copied
+ dest -- destination to which the file will be copied
+ kwargs -- additional named arguments
"""
return self._files_cp.request(self._client, source, dest, **kwargs)
-
+
def files_ls(self, path, **kwargs):
- """
- MFS - List directory contents
+ """Lists directory contents in MFS.
+
+ Keyword arguments:
+ path -- filepath within the MFS
+ kwargs -- additional named arguments
"""
return self._files_ls.request(self._client, path, **kwargs)
-
+
def files_mkdir(self, path, **kwargs):
- """
- MFS - Create directory
+ """Creates a directory in MFS.
+
+ Keyword arguments:
+ path -- filepath within the MFS
+ kwargs -- additional named arguments
"""
return self._files_mkdir.request(self._client, path, **kwargs)
-
+
def files_stat(self, path, **kwargs):
- """
- MFS - Display file status (including it's hash)
+ """Displays a file's status (including it's hash) in MFS.
+
+ Keyword arguments:
+ path -- filepath within the MFS
+ kwargs -- additional named arguments
"""
return self._files_stat.request(self._client, path, **kwargs)
-
+
def files_rm(self, path, **kwargs):
- """
- MFS - Remove a file
+ """Removes a file from MFS.
+
+ Keyword arguments:
+ path -- filepath within the MFS
+ kwargs -- additional named arguments
"""
return self._files_rm.request(self._client, path, **kwargs)
-
+
def files_read(self, path, **kwargs):
- """
- MFS - Read a file stored in MFS
+ """Reads a file stored in MFS.
+
+ Keyword arguments:
+ path -- filepath within the MFS
+ kwargs -- additional named arguments
"""
return self._files_read.request(self._client, path, **kwargs)
-
+
def files_write(self, path, file, **kwargs):
- """
- MFS - Write to a mutable file
+ """Writes to a mutable file in MFS.
+
+ Keyword arguments:
+ path -- filepath within the MFS
+ file -- object to be written
+ kwargs -- additional named arguments
"""
return self._files_write.request(self._client, (path,), file, **kwargs)
-
+
def files_mv(self, source, dest, **kwargs):
- """
- MFS - Move MFS files
+ """Moves MFS files.
+
+ Keyword arguments:
+ source -- existing filepath within the MFS
+ dest -- destination to which the file will be moved in the MFS
+ kwargs -- additional named arguments
"""
return self._files_mv.request(self._client, source, dest, **kwargs)
-
###########
# HELPERS #
@@ -553,55 +970,68 @@ def files_mv(self, source, dest, **kwargs):
@utils.return_field('Hash')
def add_str(self, string, **kwargs):
- """
- Adds a Python string as a file to IPFS.
+ """Adds a Python string as a file to IPFS.
>> ipfs_client.add_str('Mary had a little lamb')
u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
+
+ Keyword arguments:
+ string -- content to be added as a file
+ kwargs -- additional named arguments
"""
chunk_size = kwargs.pop('chunk_size',
multipart.default_chunk_size)
body, headers = multipart.stream_text(string,
chunk_size=chunk_size)
- return self._client.request('/add',
- data=body,
- headers=headers,
- **kwargs)
+ return self._client.request('/add', data=body,
+ headers=headers, **kwargs)[1]
def add_json(self, json_obj, **kwargs):
- """
- Adds a json-serializable Python dict as a json file to IPFS.
+ """Adds a json-serializable Python dict as a json file to IPFS.
>> ipfs_client.add_json({'one': 1, 'two': 2, 'three': 3})
u'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob'
+
+ Keyword arguments:
+ string -- a json-serializable Python dict
+ kwargs -- additional named arguments
"""
return self.add_str(utils.encode_json(json_obj), **kwargs)
def get_json(self, multihash, **kwargs):
- """
- Loads a json object from IPFS.
+ """Loads a json object from IPFS.
>> c.get_json('QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob')
{u'one': 1, u'two': 2, u'three': 3}
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return self.cat(multihash, decoder='json', **kwargs)
def add_pyobj(self, py_obj, **kwargs):
- """
- Adds a picklable Python object as a file to IPFS.
+ """Adds a picklable Python object as a file to IPFS.
>> c.add_pyobj([0, 1.0, 2j, '3', 4e5])
u'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K'
+
+ Keyword arguments:
+ string -- a picklable Python object
+ kwargs -- additional named arguments
"""
return self.add_str(utils.encode_pyobj(py_obj), **kwargs)
def get_pyobj(self, multihash, **kwargs):
- """
- Loads a pickled Python object from IPFS.
+ """Loads a pickled Python object from IPFS.
>> c.get_pyobj('QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K')
[0, 1.0, 2j, '3', 400000.0]
+
+ Keyword arguments:
+ multihash -- unique checksum used to identify IPFS resources
+ kwargs -- additional named arguments
"""
return utils.parse_pyobj(self.cat(multihash, **kwargs))
diff --git a/ipfsApi/commands.py b/ipfsApi/commands.py
index d4542c59..8f27a175 100644
--- a/ipfsApi/commands.py
+++ b/ipfsApi/commands.py
@@ -1,3 +1,12 @@
+"""Defines the skeleton of different command structures.
+
+Classes:
+Command -- A simple command that can make requests to a path.
+ArgCommand -- A Command subclass for commands with arguments.
+FileCommand -- A Command subclass for file-manipulation commands.
+DownloadCommand -- A Command subclass for file download commands.
+"""
+
from __future__ import absolute_import
import os
@@ -10,21 +19,71 @@
class Command(object):
+ """Defines a command.
+
+ Public methods:
+ __init__ -- creates a Command that will make requests to a given path
+ request -- make a request to this command's path
+
+ Instance variables:
+ path -- the url path that this Command will make requests to
+ """
def __init__(self, path):
+ """Creates a Command.
+
+ Keyword arguments:
+ path -- the url path that this Command makes requests to
+ """
self.path = path
def request(self, client, *args, **kwargs):
+ """Makes a request to the client with arguments.
+
+ Keyword arguments:
+ client -- the HTTP client to use for the request
+ args -- unused unnamed arguments
+ kwargs -- additional arguments to HTTP client's request
+ """
return client.request(self.path, **kwargs)
class ArgCommand(Command):
+ """Defines a command that takes arguments.
+
+ Subclass of Command.
+
+ Public methods:
+ __init__ -- extends Command constructor to also take a number of required
+ arguments
+ request -- makes a request to the ArgCommand's path with given arguments
+
+ Instance variables:
+ path -- the url path of that this command will send data to
+ argc -- the number of arguments required by this command
+ """
def __init__(self, path, argc=None):
+ """Creates an ArgCommand.
+
+ Keyword arguments:
+ path -- the url path to which the command with send data
+ argc -- the number of arguments required by this command
+ """
Command.__init__(self, path)
self.argc = argc
def request(self, client, *args, **kwargs):
+ """Makes a request to the client with arguments.
+
+ Can raise an InvalidArgument if the wrong number of arguments is
+ provided.
+
+ Keyword arguments:
+ client -- the HTTP client to use for the request
+ args -- the arguments to the HTTP client's request
+ kwargs -- additional arguments to HTTP client's request
+ """
if self.argc and len(args) != self.argc:
raise InvalidArguments("[%s] command requires %d arguments." % (
self.path, self.argc))
@@ -32,13 +91,33 @@ def request(self, client, *args, **kwargs):
class FileCommand(Command):
+ """Defines a command for manipulating files.
+
+ Subclass of Command.
+
+ Public methods:
+ request -- overrides Command's request to access a file or files
+ files -- adds file-like objects as a multipart request to IPFS
+ directory -- loads a directory recursively into IPFS
+
+ Instance variables:
+ path -- the path to make the file requests to
+ """
def request(self, client, args, f, **kwargs):
- """
- Takes either a file object, a filename, an iterable of filenames, an
- iterable of file objects, or a heterogeneous iterable of file objects
- and filenames. Can only take one directory at a time, which will be
+ """Makes a request for a file or files.
+
+ Can only take one directory at a time, which will be
traversed (optionally recursive).
+
+ Keyword arguments:
+ client -- the http client to send requests to
+ args -- the arguments to the HTTP client's request
+ f -- a file object, a filename, an iterable of filenames, an
+ iterable of file objects, or a heterogeneous iterable of file
+ objects and filenames
+ kwargs -- additional arguments (include 'recursive' if recursively
+ copying a directory)
"""
if kwargs.pop('recursive', False):
return self.directory(client, args, f, recursive=True, **kwargs)
@@ -47,29 +126,67 @@ def request(self, client, args, f, **kwargs):
else:
return self.files(client, args, f, **kwargs)
- def files(self, client, args, files, chunk_size=default_chunk_size, **kwargs):
- """
- Adds file-like objects as a multipart request to IPFS.
+ def files(self, client, args, files,
+ chunk_size=default_chunk_size, **kwargs):
+ """Adds file-like objects as a multipart request to IPFS.
+
+ Keyword arguments:
+ client -- the http client to send requests to
+ args -- the arguments to the HTTP client's request
+ files -- the files being requested
+ chunk_size -- the size of the chunks to break the files into
+ kwargs -- additional arguments to HTTP client's request
"""
body, headers = multipart.stream_files(files,
chunk_size=chunk_size)
- return client.request(self.path, args=args, data=body, headers=headers, **kwargs)
+ return client.request(self.path, args=args, data=body,
+ headers=headers, **kwargs)
def directory(self, client, args, dirname,
match='*', recursive=False,
chunk_size=default_chunk_size, **kwargs):
- """
- Loads a directory recursively into IPFS, files are matched against the
- given pattern.
+ """Loads a directory recursively into IPFS.
+
+ Files are matched against the given pattern.
+
+ Keyword arguments:
+ client -- the http client to send requests to
+ args -- the arguments to the HTTP client's request
+ dirname -- the name of the directory being requested
+ match -- a pattern to match the files against
+ recursive -- boolean for whether to load contents recursively
+ chunk_size -- the size of the chunks to break the files into
+ kwargs -- additional arguments to HTTP client's request
"""
body, headers = multipart.stream_directory(dirname,
fnpattern=match,
recursive=recursive,
chunk_size=chunk_size)
- return client.request(self.path, args=args, data=body, headers=headers, **kwargs)
+ return client.request(self.path, args=args, data=body,
+ headers=headers, **kwargs)
class DownloadCommand(Command):
+ """Downloads requested files.
+
+ Subclass of Command
+
+ Public methods:
+ request -- make a request to this DownloadCommand's path to download a
+ given file
+
+ Instance variables:
+ path -- the url path to send requests to
+ """
def request(self, client, *args, **kwargs):
+ """Requests a download from the HTTP Client.
+
+ See the HTTP client's doc for details of what to pass in.
+
+ Keyword arguments:
+ client -- the http client to send requests to
+ args -- the arguments to the HTTP client
+ kwargs -- additional arguments to the HTTP client
+ """
return client.download(self.path, args=args, **kwargs)
diff --git a/ipfsApi/encoding.py b/ipfsApi/encoding.py
index 2b1250e2..be1411c5 100644
--- a/ipfsApi/encoding.py
+++ b/ipfsApi/encoding.py
@@ -1,3 +1,15 @@
+"""Defines encoding related classes.
+
+Classes:
+Encoding - An abstract based for a data parser/encoder interface.
+Json - A subclass of Encoding that handles JSON parsing and encoding.
+Protobuf - A subclass of Encoding to handle Protobuf parsing/encoding. TO DO
+Xml - A subclass of Encoding to handle Xml parsing and encoding. TO DO
+
+Functions:
+get_encoding(name) - Retrieves the Encoder object for the named encoding.
+"""
+
from __future__ import absolute_import
import json
@@ -6,34 +18,54 @@
class Encoding(object):
- """
- Abstract base for a data parser/encoder interface interface
+ """Abstract base for a data parser/encoder interface.
+
+ Public methods:
+ parse -- parses string into corresponding encoding
+ encode - serialize a raw object into corresponding encoding
"""
def parse(self, string):
+ """Parses string into corresponding encoding.
+
+ Keyword arguments:
+ string - string to be parsed
+ """
raise NotImplemented
def encode(self, obj):
+ """Serialize a raw object into corresponding encoding.
+
+ Keyword arguments:
+ obj - object to be encoded.
+ """
raise NotImplemented
class Json(Encoding):
- """
- JSON parser/encoder that handles concatenated JSON
+ """JSON parser/encoder that handles concatenated JSON.
+
+ Public methods:
+ __init__ -- creates a Json encoder/decoder
+ parse -- returns a Python object decoded from JSON object(s) in raw
+ encode -- returns obj serialized as JSON formatted string
"""
name = 'json'
def __init__(self):
+ """Creates a JSON encoder/decoder"""
self.encoder = json.JSONEncoder()
self.decoder = json.JSONDecoder()
def parse(self, raw):
- """
- Returns a Python object decoded from JSON object(s) in raw
+ """Returns a Python object decoded from JSON object(s) in raw
Some responses from the IPFS api are a concatenated string of JSON
objects, which crashes json.loads(), so we need to use this instead as
a general approach.
+
+ Keyword arguments:
+ raw -- raw JSON object
"""
json_string = raw.strip()
results = []
@@ -54,16 +86,21 @@ def parse(self, raw):
def encode(self, obj):
"""
- Returns obj encoded as JSON in a binary string
+ Returns obj serialized as JSON formatted string
+
+ Keyword arguments:
+ obj -- generic Python object
"""
return json.dumps(obj)
class Protobuf(Encoding):
+ """Protobuf parser/encoder that handles protobuf."""
name = 'protobuf'
class Xml(Encoding):
+ """XML parser/encoder that handles XML."""
name = 'xml'
# encodings supported by the IPFS api (default is json)
@@ -77,6 +114,9 @@ class Xml(Encoding):
def get_encoding(name):
"""
Returns an Encoder object for the named encoding
+
+ Keyword arguments:
+ name - named encoding. Supported options: Json, Protobuf, Xml
"""
try:
return __encodings[name.lower()]()
diff --git a/ipfsApi/exceptions.py b/ipfsApi/exceptions.py
index 0c61e0df..2b513c48 100644
--- a/ipfsApi/exceptions.py
+++ b/ipfsApi/exceptions.py
@@ -1,22 +1,40 @@
+"""Defines the skeleton for exceptions.
+
+Classes:
+ipfsApiError - A base class for generic exceptions.
+InvalidCommand - An ipfsApiError subclass for invalid commands.
+InvalidArguments - An ipfsApiError subclass for invalid arguments.
+InvalidPath - An ipfsApiError subclass for invalid path.
+FileCommandException - An ipfsApiError subclass for file command exceptions.
+EncodingException - An ipfsApiError subclass for encoding exceptions.
+"""
+
+
class ipfsApiError(Exception):
+ """Base class for exceptions in this module."""
pass
class InvalidCommand(ipfsApiError):
+ """Exception raised for an invalid command."""
pass
class InvalidArguments(ipfsApiError):
+ """Exception raised for invalid arguments."""
pass
class InvalidPath(ipfsApiError):
+ """Exception raised for an invalid path."""
pass
class FileCommandException(ipfsApiError):
+ """Exception raised for file command exception."""
pass
class EncodingException(ipfsApiError):
+ """Exception raised for invalid encoding."""
pass
diff --git a/ipfsApi/http.py b/ipfsApi/http.py
index a809b6c1..fe11e898 100644
--- a/ipfsApi/http.py
+++ b/ipfsApi/http.py
@@ -1,6 +1,10 @@
-"""
-HTTP client for api requests. This is pluggable into the IPFS Api client and
+"""HTTP client for api requests.
+
+This is pluggable into the IPFS Api client and
can/will eventually be supplemented with an asynchronous version.
+
+Classes:
+Client -- A TCP client for interacting with an IPFS daemon
"""
from __future__ import absolute_import
@@ -14,21 +18,49 @@
from .exceptions import ipfsApiError
-def pass_defaults(f):
- """
- Use instance default kwargs updated with those passed to function.
+def pass_defaults(func):
+ """Decorator that returns a function named wrapper.
+
+ When invoked, wrapper invokes func with default kwargs appended.
+
+ Keyword arguments:
+ func -- the function to append the default kwargs to
"""
def wrapper(self, *args, **kwargs):
merged = {}
merged.update(self.defaults)
merged.update(kwargs)
- return f(self, *args, **merged)
+ return func(self, *args, **merged)
return wrapper
class HTTPClient(object):
+ """An HTTP client for interacting with the IPFS daemon.
+
+ Public methods:
+ __init__ -- initializes an instance of HTTPClient
+ request -- makes an HTTP request to the IPFS daemon
+ download -- makes a request to the IPFS daemon to download a file
+ session -- a context manager for this client's session
+
+ Instance variables:
+ host -- the host the IPFS daemon is running on
+ port -- the port the IPFS daemon is running on
+ base -- the path at which the api calls are to be sent
+ default_enc -- the default encoding of the HTTP client's response
+ defaults -- the default options to be handled by pass_defaults
+ """
def __init__(self, host, port, base, default_enc, **defaults):
+ """Initializes an instance of HTTPClient.
+
+ Keyword arguments:
+ host -- the host the IPFS daemon is running on
+ port -- the port the IPFS daemon is running on
+ base -- the path at which the api calls are to be sent
+ default_enc -- the default encoding of the HTTP client's response
+ defaults -- the default options to be handled by pass_defaults
+ """
self.host = host
self.port = port
if not re.match('^https?://', host.lower()):
@@ -50,7 +82,21 @@ def __init__(self, host, port, base, default_enc, **defaults):
def request(self, path,
args=[], files=[], opts={},
decoder=None, **kwargs):
-
+ """Makes an HTTP request to the IPFS daemon.
+
+ This function returns the contents of the HTTP response from the IPFS
+ daemon.
+
+ Keyword Arguments:
+ path -- the url path of this request
+ args -- a list of parameters to be sent with the HTTP request
+ files -- a file object, a filename, an iterable of filenames, an
+ iterable of file objects, or a heterogeneous iterable of
+ file objects and filenames
+ opts -- a dictonary of parameters to be sent with the HTTP request
+ decoder -- the encoding of the HTTP response, defaults to None
+ kwargs -- additional arguments, used to determine HTTP request method
+ """
url = self.base + path
params = []
@@ -107,9 +153,20 @@ def request(self, path,
def download(self, path, filepath=None,
args=[], opts={},
compress=True, **kwargs):
- """
+ """Makes a request to the IPFS daemon to download a file.
+
Downloads a file or files from IPFS into the current working
directory, or the directory given by :filepath:.
+
+ Keyword Arguments:
+ path -- the url path of this request
+ filepath -- the local path to where IPFS will download the files,
+ current working directory if None, defaults to None
+ args -- a list of parameters to be sent with the HTTP request
+ opts -- a dictonary of parameters to be sent with the HTTP request
+ compress -- a flag that when true indicates that the response file
+ should be downloaded as a tar, defaults to True
+ kwargs -- additional arguments
"""
url = self.base + path
wd = filepath or '.'
@@ -144,6 +201,11 @@ def download(self, path, filepath=None,
@contextlib.contextmanager
def session(self):
+ """A context manager for this client's session.
+
+ This function closes the current session when this client goes out of
+ scope.
+ """
self._session = requests.session()
yield
self._session.close()
diff --git a/ipfsApi/multipart.py b/ipfsApi/multipart.py
index 894de96d..d3baaa53 100644
--- a/ipfsApi/multipart.py
+++ b/ipfsApi/multipart.py
@@ -1,9 +1,28 @@
-"""
-Multipart/form-data encoded file streaming.
+"""HTTP Multipart/* encoded file streaming.
+
+Classes:
+BodyGenerator -- generates the body of an HTTP-encoded multipart request
+BufferedGenerator -- abstract class that generates the wrapper around HTTP
+ multipart bodies
+FileStream -- generates the HTTP multipart request body for a group of files
+DirectoryStream -- generates the HTTP multipart request body for a directory
+TextStream -- generates the HTTP multipart request body for a chunk of text
+
+Functions:
+content_disposition -- returns a dict containing the MIME content-disposition
+ header for a file
+content_type -- returns a dict with the content-type header for a file
+multipart_content_type -- creates a MIME multipart header with the given
+ configuration
+stream_files -- gets a buffered generator for streaming files
+stream_directory -- gets a buffered generator for streaming directories
+stream_text -- gets a buffered generator for streaming text
"""
from __future__ import absolute_import
-import fnmatch
+# import fnmatch
+import requests
+import io
import os
from inspect import isgenerator
from sys import version_info
@@ -25,15 +44,18 @@
def content_disposition(fn, disptype='file'):
- """
- Returns a dict containing the MIME content-disposition header for a
- file
+ """Returns a dict containing the MIME content-disposition header for a file.
+ Example:
>>> content_disposition('example.txt')
{'Content-Disposition': 'file; filename="example.txt"'}
>>> content_disposition('example.txt', 'attachment')
{'Content-Disposition': 'attachment; filename="example.txt"'}
+
+ Keyword arguments:
+ fn -- filename to retrieve the MIME content-disposition for
+ disptype -- the disposition type to use for the file
"""
disp = '%s; filename="%s"' % (
disptype,
@@ -43,10 +65,12 @@ def content_disposition(fn, disptype='file'):
def content_type(fn):
- """
- Guesses the mimetype associated with a filename and returns a dict
- containing the content-type header
+ """Returns a dict with the content-type header for a file.
+
+ Guesses the mimetype for a filename and returns a dict
+ containing the content-type header.
+ Example:
>>> content_type('example.txt')
{'Content-Type': 'text/plain'}
@@ -55,20 +79,29 @@ def content_type(fn):
>>> content_type('example')
{'Content-Type': 'application/octet-stream'}
+
+ Keyword Arguments:
+ fn -- filename to guess the content-type for
"""
return {'Content-Type': utils.guess_mimetype(fn)}
def multipart_content_type(boundary, subtype='mixed'):
- """
+ """Creates a MIME multipart header with the given configuration.
+
Returns a dict containing a MIME multipart header with the given
- boundary
+ boundary.
+ Example:
>>> multipart_content_type('8K5rNKlLQVyreRNncxOTeg')
{'Content-Type': 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'}
>>> multipart_content_type('8K5rNKlLQVyreRNncxOTeg', 'alt')
{'Content-Type': 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'}
+
+ Keyword arguments:
+ boundary -- the boundary size to put in the header
+ subtype -- the MIME subtype to put in the header
"""
ctype = 'multipart/%s; boundary="%s"' % (
subtype,
@@ -78,11 +111,31 @@ def multipart_content_type(boundary, subtype='mixed'):
class BodyGenerator(object):
+ """Generators for creating the body of a multipart/* HTTP request.
+
+ Instance variables:
+ boundary -- a separator for the body chunk being generated
+ headers -- the headers of the body chunk being generated
+
+ Public methods:
+ __init__ -- create a BodyGenerator
+ write_headers -- generator that yields HTTP headers for content
+ open -- generator that opens a body section for content
+ file_open -- generator that opens a file section for content
+ file_close -- generator that closes a file section for content
+ close -- generator that closes a body section for content
"""
- Generators for multipart/form-data encoding.
- """
+
def __init__(self, name, disptype='file', subtype='mixed', boundary=None):
+ """Create a BodyGenerator.
+ Keyword arguments:
+ name -- the name of the file(s)/content being encoded
+ disptype -- the content-disposition of the content
+ subtype -- the HTTP multipart/ type of the content
+ boundary -- an identifier for the body being generated
+ """
+ # If the boundary is unspecified, make a random one
if boundary is None:
boundary = self._make_boundary()
self.boundary = boundary
@@ -92,9 +145,21 @@ def __init__(self, name, disptype='file', subtype='mixed', boundary=None):
self.headers = headers
def _make_boundary(self):
+ """Returns a random hexadecimal string (UUID 4).
+
+ The HTTP multipart request body spec requires a boundary string to
+ separate different content chunks within a request, and this is
+ usually a random string. Using a UUID is an easy way to generate
+ a random string of appropriate length as this content separator.
+ """
return uuid4().hex
def _write_headers(self, headers):
+ """Generator function that yields the HTTP header for content.
+
+ Keyword arguments:
+ headers -- the dictionary of headers to yield
+ """
if headers:
for name in sorted(headers.keys()):
yield name
@@ -104,15 +169,27 @@ def _write_headers(self, headers):
yield CRLF
def write_headers(self):
+ """Generator function that writes out the HTTP headers for content."""
for c in self._write_headers(self.headers):
yield c
def open(self, **kwargs):
+ """Generator function that opens a body section for content.
+
+ Keyword arguments:
+ kwargs -- keyword arguments, unused
+ """
yield b'--'
yield self.boundary
yield CRLF
def file_open(self, fn):
+ """Generator function that opens a file section in multipart HTTP.
+
+ Keyword arguments:
+ fn -- filename for the file being opened and added to the HTTP
+ body
+ """
yield b'--'
yield self.boundary
yield CRLF
@@ -122,9 +199,11 @@ def file_open(self, fn):
yield c
def file_close(self):
+ """Generator function that ends a file section in HTTP encoding."""
yield CRLF
def close(self):
+ """Generator function that ends a content area in an HTTP body."""
yield b'--'
yield self.boundary
yield b'--'
@@ -132,11 +211,35 @@ def close(self):
class BufferedGenerator(object):
+ """Generator that encodes multipart/form-data.
+
+ An abstract class of buffered generator which encodes
+ multipart/form-data.
+
+ Instance variables:
+ chunk_size -- the maximum size of a generated chunk of a file
+ buf -- buffer containing the current file chunk
+ name -- the name of the file being chunked
+ envelope -- a BodyGenerator to wrap the chunked content
+ headers -- the HTTP multipart headers for the chunked content
+
+ Public methods:
+ __init__ -- generator that encodes multipart/form-data
+ file_chunks -- yields chunks of a file
+ gen_chunks -- generates byte chunks of a given size
+ body -- returns the body of the buffered file
+ close -- closes the multipart envelope
+ """
def __init__(self, name, chunk_size=default_chunk_size):
- """
+ """Generator that encodes multipart/form-data.
+
An abstract class of buffered generator which encodes
multipart/form-data.
+
+ Keyword arguments:
+ name -- the name of the file to encode
+ chunk_size -- the maxiumum size for any chunk in bytes
"""
self.chunk_size = chunk_size
self._internal = bytearray(chunk_size)
@@ -149,8 +252,11 @@ def __init__(self, name, chunk_size=default_chunk_size):
self.headers = self.envelope.headers
def file_chunks(self, fp):
- """
- Yields chunks of a file.
+ """Yields chunks of a file.
+
+ Keyword arguments:
+ fp -- the file to break into chunks (must be an open file or have the
+ readinto method)
"""
fsize = utils.file_size(fp)
offset = 0
@@ -166,9 +272,13 @@ def file_chunks(self, fp):
offset += nb
def gen_chunks(self, gen):
- """
+ """Generates byte chunks of a given size.
+
Takes a bytes generator and yields chunks of a maximum of [chunk_size]
bytes.
+
+ Keyword arguments:
+ gen -- the bytes generator that produces the bytes
"""
for data in gen:
if not isinstance(data, six.binary_type):
@@ -185,31 +295,56 @@ def gen_chunks(self, gen):
offset += nb
def body(self, *args, **kwargs):
- raise NotImplemented
+ """Returns the body of the buffered file.
- def close(self):
- """
- Closes the multipart envelope.
+ Warning: this function is not implemented.
+
+ Keyword arguments:
+ args -- additional arguments, unused
+ kwargs -- additional keyword arguments, unused
"""
+ raise NotImplementedError
+
+ def close(self):
+ """Closes the multipart envelope."""
for chunk in self.gen_chunks(self.envelope.close()):
yield chunk
class FileStream(BufferedGenerator):
+ """Generator that encodes multiples files into HTTP multipart.
+
+ A buffered generator that encodes an array of files as
+ multipart/form-data. This is a concrete implementation of
+ BufferedGenerator.
+
+ Instance variables:
+ files -- array of files to be encoded
+
+ Public methods:
+ __init__ -- creates a new FileStream
+ body -- generate the HTTP multipart-encoded body
+ """
def __init__(self, files, chunk_size=default_chunk_size):
- """
+ """Creates a new FileStream.
+
A buffered generator that encodes an array of files as
multipart/form-data.
+
+ Keyword arguments:
+ name -- the name of the file to encode
+ chunk_size -- the maxiumum size for any chunk in bytes
"""
BufferedGenerator.__init__(self, 'files', chunk_size=chunk_size)
self.files = utils.clean_files(files)
def body(self):
+ """Returns the body of the buffered file."""
for fp, need_close in self.files:
try:
- name = fp.name
+ name = os.path.basename(fp.name)
except AttributeError:
name = ''
for chunk in self.gen_chunks(self.envelope.file_open(name)):
@@ -225,85 +360,116 @@ def body(self):
class DirectoryStream(BufferedGenerator):
+ """Generator that encodes a director into HTTP multipart.
+
+ A buffered generator that encodes a directory as
+ multipart/form-data.
+
+ Instance variables:
+ directory -- the directory being encoded
+ recursive -- whether or not to recursively encode directory contents
+ fnpattern -- a pattern to match filenames (if no match, file is excluded)
+ headers -- the HTTP headers for uploading self.directory (included for
+ external API consistency)
+
+ Public methods:
+ __init__ -- creates a new FileStream
+ body -- returns the HTTP body for this directory upload request
+ headers -- returns the HTTP headers for this directory upload request
+ """
def __init__(self,
directory,
recursive=False,
fnpattern='*',
chunk_size=default_chunk_size):
- """
- A buffered generator that encodes a directory as
- multipart/form-data.
+ """A buffered generator that encodes a directory as multipart/form-data.
+
+ Keyword arguments:
+ directory -- the directory to encode
+ chunk_size -- the maximum size of a file chunk to use
"""
BufferedGenerator.__init__(self, directory, chunk_size=chunk_size)
self.directory = directory
self.recursive = recursive
self.fnpattern = fnpattern
+ self._request = self._prepare()
+ self.headers = self._request.headers
- def body(self, dirname=None, part=None):
- """
- Recursively traverses a directory and generates the multipart encoded
- body.
- """
- if part is None:
- # this is the outer envelope
- outer = True
- part = self.envelope
- dirname = self.directory
- else:
- # this is a an inner mixed part
- outer = False
-
- if dirname is None:
- dirname = part.name
-
- for chunk in self.gen_chunks(part.open()):
- yield chunk
-
- subpart = BodyGenerator(dirname)
- for chunk in self.gen_chunks(subpart.write_headers()):
- yield chunk
-
- files, subdirs = utils.ls_dir(dirname)
-
- for fn in files:
- if not fnmatch.fnmatch(fn, self.fnpattern):
- continue
- fullpath = os.path.join(dirname, fn)
- for chunk in self.gen_chunks(subpart.file_open(fullpath)):
- yield chunk
- with open(fullpath, 'rb') as fp:
- for chunk in self.file_chunks(fp):
- yield chunk
- for chunk in self.gen_chunks(subpart.file_close()):
- yield chunk
-
- if self.recursive:
- for subdir in subdirs:
- fullpath = os.path.join(dirname, subdir)
- for chunk in self.body(fullpath, subpart):
- yield chunk
+ def body(self):
+ """Returns the HTTP headers for this directory upload request."""
+ return self._request.body
+
+ def headers(self):
+ """Returns the HTTP body for this directory upload request."""
+ return self._request.headers
+
+ def _prepare(self):
+ """Pre-formats the multipart HTTP request to transmit the directory."""
+ names = []
+ if self.directory.endswith(os.sep):
+ self.directory = self.directory[:-1]
+ # identify the unecessary portion of the relative path
+ truncate = os.path.dirname(self.directory)
+ # traverse the filesystem downward from the target directory's uri
+ for curr_dir, _, files in os.walk(self.directory):
+ # find the path relative to the directory being added
+ if len(truncate) > 0:
+ _, _, short_path = curr_dir.partition(truncate)
+ else:
+ short_path = curr_dir
+ # remove leading / or \ if it is present
+ if short_path.startswith(os.sep):
+ short_path = short_path[1:]
+ # create an empty, fake file to represent the directory
+ mock_file = io.StringIO()
+ mock_file.write(u'')
+ # add this file to those that will be sent
+ names.append(('files',
+ (short_path, mock_file, 'application/x-directory')))
+ # iterate across the files in the current directory
+ for filename in files:
+ # find the filename relative to the directory being added
+ short_name = os.path.join(short_path, filename)
+ filepath = os.path.join(curr_dir, filename)
+ # remove leading / or \ if it is present
+ if short_name.startswith(os.sep):
+ short_name = short_name[1:]
+ # add the file to those being sent
+ names.append(('files', (short_name,
+ open(filepath, 'rb'),
+ 'application/octet-stream')))
+ # send the request and present the response body to the user
+ req = requests.Request("POST", 'http://localhost', files=names)
+ prep = req.prepare()
+ return prep
- for chunk in self.gen_chunks(subpart.close()):
- yield chunk
- if outer:
- for chunk in self.close():
- yield chunk
+class TextStream(BufferedGenerator):
+ """A buffered generator that encodes a string as multipart/form-data.
+ Instance variables:
+ text -- the text to stream
-class TextStream(BufferedGenerator):
+ Public methods:
+ __init__ -- create a TextStream
+ body -- generator that yields the encoded body
+ """
def __init__(self, text, chunk_size=default_chunk_size):
- """
- A buffered generator that encodes a string as multipart/form-data.
+ """A buffered generator that encodes a string as multipart/form-data.
+
+ Keyword arguments:
+ text -- the text to stream
+ chunk_size -- maximum size of a single data chunk
"""
BufferedGenerator.__init__(self, 'text', chunk_size=chunk_size)
self.text = text if isgenerator(text) else (text,)
def body(self):
+ """Generator that yields the encoded body."""
for chunk in self.gen_chunks(self.envelope.file_open(self.name)):
yield chunk
for chunk in self.gen_chunks(self.text):
@@ -315,9 +481,14 @@ def body(self):
def stream_files(files, chunk_size=default_chunk_size):
- """
+ """Gets a buffered generator for streaming files.
+
Returns a buffered generator which encodes a file or list of files as
multipart/form-data. Also returns the corresponding headers.
+
+ Keyword arguments:
+ files -- the file(s) to stream
+ chunk_size -- maxiumum size of each stream chunk
"""
stream = FileStream(files, chunk_size=chunk_size)
@@ -328,9 +499,16 @@ def stream_directory(directory,
recursive=False,
fnpattern='*',
chunk_size=default_chunk_size):
- """
+ """Gets a buffered generator for streaming directories.
+
Returns a buffered generator which encodes a directory as
multipart/form-data. Also returns the corresponding headers.
+
+ Keyword arguments:
+ directory -- the directory to stream
+ recursive -- boolean, True to stream all content recursively
+ fnpattern -- pattern of filenames to keep, functions like filter
+ chunk_size -- maximum size of each stream chunk
"""
stream = DirectoryStream(directory,
recursive=recursive,
@@ -341,9 +519,14 @@ def stream_directory(directory,
def stream_text(text, chunk_size=default_chunk_size):
- """
+ """Gets a buffered generator for streaming text.
+
Returns a buffered generator which encodes a string as multipart/form-data.
Also retrns the corresponding headers.
+
+ Keyword arguments:
+ text -- the text to stream
+ chunk_size -- the maximum size of each stream chunk
"""
stream = TextStream(text, chunk_size=chunk_size)
diff --git a/ipfsApi/utils.py b/ipfsApi/utils.py
index 36d5ac8c..f8fc0e48 100644
--- a/ipfsApi/utils.py
+++ b/ipfsApi/utils.py
@@ -1,3 +1,28 @@
+"""A module to handle generic operations.
+
+Classes:
+return_field -- a decorator that returns the given field of a json response
+
+Functions:
+make_string_buffer -- returns a readable/writeable file-like object,
+ containing a given string value
+encode_json -- returns a serialized Python object as a JSON encoded string
+parse_json -- returns a Python object unserialized from JSON in json_str
+make_json_buffer -- returns a file-like object containing a Python object
+ serialized to a JSON formatted string
+encode_pyobj -- returns a serialized Python object as a Pickle encoded string
+parse_pyobj -- returns a Python object unserialized from a
+ Pickle encoded string
+make_pyobj_buffer -- returns a file-like object containing a Python object
+ serialized to a Pickle formatted string
+guess_mimetype -- guesses the mimetype of a file based on the filename
+ls_dir -- returns files and subdirectories within a given directory
+clean_file -- returns a tuple containing a file-like object and a boolean value
+ indicating whether the file needs to be closed after use
+clean_files -- generates tuples with a file-like object and a close indicator
+file_size -- returns the size of a file in bytes
+"""
+
from __future__ import absolute_import
import io
@@ -25,6 +50,9 @@ def make_string_buffer(string):
>>> f = make_string_buffer(b'bytes')
>>> f.read() == b'bytes'
True
+
+ Keyword arguments:
+ string -- desired content for the returned file-like object
"""
if isinstance(string, six.text_type):
buf = StringIO()
@@ -35,41 +63,60 @@ def make_string_buffer(string):
return buf
-def encode_json(obj):
- return json.dumps(obj)
+def encode_json(py_obj):
+ """Returns a serialized Python object as a JSON encoded string.
+
+ Keyword arguments:
+ py_obj -- any given Python object
+ """
+ return json.dumps(py_obj)
def parse_json(json_str):
- """Returns a Python object unserialized from JSON in json_str
+ """Returns a Python object unserialized from JSON in json_str.
>>> parse_json('[1, 2, 3, true, 4.5, null, 6e3]')
[1, 2, 3, True, 4.5, None, 6000.0]
+
+ Keyword arguments:
+ json_str -- a JSON encoded string from which to reconstruct a Python object
"""
return json.loads(json_str)
-def make_json_buffer(json_obj):
- """Returns a file-like object containing json_obj serialized to JSON
+def make_json_buffer(py_obj):
+ """Returns a file-like object containing py_obj as a JSON encoded string.
>>> f = make_json_buffer([1, 2, 3, True, {u'distance': 4.5}])
>>> f.read()
'[1, 2, 3, true, {"distance": 4.5}]'
+
+ Keyword arguments:
+ py_obj -- any given Python object
"""
- return make_string_buffer(json.dumps(json_obj))
+ return make_string_buffer(encode_json(py_obj))
def encode_pyobj(py_obj):
+ """Returns a serialized Python object as a Pickle encoded string.
+
+ Keyword arguments:
+ py_obj -- any given Python object
+ """
return pickle.dumps(py_obj)
def parse_pyobj(pickled):
- r"""Returns a Python object unpickled from the provided string
+ r"""Returns a Python object unserialized from a Pickle encoded string.
>>> parse_pyobj(b'(lp0\nI1\naI2\naI3\naI01\naF4.5\naNaF6000.0\na.')
[1, 2, 3, True, 4.5, None, 6000.0]
>>> parse_pyobj(u'(lp0\nI1\naI2\naI3\naI01\naF4.5\naNaF6000.0\na.')
[1, 2, 3, True, 4.5, None, 6000.0]
+
+ Keyword arguments:
+ pickled -- the string of a pickled Python object to be reconstructed
"""
if isinstance(pickled, six.text_type):
pickled = pickled.encode('latin_1')
@@ -77,32 +124,41 @@ def parse_pyobj(pickled):
def make_pyobj_buffer(py_obj):
- """Returns a file-like object containing py_obj serialized to a pickle
+ """Returns a file-like object containing py_obj as a Pickle encoded string.
>>> f = make_pyobj_buffer([1, 2, 3, True, 4.5, None, 6e3])
>>> isinstance(f.read(), bytes)
True
+
+ Keyword arguments:
+ py_obj -- any given Python object
"""
- return make_string_buffer(pickle.dumps(py_obj))
+ return make_string_buffer(encode_pyobj(py_obj))
def guess_mimetype(filename):
- """
- Guesses the mimetype of a file, based on the filename
+ """Guesses the mimetype of a file based on the filename.
>>> guess_mimetype('example.txt')
'text/plain'
>>> guess_mimetype('/foo/bar/example')
'application/octet-stream'
+
+ Keyword arguments:
+ filename -- the file for which the mimetype is to be determined
"""
fn = os.path.basename(filename)
return mimetypes.guess_type(fn)[0] or 'application/octet-stream'
def ls_dir(dirname):
- """
+ """Returns files and subdirectories within a given directory.
+
Returns a pair of lists, containing the names of directories and files
in dirname
+
+ Keyword arguments:
+ dirname -- directory to be listed
"""
ls = os.listdir(dirname)
files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))]
@@ -111,8 +167,13 @@ def ls_dir(dirname):
def clean_file(f):
- """
- Returns a file object.
+ """Returns a tuple containing a file-like object and a close indicator.
+
+ This ensures the given file is opened and keeps track of files that should
+ be closed after use (files that were not open prior to this function call).
+
+ Keyword arguments:
+ f -- a filepath or file-like object that may or may not need to be opened
"""
if not hasattr(f, 'read'):
return open(f, 'rb'), True
@@ -121,10 +182,15 @@ def clean_file(f):
def clean_files(files):
- """
+ """Generates tuples with a file-like object and a close indicator.
+
This is a generator of tuples, where the first element is the file object
- and the second element is a boolean which is True is this module opened the
+ and the second element is a boolean which is True if this module opened the
file (and thus should close it).
+
+ Keyword arguments:
+ files -- a list or tuple of filepaths and file-like objects
+ or a singular instance of either a filepath or a file-like object
"""
if isinstance(files, (list, tuple)):
for f in files:
@@ -134,8 +200,10 @@ def clean_files(files):
def file_size(f):
- """
- Returns size of file in bytes.
+ """Returns the size of a file in bytes.
+
+ Keyword arguments:
+ f -- the file for which the size is to be determined
"""
if isinstance(f, (six.string_types, six.text_type)):
return os.path.getsize(f)
@@ -148,15 +216,37 @@ def file_size(f):
class return_field(object):
- """
- Decorator that returns the given field of a json response.
+ """Decorator that returns the given field of a json response.
+
+ Public methods:
+ __init__ -- return_field constructor that sets the value of field
+ __call__ -- wraps a command so that only the value of field is returned
+
+ Instance variables:
+ field -- the desired response field for a command that will be wrapped
"""
def __init__(self, field):
+ """Initializes a return_field object.
+
+ Keyword arguments:
+ field -- the response field to be returned for all function calls
+ """
self.field = field
def __call__(self, cmd):
+ """Wraps a command so that only a specified field is returned.
+
+ Keyword arguments:
+ cmd -- a command that is intended to be wrapped
+ """
@wraps(cmd)
- def wrapper(api, *args, **kwargs):
- res = cmd(api, *args, **kwargs)
+ def wrapper(*args, **kwargs):
+ """Returns the specified field of the command invocation.
+
+ Keyword arguments:
+ args -- additional arguments to cmd
+ kwargs -- named additional arguments to cmd
+ """
+ res = cmd(*args, **kwargs)
return res[self.field]
return wrapper
diff --git a/setup.cfg b/setup.cfg
index 175be4f2..5dac37be 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -13,4 +13,5 @@ addopts =
--doctest-modules
ipfsApi
test/unit
+ test/functional
diff --git a/setup.py b/setup.py
index 4f6805a8..e842eb88 100644
--- a/setup.py
+++ b/setup.py
@@ -45,6 +45,7 @@
# 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
diff --git a/test/functional/fake_json/links.json b/test/functional/fake_json/links.json
new file mode 100644
index 00000000..cc61bbc0
--- /dev/null
+++ b/test/functional/fake_json/links.json
@@ -0,0 +1,8 @@
+{
+ "Data": "another",
+ "Links": [ {
+ "Name": "some link",
+ "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V",
+ "Size": 8
+ } ]
+}
diff --git a/test/functional/fake_json/no_links.json b/test/functional/fake_json/no_links.json
new file mode 100644
index 00000000..950b0f0a
--- /dev/null
+++ b/test/functional/fake_json/no_links.json
@@ -0,0 +1,3 @@
+{
+ "Data": "abc"
+}
diff --git a/test/functional/tests.py b/test/functional/tests.py
index da79b01c..b552f838 100644
--- a/test/functional/tests.py
+++ b/test/functional/tests.py
@@ -1,5 +1,7 @@
# _*_ coding: utf-8 -*-
import os
+import shutil
+import json
import unittest
import ipfsApi
@@ -7,11 +9,10 @@
HERE = os.path.dirname(os.path.abspath(__file__))
-
class IpfsApiTest(unittest.TestCase):
api = ipfsApi.Client()
-
+
fake = [{'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX',
'Name': 'fake_dir/fsdfgh'},
{'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv',
@@ -26,48 +27,52 @@ class IpfsApiTest(unittest.TestCase):
'Name': 'fake_dir/test2/fssdf'},
{'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ',
'Name': 'fake_dir/test2'},
- {'Hash': u'QmbZuss6aAizLEAt2Jt2BD29oq4XfMieGezi6mN4vz9g9A',
+ {'Hash': u'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q',
+ 'Name': 'fake_dir/test3'},
+ {'Hash': u'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY',
'Name': 'fake_dir'}]
fake_lookup = dict((i['Name'], i['Hash']) for i in fake)
## test_add_multiple_from_list
fake_file = 'fake_dir/fsdfgh'
+ fake_file_only_res = [{'Name': 'fsdfgh', 'Bytes': 8},
+ {'Name': 'fsdfgh', 'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX'}]
fake_file2 = 'fake_dir/popoiopiu'
- fake_files_res = [{u'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX',
- u'Name': u'fsdfgh'},
- {u'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv',
- u'Name': u'popoiopiu'},
- {u'Hash': u'QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV',
- u'Name': u''}]
+ fake_files_res = [{'Name': 'fsdfgh', 'Bytes': 8},
+ {'Name': 'fsdfgh', 'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX'},
+ {'Name': 'popoiopiu', 'Bytes': 15},
+ {'Name': 'popoiopiu', 'Hash': 'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv'}]
## test_add_multiple_from_dirname
fake_dir_test2 = 'fake_dir/test2'
- fake_dir_res = [{u'Hash': u'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ',
- u'Name': u'llllg'},
- {u'Hash': u'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD',
- u'Name': u'fssdf'},
- {u'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ',
- u'Name': u''}]
+ fake_dir_res = [{'Name': 'test2/fssdf', 'Bytes': 14},
+ {'Name': 'test2/fssdf', 'Hash': 'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD'},
+ {'Name': 'test2/llllg', 'Bytes': 9},
+ {'Name': 'test2/llllg', 'Hash': 'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ'},
+ {'Name': 'test2', 'Hash': 'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ'}]
## test_add_recursive
fake_dir = 'fake_dir'
- fake_dir_recursive_res = [{u'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX',
- u'Name': u'fake_dir/fsdfgh'},
- {u'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv',
- u'Name': u'fake_dir/popoiopiu'},
- {u'Hash': u'QmeMbJSHNCesAh7EeopackUdjutTJznum1Fn7knPm873Fe',
- u'Name': u'fake_dir/test3/ppppoooooooooo'},
- {u'Hash': u'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q',
- u'Name': u'fake_dir/test3'},
- {u'Hash': u'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ',
- u'Name': u'fake_dir/test2/llllg'},
- {u'Hash': u'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD',
- u'Name': u'fake_dir/test2/fssdf'},
- {u'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ',
- u'Name': u'fake_dir/test2'},
- {u'Hash': u'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY',
- u'Name': u'fake_dir'}]
+ fake_dir_recursive_res = [{'Bytes': 8, 'Name': 'fake_dir/fsdfgh'},
+ {'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', 'Name': 'fake_dir/fsdfgh'},
+ {'Bytes': 15, 'Name': 'fake_dir/popoiopiu'},
+ {'Hash': 'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv', 'Name': 'fake_dir/popoiopiu'},
+ {'Bytes': 14, 'Name': 'fake_dir/test2/fssdf'},
+ {'Hash': 'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD', 'Name': 'fake_dir/test2/fssdf'},
+ {'Bytes': 9, 'Name': 'fake_dir/test2/llllg'},
+ {'Hash': 'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ', 'Name': 'fake_dir/test2/llllg'},
+ {'Bytes': 8, 'Name': 'fake_dir/test3/ppppoooooooooo'},
+ {'Hash': 'QmeMbJSHNCesAh7EeopackUdjutTJznum1Fn7knPm873Fe', 'Name': 'fake_dir/test3/ppppoooooooooo'},
+ {'Hash': 'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ', 'Name': 'fake_dir/test2'},
+ {'Hash': 'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q', 'Name': 'fake_dir/test3'},
+ {'Hash': 'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY', 'Name': 'fake_dir'}]
+
+ ## test_refs
+ refs_res = [{'Err': '', 'Ref': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX'},
+ {'Err': '', 'Ref': 'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv'},
+ {'Err': '', 'Ref': 'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ'},
+ {'Err': '', 'Ref': 'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q'}]
def setUp(self):
self._olddir = os.getcwd()
@@ -80,15 +85,28 @@ def tearDown(self):
# TESTS #
#########
+ def test_version(self):
+ expected = ['Repo', 'Commit', 'Version']
+ resp_version = self.api.version()
+ for key in expected:
+ self.assertTrue(key in resp_version)
+
+ def test_id(self):
+ expected = ['PublicKey', 'ProtocolVersion',
+ 'ID', 'AgentVersion', 'Addresses']
+ resp_id = self.api.id()
+ for key in expected:
+ self.assertTrue(key in resp_id)
+
def test_add_single_from_str(self):
res = self.api.add(self.fake_file)
- self.assertEqual(res[u"Hash"], self.fake_lookup[self.fake_file])
-
+ self.assertEqual(res, self.fake_file_only_res)
+
def test_add_single_from_fp(self):
with open(self.fake_file, 'rb') as fp:
res = self.api.add(fp)
- self.assertEqual(res[u"Hash"], self.fake_lookup[self.fake_file])
-
+ self.assertEqual(res, self.fake_file_only_res)
+
def test_add_multiple_from_list(self):
res = self.api.add([self.fake_file, self.fake_file2])
self.assertEqual(res, self.fake_files_res)
@@ -100,7 +118,6 @@ def test_add_multiple_from_dirname(self):
sorted(self.fake_dir_res,
key=lambda x: x['Name']))
-
def test_add_recursive(self):
res = self.api.add(self.fake_dir, recursive=True)
self.assertEqual(sorted(res,
@@ -114,8 +131,171 @@ def test_add_get_pyobject(self):
self.assertEqual(data,
self.api.get_pyobj(res))
+ def test_get_file(self):
+ self.api.add(self.fake_file)
+
+ test_hash = self.fake[0]['Hash']
+
+ self.api.get(test_hash)
+ self.assertIn(test_hash, os.listdir(os.getcwd()))
+
+ os.remove(test_hash)
+ self.assertNotIn(test_hash, os.listdir(os.getcwd()))
+
+ def test_get_dir(self):
+ self.api.add(self.fake_dir, recursive=True)
+
+ test_hash = self.fake[8]['Hash']
+
+ self.api.get(test_hash)
+ self.assertIn(test_hash, os.listdir(os.getcwd()))
+
+ shutil.rmtree(test_hash)
+ self.assertNotIn(test_hash, os.listdir(os.getcwd()))
+
+ def test_get_path(self):
+ self.api.add(self.fake_file)
+
+ test_hash = self.fake[8]['Hash'] + '/fsdfgh'
+
+ self.api.get(test_hash)
+ self.assertIn('fsdfgh', os.listdir(os.getcwd()))
+
+ os.remove('fsdfgh')
+ self.assertNotIn('fsdfgh', os.listdir(os.getcwd()))
+
+ def test_refs(self):
+ self.api.add(self.fake_dir, recursive=True)
+
+ refs = self.api.refs(self.fake[8]['Hash'])
+
+ self.assertEqual(sorted(refs, key=lambda x: x['Ref']),
+ sorted(self.refs_res, key=lambda x: x['Ref']))
+
+ def test_refs_local(self):
+ refs = self.api.refs_local()
+
+ self.assertEqual(sorted(refs[0].keys()), ['Err', 'Ref'])
+
+ def test_cat_single_file_str(self):
+ self.api.add(self.fake_file)
+ res = self.api.cat('QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX')
+ self.assertEqual("dsadsad\n", res)
+
+
+class IpfsApiLogTest(unittest.TestCase):
+
+ def setUp(self):
+ self.api = ipfsApi.Client()
+
+ def test_log_ls_level(self):
+ """
+ Unfortunately there is no way of knowing the logging levels prior
+ to this test. This makes it impossible to guarantee that the logging
+ levels are the same as before the test was run.
+ """
+ # Retrieves the list of logging subsystems for a running daemon.
+ resp_ls = self.api.log_ls()
+ # The response should be a dictionary with only one key ('Strings').
+ self.assertTrue('Strings' in resp_ls)
+
+ # Sets the logging level to 'error' for the first subsystem found.
+ sub = resp_ls['Strings'][0]
+ resp_level = self.api.log_level(sub, 'error')
+ self.assertEqual(resp_level['Message'],
+ "Changed log level of \'%s\' to 'error'\n" % sub)
+
+ def test_log_tail(self):
+ # Gets the response object.
+ tail = self.api.log_tail()
+
+ # Takes the first log received.
+ line = tail.readline()
+ log = json.loads(line.decode())
+
+ # Closes the response object.
+ tail.close()
+
+ # The log should have been parsed into a dictionary object with
+ # various keys depending on the event that occured.
+ self.assertTrue(type(log) is dict)
+
+
+class IpfsApiPinTest(unittest.TestCase):
+
+ fake_dir_hash = 'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY'
+
+ def setUp(self):
+ self.api = ipfsApi.Client()
+ # Add resources to be pinned.
+ self.resource = self.api.add_str(u'Mary had a little lamb')
+ resp_add = self.api.add('fake_dir', recursive=True)
+ self.fake_dir_hashes = [el['Hash'] for el in resp_add if 'Hash' in el]
+
+ def test_pin_ls_add_rm_single(self):
+ # Get pinned objects at start.
+ pins_begin = self.api.pin_ls()['Keys']
+
+ # Unpin the resource if already pinned.
+ if self.resource in pins_begin.keys():
+ self.api.pin_rm(self.resource)
+
+ # No matter what, the resource should not be pinned at this point.
+ self.assertFalse(self.resource in self.api.pin_ls()['Keys'])
+
+ for option in [True, False]:
+ # Pin the resource.
+ resp_add = self.api.pin_add(self.resource,
+ opts={"recursive":str(option)})
+ pins_afer_add = self.api.pin_ls()['Keys']
+ self.assertEqual(resp_add['Pins'], [self.resource])
+ self.assertTrue(self.resource in pins_afer_add)
+ self.assertEqual(pins_afer_add[self.resource]['Type'] == 'recursive',
+ option)
+
+ # Unpin the resource.
+ resp_rm = self.api.pin_rm(self.resource)
+ pins_afer_rm = self.api.pin_ls()['Keys']
+ self.assertEqual(resp_rm['Pins'], [self.resource])
+ self.assertFalse(self.resource in pins_afer_rm)
+
+ # Get pinned objects at end.
+ pins_end = self.api.pin_ls()['Keys']
+
+ # Compare pinned items from start to finish of test.
+ self.assertFalse(self.resource in pins_end.keys())
+
+ def test_pin_ls_add_rm_directory(self):
+ # Get pinned objects at start.
+ pins_begin = self.api.pin_ls()['Keys']
+
+ # Remove fake_dir if it had previously been pinned.
+ if self.fake_dir_hash in pins_begin.keys() and \
+ pins_begin[self.fake_dir_hash]['Type'] == 'recursive':
+ self.api.pin_rm(self.fake_dir_hash)
+
+ # Make sure I removed it
+ pins_after_rm = self.api.pin_ls()['Keys']
+ self.assertFalse(self.fake_dir_hash in pins_after_rm.keys() and \
+ pins_after_rm[self.fake_dir_hash]['Type'] == 'recursive')
+
+ # Add 'fake_dir' recursively.
+ self.api.pin_add(self.fake_dir_hash)
+
+ # Make sure all appear on the list of pinned objects.
+ pins_after_add = self.api.pin_ls()['Keys'].keys()
+ for el in self.fake_dir_hashes:
+ self.assertTrue(el in pins_after_add)
+
+ # Clean up.
+ self.api.pin_rm(self.fake_dir_hash)
+ pins_end = self.api.pin_ls()['Keys'].keys()
+ self.assertFalse(self.fake_dir_hash in pins_end and \
+ pins_after_rm[self.fake_dir_hash]['Type'] == 'recursive')
+
+
class IpfsApiMFSTest(unittest.TestCase):
-
+
test_files = {
'/test_file1': {
u'Name': u'fake_dir/popoiopiu',
@@ -126,7 +306,7 @@ class IpfsApiMFSTest(unittest.TestCase):
u'Size': 15}
}
}
-
+
def setUp(self):
self.api = ipfsApi.Client()
self._olddir = os.getcwd()
@@ -134,25 +314,189 @@ def setUp(self):
def tearDown(self):
os.chdir(self._olddir)
-
+
def test_write_stat_read_delete(self):
for target, desc in self.test_files.items():
# Create target file
self.api.files_write(target, desc[u'Name'], opts={'create':True})
-
+
# Verify stat information of file
stat = self.api.files_stat(target)
- self.assertEqual(sorted(desc[u'Stat'].items()), sorted(stat.items()))
-
+ self.assertEqual(sorted(desc[u'Stat'].items()),
+ sorted(stat.items()))
+
# Read back (and compare file contents)
with open(desc[u'Name'], 'r') as file:
content = self.api.files_read(target)
self.assertEqual(content, file.read())
-
+
# Delete file
self.api.files_rm(target)
-
-
+
+
+class TestBlockFunctions(unittest.TestCase):
+ def setUp(self):
+ self.api = ipfsApi.Client()
+ self.multihash = 'QmYA2fn8cMbVWo4v95RwcwJVyQsNtnEwHerfWR8UNtEwoE'
+ self.content_size = 248
+
+ def test_block_stat(self):
+ expected_keys = ['Key', 'Size']
+ res = self.api.block_stat(self.multihash)
+ for key in expected_keys:
+ self.assertTrue(key in res)
+
+ def test_block_get(self):
+ self.assertEqual(len(self.api.block_get(self.multihash)), self.content_size)
+
+ def test_block_put(self):
+ path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ "functional", "fake_dir", "fsdfgh")
+ expected_block_multihash = 'QmPevo2B1pwvDyuZyJbWVfhwkaGPee3f1kX36wFmqx1yna'
+ expected_keys = ['Key', 'Size']
+ res = self.api.block_put(path)
+ for key in expected_keys:
+ self.assertTrue(key in res)
+ self.assertEqual(res['Key'], expected_block_multihash)
+
+
+class IpfsApiRepoTest(unittest.TestCase):
+
+ def setUp(self):
+ self.api = ipfsApi.Client()
+
+ def test_repo_stat(self):
+ # Verify that the correct key-value pairs are returned
+ stat = self.api.repo_stat()
+ self.assertEqual(sorted(stat.keys()), ['NumObjects', 'RepoPath',
+ 'RepoSize', 'Version'])
+
+ def test_repo_gc(self):
+ # Add and unpin an object to be garbage collected
+ garbage = self.api.add_str('Test String')
+ self.api.pin_rm(garbage)
+
+ # Collect the garbage object with object count before and after
+ orig_objs = self.api.repo_stat()['NumObjects']
+ gc = self.api.repo_gc()
+ cur_objs = self.api.repo_stat()['NumObjects']
+
+ # Verify the garbage object was collected
+ self.assertGreater(orig_objs, cur_objs)
+ keys = [el['Key'] for el in gc]
+ self.assertTrue(garbage in keys)
+
+
+class IpfsApiObjectTest(unittest.TestCase):
+
+ def setUp(self):
+ self.api = ipfsApi.Client()
+ self._olddir = os.getcwd()
+ os.chdir(HERE)
+ # Add a resource to get the stats for.
+ self.resource = self.api.add_str(u'Mary had a little lamb')
+
+ def tearDown(self):
+ os.chdir(self._olddir)
+
+ def test_object_new(self):
+ expected_keys = ['Hash']
+ res = self.api.object_new()
+ for key in expected_keys:
+ self.assertTrue(key in res)
+
+ def test_object_stat(self):
+ expected = ['Hash', 'CumulativeSize', 'DataSize',
+ 'NumLinks', 'LinksSize', 'BlockSize']
+ resp_stat = self.api.object_stat(self.resource)
+ for key in expected:
+ self.assertTrue(key in resp_stat)
+
+ def test_object_put_get(self):
+ # Set paths to test json files
+ path_no_links = os.path.join(os.path.dirname(__file__),
+ "fake_json", "no_links.json")
+ path_links = os.path.join(os.path.dirname(__file__),
+ "fake_json", "links.json")
+
+ # Put the json objects on the DAG
+ no_links = self.api.object_put(path_no_links)
+ links = self.api.object_put(path_links)
+
+ # Verify the correct content was put
+ self.assertEqual(no_links['Hash'], 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')
+ self.assertEqual(links['Hash'], 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm')
+ self.assertEqual(links['Links'][0]['Hash'], 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')
+
+ # Get the objects from the DAG
+ get_no_links = self.api.object_get('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')
+ get_links = self.api.object_get('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm')
+
+ # Verify the objects we put have been gotten
+ self.assertEqual(get_no_links['Data'], 'abc')
+ self.assertEqual(get_links['Data'], 'another')
+ self.assertEqual(get_links['Links'][0]['Name'], 'some link')
+
+ def test_object_links(self):
+ # Set paths to test json files
+ path_links = os.path.join(os.path.dirname(__file__),
+ "fake_json", "links.json")
+
+ # Put json object on the DAG and get its links
+ self.api.object_put(path_links)
+ links = self.api.object_links('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm')
+
+ # Verify the correct link has been gotten
+ self.assertEqual(links['Links'][0]['Hash'], 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V')
+
+ def test_object_data(self):
+ # Set paths to test json files
+ path_links = os.path.join(os.path.dirname(__file__),
+ "fake_json", "links.json")
+
+ # Put json objects on the DAG and get its data
+ self.api.object_put(path_links)
+ data = self.api.object_data('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm')
+
+ # Verify the correct bytes have been gotten
+ self.assertEqual(data, 'another')
+
+ def test_object_patch_append_data(self):
+ """Warning, this test depends on the contents of
+ test/functional/fake_dir/fsdfgh
+ """
+ result = self.api.object_patch_append_data(
+ 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'fake_dir/fsdfgh')
+ self.assertEqual(result,
+ {'Hash': 'QmcUsyoGVxWoQgYKgmLaDBGm8J3eHWfchMh3oDUD5FrrtN'})
+
+ def test_object_patch_add_link(self):
+ """Warning, this test depends on the contents of
+ test/functional/fake_dir/fsdfgh
+ """
+ result = self.api.object_patch_add_link(
+ 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'self',
+ 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+ self.assertEqual(result,
+ {'Hash': 'QmbWSr7YXBLcF23VVb7yPvUuogUPn46GD7gXftXC6mmsNM'})
+
+ def test_object_patch_rm_link(self):
+ """Warning, this test depends on the contents of
+ test/functional/fake_dir/fsdfgh
+ """
+ result = self.api.object_patch_rm_link(
+ 'QmbWSr7YXBLcF23VVb7yPvUuogUPn46GD7gXftXC6mmsNM', 'self')
+ self.assertEqual(result,
+ {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'})
+
+ def test_object_patch_set_data(self):
+ """Warning, this test depends on the contents of
+ test/functional/fake_dir/popoiopiu
+ """
+ result = self.api.object_patch_set_data(
+ 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'fake_dir/popoiopiu')
+ self.assertEqual(result,
+ {'Hash': 'QmV4QR7MCBj5VTi6ddHmXPyjWGzbaKEtX2mx7axA5PA13G'})
if __name__ == "__main__":
unittest.main()
diff --git a/test/unit/test_command.py b/test/unit/test_command.py
index 67c0f4ef..dd774b73 100644
--- a/test/unit/test_command.py
+++ b/test/unit/test_command.py
@@ -1,3 +1,14 @@
+"""Test commands.py.
+
+Classes:
+TestCommands -- test the functionality of the commands
+
+Functions:
+cmd_simple -- defines an endpoint for simple commands
+cmd_with_arg -- defines an endpoint for commands that have arguments
+cmd_with_file -- defines an endpoint for commands that handle files
+"""
+
import unittest
import json
import six
@@ -9,9 +20,18 @@
import ipfsApi.commands
import ipfsApi.exceptions
-
@urlmatch(netloc='localhost:5001', path=r'.*/simple')
def cmd_simple(url, request):
+ """Defines an endpoint for simple commands.
+
+ This endpoint will listen at http://localhost:5001/*/simple for incoming
+ requests and will always respond with a 200 status code and a Message of
+ "okay".
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
return {
'status_code': 200,
'content': json.dumps({
@@ -22,6 +42,16 @@ def cmd_simple(url, request):
@urlmatch(netloc='localhost:5001', path=r'.*/arg')
def cmd_with_arg(url, request):
+ """Defines an endpoint for commands that have arguments.
+
+ This endpoint will listen at http://localhost:5001/*/arg for incoming
+ requests and will always respond with a 200 status code, a Message of
+ "okay", and a list of Args.
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
qs = urlparse.parse_qs(url.query)
return {
'status_code': 200,
@@ -34,6 +64,16 @@ def cmd_with_arg(url, request):
@urlmatch(netloc='localhost:5001', path=r'.*/file')
def cmd_with_file(url, request):
+ """Defines an endpoint for commands that handle files.
+
+ This endpoint will listen at http://localhost:5001/*/file for incoming
+ requests and will always respond with a 200 status code, a Message of
+ "okay", and a file.
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
# request.body is a byte generator
body = []
for b in request.body:
@@ -53,7 +93,23 @@ def cmd_with_file(url, request):
class TestCommands(unittest.TestCase):
+ """Test the functionality of the commands.py classes.
+
+ Public methods:
+ setUp -- create an HTTP client
+ test_simple_command -- test the Command class
+ test_arg_command_no_args -- test the ArgCommand class without a specific
+ number of arguments
+ test_arg_command_with_args -- test the ArgCommand with a specific number
+ of arguments
+ test_arg_command_wrong_num_args -- test that the ArgCommand class fails
+ when given the wrong number of
+ arguments
+ test_file_command_fd -- TODO
+ """
+
def setUp(self):
+ """Prepare tests."""
self._client = ipfsApi.http.HTTPClient(
'localhost',
5001,
@@ -61,21 +117,40 @@ def setUp(self):
'json')
def test_simple_command(self):
+ """Test the Command class."""
with HTTMock(cmd_simple):
cmd = ipfsApi.commands.Command('/simple')
res = cmd.request(self._client)
self.assertEquals(res['Message'], 'okay')
-
- def test_arg_command(self):
+
+ def test_arg_command_no_args(self):
+ """Test the ArgCommand class without a specific number of arguments."""
with HTTMock(cmd_with_arg):
+ # test without arguments
cmd = ipfsApi.commands.ArgCommand('/arg')
res = cmd.request(self._client, 'arg1')
self.assertEquals(res['Arg'][0], 'arg1')
-
+
+ def test_arg_command_with_args(self):
+ """Test the ArgCommand with a specific number of arguments."""
+ with HTTMock(cmd_with_arg):
+ #test with arguments
+ cmd = ipfsApi.commands.ArgCommand('/arg', 2)
+ res = cmd.request(self._client, 'arg1', 'first')
+ self.assertEquals(res['Arg'], ['arg1', 'first'])
+
+ def test_arg_command_wrong_num_args(self):
+ """Test that the ArgCommand class fails when given the wrong number of arguments."""
+ with HTTMock(cmd_with_arg):
+ #test with wrong number of arguments
+ cmd = ipfsApi.commands.ArgCommand('/arg', 2)
+ self.assertRaises(ipfsApi.exceptions.InvalidArguments, cmd.request, self._client, 'arg1')
+
def test_file_command_fd(self):
+ """Test a simple FileCommand."""
data = 'content\ngoes\nhere'
fd = StringIO(data)
with HTTMock(cmd_with_file):
cmd = ipfsApi.commands.FileCommand('/file')
- res = cmd.request(self._client, fd)
+ res = cmd.request(self._client, (), fd)
self.assertTrue(data in res['Body'])
diff --git a/test/unit/test_encoding.py b/test/unit/test_encoding.py
index efc41a73..49a763df 100644
--- a/test/unit/test_encoding.py
+++ b/test/unit/test_encoding.py
@@ -1,3 +1,9 @@
+"""Defines encoding related classes.
+
+Classes:
+TestEncoding - A class that tests constructs located in the encoding.py module.
+"""
+
import unittest
import json
from httmock import urlmatch, HTTMock
@@ -7,16 +13,31 @@
class TestEncoding(unittest.TestCase):
+ """Unit tests the Encoding class
+
+ Public methods:
+ setUp - create a Json encoder
+ test_json_parse - Asserts parsed key/value json matches expected output
+ test_json_parse_chained - Tests if concatenated string of JSON object is being parsed correctly
+ test_json_parse_chained_newlines - Tests parsing of concatenated string of JSON object containing a new line
+ test_json_encode - Tests serilization of json formatted string to an object
+ test_get_encoder_by_name - Tests the process of obtaining an Encoder object given the named encoding
+ test_get_invalid_encoder - Tests the exception handling given an invalid named encoding
+
+ """
def setUp(self):
+ """create a Json encoder"""
self.encoder = ipfsApi.encoding.Json()
def test_json_parse(self):
+ """Asserts parsed key/value json matches expected output."""
data = {'key': 'value'}
raw = json.dumps(data)
res = self.encoder.parse(raw)
self.assertEqual(res['key'], 'value')
def test_json_parse_chained(self):
+ """Tests if concatenated string of JSON object is being parsed correctly."""
data1 = {'key1': 'value1'}
data2 = {'key2': 'value2'}
res = self.encoder.parse(
@@ -26,6 +47,7 @@ def test_json_parse_chained(self):
self.assertEqual(res[1]['key2'], 'value2')
def test_json_parse_chained_newlines(self):
+ """Tests parsing of concatenated string of JSON object containing a new line."""
data1 = {'key1': 'value1'}
data2 = {'key2': 'value2'}
res = self.encoder.parse(
@@ -35,15 +57,18 @@ def test_json_parse_chained_newlines(self):
self.assertEqual(res[1]['key2'], 'value2')
def test_json_encode(self):
+ """Tests serilization of json formatted string into an object."""
data = {'key': 'value'}
self.assertEqual(
self.encoder.encode(data),
json.dumps(data))
def test_get_encoder_by_name(self):
+ """Tests the process of obtaining an Encoder object given the named encoding."""
encoder = ipfsApi.encoding.get_encoding('json')
self.assertEqual(encoder.name, 'json')
def test_get_invalid_encoder(self):
+ """Tests the exception handling given an invalid named encoding."""
self.assertRaises(ipfsApi.exceptions.EncodingException,
ipfsApi.encoding.get_encoding, 'fake')
diff --git a/test/unit/test_http.py b/test/unit/test_http.py
index 9816f2ec..999b7acb 100644
--- a/test/unit/test_http.py
+++ b/test/unit/test_http.py
@@ -1,6 +1,17 @@
+"""Test cases for http.py.
+
+These tests are designed to mock http responses from the IPFS daemon. They
+are used to determine if the functions in http.py are operating correctly.
+
+Classes:
+TestHttp -- A TCP client for interacting with an IPFS daemon
+"""
+
import unittest
import json
import requests
+import tarfile
+import os
from httmock import urlmatch, HTTMock
import ipfsApi.http
@@ -9,6 +20,16 @@
@urlmatch(netloc='localhost:5001', path=r'.*/okay')
def return_okay(url, request):
+ """Defines an endpoint for successful http requests.
+
+ This endpoint will listen at http://localhost:5001/*/okay for incoming
+ requests and will always respond with a 200 status code and a Message of
+ "okay".
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
return {
'status_code': 200,
'content': 'okay'.encode('utf-8'),
@@ -17,6 +38,16 @@ def return_okay(url, request):
@urlmatch(netloc='localhost:5001', path=r'.*/fail')
def return_fail(url, request):
+ """Defines an endpoint for failed http requests.
+
+ This endpoint will listen at http://localhost:5001/*/fail for incoming
+ requests and will always respond with a 500 status code and a Message of
+ "fail".
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
return {
'status_code': 500,
'content': 'fail'.encode('utf-8'),
@@ -25,6 +56,16 @@ def return_fail(url, request):
@urlmatch(netloc='localhost:5001', path=r'.*/apiokay')
def api_okay(url, request):
+ """Defines an endpoint for successful api requests.
+
+ This endpoint will listen at http://localhost:5001/*/apiokay for incoming
+ requests and will always respond with a 200 status code and a json encoded
+ Message of "okay".
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
return {
'status_code': 200,
'content': json.dumps({
@@ -34,6 +75,16 @@ def api_okay(url, request):
@urlmatch(netloc='localhost:5001', path=r'.*/apifail')
def api_fail(url, request):
+ """Defines an endpoint for failed api requests.
+
+ This endpoint will listen at http://localhost:5001/*/apifail for incoming
+ requests and will always respond with a 500 status code and a json encoded
+ Message of "Someone set us up the bomb".
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
return {
'status_code': 500,
'content': json.dumps({
@@ -41,8 +92,47 @@ def api_fail(url, request):
}
+@urlmatch(netloc='localhost:5001', path=r'.*/cat')
+def api_cat(url, request):
+ """Defines an endpoint for a request to cat a file.
+
+ This endpoint will listen at http://localhost:5001/*/cat for incoming
+ requests and will always respond with a 200 status code and a json encoded
+ Message of "do not parse".
+
+ Keyword arguments:
+ url -- the url of the incoming request
+ request -- the request that is being responded to
+ """
+ return {
+ 'status_code': 200,
+ 'content': json.dumps({
+ 'Message': 'do not parse'}).encode('utf-8')
+ }
+
+
class TestHttp(unittest.TestCase):
+ """A series of tests to test the functionality of http.py.
+
+ Public methods:
+ setUp -- creates an instance of HTTPClient to test against
+ test_successful_request -- tests that a successful http request returns the
+ proper message
+ test_generic_failure -- tests that a failed http request raises an HTTPError
+ test_api_failure -- tests that an api failure raises an ispfApiError
+ test_stream -- tests that the stream flag being set returns the raw response
+ test_cat -- tests that paths ending in /cat are not parsed
+ test_default_decoder -- tests that the default encoding is set to json
+ test_explicit_decoder -- tests that an explicit decoder is handled correctly
+ test_unsupported_decoder -- tests that unsupported encodings raise an
+ EncodingException
+ test_failed_decoder -- tests that a failed encoding parse returns response
+ text
+ test_failed_download -- tests that a failed download raises an HTTPError
+ test_session -- tests that a session is established and then closed
+ """
def setUp(self):
+ """Creates an instance of HTTPClient to test against."""
self.client = ipfsApi.http.HTTPClient(
'localhost',
5001,
@@ -50,28 +140,76 @@ def setUp(self):
'json')
def test_successful_request(self):
+ """Tests that a successful http request returns the proper message."""
with HTTMock(return_okay):
res = self.client.request('/okay')
self.assertEqual(res, 'okay')
def test_generic_failure(self):
+ """Tests that a failed http request raises an HTTPError."""
with HTTMock(return_fail):
self.assertRaises(requests.HTTPError,
self.client.request, '/fail')
def test_api_failure(self):
+ """Tests that an api failure raises an ispfApiError."""
with HTTMock(api_fail):
self.assertRaises(ipfsApi.exceptions.ipfsApiError,
self.client.request, '/apifail')
- def test_session(self):
+ def test_stream(self):
+ """Tests that the stream flag being set returns the raw response."""
with HTTMock(return_okay):
- with self.client.session():
- res = self.client.request('/okay')
- self.assertEqual(res, 'okay')
+ res = self.client.request('/okay', stream=True)
+ self.assertEqual(res.read(4), b'okay')
+
+ def test_cat(self):
+ """Tests that paths ending in /cat are not parsed."""
+ with HTTMock(api_cat):
+ res = self.client.request('/cat')
+ self.assertEquals(res, '{"Message": "do not parse"}')
+
+ def test_default_decoder(self):
+ """Tests that the default encoding is set to json."""
+ with HTTMock(api_okay):
+ res = self.client.request('/apiokay')
+ self.assertEquals(res['Message'], 'okay')
def test_explicit_decoder(self):
+ """Tests that an explicit decoder is handled correctly."""
with HTTMock(api_okay):
res = self.client.request('/apiokay',
decoder='json')
self.assertEquals(res['Message'], 'okay')
+
+ def test_unsupported_decoder(self):
+ """Tests that unsupported encodings raise an EncodingException."""
+ with HTTMock(api_fail):
+ self.assertRaises(ipfsApi.exceptions.EncodingException,
+ self.client.request, '/apifail', decoder='xyz')
+
+ def test_failed_decoder(self):
+ """Tests that a failed encoding parse returns response text."""
+ with HTTMock(return_okay):
+ res = self.client.request('/okay', decoder='json')
+ self.assertEquals(res, 'okay')
+
+ """TODO: Test successful download
+ Need to determine correct way to mock an http request that returns a tar
+ file. tarfile.open expects the tar to be in the form of an octal escaped
+ string, but internal functionality keeps resulting in hexidecimal.
+ """
+
+ def test_failed_download(self):
+ """Tests that a failed download raises an HTTPError."""
+ with HTTMock(return_fail):
+ self.assertRaises(requests.HTTPError,
+ self.client.download, '/fail')
+
+ def test_session(self):
+ """Tests that a session is established and then closed."""
+ with HTTMock(return_okay):
+ with self.client.session():
+ res = self.client.request('/okay')
+ self.assertEqual(res, 'okay')
+ self.assertEqual(self.client._session, None)
diff --git a/test/unit/test_multipart.py b/test/unit/test_multipart.py
new file mode 100644
index 00000000..b0d70f9d
--- /dev/null
+++ b/test/unit/test_multipart.py
@@ -0,0 +1,441 @@
+"""Test the file multipart.py
+
+Classes:
+TestContentHelpers -- test the three content-header helper functions
+TestBodyGenerator -- test the BodyGenerator helper class
+TestBufferedGenerator -- test the BufferedGenerator helper class
+TestFileStream -- test the FileStream generator class
+TestDirectoryStream -- test the DirectoryStream generator class
+TestTextStream -- test the TextStream generator class
+TestStreamHelpers -- unimplemented
+"""
+
+import unittest
+import os
+import re
+import six
+import ipfsApi.multipart
+
+
+class TestContentHelpers(unittest.TestCase):
+ """Tests the functionality of the three content-oriented helper functions.
+
+ Public methods:
+ test_content_disposition -- check the content_disposition defaults
+ test_content_disposition_with_type -- check that content_disposition
+ handles given disposition type
+ test_content_type -- check the content_type guessing functionality
+ test_multipart_content_type -- check multipart_content_type functionality
+ """
+
+ def test_content_disposition(self):
+ """Check that content_disposition defaults properly"""
+ expected = {'Content-Disposition': 'file; filename="example.txt"'}
+ actual = ipfsApi.multipart.content_disposition('example.txt')
+ self.assertEquals(expected, actual)
+
+ def test_content_disposition_with_type(self):
+ """Check that content_disposition handles given disposition type"""
+ expected = {'Content-Disposition':
+ 'attachment; filename="example.txt"'}
+ actual = ipfsApi.multipart.content_disposition('example.txt',
+ 'attachment')
+ self.assertEquals(expected, actual)
+
+ def test_content_type(self):
+ """Check the content_type guessing functionality."""
+ actual = ipfsApi.multipart.content_type('example.txt')
+ expected = {'Content-Type': 'text/plain'}
+ self.assertEquals(expected, actual)
+
+ actual = ipfsApi.multipart.content_type('example.jpeg')
+ expected = {'Content-Type': 'image/jpeg'}
+ self.assertEquals(expected, actual)
+
+ actual = ipfsApi.multipart.content_type('example')
+ expected = {'Content-Type': 'application/octet-stream'}
+ self.assertEquals(expected, actual)
+
+ def test_multipart_content_type(self):
+ """Check test_multipart_content_type functionality."""
+ actual = ipfsApi.multipart.multipart_content_type(
+ '8K5rNKlLQVyreRNncxOTeg')
+ expected = {'Content-Type':
+ 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'}
+ self.assertEquals(expected, actual)
+
+ actual = ipfsApi.multipart.multipart_content_type(
+ '8K5rNKlLQVyreRNncxOTeg', 'alt')
+ expected = {'Content-Type':
+ 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'}
+ self.assertEquals(expected, actual)
+
+
+class TestBodyGenerator(unittest.TestCase):
+ """Tests the functionality of the BodyGenerator class.
+
+ Public methods:
+ test_init_defaults -- tests the constructor and its behavior with only the
+ required argument
+ test_init_with_all -- tests the constructor when all arguments are set
+ explicitly
+ test_write_headers -- tests write_headers function against example output
+ test_open -- tests open function against example output
+ test_file_open -- test file_open function against example output
+ test_file_close -- test file_close function against example output
+ test_close -- test close function against example output
+ """
+
+ def test_init_defaults(self):
+ """Test the __init__ function for default parameter values."""
+ name = "test_name"
+ expected_disposition = 'file; filename="test_name"'
+ expected_type = 'multipart/mixed; boundary="\S*"'
+ expected_boundary_pattern = '\S*'
+ generator = ipfsApi.multipart.BodyGenerator(name)
+ self.assertEqual(generator.headers['Content-Disposition'],
+ expected_disposition)
+ if six.PY2:
+ if not re.match(expected_type, generator.headers['Content-Type']):
+ self.fail('Content-Type does not match "%s", got %s'
+ % (expected_type, generator.headers['Content-Type']))
+ if not re.match(expected_boundary_pattern, generator.boundary):
+ self.fail('Boundary does not match "%s"'
+ % expected_boundary_pattern)
+ else:
+ self.assertRegexpMatches(generator.headers['Content-Type'],
+ expected_type)
+ self.assertRegexpMatches(generator.boundary,
+ expected_boundary_pattern)
+
+ def test_init_with_all(self):
+ """Test the __init__ function for explicitly set parameter values."""
+ name = "test_name"
+ disptype = "test_disp"
+ subtype = "test_subtype"
+ boundary = "test_boundary"
+ generator = ipfsApi.multipart.BodyGenerator(name, disptype,
+ subtype, boundary)
+ self.assertEqual(generator.headers, {
+ 'Content-Disposition': 'test_disp; filename="test_name"',
+ 'Content-Type':
+ 'multipart/test_subtype; boundary="test_boundary"'})
+ self.assertEqual(generator.boundary, boundary)
+
+ def test_write_headers(self):
+ """Test the write_headers function against sample output."""
+ expected = 'Content-Disposition: test_disp; filename="test_name"' \
+ + '\r\nContent-Type: multipart/test_subtype; ' \
+ + 'boundary="test_boundary"\r\n\r\n'
+ name = "test_name"
+ disptype = "test_disp"
+ subtype = "test_subtype"
+ boundary = "test_boundary"
+ generator = ipfsApi.multipart.BodyGenerator(name, disptype,
+ subtype, boundary)
+ headers = ""
+ for chunk in generator.write_headers():
+ if type(chunk) is not str:
+ chunk = chunk.decode()
+ headers += chunk
+ self.assertEqual(headers, expected)
+
+ def test_open(self):
+ """Test the open function against sample output."""
+ expected = '--test_boundary\r\n'
+ name = "test_name"
+ disptype = "test_disp"
+ subtype = "test_subtype"
+ boundary = "test_boundary"
+ generator = ipfsApi.multipart.BodyGenerator(name, disptype,
+ subtype, boundary)
+ headers = ""
+ for chunk in generator.open():
+ if type(chunk) is not str:
+ chunk = chunk.decode()
+ headers += chunk
+ self.assertEqual(headers, expected)
+
+ def test_file_open(self):
+ """Test the file_open function against sample output."""
+ expected = '--test_boundary\r\nContent-Disposition: file; '\
+ + 'filename="test_name"\r\nContent-Type: '\
+ + 'application/octet-stream\r\n\r\n'
+ name = "test_name"
+ disptype = "test_disp"
+ subtype = "test_subtype"
+ boundary = "test_boundary"
+ generator = ipfsApi.multipart.BodyGenerator(name, disptype,
+ subtype, boundary)
+ headers = ""
+ for chunk in generator.file_open(name):
+ if type(chunk) is not str:
+ chunk = chunk.decode()
+ headers += chunk
+ self.assertEqual(headers, expected)
+
+ def test_file_close(self):
+ """Test the file_close function against sample output."""
+ expected = '\r\n'
+ name = "test_name"
+ disptype = "test_disp"
+ subtype = "test_subtype"
+ boundary = "test_boundary"
+ generator = ipfsApi.multipart.BodyGenerator(name, disptype,
+ subtype, boundary)
+ headers = ""
+ for chunk in generator.file_close():
+ if type(chunk) is not str:
+ chunk = chunk.decode()
+ headers += chunk
+ self.assertEqual(headers, expected)
+
+ def test_close(self):
+ """Test the close function against sample output."""
+ expected = '--test_boundary--\r\n'
+ name = "test_name"
+ disptype = "test_disp"
+ subtype = "test_subtype"
+ boundary = "test_boundary"
+ generator = ipfsApi.multipart.BodyGenerator(name, disptype,
+ subtype, boundary)
+ headers = ""
+ for chunk in generator.close():
+ if type(chunk) is not str:
+ chunk = chunk.decode()
+ headers += chunk
+ self.assertEqual(headers, expected)
+
+
+def _generate_test_chunks(chunk_size, interations):
+ """Generates strings of chunk_size length until out of iterations."""
+ for i in range(interations):
+ output = ""
+ for j in range(chunk_size):
+ output += "z"
+ yield output
+
+
+class TestBufferedGenerator(unittest.TestCase):
+ """Test the BufferedGenerator class.
+
+ Public methods:
+ test_init -- test the default arguments of the constructor
+ test_file_chunks -- test the file_chunks function against example output
+ test_gen_chunks -- test the gen_chunks function against example output
+ test_body -- verify that body is unimplemented
+ test_close -- test the close function against example output
+ """
+
+ def test_init(self):
+ """Test the __init__ function for default parameter values."""
+ name = "test_name"
+ instance = ipfsApi.multipart.BufferedGenerator(name)
+ self.assertEqual(instance.name, name)
+
+ def test_file_chunks(self):
+ """Test the file_chunks function against example output.
+
+ Warning: This test depends on the contents of
+ test/functional/fake_dir/fsdfgh
+ Changing that file could break the test.
+ """
+ name = "fsdfgh"
+ chunk_size = 2
+ path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ "functional", "fake_dir", "fsdfgh")
+ instance = ipfsApi.multipart.BufferedGenerator(name, chunk_size)
+ expected = 'dsadsad\n'
+ output = ""
+ open_file = open(path)
+ for emitted in instance.file_chunks(open_file):
+ if type(emitted) is not str:
+ emitted = emitted.decode()
+ if six.PY2:
+ if len(emitted) > chunk_size:
+ self.fail("Generator emitted chunk above chunk size.")
+ else:
+ self.assertLessEqual(len(emitted), chunk_size)
+ output += emitted
+ open_file.close()
+ self.assertEqual(output, expected)
+
+ def test_gen_chunks(self):
+ """Test the gen_chunks function against example output."""
+ name = "fsdfgh"
+ chunk_size = 2
+ instance = ipfsApi.multipart.BufferedGenerator(name, chunk_size)
+ for i in instance.gen_chunks(_generate_test_chunks(5, 5)):
+ if six.PY2:
+ if len(i) > chunk_size:
+ self.fail("Generator emitted chunk above chunk size.")
+ else:
+ self.assertLessEqual(len(i), chunk_size)
+
+ def test_body(self):
+ """Ensure that body throws a NotImplemented exception."""
+ instance = ipfsApi.multipart.BufferedGenerator("name")
+ self.assertRaises(NotImplementedError, instance.body)
+
+ def test_close(self):
+ """Test the close function against example output."""
+ name = "fsdfgh"
+ chunk_size = 2
+ instance = ipfsApi.multipart.BufferedGenerator(name, chunk_size)
+ expected = '--\S+--\r\n'
+ actual = ''
+ for i in instance.close():
+ if type(i) is not str and type(i) is not memoryview:
+ i = i.decode()
+ elif six.PY3 and type(i) is memoryview:
+ i = i.tobytes().decode()
+ if six.PY2:
+ if len(i) > chunk_size:
+ self.fail("Generator emitted chunk above chunk size.")
+ else:
+ self.assertLessEqual(len(i), chunk_size)
+ actual += i
+
+ if six.PY2:
+ if not re.match(expected, actual):
+ self.fail('Envelope end malformed. Expected %s, got %s'
+ % (expected, actual))
+ else:
+ self.assertRegexpMatches(actual, expected)
+
+
+class TestFileStream(unittest.TestCase):
+ """Test the FileStream class
+
+ Public methods:
+ test_body -- check file stream body for proper structure
+ """
+
+ def test_body(self):
+ """Test the body function against expected output.
+
+ Warning: This test depends on the contents of
+ test/functional/fake_dir
+ Changing that directory or its contents could break the test.
+ """
+ # Get OS-agnostic path to test files
+ path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ "functional", "fake_dir")
+ # Collect absolute paths to all test files
+ filenames_list = []
+ for (dirpath, _, filenames) in os.walk(path):
+ temp_list = [os.path.join(dirpath, name) for name in filenames]
+ filenames_list.extend(temp_list)
+ # Convert absolute paths to relative
+ relative_paths_list = [os.path.relpath(cur_path, os.getcwd())
+ for cur_path in filenames_list]
+
+ instance = ipfsApi.multipart.FileStream(relative_paths_list)
+
+ expected = "(--\S+\r\nContent-Disposition: file; filename=\"\S+\""\
+ + "\r\nContent-Type: application/\S+\r\n"\
+ + "\r\n(.|\n)*\r\n)+--\S+--\r\n"
+ actual = ""
+ for i in instance.body():
+ if type(i) is not str and type(i) is not memoryview:
+ i = i.decode()
+ elif six.PY3 and type(i) is memoryview:
+ i = i.tobytes().decode()
+ actual += i
+ if six.PY2:
+ if not re.match(expected, actual):
+ self.fail('Body malformed. Expected %s\n\nbut got:\n\n %s'
+ % (expected, actual))
+ else:
+ self.assertRegexpMatches(actual, expected)
+
+
+class TestDirectoryStream(unittest.TestCase):
+ """Test the DirectoryStream class.
+
+ Public methods:
+ test_body -- check that the HTTP body for the directory is correct
+ test_body_recursive -- check body structure when recursive directory
+ is uploaded
+ """
+
+ def test_body(self):
+ """Check the multipart HTTP body for the streamed directory."""
+ # Get OS-agnostic path to test files
+ path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ "functional", "fake_dir")
+ instance = ipfsApi.multipart.DirectoryStream(path)
+ expected = b"^(--\S+\r\nContent-Disposition: form-data; name=\"\S+\"; filename=\"\S+\""\
+ + b"\r\nContent-Type: application/\S+\r\n\r\n(.|\n)*"\
+ + b"\r\n)+--\S+--\r\n$"
+ actual = instance.body()
+ """
+ for i in instance.body():
+ if type(i) is not str and type(i) is not memoryview:
+ i = i.decode()
+ elif six.PY3 and type(i) is memoryview:
+ i = i.tobytes().decode()
+ actual += i
+ """
+ if six.PY2:
+ if not re.match(expected, actual):
+ self.fail('Body malformed. Expected %s\n\nbut got:\n\n %s'
+ % (expected, actual))
+ else:
+ self.assertRegexpMatches(actual, expected)
+
+
+class TestTextStream(unittest.TestCase):
+ """Test the TextStream class.
+
+ Public methods:
+ test_body -- check that the HTTP body for the text is correct
+ """
+
+ def test_body(self):
+ """Check the multipart HTTP body for the streamed directory."""
+ # Get OS-agnostic path to test files
+ text = "Here is some text for this test."
+ instance = ipfsApi.multipart.TextStream(text)
+ expected = "(--\S+\r\nContent-Disposition: file; filename=\"\S+\""\
+ + "\r\nContent-Type: application/\S+\r\n"\
+ + "\r\n(.|\n)*\r\n)+--\S+--\r\n"
+ actual = ""
+ for i in instance.body():
+ if type(i) is not str and type(i) is not memoryview:
+ i = i.decode()
+ elif six.PY3 and type(i) is memoryview:
+ i = i.tobytes().decode()
+ actual += i
+ if six.PY2:
+ if not re.match(expected, actual):
+ self.fail('Body malformed. Expected %s\n\nbut got:\n\n %s'
+ % (expected, actual))
+ else:
+ self.assertRegexpMatches(actual, expected)
+
+
+class TestStreamHelpers(unittest.TestCase):
+ """Test stream_files, stream_directory, and stream_text.
+
+ TODO: These functions are just wrappers around other,
+ already-tested functions. Maybe they should be tested,
+ but it is unclear how.
+
+ Public Methods:
+ test_stream_files -- unimplemented
+ test_stream_directory -- unimplemented
+ test_stream_text -- unimplemented
+ """
+
+ def test_stream_files(self):
+ """Test the stream_files function."""
+ pass
+
+ def test_stream_directory(self):
+ """Test the stream_directory function."""
+ pass
+
+ def test_stream_text(self):
+ """Test the stream_text function."""
+ pass
diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py
new file mode 100644
index 00000000..3edff770
--- /dev/null
+++ b/test/unit/test_utils.py
@@ -0,0 +1,209 @@
+"""Tox unit tests for utils.py.
+
+Classes:
+TestUtils -- defines a set of unit tests for untils.py
+"""
+
+import io
+import json
+import os
+import pickle
+import unittest
+
+import ipfsApi.utils as utils
+
+class TestUtils(unittest.TestCase):
+ """Contains unit tests for utils.py.
+
+ Public methods:
+ test_make_string_buffer -- tests utils.make_string_buffer()
+ test_encode_json -- tests utils.encode_json()
+ test_parse_json -- tests utils.parse_json()
+ test_make_json_buffer -- tests utils.make_json_buffer()
+ test_encode_pyobj -- tests utils.encode_pyobj()
+ test_parse_pyobj -- tests utils.parse_pyobj()
+ test_make_pyobj_buffer -- tests utils.make_pyobj_buffer()
+ test_guess_mimetype -- tests utils.guess_mimetype()
+ test_ls_dir -- tests utils.ls_dir()
+ test_clean_file_opened -- tests utils.clean_file() with a stringIO object
+ test_clean_file_unopened -- tests utils.clean_file() with a filepath
+ test_clean_files_single -- tests utils.clean_files() with a filepath
+ test_clean_files_list -- tests utils.clean_files() with a list of files
+ test_file_size -- tests utils.file_size()
+ test_return_field_init -- tests utils.return_field.__init__()
+ test_return_field_call -- tests utils.return_field.__call__()
+ """
+ def test_make_string_buffer(self):
+ """Tests utils.make_string_buffer()."""
+ raw = u'Mary had a little lamb'
+ buff = utils.make_string_buffer(raw)
+ self.assertEqual(hasattr(buff, 'read'), True)
+ self.assertEqual(hasattr(buff, 'write'), True)
+ self.assertEqual(buff.read(), raw)
+ # Closing buffer after test assertions.
+ buff.close()
+
+ def test_encode_json(self):
+ """Tests utils.encode_json()."""
+ data = {'key': 'value'}
+ self.assertEqual(utils.encode_json(data), json.dumps(data))
+
+ def test_parse_json(self):
+ """Tests utils.parse_json()."""
+ data = {'key': 'value'}
+ raw = json.dumps(data)
+ res = utils.parse_json(raw)
+ self.assertEqual(res['key'], 'value')
+
+ def test_make_json_buffer(self):
+ """Tests utils.make_json_buffer()."""
+ data = {'key': 'value'}
+ raw = json.dumps(data)
+ buff = utils.make_json_buffer(data)
+ self.assertEqual(hasattr(buff, 'read'), True)
+ self.assertEqual(hasattr(buff, 'write'), True)
+ self.assertEqual(buff.read(), raw)
+ # Closing buffer after test assertions.
+ buff.close()
+
+ def test_encode_pyobj(self):
+ """Tests utils.encode_pyobj().
+
+ In Python 2, data appears to be encoded differently based on the
+ context from which pickle.dumps() is called. For this reason we are
+ encoding and then decoding data to ensure that the decoded values are
+ equivalent after the original data has been serialized.
+ """
+ data = {'key': 'value'}
+ utils_res = pickle.loads(utils.encode_pyobj(data))
+ pickle_res = pickle.loads(pickle.dumps(data))
+ self.assertEqual(utils_res, pickle_res)
+
+ def test_parse_pyobj(self):
+ """Tests utils.parse_pyobj()."""
+ data = {'key': 'value'}
+ raw = pickle.dumps(data)
+ res = utils.parse_pyobj(raw)
+ self.assertEqual(res['key'], 'value')
+
+ def test_make_pyobj_buffer(self):
+ """Tests utils.make_pyobj_buffer().
+
+ In Python 2, data appears to be encoded differently based on the
+ context from which pickle.dumps() is called. For this reason we are
+ encoding and then decoding data to ensure that the decoded values are
+ equivalent after the original data has been serialized.
+ """
+ data = {'key': 'value'}
+ raw = pickle.dumps(data)
+ buff = utils.make_pyobj_buffer(data)
+ self.assertEqual(hasattr(buff, 'read'), True)
+ self.assertEqual(hasattr(buff, 'write'), True)
+ utils_res = pickle.loads(buff.read())
+ pickle_res = pickle.loads(raw)
+ self.assertEqual(utils_res, pickle_res)
+ # Closing buffer after test assertions.
+ buff.close()
+
+ def test_guess_mimetype(self):
+ """Tests utils.guess_mimetype().
+
+ Guesses the mimetype of the requirements.txt file
+ located in the project's root directory.
+ """
+ path = os.path.join(os.path.dirname(__file__),
+ "..", "..", "requirements.txt")
+ self.assertEqual(utils.guess_mimetype(path),"text/plain")
+
+ def test_ls_dir(self):
+ """Tests utils.ls_dir()
+
+ This test is dependent on the contents of the directory 'fake_dir'
+ located in 'test/functional' not being modified.
+ """
+ path = os.path.join(os.path.dirname(__file__),
+ "..", "functional", "fake_dir")
+ dirs = ['test2', 'test3']
+ files = ['fsdfgh', 'popoiopiu']
+ contents = (files, dirs)
+ self.assertEqual(utils.ls_dir(path), contents)
+
+ def test_clean_file_opened(self):
+ """Tests utils.clean_file() with a stringIO object."""
+ string_io = io.StringIO(u'Mary had a little lamb')
+ f, opened = utils.clean_file(string_io)
+ self.assertEqual(hasattr(f, 'read'), True)
+ self.assertEqual(opened, False)
+ # Closing stringIO after test assertions.
+ f.close()
+
+ def test_clean_file_unopened(self):
+ """Tests utils.clean_file() with a filepath.
+
+ This test relies on the openability of the file 'fsdfgh'
+ located in 'test/functional/fake_dir'.
+ """
+ path = os.path.join(os.path.dirname(__file__),
+ "..", "functional", "fake_dir", "fsdfgh")
+ f, opened = utils.clean_file(path)
+ self.assertEqual(hasattr(f, 'read'), True)
+ self.assertEqual(opened, True)
+ # Closing file after test assertions.
+ f.close()
+
+ def test_clean_files_single(self):
+ """Tests utils.clean_files() with a singular filepath.
+
+ This test relies on the openability of the file 'fsdfgh'
+ located in 'test/functional/fake_dir'.
+ """
+ path = os.path.join(os.path.dirname(__file__),
+ "..", "functional", "fake_dir", "fsdfgh")
+ gen = utils.clean_files(path)
+ for tup in gen:
+ self.assertEqual(hasattr(tup[0], 'read'), True)
+ self.assertEqual(tup[1], True)
+ # Closing file after test assertions.
+ tup[0].close()
+
+ def test_clean_files_list(self):
+ """Tests utils.clean_files() with a list of files/stringIO objects."""
+ path = os.path.join(os.path.dirname(__file__),
+ "..", "functional", "fake_dir", "fsdfgh")
+ string_io = io.StringIO(u'Mary had a little lamb')
+ files = [path, string_io]
+ gen = utils.clean_files(files)
+ for i in range(0, 2):
+ tup = next(gen)
+ self.assertEqual(hasattr(tup[0], 'read'), True)
+ if i == 0:
+ self.assertEqual(tup[1], True)
+ else:
+ self.assertEqual(tup[1], False)
+ # Closing files/stringIO objects after test assertions.
+ tup[0].close()
+
+ def test_file_size(self):
+ """Tests utils.file_size().
+
+ This test relies on the content size of the file 'fsdfgh'
+ located in 'test/functional/fake_dir' not being modified.
+ """
+ path = os.path.join(os.path.dirname(__file__),
+ "..", "functional", "fake_dir", "fsdfgh")
+ self.assertEqual(utils.file_size(path), 8)
+
+ def test_return_field_init(self):
+ """Tests utils.return_field.__init__()."""
+ return_field = utils.return_field('Hash')
+ self.assertEqual(return_field.field, 'Hash')
+
+ def test_return_field_call(self):
+ """Tests utils.return_field.__call__()."""
+ @utils.return_field('Hash')
+ def wrapper(string, *args, **kwargs):
+ resp = {'Hash':u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab',
+ 'string': string}
+ return resp
+ self.assertEqual(wrapper('Mary had a little lamb'),
+ u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab')
diff --git a/tox.ini b/tox.ini
index 266b2fcb..1d1dea09 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,10 +1,10 @@
[tox]
minversion = 1.6
envlist =
- py26,
py27,
py33,
py34,
+ py35,
[testenv]
deps = -r{toxinidir}/requirements.txt
@@ -21,4 +21,4 @@ commands =
[flake8]
ignore = E222,E221,F403
-exclude = .venv,.git,.tox,dist,doc,*egg,build,tools,test,docs
+exclude = .venv,.git,.tox,dist,doc,*egg,build,tools,test,docs,*__init__.py