Skip to content
This repository has been archived by the owner on Aug 31, 2022. It is now read-only.

Commit

Permalink
Merge pull request #23 from xmunoz/update_metadata
Browse files Browse the repository at this point in the history
Fixes #22
  • Loading branch information
xmunoz authored Oct 4, 2016
2 parents 9db28ee + 68ee923 commit 5830a18
Show file tree
Hide file tree
Showing 5 changed files with 87 additions and 24 deletions.
68 changes: 45 additions & 23 deletions README
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,15 @@ Table of Contents
- `client <#client>`__
- ```get`` <#getdataset_identifier-content_typejson-kwargs>`__
- ```get_metadata`` <#get_metadatadataset_identifier-content_typejson>`__
- ```update_metadata`` <#update_metadatadataset_identifier-update_fields-content_typejson>`__
- ```download_attachments`` <#download_attachmentsdataset_identifier-content_typejson-download_dirsodapy_downloads>`__
- ```create`` <#createname-kwargs>`__
- ```publish`` <#publishdataset_identifier-content_typejson>`__
- ```set_permission`` <#set_permissiondataset_identifier-permissionprivate-content_typejson>`__
- ```upsert`` <#upsertdataset_identifier-payload-content_typejson>`__
- ```replace`` <#replacedataset_identifier-payload-content_typejson>`__
- ```createNonDataFile``` <#createnondatafilepayload-files-content_typejson>`__
- ```replaceNonDataFile``` <#replacenondatafiledataset_identifier-payload-files-content_typejson>`__
- ```create_non_data_file`` <#create_non_data_fileparams-file_obj>`__
- ```replace_non_data_file`` <#replace_non_data_filedataset_identifier-params-file_obj>`__
- ```delete`` <#deletedataset_identifier-row_idnone-content_typejson>`__
- ```close`` <#close>`__

Expand All @@ -65,8 +66,10 @@ Import the library and set up a connection to get started.
``username`` and ``password`` are only required for creating or
modifying data. An application token isn't strictly required (can be
``None``), but queries executed from a client without an application
token will be sujected to strict throttling limits. To create a bare-
bones client:
token will be sujected to strict throttling limits. To create a
bare-bones client:

::

>>> client = Socrata("sandbox.demo.socrata.com", None)

Expand Down Expand Up @@ -101,6 +104,23 @@ Retrieve the metadata associated with a particular dataset.
>>> client.get_metadata("nimj-3ivp")
{"newBackend": false, "licenseId": "CC0_10", "publicationDate": 1436655117, "viewLastModified": 1451289003, "owner": {"roleName": "administrator", "rights": [], "displayName": "Brett", "id": "cdqe-xcn5", "screenName": "Brett"}, "query": {}, "id": "songs", "createdAt": 1398014181, "category": "Public Safety", "publicationAppendEnabled": true, "publicationStage": "published", "rowsUpdatedBy": "cdqe-xcn5", "publicationGroup": 1552205, "displayType": "table", "state": "normal", "attributionLink": "http://foo.bar.com", "tableId": 3523378, "columns": [], "metadata": {"rdfSubject": "0", "renderTypeConfig": {"visible": {"table": true}}, "availableDisplayTypes": ["table", "fatrow", "page"], "attachments": ... }}

update\_metadata(dataset\_identifier, update\_fields, content\_type="json")
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Update the metadata for a particular dataset. ``update_fields`` should
be a dictionary containing only the metadata keys that you wish to
overwrite.

Note: Invalid payloads to this method could corrupt the dataset or
visualization. See `this
comment <https://github.com/xmunoz/sodapy/issues/22#issuecomment-249971379>`__
for more information.

::

>>> client.update_metadata("nimj-3ivp", {"attributionLink": "https://anothertest.com"})
{"newBackend": false, "licenseId": "CC0_10", "publicationDate": 1436655117, "viewLastModified": 1451289003, "owner": {"roleName": "administrator", "rights": [], "displayName": "Brett", "id": "cdqe-xcn5", "screenName": "Brett"}, "query": {}, "id": "songs", "createdAt": 1398014181, "category": "Public Safety", "publicationAppendEnabled": true, "publicationStage": "published", "rowsUpdatedBy": "cdqe-xcn5", "publicationGroup": 1552205, "displayType": "table", "state": "normal", "attributionLink": "https://anothertest.com", "tableId": 3523378, "columns": [], "metadata": {"rdfSubject": "0", "renderTypeConfig": {"visible": {"table": true}}, "availableDisplayTypes": ["table", "fatrow", "page"], "attachments": ... }}

download\_attachments(dataset\_identifier, content\_type="json", download\_dir="~/sodapy\_downloads")
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Expand Down Expand Up @@ -194,40 +214,42 @@ Similar in usage to ``upsert``, but overwrites existing data.
>>> client.replace("eb9n-hr43", data)
{u'Errors': 0, u'Rows Deleted': 0, u'Rows Updated': 0, u'By SID': 0, u'Rows Created': 12, u'By RowIdentifier': 0}


createNonDataFile(params, file)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
create\_non\_data\_file(params, file\_obj)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Creates a new file-based dataset with the name provided in the files
tuple. A valid file input would be:
tuple. A valid file input would be:

::
files = (
{'file': ("gtfs2", open('myfile.zip', 'rb'))}
)

files = (
{'file': ("gtfs2", open('myfile.zip', 'rb'))}
)

::
>>> with open(nondatafile_path, 'rb') as fin:

>>> with open(nondatafile_path, 'rb') as f:
>>> files = (
>>> {'file': ("nondatafile.zip", fin)}
>>> {'file': ("nondatafile.zip", f)}
>>> )
>>> response = client.createNonDataFile(params, file)
>>> response = client.create_non_data_file(params, files)

replaceNonDataFile(dataset\_identifier, params, file)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
replace\_non\_data\_file(dataset\_identifier, params, file\_obj)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Same as createNonDataFile, but replaces a file that already exists in a
file-based dataset.
Same as create\_non\_data\_file, but replaces a file that already exists
in a file-based dataset.

WARNING: a table-based dataset cannot be replaced by a file-based dataset. Use createNonDataFile in order to replace.
Note: a table-based dataset cannot be replaced by a file-based dataset.
Use create\_non\_data\_file in order to replace.

::
>>> with open(nondatafile_path, 'rb') as fin:

>>> with open(nondatafile_path, 'rb') as f:
>>> files = (
>>> {'file': ("nondatafile.zip", fin)}
>>> {'file': ("nondatafile.zip", f)}
>>> )
>>> response = client.replaceNonDataFile(DATASET_IDENTIFIER, {}, file)

>>> response = client.replace_non_data_file(DATASET_IDENTIFIER, {}, files)

delete(dataset\_identifier, row\_id=None, content\_type="json")
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Expand Down
12 changes: 11 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ The [official Socrata API docs](http://dev.socrata.com/) provide thorough docume
- [client](#client)
- [`get`](#getdataset_identifier-content_typejson-kwargs)
- [`get_metadata`](#get_metadatadataset_identifier-content_typejson)
- [`update_metadata`](#update_metadatadataset_identifier-update_fields-content_typejson)
- [`download_attachments`](#download_attachmentsdataset_identifier-content_typejson-download_dirsodapy_downloads)
- [`create`](#createname-kwargs)
- [`publish`](#publishdataset_identifier-content_typejson)
Expand Down Expand Up @@ -68,6 +69,15 @@ Retrieve the metadata associated with a particular dataset.
>>> client.get_metadata("nimj-3ivp")
{"newBackend": false, "licenseId": "CC0_10", "publicationDate": 1436655117, "viewLastModified": 1451289003, "owner": {"roleName": "administrator", "rights": [], "displayName": "Brett", "id": "cdqe-xcn5", "screenName": "Brett"}, "query": {}, "id": "songs", "createdAt": 1398014181, "category": "Public Safety", "publicationAppendEnabled": true, "publicationStage": "published", "rowsUpdatedBy": "cdqe-xcn5", "publicationGroup": 1552205, "displayType": "table", "state": "normal", "attributionLink": "http://foo.bar.com", "tableId": 3523378, "columns": [], "metadata": {"rdfSubject": "0", "renderTypeConfig": {"visible": {"table": true}}, "availableDisplayTypes": ["table", "fatrow", "page"], "attachments": ... }}

### update_metadata(dataset_identifier, update_fields, content_type="json")

Update the metadata for a particular dataset. `update_fields` should be a dictionary containing only the metadata keys that you wish to overwrite.

Note: Invalid payloads to this method could corrupt the dataset or visualization. See [this comment](https://github.com/xmunoz/sodapy/issues/22#issuecomment-249971379) for more information.

>>> client.update_metadata("nimj-3ivp", {"attributionLink": "https://anothertest.com"})
{"newBackend": false, "licenseId": "CC0_10", "publicationDate": 1436655117, "viewLastModified": 1451289003, "owner": {"roleName": "administrator", "rights": [], "displayName": "Brett", "id": "cdqe-xcn5", "screenName": "Brett"}, "query": {}, "id": "songs", "createdAt": 1398014181, "category": "Public Safety", "publicationAppendEnabled": true, "publicationStage": "published", "rowsUpdatedBy": "cdqe-xcn5", "publicationGroup": 1552205, "displayType": "table", "state": "normal", "attributionLink": "https://anothertest.com", "tableId": 3523378, "columns": [], "metadata": {"rdfSubject": "0", "renderTypeConfig": {"visible": {"table": true}}, "availableDisplayTypes": ["table", "fatrow", "page"], "attachments": ... }}

### download_attachments(dataset_identifier, content_type="json", download_dir="~/sodapy_downloads")

Download all attachments associated with a dataset.
Expand Down Expand Up @@ -159,7 +169,7 @@ files = (
Same as create_non_data_file, but replaces a file that already exists in a
file-based dataset.

WARNING: a table-based dataset cannot be replaced by a file-based dataset. Use create_non_data_file in order to replace.
Note: a table-based dataset cannot be replaced by a file-based dataset. Use create_non_data_file in order to replace.

>>> with open(nondatafile_path, 'rb') as f:
>>> files = (
Expand Down
11 changes: 11 additions & 0 deletions sodapy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,17 @@ def get_metadata(self, dataset_identifier, content_type="json"):
resource = _format_old_api_request(dataid=dataset_identifier, content_type=content_type)
return self._perform_request("get", resource)

def update_metadata(self, dataset_identifier, update_fields, content_type="json"):
'''
Update the metadata for a particular dataset.
update_fields is a dictionary containing [metadata key:new value] pairs.
This method performs a full replace for the key:value pairs listed in `update_fields`, and
returns all of the metadata with the updates applied.
'''
resource = _format_old_api_request(dataid=dataset_identifier, content_type=content_type)
return self._perform_update("put", resource, update_fields)

def download_attachments(self, dataset_identifier, content_type="json",
download_dir="~/sodapy_downloads"):
'''
Expand Down
1 change: 1 addition & 0 deletions tests/test_data/update_song_metadata.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"newBackend": false, "licenseId": "CC0_10", "publicationDate": 1436655117, "viewLastModified": 1451289003, "owner": {"roleName": "administrator", "rights": [], "displayName": "Brett", "id": "cdqe-xcn5", "screenName": "Brett"}, "query": {}, "id": "songs", "createdAt": 1398014181, "category": "Education", "publicationAppendEnabled": true, "publicationStage": "published", "rowsUpdatedBy": "cdqe-xcn5", "publicationGroup": 1552205, "displayType": "table", "state": "normal", "attributionLink": "https://testing.updates", "tableId": 3523378, "columns": [], "metadata": {"rdfSubject": "0", "renderTypeConfig": {"visible": {"table": true}}, "availableDisplayTypes": ["table", "fatrow", "page"], "attachments": []}}
19 changes: 19 additions & 0 deletions tests/test_soda.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,25 @@ def test_get_metadata():

client.close()

def test_update_metadata():
mock_adapter = {}
mock_adapter["prefix"] = PREFIX
adapter = requests_mock.Adapter()
mock_adapter["adapter"] = adapter
client = Socrata(DOMAIN, APPTOKEN, session_adapter=mock_adapter)

response_data = "update_song_metadata.txt"
setup_old_api_mock(adapter, "PUT", response_data, 200)
data = {"category": "Education", "attributionLink": "https://testing.updates"}

response = client.update_metadata(DATASET_IDENTIFIER, data)

assert isinstance(response, dict)
assert response.get("category") == data["category"]
assert response.get("attributionLink") == data["attributionLink"]

client.close()

def test_upsert_exception():
mock_adapter = {}
mock_adapter["prefix"] = PREFIX
Expand Down

0 comments on commit 5830a18

Please sign in to comment.