Skip to content

Commit

Permalink
MNT #590 json_export json_import
Browse files Browse the repository at this point in the history
  • Loading branch information
prjemian committed Nov 28, 2021
1 parent 5d07dab commit 6284691
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 111 deletions.
2 changes: 2 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ Deprecations

* Utilities
* ``device_read2table``
* ``json_export``
* ``json_import``
* ``listdevice_1_5_2``
* ``listruns_v1_4``
* ``object_explorer``
Expand Down
111 changes: 0 additions & 111 deletions apstools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,6 @@
~getStreamValues
~apstools._utils.profile_support.ipython_profile_name
~itemizer
~json_export
~json_import
~apstools._utils.device_info.listdevice
~listobjects
~apstools._utils.list_plans.listplans
Expand Down Expand Up @@ -2225,115 +2223,6 @@ def print_snapshot_list(db, printing=True, **search_criteria):
return t


def json_export(headers, filename, zipfilename=None):
"""
DEPRECATED: Use *databroker-pack* package instead.
write a list of headers (from databroker) to a file
PARAMETERS
headers
*[headers]* or ``databroker._core.Results`` object :
list of databroker headers as returned from
``db(...search criteria...)``
filename
*str* :
name of file into which to write JSON
zipfilename
*str* or ``None`` :
name of ZIP file container of ``filename``
(if ``None``, do not ZIP ``filename``)
.. note:: If writing to a ZIP file, the data file is
*only* written into the ZIP file.
EXAMPLE::
from databroker import Broker
db = Broker.named("mongodb_config")
headers = db(plan_name="count", since="2019-04-01")
json_export(
headers,
"data.json",
zipfilename="bluesky_data.zip")
EXAMPLE: READ THE ZIP FILE:
using :func:`~json_import`::
datasets = json_import("data.json", zipfilename="bluesky_data.zip")
EXAMPLE: READ THE JSON TEXT FILE
using :func:`~json_import`::
datasets = json_import("data.json)
"""
warnings.warn(
"DEPRECATED: json_import() will be removed"
" in a future release. Instead, use *databroker-pack* package.",
DeprecationWarning,
)
datasets = [list(h.documents()) for h in headers]
buf = json.dumps(datasets, cls=NumpyEncoder, indent=2)

if zipfilename is None:
with open(filename, "w") as fp:
fp.write(buf)
else:
with zipfile.ZipFile(zipfilename, "w", allowZip64=True) as fp:
fp.writestr(filename, buf, compress_type=zipfile.ZIP_LZMA)


def json_import(filename, zipfilename=None):
"""
DEPRECATED: Use *databroker-pack* package instead.
read the file exported by :func:`~json_export()`
RETURNS
datasets :
*list of documents* :
list of
`documents <https://blueskyproject.io/bluesky/documents.html/>`_,
such as returned by
``[list(h.documents()) for h in db]``
See:
https://blueskyproject.io/databroker/generated/databroker.Header.documents.html
EXAMPLE
Insert the datasets into the databroker ``db``::
def insert_docs(db, datasets):
for i, h in enumerate(datasets):
print(f"{i+1}/{len(datasets)} : {len(h)} documents")
for k, doc in h:
db.insert(k, doc)
"""
warnings.warn(
"DEPRECATED: json_import() will be removed"
" in a future release. Instead, use *databroker-pack* package.",
DeprecationWarning,
)
if zipfilename is None:
with open(filename, "r") as fp:
buf = fp.read()
datasets = json.loads(buf)
else:
with zipfile.ZipFile(zipfilename, "r") as fp:
buf = fp.read(filename).decode("utf-8")
datasets = json.loads(buf)

return datasets


def redefine_motor_position(motor, new_position):
"""set EPICS motor record's user coordinate to ``new_position``"""
yield from bps.mv(motor.set_use_switch, 1)
Expand Down

0 comments on commit 6284691

Please sign in to comment.