diff --git a/README.md b/README.md
index f033d05c..92f11df6 100644
--- a/README.md
+++ b/README.md
@@ -47,15 +47,8 @@ configuration is handled by the ``typhon.config`` module. The default file
location is ``~/.typhonrc`` but can be changed using the ``TYPHONRC``
environment variable.
-It is also possible to set environment variables in the same-named
-section of the configuration file, e.g.:
-```
-[environment]
-ARTS_BUILD_PATH: /path/to/arts/build/
-```
-
## Documentation
-A daily build of the documentation is accessible
+A recent build of the documentation is accessible
[online](http://radiativetransfer.org/misc/typhon/doc-trunk).
Kindly note that bleeding edge features might not be covered.
diff --git a/doc/modules.rst b/doc/modules.rst
index 0e051fbc..0c9f06dc 100644
--- a/doc/modules.rst
+++ b/doc/modules.rst
@@ -4,21 +4,11 @@ Reference
ARTS
----
-Data types and File I/O
-```````````````````````
.. toctree::
:maxdepth: 1
typhon.arts
-ARTS Interface
-``````````````
-
-.. toctree::
- :maxdepth: 1
-
- typhon.arts.workspace
-
Cloud mask
----------
diff --git a/doc/typhon.arts.rst b/doc/typhon.arts.rst
index 7471f819..3a911001 100644
--- a/doc/typhon.arts.rst
+++ b/doc/typhon.arts.rst
@@ -1,149 +1,7 @@
arts
====
-.. automodule:: typhon.arts
-
-.. currentmodule:: typhon.arts
-
-.. autosummary::
- :toctree: generated
-
- run_arts
-
-arts.catalogues
-===============
-
-.. automodule:: typhon.arts.catalogues
-
-.. currentmodule:: typhon.arts.catalogues
-
-.. autosummary::
- :toctree: generated
-
- ArrayOfLineRecord
- CIARecord
- GasAbsLookup
- LineMixingRecord
- QuantumIdentifier
- QuantumNumberRecord
- QuantumNumbers
- Sparse
- SpeciesAuxData
- SpeciesTag
-
-arts.covariancematrix
-=====================
-
-.. automodule:: typhon.arts.covariancematrix
-
-.. currentmodule:: typhon.arts.covariancematrix
-
-.. autosummary::
- :toctree: generated
-
- Block
- CovarianceMatrix
- plot_covariance_matrix
-
-arts.griddedfield
-=================
-.. automodule:: typhon.arts.griddedfield
-
-.. currentmodule:: typhon.arts.griddedfield
-
-.. autosummary::
- :toctree: generated
-
- GriddedField1
- GriddedField2
- GriddedField3
- GriddedField4
- GriddedField5
- GriddedField6
- griddedfield_from_netcdf
- griddedfield_from_xarray
-
-arts.internals
-===============
-
-.. automodule:: typhon.arts.internals
-
-.. currentmodule:: typhon.arts.internals
-
-.. autosummary::
- :toctree: generated
-
- LineMixing
- ARTSCAT5
- Rational
- PartitionFunctions
- PressureBroadening
-
-arts.retrieval
-==============
-
-.. automodule:: typhon.arts.retrieval
-
-.. currentmodule:: typhon.arts.retrieval
-
-.. autosummary::
- :toctree: generated
-
- RetrievalQuantity
-
-arts.scattering
-===============
-
-.. automodule:: typhon.arts.scattering
-
-.. currentmodule:: typhon.arts.scattering
-
-.. autosummary::
- :toctree: generated
-
- SingleScatteringData
- SpectralSingleScatteringData
- ScatteringMetaData
-
-arts.sensor
-===========
-
-.. automodule:: typhon.arts.sensor
-
-.. currentmodule:: typhon.arts.sensor
-
-.. autosummary::
- :toctree: generated
-
- get_f_backend_rel_width
- get_f_backend_const_width
-
-
-arts.xml
-========
-
-.. automodule:: typhon.arts.xml
-
-.. currentmodule:: typhon.arts.xml
-
-.. autosummary::
- :toctree: generated
-
- load
- load_directory
- load_indexed
- save
- make_binary
- make_directory_binary
-
-arts.xsec
-=========
-
-.. automodule:: typhon.arts.xsec
-
-.. currentmodule:: typhon.arts.xsec
-
-.. autosummary::
- :toctree: generated
-
- XsecRecord
+The arts module has moved. Its functionality has been migrated to the PyARTS
+package which is available as part of the
+`ARTS distribution `_.
+On Linux, it can be installed with :code:`pip install pyarts`.
diff --git a/doc/typhon.arts.workspace.rst b/doc/typhon.arts.workspace.rst
deleted file mode 100644
index 7439834e..00000000
--- a/doc/typhon.arts.workspace.rst
+++ /dev/null
@@ -1,56 +0,0 @@
-arts.workspace
-==============
-
-.. automodule:: typhon.arts.workspace
-.. currentmodule:: typhon.arts.workspace
-
-.. autosummary::
- :toctree: generated
-
- arts_agenda
- Include
- Workspace
-
-arts.workspace.agendas
-======================
-
-.. automodule:: typhon.arts.workspace.agendas
-.. currentmodule:: typhon.arts.workspace.agendas
-
-.. autosummary::
- :toctree: generated
-
- Agenda
-
-arts.workspace.api
-==================
-
-.. automodule:: typhon.arts.workspace.api
-.. currentmodule:: typhon.arts.workspace.api
-
-.. autosummary::
- :toctree: generated
-
-arts.workspace.methods
-======================
-
-.. automodule:: typhon.arts.workspace.methods
-.. currentmodule:: typhon.arts.workspace.methods
-
-.. autosummary::
- :toctree: generated
-
- WorkspaceMethod
- iter
-
-arts.workspace.variables
-========================
-
-.. automodule:: typhon.arts.workspace.variables
-.. currentmodule:: typhon.arts.workspace.variables
-
-.. autosummary::
- :toctree: generated
-
- WorkspaceVariable
-
diff --git a/doc/typhon.plots.rst b/doc/typhon.plots.rst
index bc165089..38a77b12 100644
--- a/doc/typhon.plots.rst
+++ b/doc/typhon.plots.rst
@@ -27,7 +27,6 @@ plots
HectoPascalFormatter
HectoPascalLogFormatter
label_axes
- plot_arts_lookup
plot_bitfield
plot_distribution_as_percentiles
profile_p
diff --git a/typhon/__init__.py b/typhon/__init__.py
index d67d8746..74560a6b 100644
--- a/typhon/__init__.py
+++ b/typhon/__init__.py
@@ -2,7 +2,6 @@
import logging
from os.path import dirname, join
-from . import arts # noqa
from . import cloudmask # noqa
from . import config # noqa
from . import constants # noqa
diff --git a/typhon/arts/__init__.py b/typhon/arts/__init__.py
index f6c33712..2d795980 100644
--- a/typhon/arts/__init__.py
+++ b/typhon/arts/__init__.py
@@ -1,10 +1,3 @@
-# -*- coding: utf-8 -*-
-
-"""This module contains functions to interact with ARTS.
-"""
-
-from typhon.arts import sensor # noqa
-from typhon.arts import xml # noqa
-from typhon.arts.common import * # noqa
-
-__all__ = [s for s in dir() if not s.startswith('_')]
+raise ModuleNotFoundError(
+ "The ARTS submodule has been migrated to PyARTS in Typhon version >0.8.0\n"
+ "Either install Typhon version 0.8.0 or switch to the PyARTS package")
diff --git a/typhon/arts/catalogues.py b/typhon/arts/catalogues.py
deleted file mode 100644
index eec053ad..00000000
--- a/typhon/arts/catalogues.py
+++ /dev/null
@@ -1,1496 +0,0 @@
-"""Implementation of classes to handle various catalogue information. """
-import re
-
-import numpy as np
-import scipy.sparse
-
-__all__ = ['ArrayOfLineRecord',
- 'CIARecord',
- 'GasAbsLookup',
- 'LineMixingRecord',
- 'QuantumIdentifier',
- 'QuantumNumberRecord',
- 'QuantumNumbers',
- 'Sparse',
- 'SpeciesAuxData',
- 'SpeciesTag',
- 'PropagationMatrix',
- 'StokesVector',
- 'Ppath',
- 'GridPos',
- ]
-
-
-class GridPos:
- """Representation of ARTS GridPos"""
-
- def __init__(self, ind=None, n1=None, n2=None):
- self.ind = ind # Index
- self.n1 = n1 # Numeric
- self.n2 = n2 # Numeric
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads GridPos object from an existing file.
- """
- obj = cls()
-
- obj.ind = int(xmlelement[0].value())
- obj.n1 = xmlelement[1].value()
- obj.n2 = xmlelement[2].value()
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write GridPos object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("GridPos", attr)
- xmlwriter.write_xml(self.ind, {"name": "OriginalGridIndexBelowInterpolationPoint"})
- xmlwriter.write_xml(self.n1, {"name": "FractionalDistanceToNextPoint_1"})
- xmlwriter.write_xml(self.n2, {"name": "FractionalDistanceToNextPoint_2"})
- xmlwriter.close_tag()
-
- def __repr__(self):
- return "GridPos {}: n1={}; n2={}".format(self.ind, self.n1, self.n2)
-
- def __eq__(self, other):
- return self.ind == other.ind and self.n1 == other.n1 and self.n2 == other.n2
-
- def __ne__(self, other):
- return not (self==other)
-
-
-class Ppath:
- """Represents the Ppath variable in ARTS"""
-
- def __init__(self):
- self.dim = None # Index
- self.np = None # Index
- self.constant = None # Numeric
- self.background = None # String
- self.start_pos = None # Vector
- self.start_los = None # Vector
- self.start_lstep = None # Numeric
- self.pos = None # Matrix
- self.los = None # Matrix
- self.r = None # Vector
- self.lstep = None # Vector
- self.end_pos = None # Vector
- self.end_los = None # Vector
- self.end_lstep = None # Vector
- self.nreal = None # Vector
- self.ngroup = None # Vector
- self.gp_p = None # ArrayOfGridPos
- self.gp_lat = None # ArrayOfGridPos
- self.gp_lon = None # ArrayOfGridPos
-
- def __repr__(self):
- return "Ppath of {} steps in {}D Atmosphere".format(self.np, self.dim)
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads Ppath object from an existing file.
- """
- obj = cls()
-
- obj.dim = int(xmlelement[0].value())
- obj.np = int(xmlelement[1].value())
- obj.constant = xmlelement[2].value()
- obj.background = xmlelement[3].value()
- obj.start_pos = xmlelement[4].value()
- obj.start_los = xmlelement[5].value()
- obj.start_lstep = xmlelement[6].value()
- obj.pos = xmlelement[7].value()
- obj.los = xmlelement[8].value()
- obj.r = xmlelement[9].value()
- obj.lstep = xmlelement[10].value()
- obj.end_pos = xmlelement[11].value()
- obj.end_los = xmlelement[12].value()
- obj.end_lstep = xmlelement[13].value()
- obj.nreal = xmlelement[14].value()
- obj.ngroup = xmlelement[15].value()
- obj.gp_p = xmlelement[16].value()
- obj.gp_lat = xmlelement[17].value()
- obj.gp_lon = xmlelement[18].value()
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write Ppath object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("Ppath", attr)
- xmlwriter.write_xml(self.dim, {"name": "AtmosphericDimensionality"})
- xmlwriter.write_xml(self.np, {"name": "NumberOfPositionInPropagationPath"})
- xmlwriter.write_xml(self.constant, {"name": "PropagationPathConstant"})
- xmlwriter.write_xml(self.background, {"name": "RadiativeBackground"})
- xmlwriter.write_xml(self.start_pos, {"name": "StartPositionOfPropagationPath"})
- xmlwriter.write_xml(self.start_los, {"name": "StartLOSOfPropagationPath"})
- xmlwriter.write_xml(self.start_lstep, {"name": "StartLstepOfPropagationPath"})
- xmlwriter.write_xml(self.pos, {"name": "PropagationPathPointPositions"})
- xmlwriter.write_xml(self.los, {"name": "LineOfSight"})
- xmlwriter.write_xml(self.r, {"name": "PropagationPathPointRadii"})
- xmlwriter.write_xml(self.lstep, {"name": "PropagationPathPositionLength"})
- xmlwriter.write_xml(self.end_pos, {"name": "EndPositionOfPropagationPath"})
- xmlwriter.write_xml(self.end_los, {"name": "EndLOSOfPropagationPath"})
- xmlwriter.write_xml(self.end_lstep, {"name": "EndLstepPropagationPath"})
- xmlwriter.write_xml(self.nreal, {"name": "RefractiveIndexRealPart"})
- xmlwriter.write_xml(self.ngroup, {"name": "GroupRefractiveIndex"})
- xmlwriter.write_xml(self.gp_p, {"name": "PressureGridIndexPosition"}, arraytype="GridPos")
- xmlwriter.write_xml(self.gp_lat, {"name": "LatitudeGridIndexPosition"}, arraytype="GridPos")
- xmlwriter.write_xml(self.gp_lon, {"name": "LongitudeGridIndexPosition"}, arraytype="GridPos")
- xmlwriter.close_tag()
-
- def alt_lat_lon_za_aa(self):
- alt = self.pos[:, 0]
- lat = self.pos[:, 1] if self.pos.shape[1] > 1 else np.zeros_like(alt)
- lon = self.pos[:, 2] if self.pos.shape[1] > 2 else np.zeros_like(alt)
-
- za = self.los[:, 0]
- aa = self.los[:, 1] if self.los.shape[1] > 1 else np.zeros_like(za)
-
- return alt, lat, lon, za, aa
-
-
-class ArrayOfLineRecord:
- """Represents an :arts:`ArrayOfLineRecord` object."""
-
- def __init__(self, data=None, version=None):
- self.data = data
- self.version = version
-
- def __repr__(self):
- if len(self.data) > 1:
- return "ArrayOfLineRecord. " + self.version + ". " + \
- str(len(self.data)) + " lines."
- elif len(self.data) == 1:
- if '@' in self.data[0]:
- return "ArrayOfLineRecord. " + self.version + ". 1 line."
- return "ArrayOfLineRecord. " + self.version + ". No lines."
-
- def __getitem__(self, index):
- return self.data[index]
-
- def __len__(self):
- return len(self.data)
-
- def as_ARTSCAT5(self):
- """Returns manipulable ARTSCAT5 class of this linerecord array
- """
- assert self.version == 'ARTSCAT-5', "Only for ARTSCAT-5 data"
- return ARTSCAT5(self)
-
- @property
- def version(self):
- """ArrayOfRecord version number."""
- return self._version
-
- @property
- def data(self):
- """List of strings representing line records."""
- return self._data
-
- @version.setter
- def version(self, version):
- if version is None:
- self._version = None
- return
-
- if isinstance(version, str):
- self._version = version
- else:
- raise TypeError('version has to be String.')
-
- @data.setter
- def data(self, data):
- self._data = return_if_arts_type(data, 'ArrayOfString')
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads an ArrayOfLineRecord object from an existing file.
- """
- obj = cls()
- obj.version = xmlelement.attrib['version']
- obj.data = xmlelement.text.strip().split('\n')
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write an ArrayOfLineRecord object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- attr['version'] = self.version
- attr['nelem'] = len(self.data)
-
- xmlwriter.open_tag("ArrayOfLineRecord", attr)
- xmlwriter.write('\n'.join(self.data) + '\n')
- xmlwriter.close_tag()
-
-
-class CIARecord:
- """Represents a CIARecord object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, molecule1=None, molecule2=None, data=None):
- self.molecule1 = molecule1
- self.molecule2 = molecule2
- self.data = data
-
- @property
- def molecule1(self):
- """Name of the first molecule."""
- return self._molecule1
-
- @property
- def molecule2(self):
- """Name of the second molecule."""
- return self._molecule2
-
- @property
- def data(self):
- """Actual data stored in (list of) GriddedField2 objects."""
- return self._data
-
- @molecule1.setter
- def molecule1(self, molecule1):
- self._molecule1 = return_if_arts_type(molecule1, 'String')
-
- @molecule2.setter
- def molecule2(self, molecule2):
- self._molecule2 = return_if_arts_type(molecule2, 'String')
-
- @data.setter
- def data(self, data):
- self._data = return_if_arts_type(data, 'ArrayOfGriddedField2')
-
- def __repr__(self):
- return self._molecule1 + "-CIA-" + self.molecule2 + " " + \
- str(self.data)
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a CIARecord object from an existing file.
- """
-
- obj = cls()
- obj.molecule1 = xmlelement.attrib['molecule1']
- obj.molecule2 = xmlelement.attrib['molecule2']
- obj.data = xmlelement[0].value()
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a CIARecord object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- attr['molecule1'] = self.molecule1
- attr['molecule2'] = self.molecule2
-
- xmlwriter.open_tag("CIARecord", attr)
- xmlwriter.write_xml(self.data)
- xmlwriter.close_tag()
-
-
-# TODO(LKL): consider splitting SpeciesAuxData into seperate classes for each
-# version. SpeciesAuxData could be used as wrapper class.
-class SpeciesAuxData:
- """Represents a SpeciesAuxData object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, data, version, nparam=None):
- self.version = version
- self.nparam = nparam
- self.data = data
-
- def __repr__(self):
- return "SpeciesAuxData Version " + str(self.version) + ' ' + \
- 'for ' + str(len(self.species())) + ' species'
-
- @property
- def data(self):
- return self._data
-
- @data.setter
- def data(self, data):
- self._data = data
- if self.version == 1:
- self._data_dict = {}
- self._keys = {}
- for ii in range(len(data)):
- iso_data = data[ii]
- tmp = iso_data.split()
- self._keys[tmp[1]] = ii
- self._data_dict[tmp[1]] = float(tmp[2])
- elif self.version == 2:
- self._data_dict = {}
- self._keys = {}
- for ii in range(len(data)):
- tmp = data[ii]
- self._keys[tmp[0]] = ii
- self._data_dict[tmp[0]] = [tmp[1], tmp[2]]
-
- def __getitem__(self, key):
- return self._data_dict[key]
-
- def __setitem__(self, key, val):
- self._data_dict[key] = val
- if self.version == 1:
- self._data[(self._keys[key])] = '@ ' + key + ' ' + str(val)
- elif self.version == 2:
- self._data[(self._keys[key])] = val
-
- def __contains__(self, key):
- return key in self._data_dict
-
- def species(self):
- return list(self._data_dict.keys())
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a SpeciesAuxData object from an existing file.
- """
-
- version = int(xmlelement.attrib['version'])
-
- if version == 1:
- nparam = int(xmlelement.attrib['nparam'])
- data = [s for s in xmlelement.text.split('\n') if s != '']
- elif version == 2:
- nparam = None
- data = []
- sub_list = []
- for n, elem in enumerate(xmlelement):
- if n != 0 and n % 3 == 0:
- data.append(sub_list)
- sub_list = []
- sub_list.append(elem.value())
- data.append(sub_list)
- else:
- raise Exception(
- "Unknown SpeciesAuxData version {}.".format(version))
-
- obj = cls(data, version, nparam=nparam)
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a ScatterinMetaData object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- attr['version'] = self.version
- attr['nelem'] = len(self.data)
-
- if self.version == 1:
- attr['nparam'] = self.nparam
-
- xmlwriter.open_tag("SpeciesAuxData", attr)
- xmlwriter.write('\n'.join(self.data) + '\n')
- xmlwriter.close_tag()
- elif self.version == 2:
- xmlwriter.open_tag("SpeciesAuxData", attr)
- for sub_list in self.data:
- for element in sub_list:
- xmlwriter.write_xml(element)
- xmlwriter.close_tag()
-
- def as_PartitionFunctions(self):
- return PartitionFunctions(self)
-
-
-class GasAbsLookup:
- """Represents a GasAbsLookup object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self,
- speciestags=None,
- nonlinearspecies=None,
- frequencygrid=None,
- pressuregrid=None,
- referencevmrprofiles=None,
- referencetemperatureprofile=None,
- temperatureperturbations=None,
- nonlinearspeciesvmrperturbations=None,
- absorptioncrosssection=None):
-
- self.speciestags = speciestags
- self.nonlinearspecies = nonlinearspecies
- self.frequencygrid = frequencygrid
- self.pressuregrid = pressuregrid
- self.referencevmrprofiles = referencevmrprofiles
- self.referencetemperatureprofile = referencetemperatureprofile
- self.temperatureperturbations = temperatureperturbations
- self.nonlinearspeciesvmrperturbations = nonlinearspeciesvmrperturbations
- self.absorptioncrosssection = absorptioncrosssection
-
- @property
- def speciestags(self):
- """List of :class:`SpeciesTag`."""
- return self._speciestags
-
- @property
- def nonlinearspecies(self):
- """Indices to indentify nonlinear species."""
- return self._nonlinearspecies
-
- @property
- def frequencygrid(self):
- """Frequency vector."""
- return self._frequencygrid
-
- @property
- def pressuregrid(self):
- """Pressure level vector."""
- return self._pressuregrid
-
- @property
- def referencevmrprofiles(self):
- """Reference VMR profiles."""
- return self._referencevmrprofiles
-
- @property
- def referencetemperatureprofile(self):
- """Reference temperature profile."""
- return self._referencetemperatureprofile
-
- @property
- def temperatureperturbations(self):
- """Vector with temperature perturbations."""
- return self._temperatureperturbations
-
- @property
- def nonlinearspeciesvmrperturbations(self):
- """Vector with VMR perturbations for nonlinear species."""
- return self._nonlinearspeciesvmrperturbations
-
- @property
- def absorptioncrosssection(self):
- """Absorption crosssections."""
- return self._absorptioncrosssection
-
- @speciestags.setter
- def speciestags(self, speciestags):
- self._speciestags = return_if_arts_type(
- speciestags, 'ArrayOfArrayOfSpeciesTag')
-
- @nonlinearspecies.setter
- def nonlinearspecies(self, nonlinearspecies):
- self._nonlinearspecies = return_if_arts_type(
- nonlinearspecies, 'ArrayOfIndex')
-
- @frequencygrid.setter
- def frequencygrid(self, frequencygrid):
- self._frequencygrid = return_if_arts_type(
- frequencygrid, 'Vector')
-
- @pressuregrid.setter
- def pressuregrid(self, pressuregrid):
- self._pressuregrid = return_if_arts_type(
- pressuregrid, 'Vector')
-
- @referencevmrprofiles.setter
- def referencevmrprofiles(self, referencevmrprofiles):
- self._referencevmrprofiles = return_if_arts_type(
- referencevmrprofiles, 'Matrix')
-
- @referencetemperatureprofile.setter
- def referencetemperatureprofile(self, referencetemperatureprofile):
- self._referencetemperatureprofile = return_if_arts_type(
- referencetemperatureprofile, 'Vector')
-
- @temperatureperturbations.setter
- def temperatureperturbations(self, temperatureperturbations):
- self._temperatureperturbations = return_if_arts_type(
- temperatureperturbations, 'Vector')
-
- @nonlinearspeciesvmrperturbations.setter
- def nonlinearspeciesvmrperturbations(self, nonlinearspeciesvmrperturbations):
- self._nonlinearspeciesvmrperturbations = return_if_arts_type(
- nonlinearspeciesvmrperturbations, 'Vector')
-
- @absorptioncrosssection.setter
- def absorptioncrosssection(self, absorptioncrosssection):
- self._absorptioncrosssection = return_if_arts_type(
- absorptioncrosssection, 'Tensor4')
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a GasAbsLookup object from an existing file.
- """
-
- obj = cls()
- obj.speciestags = xmlelement[0].value()
- obj.nonlinearspecies = xmlelement[1].value()
- obj.frequencygrid = xmlelement[2].value()
- obj.pressuregrid = xmlelement[3].value()
- obj.referencevmrprofiles = xmlelement[4].value()
- obj.referencetemperatureprofile = xmlelement[5].value()
- obj.temperatureperturbations = xmlelement[6].value()
- obj.nonlinearspeciesvmrperturbations = xmlelement[7].value()
- obj.absorptioncrosssection = xmlelement[8].value()
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a ScatterinMetaData object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("GasAbsLookup", attr)
- xmlwriter.write_xml(self.speciestags)
- if self.nonlinearspecies is None:
- nonlinearspecies = []
- else:
- nonlinearspecies = self.nonlinearspecies
- xmlwriter.write_xml(nonlinearspecies,
- {'name': 'NonlinearSpecies'},
- arraytype='Index')
- xmlwriter.write_xml(self.frequencygrid,
- {'name': 'FrequencyGrid'})
- xmlwriter.write_xml(self.pressuregrid,
- {'name': 'PressureGrid'})
- xmlwriter.write_xml(self.referencevmrprofiles,
- {'name': 'ReferenceVmrProfiles'})
- xmlwriter.write_xml(self.referencetemperatureprofile,
- {'name': 'ReferenceTemperatureProfile'})
- xmlwriter.write_xml(self.temperatureperturbations,
- {'name': 'TemperaturePerturbations'})
- xmlwriter.write_xml(self.nonlinearspeciesvmrperturbations,
- {'name': 'NonlinearSpeciesVmrPerturbations'})
- xmlwriter.write_xml(self.absorptioncrosssection,
- {'name': 'AbsorptionsCrossSections'})
- xmlwriter.close_tag()
-
-
-class SpeciesTag(str):
- """Represents a SpeciesTag object.
-
- See online ARTS documentation for object details.
-
- """
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a SpeciesTag object from an existing file.
- """
- if xmlelement.text is None:
- raise Exception('SpeciesTag must not be empty.')
- return cls(xmlelement.text.strip()[1:-1])
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a SpeciesTag object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag('SpeciesTag', attr, newline=False)
- xmlwriter.write('"' + self + '"')
- xmlwriter.close_tag()
-
- def to_tex(self):
- """Turn indices in species tags to LaTeX subscripts."""
- if "FC" in self:
- return self
- else:
- return re.sub("([a-zA-Z]+)([0-9]+)", r"\1$_{\2}$", self)
-
-
-class Sparse(scipy.sparse.csc_matrix):
- """Wrapper around :class:`scipy.sparse.csc_matrix`.
-
- This class wraps around the SciPy Compressed Sparse Column matrix. The
- usage is exactly the same, but support for reading and writing XML files
- is added. Also additional attributes were added to map the ARTS
- implementation of :arts:`Sparse`.
-
- """
- @property
- def nrows(self):
- """Number of rows."""
- return self.shape[0]
-
- @property
- def ncols(self):
- """Number of columns."""
- return self.shape[0]
-
- @property
- def rowindex(self):
- """Row indices to locate data in matrix."""
- return self.tocoo().row
-
- @property
- def colindex(self):
- """Column indices to locate data in matrix."""
- return self.tocoo().col
-
- @property
- def sparsedata(self):
- """Data value at specified positions in matrix."""
- return self.tocoo().data
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a Sparse object from an existing file."""
-
- binaryfp = xmlelement.binaryfp
- nelem = int(xmlelement[0].attrib['nelem'])
- nrows = int(xmlelement.attrib['nrows'])
- ncols = int(xmlelement.attrib['ncols'])
-
- if binaryfp is None:
- rowindex = np.fromstring(xmlelement[0].text, sep=' ').astype(int)
- colindex = np.fromstring(xmlelement[1].text, sep=' ').astype(int)
- sparsedata = np.fromstring(xmlelement[2].text, sep=' ')
- else:
- rowindex = np.fromfile(binaryfp, dtype=' 0, "No QuantumIdentifier"
- spec = these[0].split('-') # UGLY: What about negative charge?
- if len(spec) == 1:
- self._afgl = None
- if spec[0] == 'None':
- self._spec = None
- else:
- self._spec = spec[0]
- elif len(spec) == 2:
- self._spec = spec[0]
- self._afgl = int(spec[1])
- else:
- assert False, "Cannot recognize species"
-
- if len(these) == 1:
- self._transition = False
- self._level = False
- return
-
- if these[1] == 'TR':
- self._transition = True
- self._level = False
- elif these[1] == 'EN':
- self._transition = False
- self._level = True
- else:
- assert False, "Must be energy level [EN] or transition [TR] type"
-
- self._qns = as_quantumnumbers(" ".join(these[2:]))
-
- self._assert_sanity_()
-
- def __repr__(self):
- out = str(self._spec)
- if self._afgl is not None:
- out += '-' + str(self._afgl)
- if self._transition or self._level:
- if self._transition:
- out += ' TR '
- else:
- out += ' EN '
- out += str(self._qns)
- return out
-
- def _assert_sanity_(self):
- if self._transition:
- assert type(self._qns) is QuantumNumberRecord, "Mismatching types"
- elif self._level:
- assert type(self._qns) is QuantumNumbers, "Mismatching types"
- else:
- assert False, "Programmer error?"
-
- def __str__(self):
- assert self.afgl is not None or self.species is not None, \
- "Bad data cannot be converted to str. Contains no species or iso"
- return self.__repr__()
-
- @property
- def qns(self):
- return self._qns
-
- @qns.setter
- def qns(self, qns):
- self._qns = as_quantumnumbers(qns)
- if type(self._qns) is QuantumNumberRecord:
- self._transition = True
- self._level = False
- elif type(self._qns) is QuantumNumbers:
- self._transition = False
- self._level = True
- else:
- assert False, "Programmer error?"
-
- @property
- def species(self):
- return self._spec
-
- @species.setter
- def species(self, value):
- self._spec = return_if_arts_type(value, 'String')
-
- @property
- def afgl(self):
- return self._afgl
-
- @afgl.setter
- def afgl(self, value):
- self._afgl = return_if_arts_type(value, 'Index')
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a QuantumIdentifier object from an existing file.
- """
- if xmlelement.text is None:
- raise Exception('QuantumIdentifier must not be empty.')
- return cls(xmlelement.text.strip())
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a QuantumIdentifier object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag('QuantumIdentifier', attr, newline=False)
- xmlwriter.write(self.__str__())
- xmlwriter.close_tag()
-
-
-class QuantumNumberRecord:
- """Represents a QuantumNumberRecord object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, upper=None, lower=None):
- self._qns = {'UP': QuantumNumbers(), 'LO': QuantumNumbers()}
- self._qns['UP'] = return_if_arts_type(upper, 'QuantumNumbers')
- self._qns['LO'] = return_if_arts_type(lower, 'QuantumNumbers')
-
- def __repr__(self):
- if len(self._qns['UP']) == 0 and len(self._qns['LO']) == 0:
- return 'No Quantum-Numbers'
- else:
- return "UP " + str(self._qns['UP']) + " LO " + str(self._qns['LO'])
-
- def __str__(self):
- if len(self._qns['UP']) == 0 and len(self._qns['LO']) == 0:
- return ''
- else:
- return self.__repr__()
-
- def __getitem__(self, key):
- return self._qns[key]
-
- def __setitem__(self, key, value):
- self._qns[key] = return_if_arts_type(as_quantumnumbers(value),
- 'QuantumNumbers')
-
- def __iter__(self):
- return iter(self._qns)
-
- def __contains__(self, value):
- return value in ['UP', 'LO']
-
- @classmethod
- def from_dict(cls, d):
- """Creates a QuantumNumberRecord from dictionary
- """
- if len(d) == 0:
- return QuantumNumberRecord(upper=QuantumNumbers(),
- lower=QuantumNumbers())
-
- assert 'UP' in d and 'LO' in d, "Need UP and LO to create"
- qnr = QuantumNumberRecord(upper=QuantumNumbers(d['UP']),
- lower=QuantumNumbers(d['LO']))
- return qnr
-
- @classmethod
- def from_str(cls, s):
- """Creates a QuantumNumberRecord from string
- """
- s = s.strip()
- if len(s) == 0:
- return QuantumNumberRecord(upper=QuantumNumbers(),
- lower=QuantumNumbers())
-
- assert 'UP' in s and 'LO' in s, "Need UP and LO to create"
- _t1 = s.split('UP')
- assert len(_t1) == 2, "Unexpectedly many/few UP in s"
- if len(_t1[0]) == 0:
- _t2 = _t1[1].split('LO')
- assert len(_t2) == 2, "Unexpectedly many/few LO in s"
- lo = _t2[1]
- up = _t2[0]
- else:
- up = _t1[1]
- _t2 = _t1[0].split('LO')
- assert len(_t2) == 2, "Unexpectedly many/few LO in s"
- lo = _t2[1]
-
- qnr = QuantumNumberRecord(upper=QuantumNumbers(up),
- lower=QuantumNumbers(lo))
- return qnr
-
- @property
- def upper(self):
- """QuantumNumbers object representing the upper quantumnumber."""
- return self._qns['UP']
-
- @property
- def lower(self):
- """QuantumNumbers object representing the lower quantumnumber."""
- return self._qns['LO']
-
- @upper.setter
- def upper(self, upper):
- self._qns['UP'] = return_if_arts_type(upper, 'QuantumNumbers')
-
- @lower.setter
- def lower(self, lower):
- self._qns['LO'] = return_if_arts_type(lower, 'QuantumNumbers')
-
- @property
- def qns(self):
- return self._qns
-
- @qns.setter
- def qns(self, value):
- if 'LO' in value:
- self._qns['LO'] = QuantumNumbers(value['LO'])
- else:
- self._qns['LO'] = QuantumNumbers()
-
- if 'UP' in value:
- self._qns['UP'] = QuantumNumbers(value['UP'])
- else:
- self._qns['UP'] = QuantumNumbers()
-
- def zeeman_splitting(self, type=None, case=None, H=1):
- from ..physics.em import zeeman_splitting, landau_g_factor, \
- zeeman_transitions
-
- if case.lower() == 'a':
- gu = landau_g_factor(self.upper['Omega'], self.upper['J'],
- self.upper['S'], self.upper['Lambda'],
- gs=2, gl=1, case='a')
- gl = landau_g_factor(self.lower['Omega'], self.lower['J'],
- self.lower['S'], self.lower['Lambda'],
- gs=2, gl=1, case='a')
- elif case.lower() == 'b':
- gu = landau_g_factor(self.upper['N'], self.upper['J'],
- self.upper['S'], self.upper['Lambda'],
- gs=2, gl=1, case='b')
- gl = landau_g_factor(self.lower['N'], self.lower['J'],
- self.lower['S'], self.lower['Lambda'],
- gs=2, gl=1, case='b')
- else:
- raise RuntimeError("No unknown cases allowed")
-
- if type is not None:
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], type)
- return zeeman_splitting(gu, gl, mu, ml, H)
- else:
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], "PI")
- pi = zeeman_splitting(gu, gl, mu, ml, H)
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], "S+")
- sp = zeeman_splitting(gu, gl, mu, ml, H)
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], "S-")
- sm = zeeman_splitting(gu, gl, mu, ml, H)
- return {"pi": pi, "s+": sp, "s-": sm}
-
- def zeeman_transitions(self, type=None):
- from ..physics.em import zeeman_transitions, zeeman_strength
- if type is not None:
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], type)
- return zeeman_strength(self.upper['J'], self.lower['J'], mu, ml)
- else:
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], "PI")
- pi = zeeman_strength(self.upper['J'], self.lower['J'], mu, ml)
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], "S+")
- sp = zeeman_strength(self.upper['J'], self.lower['J'], mu, ml)
- mu, ml = zeeman_transitions(self.upper['J'], self.lower['J'], "S-")
- sm = zeeman_strength(self.upper['J'], self.lower['J'], mu, ml)
- return {"pi": pi, "s+": sp, "s-": sm}
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a QuantumNumberRecord object from an existing file.
- """
-
- obj = cls()
- obj.upper = xmlelement[0][0].value()
- obj.lower = xmlelement[1][0].value()
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a SpeciesTag object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag('QuantumNumberRecord', attr)
- xmlwriter.open_tag('Upper', attr, newline=False)
- xmlwriter.write_xml(self.upper)
- xmlwriter.close_tag()
- xmlwriter.open_tag('Lower', attr, newline=False)
- xmlwriter.write_xml(self.lower)
- xmlwriter.close_tag()
- xmlwriter.close_tag()
-
- def __iadd__(self, qnr):
- self._qns['UP'] += qnr['UP']
- self._qns['LO'] += qnr['LO']
-
- def __isub__(self, qnr):
- self._qns['UP'] -= qnr['UP']
- self._qns['LO'] -= qnr['LO']
-
- def __eq__(self, qns):
- if type(qns) is QuantumNumberRecord:
- return self['LO'] == qns['LO'] and self['UP'] == qns['UP']
- elif type(qns) is QuantumNumbers:
- return self['UP'] == qns, self['LO'] == qns
- else:
- return self == as_quantumnumbers(qns)
-
- def __ne__(self, qns):
- if type(qns) is QuantumNumberRecord:
- return self['LO'] != qns['LO'] and self['UP'] != qns['UP']
- elif type(qns) is QuantumNumbers:
- return self['UP'] != qns, self['LO'] != qns
- else:
- return self != as_quantumnumbers(qns)
-
- def __lt__(self, qns):
- if type(qns) is QuantumNumberRecord:
- return self['LO'] < qns['LO'] and self['UP'] < qns['UP']
- elif type(qns) is QuantumNumbers:
- return self['UP'] < qns, self['LO'] < qns
- else:
- return self < as_quantumnumbers(qns)
-
- def __gt__(self, qns):
- if type(qns) is QuantumNumberRecord:
- return self['LO'] > qns['LO'] and self['UP'] > qns['UP']
- elif type(qns) is QuantumNumbers:
- return self['UP'] > qns, self['LO'] > qns
- else:
- return self > as_quantumnumbers(qns)
-
- def __le__(self, qns):
- if type(qns) is QuantumNumberRecord:
- return self['LO'] <= qns['LO'] and self['UP'] <= qns['UP']
- elif type(qns) is QuantumNumbers:
- return self['UP'] <= qns, self['LO'] <= qns
- else:
- return self <= as_quantumnumbers(qns)
-
- def __ge__(self, qns):
- if type(qns) is QuantumNumberRecord:
- return self['LO'] >= qns['LO'] and self['UP'] >= qns['UP']
- elif type(qns) is QuantumNumbers:
- return self['UP'] >= qns, self['LO'] >= qns
- else:
- return self >= as_quantumnumbers(qns)
-
-
-class QuantumNumbers:
- """Represents a QuantumNumbers object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, numbers=None, nelem=None):
-
- self.numbers = numbers
- if nelem is not None:
- self.nelem = nelem
- else:
- self.nelem = len(self.numbers)
-
- self._assert_sanity_()
-
- def _assert_sanity_(self):
- if self.nelem is None or self.numbers is None:
- return
- assert len(self.numbers) == self.nelem, "mismatching quantum numbers"
-
- def __repr__(self):
- out = ''
- for qn in self.numbers:
- out += qn + ' ' + str(self.numbers[qn]) + ' '
- return out[:-1]
-
- def __getitem__(self, key):
- """Returns the value. Mimics ARTS' behavior for mismatched data
- """
- if key in self:
- return self.numbers[key]
- else:
- return None
-
- def __setitem__(self, key, value):
- """Sets a value and counts up the quantum numbers
- """
- if key in self.numbers:
- self.numbers[key] = Rational(value)
- else:
- self.numbers[key] = Rational(value)
- self.nelem += 1
- self._assert_sanity_()
-
- def __iadd__(self, qns):
- for qn in qns:
- assert qn not in self, "Addition means adding new QN. Access " + \
- "individual elements to change their values"
- self.numbers[qn] = qns[qn]
- self.nelem += 1
- return self
-
- def __isub__(self, qns):
- for qn in qns:
- assert qn in self, "Subtraction means removing QN. Access " + \
- "individual elements to change their values"
- del self.numbers[qn]
- self.nelem -= 1
- return self
-
- def __contains__(self, key):
- """Are these quantum numbers here?
- """
- return key in self.numbers
-
- def __iter__(self):
- return iter(self.numbers)
-
- def __eq__(self, qns):
- """Tests for complete equality ==
- """
- return self <= qns and len(qns) == self.nelem
-
- def __ne__(self, qns):
- """Tests for lacking complete equality !=
- """
- return not self == qns
-
- def __le__(self, qns):
- """Tests for all in self being in qns <=
- """
- try:
- for qn in self:
- if qns[qn] != self[qn]:
- return False
- return True
- except:
- return False
-
- def __ge__(self, qns):
- """Tests for all in qns being in self >=
- """
- try:
- for qn in qns:
- if qns[qn] != self[qn]:
- return False
- return True
- except:
- return False
-
- def __lt__(self, qns):
- """Tests for all in self being in qns and if there is more in qns <
- """
- return self <= qns and self.nelem < len(qns)
-
- def __gt__(self, qns):
- """Tests for all in self being in qns and if there is more in self >
- """
- return qns <= self and len(qns) < self.nelem
-
- def __len__(self):
- return self.nelem
-
- def array_of_M(self):
- """Returns all possible M in a list. Requires presence of J
- """
- assert 'J' in self, "Need J to define M"
- assert self['J'] >= 0, "Negative J in this instance?"
- _t = []
- _s = -self['J']
- while _s <= self['J']:
- _t.append(_s)
- _s += 1
- return np.array(_t)
-
- @property
- def numbers(self):
- """Dict representing the quantumnumbers."""
- return self._numbers
-
- @property
- def nelem(self):
- """Number of quantumnumbers stored."""
- return self._nelem
-
- @numbers.setter
- def numbers(self, numbers):
- if type(numbers) is str:
- _t = numbers.split()
- nums = {}
- i = 0
- assert len(_t) % 2 == 0, "Not of form 'key1 value1 key2 value2'"
- while i < len(_t):
- nums[_t[i]] = Rational(_t[i+1])
- i += 2
- self._numbers = nums
- elif type(numbers) is dict:
- for i in numbers:
- numbers[i] = Rational(numbers[i])
- self._numbers = numbers
- elif type(numbers) is QuantumNumbers:
- self._numbers = numbers.numbers
- elif numbers is None:
- self._numbers = {}
- else:
- assert False, "Expected dict or String for QuantumNumbers"
- # OLD: self._numbers = return_if_arts_type(numbers, 'String')
-
- @nelem.setter
- def nelem(self, nelem):
- if nelem is None:
- self._nelem = None
- return
-
- self._nelem = nelem
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a QuantumNumbers object from an existing file.
- """
-
- obj = cls()
- obj.numbers = xmlelement.text
- obj.nelem = int(xmlelement.attrib['nelem'])
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a SpeciesTag object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- attr['nelem'] = self.nelem
-
- xmlwriter.open_tag('QuantumNumbers', attr, newline=False)
- xmlwriter.write(self.__str__())
- xmlwriter.close_tag(newline=False)
-
-
-class LineMixingRecord:
- """Represents a LineMixingRecord object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, tag=None, quantumnumberrecord=None, data=None):
-
- self.tag = tag
- self.quantumnumberrecord = quantumnumberrecord
- self.data = LineMixing(data)
-
- @property
- def tag(self):
- """:class:`SpeciesTag`"""
- return self._tag
-
- @property
- def quantumnumberrecord(self):
- """:class:`QuantumNumberRecord`"""
- return self._quantumnumberrecord
-
- @property
- def data(self):
- """Lineshape parameters."""
- return self._data
-
- @tag.setter
- def tag(self, tag):
- if tag is None:
- self._tag = None
- return
-
- self._tag = SpeciesTag(tag)
-
- def __repr__(self):
- return self.tag + ' ' + str(self.quantumnumberrecord) + ' ' + \
- str(self.data)
-
- @quantumnumberrecord.setter
- def quantumnumberrecord(self, quantumnumberrecord):
- self._quantumnumberrecord = return_if_arts_type(
- quantumnumberrecord, 'QuantumNumberRecord')
-
- @data.setter
- def data(self, data):
- self._data = return_if_arts_type(data, 'LineMixing')
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a LineMixingRecord object from an existing file.
- """
-
- obj = cls()
- obj.tag = xmlelement[0].value()
- obj.quantumnumberrecord = xmlelement[1].value()
- obj.data = LineMixing(xmlelement[2].value())
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a LineMixingRecord object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("LineMixingRecord", attr)
- xmlwriter.write_xml(self.tag)
- xmlwriter.write_xml(self.quantumnumberrecord)
- xmlwriter.write_xml(self.data.data)
- xmlwriter.close_tag()
-
-
-class PropagationMatrix:
- """Represents a PropagationMatrix object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, data=np.zeros((1))):
- n = len(data.shape)
- if n == 4:
- self.aa = data.shape[0]
- self.za = data.shape[1]
- self.nf = data.shape[2]
- elif n == 3:
- self.aa = 1
- self.za = data.shape[0]
- self.nf = data.shape[1]
- elif n == 2:
- self.aa = 1
- self.za = 1
- self.nf = data.shape[0]
- elif n != 1:
- raise RuntimeError("Bad input")
- else:
- self.aa = 1
- self.za = 1
- self.nf = 1
-
- if data.shape[-1] == 7:
- self.stokes = 4
- elif data.shape[-1] == 4:
- self.stokes = 3
- elif data.shape[-1] == 1 or data.shape[-1] == 2:
- self.stokes = data.shape[-1]
- else:
- raise RuntimeError("Bad input")
-
- self.data = data.reshape(self.aa, self.za, self.nf, data.shape[-1])
-
- def Kjj(self, aa=0, za=0):
- return self.data[aa, za, :, 0]
-
- def K12(self, aa=0, za=0):
- return self.data[aa, za, :, 1]
-
- def K13(self, aa=0, za=0):
- return self.data[aa, za, :, 2]
-
- def K14(self, aa=0, za=0):
- return self.data[aa, za, :, 3]
-
- def K23(self, aa=0, za=0):
- return self.data[aa, za, :, 4]
-
- def K24(self, aa=0, za=0):
- return self.data[aa, za, :, 5]
-
- def K34(self, aa=0, za=0):
- return self.data[aa, za, :, 6]
-
- def __add__(self, other):
- if isinstance(other, PropagationMatrix):
- return PropagationMatrix(self.data + other.data)
- else:
- return PropagationMatrix(self.data + other)
- __radd__ = __add__
-
- def __repr__(self):
- size = "Stokes Dim: {}, Freqs: {}, Zeniths: {}, Azimuths: {}".format(self.stokes, self.nf, self.za, self.aa)
- return "PropagationMatrix of size <{}>".format(size)
- __str__=__repr__
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a PropagationMatrix object from an existing file.
- """
- return cls(xmlelement[0].value())
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a PropagationMatrix object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("PropagationMatrix", attr)
- xmlwriter.write_xml(self.data)
- xmlwriter.close_tag()
-
-
-class StokesVector:
- """Represents a StokesVector object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, data=np.zeros((1))):
- n = len(data.shape)
- if n == 4:
- self.aa = data.shape[0]
- self.za = data.shape[1]
- self.nf = data.shape[2]
- elif n == 3:
- self.aa = 1
- self.za = data.shape[0]
- self.nf = data.shape[1]
- elif n == 2:
- self.aa = 1
- self.za = 1
- self.nf = data.shape[0]
- elif n != 1:
- raise RuntimeError("Bad input")
- else:
- self.aa = 1
- self.za = 1
- self.nf = 1
-
- if data.shape[-1] == 1 or data.shape[-1] == 2 or \
- data.shape[-1] == 3 or data.shape[-1] == 4:
- self.stokes = data.shape[-1]
- else:
- raise RuntimeError("Bad input")
-
- self.data = data.reshape(self.aa, self.za, self.nf, data.shape[-1])
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a StokesVector object from an existing file.
- """
- return cls(xmlelement[0].value())
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a StokesVector object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("StokesVector", attr)
- xmlwriter.write_xml(self.data)
- xmlwriter.close_tag()
-
-
-try:
- from .utils import return_if_arts_type
- from .utils import as_quantumnumbers
- from .internals import PartitionFunctions
- from .internals import ARTSCAT5
- from .internals import Rational
- from .internals import LineMixing
-except:
- from typhon.arts.utils import return_if_arts_type, as_quantumnumbers
- from typhon.arts.internals import PartitionFunctions, ARTSCAT5, Rational, \
- LineMixing
diff --git a/typhon/arts/common.py b/typhon/arts/common.py
deleted file mode 100644
index 8b38b65c..00000000
--- a/typhon/arts/common.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Common functions for the typhon.arts subpackage.
-"""
-import collections
-import os
-import shutil
-import subprocess
-
-from typhon.environment import environ
-from typhon.utils import path_append
-
-
-__all__ = [
- 'run_arts',
- ]
-
-
-def run_arts(controlfile=None, arts=None, writetxt=False,
- ignore_error=False, **kwargs):
- """Start an ARTS Simulation.
-
- Parameters:
- controlfile (str): Path to the ARTS controlfile.
- arts (str): Path to the arts executable.
- writetxt (bool): Write stdout and stderr to ASCII files.
- ignore_error (bool): If set to True, erros during the ARTS run do not
- result in an exception (default is False).
- **kwargs: Additional command line arguments passed as keyword.
- See `arts --help` for more details.
-
- Returns:
- Named tuple containing the fields stdout, stderr and retcode.
-
- Examples:
- Run a simple ARTS job and set the output directory
- and the report level:
-
- >>> run_arts('foo.arts', outdir='bar', reporting='020')
-
- If a keyword is set to True it is added as flag.
- Show the ARTS help message:
-
- >>> run_arts(help=True)
-
- """
- # If path to the ARTS exectuable is not passed explicitly, construct it
- # from the ARTS_BUILD_PATH. Its actual existence is checked later.
- if arts is None and environ.get('ARTS_BUILD_PATH') is not None:
- arts = os.path.join(environ['ARTS_BUILD_PATH'], 'src', 'arts')
- # Try 'arts' as a fallback, maybe it is in the user's PATH.
- elif arts is None:
- arts = 'arts'
-
- # Append ARTS_INCLUDE_PATH and ARTS_DATA_PATH to the user's environment.
- if environ.get('ARTS_INCLUDE_PATH') is not None:
- path_append(environ.get('ARTS_INCLUDE_PATH'), path='ARTS_INCLUDE_PATH')
-
- if environ.get('ARTS_DATA_PATH') is not None:
- path_append(environ.get('ARTS_DATA_PATH'), path='ARTS_DATA_PATH')
-
- if not shutil.which(arts):
- raise Exception('ARTS executable not found at: {}'.format(arts))
-
- if controlfile is None:
- controlfile = ''
- elif not os.path.exists(controlfile):
- err_msg = 'Controlfile not found at: {}'.format(controlfile)
- raise FileNotFoundError(err_msg)
-
- opts = []
- for kw, arg in kwargs.items():
- if isinstance(arg, bool) and arg is True:
- if len(kw) == 1:
- opts.append('-{}'.format(kw))
- else:
- opts.append('--{}'.format(kw))
- elif len(kw) == 1:
- opts.append('-{}{}'.format(kw, arg))
- else:
- opts.append('--{}={}'.format(kw, arg))
-
- # Run ARTS job and redirect output.
- p = subprocess.run([arts, *opts, controlfile],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- universal_newlines=True
- )
-
- # Write ARTS output and error to ASCII file.
- if writetxt:
- if controlfile.endswith('.arts'):
- outfile = controlfile.replace('.arts', '.out')
- errfile = controlfile.replace('.arts', '.err')
- else:
- outfile = 'arts.out'
- errfile = 'arts.err'
-
- for key in ['outdir', 'o']:
- if key in kwargs:
- outfile = os.path.join(kwargs[key], outfile)
- errfile = os.path.join(kwargs[key], errfile)
-
- with open(outfile, 'w') as out, open(errfile, 'w') as err:
- out.write(p.stdout)
- err.write(p.stderr)
-
- # Throw exception if ARTS run failed.
- if p.returncode != 0 and ignore_error is not True:
- raise Exception('ARTS run failed:\n{}'.format(p.stderr))
-
- # Store ARTS output in namedtuple.
- arts_out = collections.namedtuple(
- 'ARTS_output',
- ['stdout', 'stderr', 'retcode']
- )
-
- return arts_out(stdout=p.stdout, stderr=p.stderr, retcode=p.returncode)
-
diff --git a/typhon/arts/covariancematrix.py b/typhon/arts/covariancematrix.py
deleted file mode 100644
index d85ff494..00000000
--- a/typhon/arts/covariancematrix.py
+++ /dev/null
@@ -1,329 +0,0 @@
-import numpy as np
-import scipy as sp
-import matplotlib.pyplot as plt
-from typhon.arts.catalogues import Sparse
-import ctypes as c
-
-class Block(object):
- """
- A block of a covariance matrix.
-
- A covariance matrix block holds the covariances of a given retrieval
- quantity or the covariances between todifferent retrieval quantities. A
- block is identified by its row and column block indices. The row and column
- indices of the block correspond to the retrieval quantity indices within
- ARTS. A covariance matrix block consists of a matrix holding the covariances,
- which may be either dense or sparse, as well as additional information on
- the location of the block in the full covariance matrix.
- """
-
- @classmethod
- def __from_covariance_matrix_block_struct__(cls, s, inverse):
- """
- Create a block from a :class:`CovarianceMatrixBlockStruct`
- returned from the ARTS API.
-
- Paramters:
- s: The :class:`CovarianceMatrixBlockStruct` to create the
- block from.
- inverse: Flag that indicates whether the block belongs to
- the normal part of the covariance matrix or its inverse.
-
- Returns: The :class:`Block` object that represents the given
- :class:`CovarianceMatrixBlockStruct`
- """
-
- i, j = list(s.indices)
- rs, cs = list(s.position)
- m, n = list(s.dimensions)
-
- if not s.inner_ptr:
- matrix = np.ctypeslib.as_array(c.cast(s.ptr, c.POINTER(c.c_double)), (m, n))
- else:
- nnz = s.nnz
- data = np.ctypeslib.as_array(c.cast(s.ptr, c.POINTER(c.c_double)),
- (nnz,))
- row_indices = np.ctypeslib.as_array(s.inner_ptr, (nnz,))
- col_starts = np.ctypeslib.as_array(s.outer_ptr, (m + 1,))
- matrix = sp.sparse.csr_matrix((data, row_indices, col_starts),
- shape = (m, n))
- return Block(i, j, rs, cs, inverse, matrix)
-
- def __init__(self, i, j, row_start, column_start, inverse, matrix):
- """
- Parameters:
- i(int): The row-block index of the covariance matrix block.
- j(int): The column-block index of the covariance matrix block.
- row_start(int): Row index of the left- and uppermost element in
- in the block.
- column_start(int): Column index of the left- and uppermost element
- in the block
- inverse(bool): Flag indicating whether the block is part of the
- inverse of the covariance matrix or not.
- matrix(np.ndarray or sp.sparse): The matrix of which the block
- consists.
-
- """
- self._i = i
- self._j = j
- self._row_start = row_start
- self._column_start = column_start
- self._inverse = inverse
- self._matrix = matrix
-
- #
- # Read-only properties
- #
-
- @property
- def i(self):
- """Row-index of the block."""
- return self._i
-
- @property
- def j(self):
- """Column-index of the block"""
- return self._j
-
- @property
- def row_start(self):
- """Row-index of the left- and uppermost element of the
- block w.r.t to the full covariance matrix."""
- return self._row_start
-
- @property
- def column_start(self):
- """Column index of the left- and uppermost element of
- the block w.r.t. to the full covariance matrix."""
- return self._column_start
-
- @property
- def inverse(self):
- """Flag that indicates whether this block is part of
- the normal part of the covariance matrix or its inverse."""
- return self._inverse
-
- @property
- def matrix(self):
- """The matrix containing the covariances of the block."""
- return self._matrix
-
- def write_xml(self, xmlwriter, attr = None):
- """
- Serialize block and write to xml stream.
-
- Opens a new tag for the block and writes the matrix into
- it. Attributes of the block are saved as attributes of
- the newly create block.
-
- Parameters:
- xmlwriter: The xml stream to which to write the block.
- attr(dict): Additional attributes that should be added
- the tag that is created for the block.
- """
- if attr is None:
- attr = {}
-
- attr["row_index"] = self.i
- attr["column_index"] = self.j
- attr["row_start"] = self.row_start
- attr["column_start"] = self.column_start
- attr["row_extent"], attr["column_extent"] = self.matrix.shape
- attr["is_inverse"] = int(self.inverse)
-
- if sp.sparse.issparse(self.matrix):
- if not type(self.matrix) == Sparse:
- # why? because I can ...
- self.matrix.__class__.write_xml = Sparse.write_xml
- attr["type"] = "Sparse"
- else:
- attr["type"] = "Dense"
-
- xmlwriter.open_tag('Block', attr)
- xmlwriter.write_xml(self.matrix)
- xmlwriter.close_tag()
-
-class CovarianceMatrix(object):
- """
- Covariance matrix class representing the ARTS group of the same name
- implementing covariance matrices for OEM calculations in ARTS. Covariance
- matrices are stored as block diagonal matrices where each block represents
- covariances between two retrieval quantities.
-
- Since covariance matrices must be symmetric only blocks lying on or above
- the diagonal are stored. The covariance matrix class is designed to hold
- both, the covariance matrix and its inverse. This has the advantage that
- the inverse of the covariance matrix can be set directly by the user, which
- is useful for Tikhonov regularization and when the inverse is available in
- closed form.
- """
- #
- # Class methods
- #
-
- @classmethod
- def __from_variable_value_struct__(cls, s):
- """
- Implements ARTS-API interface for returning objects from
- an ARTS workspace.
-
-
- """
- from typhon.arts.workspace.api import arts_api
-
- n_blocks = s.dimensions[0]
- n_inv_blocks = s.dimensions[1]
-
- blocks = []
- for i in range(n_blocks):
- bs = arts_api.get_covariance_matrix_block(s.ptr, i, False)
- b = Block.__from_covariance_matrix_block_struct__(bs, False)
- blocks += [b]
-
- inv_blocks = []
- for i in range(n_inv_blocks):
- bs = arts_api.get_covariance_matrix_block(s.ptr, i, True)
- b = Block.__from_covariance_matrix_block_struct__(bs, True)
- inv_blocks += [b]
-
- return CovarianceMatrix(blocks, inv_blocks)
-
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Load a covariance matrix from an ARTS XML fiile.
-
- Returns:
- The loaded covariance matrix as :class:`CovarianceMatrix` object
- """
- n_blocks = xmlelement.get("n_blocks")
-
- blocks = []
- inv_blocks = []
- for b in list(xmlelement):
-
- i = b.get("row_index")
- j = b.get("column_index")
- row_start = int(b.get("row_start"))
- column_start = int(b.get("column_start"))
- inverse = bool(int(b.get("is_inverse")))
- matrix = b[0].value()
-
- b = Block(i, j, row_start, column_start, inverse, matrix)
- if inverse:
- inv_blocks += [b]
- else:
- blocks += [b]
- return CovarianceMatrix(blocks, inv_blocks)
-
- def __init__(self, blocks, inverse_blocks = [], workspace = None):
- """
- Create a covariance matrix object.
-
- Parameters:
- blocks(list): List containing the blocks that make up the
- covariance matrix.
- inverse_blocks: Blocks that make up the inverse of the
- covariance. Can be provided to avoid computation of
- the inverse of the covariance matrix.
- workspace: :class:`Workspace` to associate the covariance
- matrix to.
- """
- self._blocks = blocks
- self._inverse_blocks = inverse_blocks
- self._workspace = None
-
- #
- # Read-only properties
- #
-
- @property
- def blocks(self):
- """ The blocks contained in the covariance matrix."""
- return self._blocks
-
- @property
- def inverse_blocks(self):
- """The blocks that contained in the inverse of the covariance matrix."""
- return self._inverse_blocks
-
- @property
- def workspace(self):
- """The workspace associated with the covariance matrix."""
- return self._workspace
-
- #
- # Serialization
- #
-
- def write_xml(self, xmlwriter, attr = None):
- """
- Implements typhon xml serialization interface.
-
- Parameters:
- xmlwriter: The xml stream to which to write the block.
- attr(dict): Additional attributes that should be added
- the tag that is created for the block.
- """
-
- if attr is None:
- attr = {}
-
- attr['n_blocks'] = len(self.blocks) + len(self.inverse_blocks)
- xmlwriter.open_tag('CovarianceMatrix', attr)
-
- for b in self.blocks:
- xmlwriter.write_xml(b)
- for b in self.inverse_blocks:
- xmlwriter.write_xml(b)
-
- xmlwriter.close_tag()
-
- def to_dense(self):
- """Conversion to dense representation.
-
- Converts the covariance matrix to a 2-dimensional numpy.ndarray.
-
- Returns:
- The covariance matrix as dense matrix.
-
- """
- m = max([b.row_start + b.matrix.shape[0] for b in self.blocks])
- n = max([b.column_start + b.matrix.shape[1] for b in self.blocks])
- mat = np.zeros((m, n))
- for b in self.blocks:
- m0 = b.row_start
- n0 = b.column_start
- dm = b.matrix.shape[0]
- dn = b.matrix.shape[1]
- if sp.sparse.issparse(b.matrix):
- mat[m0 : m0 + dm, n0 : n0 + dn] = b.matrix.toarray()
- else:
- mat[m0 : m0 + dm, n0 : n0 + dn] = b.matrix
- return mat
-
-def plot_covariance_matrix(covariance_matrix, ax = None):
- """
- Plots a covariance matrix.
-
- Parameters:
- covariance_matrix(:class:`CovarianceMatrix`): The covariance matrix
- to plot
- ax(matplotlib.axes): An axes object into which to plot the
- covariance matrix.
- """
-
- if ax is None:
- ax = plt.gca()
-
- for b in covariance_matrix.blocks:
- y = np.arange(b.row_start, b.row_start + b.matrix.shape[0] + 1) - 0.5
- x = np.arange(b.column_start, b.column_start + b.matrix.shape[1] + 1) - 0.5
- ax.pcolormesh(x, y, np.array(b.matrix.toarray()))
-
-
-
- m = max([b.row_start + b.matrix.shape[0] for b in covariance_matrix.blocks])
- n = max([b.column_start + b.matrix.shape[1] for b in covariance_matrix.blocks])
- ax.set_xlim([-0.5, n + 0.5])
- ax.set_ylim([m + 0.5, -0.5])
diff --git a/typhon/arts/griddedfield.py b/typhon/arts/griddedfield.py
deleted file mode 100644
index 3ac76d72..00000000
--- a/typhon/arts/griddedfield.py
+++ /dev/null
@@ -1,764 +0,0 @@
-# -*- coding: utf-8 -*-
-import copy
-import numbers
-
-import netCDF4
-import numpy as np
-import xarray
-from scipy import interpolate
-
-from .utils import return_if_arts_type, get_arts_typename
-
-__all__ = [
- 'GriddedField1',
- 'GriddedField2',
- 'GriddedField3',
- 'GriddedField4',
- 'GriddedField5',
- 'GriddedField6',
- 'griddedfield_from_netcdf',
- 'griddedfield_from_xarray',
-]
-
-
-class _GriddedField:
- """:class:`GriddedField` implements the same-named ARTS dataype.
-
- This class provides the facility of storing gridded data. For this purpose
- the grid-axes as well as the data are stored. GriddedFields can be easily
- written to XML-files as they define a clear datastructure.
-
- :class:`GriddedField` should not be used directly. Use one of the derived
- types such as :class:`GriddedField1` instead.
-
- Note:
- For the special case of storing atmospheric profiles as GriddedField3
- the latitude and longitude grids have to be initialised as empty
- np.array.
-
- Examples:
- Create and manipulate a :class:`GriddedField` object.
-
- >>> gf1 = GriddedField1()
- >>> gf1.grids = [np.arange(10)]
- >>> gf1.gridnames = ["Indices"]
- >>> gf1.data = np.random.randn(10)
-
- Inspect an existing :class:`GriddedField` object.
-
- >>> gf1.dimension
- 1
- >>> gf1.grids
- [array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])]
- >>> gf1.gridnames
- ['Indices']
-
- """
-
- def __init__(self, dimension=None, grids=None, data=None, gridnames=None,
- dataname=None, name=None):
- """Create a GriddedField object.
-
- Parameters:
- dimension (int): Dimension of the GriddedField.
- grids (list, tuple, np.ndarray): grids.
- data (np.ndarray): data values.
- gridnames (List[str]): clear names for all grids.
- dataname (str): name of the data array.
- name (str): name of the GriddedField.
-
- """
- if not isinstance(dimension, numbers.Integral) or dimension < 1:
- raise ValueError('dimension must be a scalar greater 0')
- self._dimension = dimension
- self.grids = copy.deepcopy(grids)
- self.data = copy.copy(data)
- self.gridnames = copy.copy(gridnames)
- self.dataname = dataname
- self.name = name
-
- def __getitem__(self, index):
- """Make the data array subscriptable directly.
-
- ``gf[0, 1]`` is equivalent to ``gf.data[0, 1]``.
- """
- return self.data[index]
-
- def __eq__(self, other):
- """Test the equality of GriddedFields."""
- if (isinstance(other, self.__class__) or
- isinstance(self, other.__class__)):
- # Check each attribute after another for readabilty.
- # Return as soon as possible for performance.
- if self.name != other.name:
- return False
-
- if self.dataname != other.dataname:
- return False
-
- if self.gridnames != other.gridnames:
- return False
-
- if self.dimension != other.dimension:
- return False
-
- if self.grids is not None and other.grids is not None:
- if not np.all(a == b for a, b in zip(self.grids, other.grids)):
- return False
- elif self.grids is not other.grids:
- return False
-
- if self.data is not None and other.data is not None:
- if not np.allclose(self.data, other.data):
- return False
- elif self.data is not other.data:
- return False
-
- return True
- return NotImplemented
-
- def __neq__(self, other):
- """Test the non-equality of GriddedFields."""
- if isinstance(other, self.__class__):
- return not self.__eq__(other)
- return NotImplemented
-
- def __repr__(self):
- try:
- if self.name:
- out = "GriddedField{}: {}\n".format(self.dimension, self.name)
- else:
- out = "GriddedField{}: {}\n".format(self.dimension,
- "Generic Field")
-
- for i in range(self.dimension):
- if self.gridnames[i]:
- out += "{} {}: {}\n".format(self.gridnames[i],
- np.shape(self.grids[i]),
- self.grids[i])
- else:
- out += "{} {}: {}\n".format("Grid " + str(i + 1),
- np.shape(self.grids[i]),
- self.grids[i])
- if self.dataname:
- out += "{} {}: {}\n".format(self.dataname,
- self.data.shape,
- self.data.flatten())
- else:
- out += "{} {}: {}\n".format("Data",
- self.data.shape,
- self.data.flatten())
- return out
- except:
- # If representation fails, fall back to default.
- # Known issues: Empty GriddedFields.
- return '<{0} at {1}>'.format(type(self).__name__, hex(id(self)))
-
- @property
- def shape(self):
- """Shape of the data array."""
- return self.data.shape
-
- @property
- def dimension(self):
- """Dimension of the GriddedField.
-
- The dimension has to be defined when creating the GriddedField object.
- For the convenience subclasses (e.g. GriddedField1) this is done
- automatically.
-
- """
- return self._dimension
-
- @property
- def grids(self):
- """List of grids defining the GriddedField.
-
- Note:
- The number of grids has to match the GriddedField dimension.
- """
- return self._grids
-
- @property
- def gridnames(self):
- """A list or tuple that includes a name for every grid.
-
- Note:
- The number of gridnames has to match the number of grids.
- Gridnames are currently not used so it is not neccesarry
- to set them.
- """
- return self._gridnames
-
- @property
- def data(self):
- """The data matrix stored in the GriddedField.
-
- Note:
- The data array has to fit the grid dimensions.
- """
- return self._data
-
- @property
- def name(self):
- """Name of the GriddedField."""
- return self._name
-
- @property
- def dataname(self):
- """Name of the data array."""
- return self._dataname
-
- @grids.setter
- def grids(self, grids):
- if grids is None:
- self._grids = None
- return
-
- if type(grids) not in (list, tuple):
- raise TypeError('The array of grids must be type list or tuple.')
-
- for grid in grids:
- if (get_arts_typename(grid)
- in ['ArrayOfString', 'ArrayOfIndex', 'Vector', None]):
- self._grids = grids
- else:
- raise TypeError(
- 'grids have to be ArrayOfString, ArrayOfIndex or Vector.')
-
- @gridnames.setter
- def gridnames(self, gridnames):
- self._gridnames = return_if_arts_type(gridnames, 'ArrayOfString')
-
- @data.setter
- def data(self, data):
- data_type = get_arts_typename(np.ndarray([0] * self.dimension))
- self._data = return_if_arts_type(data, data_type)
-
- @dataname.setter
- def dataname(self, dataname):
- self._dataname = return_if_arts_type(dataname, 'String')
-
- @name.setter
- def name(self, name):
- self._name = return_if_arts_type(name, 'String')
-
- def check_dimension(self):
- """Checks the consistency of grids and data.
-
- This functions check if the dimensions defined by the grids fit to the
- dimension of the passed data.
- Also check if the number of gridnames fits the number of grids.
-
- Note:
- This check is done automatically before storing and after loading
- XML files.
-
- Returns:
- True if successful.
-
- Raises:
- Exception: if number of grids does not fit
- the GriddedField dimension.
- Exception: if number of gridnames does not fit
- the number of grids.
- Exception: if data dimension does not fit the grid dimensions.
- Warning: if a dimension is empty.
-
- """
- # define error messages
- grid_dim_error = (('The number of grids has to fit the dimension '
- 'of the GriddedField.\nThe dimension is {0} '
- 'but {1} grids were passed.')
- .format(self.dimension, len(self.grids)))
-
- # number of grids has to match the GriddedField dimension
- if len(self.grids) != self.dimension:
- raise Exception(grid_dim_error)
-
- # if grids are named, each grid has to be named
- if self.gridnames is None:
- self.gridnames = [''] * self.dimension
-
- grid_name_error = (('The number of gridnames has to fit the '
- 'dimension of the GriddedField.\nThe dimension'
- ' is {0} but {1} gridnames were passed.')
- .format(self.dimension, len(self.gridnames)))
-
- if len(self.gridnames) != self.dimension:
- raise Exception(grid_name_error)
-
- # grid and data dimension have to fit
- g_dim = [np.size(g) if np.size(g) > 0 else 1 for g in self.grids]
-
- if tuple(g_dim) != self.data.shape:
- raise Exception(('Dimension mismatch between data and grids. '
- 'Grid dimension is {0} but data {1}')
- .format(tuple(g_dim), self.data.shape))
-
- return True
-
- def check_atm_fields_compact(self, check_gridnames=False):
- """Checks the consistency of grids and data.
-
- This functions check if the dimensions defined by the grids fit to the
- dimension of the passed data.
- Also check if the number of gridnames fits the number of grids.
-
- Note:
- The last three gridnames are expected to be 'Pressure', 'Latitude'
- and 'Longitude'. This is more strict than the ARTS checks, but good
- behavior.
-
- Returns:
- True if successful.
-
- Raises:
- Exception:
- - If not GriddedField4.
- - If the pressure grid is not decreasing.
-
- """
- if self.dimension != 4:
- raise Exception('atm_fields_compact have to be GriddedField4.')
-
- if check_gridnames:
- if self.gridnames[1] != 'Pressure':
- err = "Second grid has to be 'Pressure' not '{}'."
- raise Exception(err.format(self.gridnames[1]))
- elif self.gridnames[2] != 'Latitude':
- err = "Third grid has to be 'Latitude' not '{}'."
- raise Exception(err.format(self.gridnames[2]))
- elif self.gridnames[3] != 'Longitude':
- err = "Fourth grid has to be 'Longitude' not '{}'."
- raise Exception(err.format(self.gridnames[3]))
-
- if not get_arts_typename(self.grids[0]) == 'ArrayOfString':
- raise Exception('First grid has to be ArrayOfString.')
-
- if not all(np.diff(self.grids[1]) < 0):
- raise Exception('Pressure grid has to be strictly decreasing.')
-
- return True
-
- def copy(self):
- """Return a deepcopy of the GriddedField."""
- return copy.deepcopy(self)
-
- def extract_slice(self, s=slice(None), axis=0):
- """Return a new GriddedField containing a slice of the current one.
-
- Parameters:
- s (slice): Slice.
- axis (int): Axis to slice along.
-
- Returns:
- :class:`typhon.arts.griddedfield._GriddedField`:
- GriddedField containing sliced grids and data.
- """
- gf = self.copy()
- gf.grids[axis] = gf.grids[axis][s]
- slices = [slice(None)] * self.dimension
- slices[axis] = s
- gf.data = gf.data[tuple(slices)]
-
- return gf
-
- def refine_grid(self, new_grid, axis=0, fun=np.array, **kwargs):
- """Interpolate GriddedField axis to a new grid.
-
- This function replaces a grid of a GriddField and interpolates all
- data to match the new coordinates. :func:`scipy.interpolate.interp1d`
- is used for interpolation.
-
- Parameters:
- new_grid (ndarray): The coordinates of the interpolated values.
- axis (int): Specifies the axis of data along which to interpolate.
- Interpolation defaults to the first axis of the GriddedField.
- fun (numpy.ufunc, or similar): Function to apply to grid before
- interpolation. Suggested values: np.array, np.log10, np.log
- **kwargs:
- Keyword arguments passed to :func:`scipy.interpolate.interp1d`.
-
- Returns: :class:`typhon.arts.griddedfield.GriddedField`
-
- """
- if len(self.grids[axis]) > 1:
- f = interpolate.interp1d(fun(self.grids[axis]), self.data,
- axis=axis, **kwargs)
- self.grids[axis] = new_grid
- self.data = f(fun(new_grid))
- else: # if the intention is to create a useful TensorX
- self.data = self.data.repeat(len(new_grid), axis=axis)
- self.grids[axis] = new_grid
-
- self.check_dimension()
-
- return self
-
- def get(self, key, default=None, keep_dims=True):
- """Return data from field with given fieldname.
-
- Notes:
- This method only works, if the first grid
- is an :arts:`ArrayOfString`.
-
- Parameters:
- key (str): Name of the field to extract.
- default: Default value, if ``key`` is not found.
- keep_dims (bool): If ``False``, empty dimensions are squeezed
- before the extracted array is returned.
-
- Returns:
- ndarray: Extracted ndarray.
- """
- # The first grid has to be an ArrayOfString.
- if not get_arts_typename(self.grids[0]) == 'ArrayOfString':
- raise TypeError(
- 'Method only works, if the first grid is an "ArrayOfString"')
-
- # If the GriddedField is empty or the given fieldname is not found,
- # return the default value.
- if self.grids is None or key not in self.grids[0]:
- return default
-
- # Find the index of given fieldname in the name grid and return the
- # ndarray at that position.
- field = self.data[[self.grids[0].index(key)]]
-
- # Squeeze empty dimensions, if ``keep_dims`` is ``False``.
- return field if keep_dims else field.squeeze()
-
- def set(self, key, data):
- """Assign data to field with given fieldname.
-
- Notes:
- This method only works, if the first grid
- is an :arts:`ArrayOfString`.
-
- Parameters:
- key (str): Name of the field to extract.
- data (ndarray): Data array.
- """
- if not get_arts_typename(self.grids[0]) == 'ArrayOfString':
- raise TypeError(
- 'Method only works, if the first grid is an "ArrayOfString"')
-
- self.data[[self.grids[0].index(key)]] = data
-
- def scale(self, key, factor, dtype=float):
- """Scale data stored in field with given fieldname.
-
- Notes:
- This method only works, if the first grid
- is an :arts:`ArrayOfString`.
-
- Parameters:
- key (str): Name of the field to scale.
- factor (float or ndarray): Scale factor.
- dtype (type): Data type used for typecasting. If the original
- dtype of ``GriddedField.data`` is ``int``, the data array
- gets typecasted to prevent messy behaviour when assigning
- scaled values.
- """
- if issubclass(self.data.dtype.type, numbers.Integral):
- # Typecast integer data arrays to prevent unwanted typecast when
- # assigning scaled (float) variables back to the (integer) ndarray.
- self.data = self.data.astype(dtype)
-
- self.set(key, self.get(key) * factor)
-
- def add(self, key, offset, dtype=float):
- """Add offset to data stored in field with given fieldname.
-
- Notes:
- This method only works, if the first grid
- is an :arts:`ArrayOfString`.
-
- Parameters:
- key (str): Name of the field to offset.
- offset (float or ndarray): Offset.
- dtype (type): Data type used for typecasting. If the original
- dtype of ``GriddedField.data`` is ``int``, the data array
- gets typecasted to prevent messy behaviour when assigning
- scaled values.
- """
- if issubclass(self.data.dtype.type, numbers.Integral):
- # Typecast integer data arrays to prevent unwanted typecast when
- # assigning scaled (float) variables back to the (integer) ndarray.
- self.data = self.data.astype(dtype)
-
- self.set(key, self.get(key) + offset)
-
- def to_dict(self):
- """Convert GriddedField to dictionary.
-
- Converts a GriddedField object into a classic Python dictionary. The
- gridname is used as dictionary key. If the grid is unnamed the key is
- generated automatically ('grid1', 'grid2', ...). The data can be
- accessed through the 'data' key.
-
- Returns:
- Dictionary containing the grids and data.
-
- """
- grids, gridnames = self.grids, self.gridnames
-
- if gridnames is None:
- gridnames = ['grid%d' % n for n in range(1, self.dimension + 1)]
-
- for n, name in enumerate(gridnames):
- if name == '':
- gridnames[n] = 'grid%d' % (n + 1)
-
- d = {name: grid for name, grid in zip(gridnames, grids)}
-
- if self.dataname is not None:
- d[self.dataname] = self.data
- else:
- d['data'] = self.data
-
- return d
-
- def to_xarray(self):
- """Convert GriddedField to xarray.DataArray object.
-
- Convert a GriddedField object into a :func:`xarray.DataArray`
- object. The dataname is used as the DataArray name.
-
- Returns:
- xarray.DataArray object corresponding to gridded field
- """
-
- da = xarray.DataArray(self.data)
- da = da.rename(dict((k, v)
- for (k, v) in zip(da.dims, self.gridnames)
- if v!=""))
- da = da.assign_coords(
- **{name: coor
- for (name, coor) in zip(da.dims, self.grids)
- if len(coor)>0})
- if self.name is not None:
- da.name = self.name
- da.attrs['data_name'] = self.dataname
- return da
-
- @classmethod
- def from_nc(cls, inputfile, variable, fill_value=np.nan):
- """Create GriddedField from variable in netCDF files.
-
- Extract a given variable from a netCDF file. The data and its
- dimensions are returned as a :class:`GriddedField` object.
-
- Parameters:
- inputfile (str): Path to netCDF file.
- variable (str): Variable key of variable to extract.
- fill_value (float): Fill value for masked areas (default: np.nan).
-
- Returns:
- GriddedField object of sufficient dimension.
-
- Raises:
- Exception: If the variable key can't be found in the netCDF file.
-
- """
- with netCDF4.Dataset(inputfile) as nc:
- if variable not in nc.variables:
- raise Exception('netCDF file has no variable {}.'.format(variable))
-
- data = nc.variables[variable]
-
- obj = cls()
- obj.grids = [nc.variables[dim][:] for dim in data.dimensions]
- obj.gridnames = [dim for dim in data.dimensions]
-
- if isinstance(data[:], np.ma.MaskedArray):
- obj.data = data[:].filled(fill_value=fill_value)
- else:
- obj.data = data[:]
-
- obj.check_dimension()
-
- return obj
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Load a GriddedField from an ARTS XML file.
-
- Returns:
- GriddedField. Dimension depends on data in file.
-
-
- """
- obj = cls()
-
- if 'name' in xmlelement.attrib:
- obj.name = xmlelement.attrib['name']
-
- obj.grids = [x.value() for x in xmlelement[:-1]]
- obj.gridnames = [x.attrib['name']
- if 'name' in x.attrib else ''
- for x in xmlelement[:-1]]
-
- # Read data (and optional dataname).
- obj.data = xmlelement[-1].value()
- if 'name' in xmlelement[-1].attrib:
- obj.dataname = xmlelement[-1].attrib['name']
-
- obj.check_dimension()
- return obj
-
- @classmethod
- def from_xarray(cls, da):
- """Create GriddedField from a xarray.DataArray object.
-
- The data and its dimensions are returned as a :class:`GriddedField` object.
- The DataArray name is used as name for the gridded field. If the attribute
- `data_name` is present, it is used as `dataname` on the :class:`GriddedField`.
-
- Parameters:
- da (xarray.DataArray): xarray.DataArray containing the dimensions and data.
-
- Returns:
- GriddedField object.
-
- """
- obj = cls()
- obj.grids = [da[c].values for c in da.dims]
- obj.gridnames = list(da.dims)
- obj.data = da.values
- obj.dataname = da.attrs.get('data_name', 'Data')
- if da.name:
- obj.name = da.name
- obj.check_dimension()
- return obj
-
- def to_atmlab_dict(self):
- """Returns a copy of the GriddedField as a dictionary.
-
- Returns a dictionary compatible with an atmlab structure.
-
- Returns:
- Dictionary containing the grids and data.
- """
-
- d = {}
- if self.name is None:
- d['name'] = ''
- else:
- d['name'] = self.name
- d['grids'] = self.grids
- d['gridnames'] = self.gridnames
- d['data'] = self.data
- if self.dataname is None:
- d['dataname'] = ''
- else:
- d['dataname'] = self.dataname
-
- return d
-
- def write_xml(self, xmlwriter, attr=None):
- """Save a GriddedField to an ARTS XML file."""
- self.check_dimension()
-
- if attr is None:
- attr = {}
-
- if self.name is not None:
- attr['name'] = self.name
-
- xmlwriter.open_tag('GriddedField{}'.format(self.dimension), attr)
- for grid, name in zip(self.grids, self.gridnames):
- xmlwriter.write_xml(grid, {'name': name})
-
- if self.dataname is None:
- xmlwriter.write_xml(self.data)
- else:
- xmlwriter.write_xml(self.data, {'name': self.dataname})
-
- xmlwriter.close_tag()
-
-
-class GriddedField1(_GriddedField):
- """Implements a :arts:`GriddedField1`."""
-
- def __init__(self, *args, **kwargs):
- super(GriddedField1, self).__init__(1, *args, **kwargs)
-
-
-class GriddedField2(_GriddedField):
- """Implements a :arts:`GriddedField2`."""
-
- def __init__(self, *args, **kwargs):
- super(GriddedField2, self).__init__(2, *args, **kwargs)
-
-
-class GriddedField3(_GriddedField):
- """Implements a :arts:`GriddedField3`."""
-
- def __init__(self, *args, **kwargs):
- super(GriddedField3, self).__init__(3, *args, **kwargs)
-
-
-class GriddedField4(_GriddedField):
- """Implements a :arts:`GriddedField4`."""
-
- def __init__(self, *args, **kwargs):
- super(GriddedField4, self).__init__(4, *args, **kwargs)
-
-
-class GriddedField5(_GriddedField):
- """Implements a :arts:`GriddedField5`."""
-
- def __init__(self, *args, **kwargs):
- super(GriddedField5, self).__init__(5, *args, **kwargs)
-
-
-class GriddedField6(_GriddedField):
- """Implements a :arts:`GriddedField6`."""
-
- def __init__(self, *args, **kwargs):
- super(GriddedField6, self).__init__(6, *args, **kwargs)
-
-
-def _griddedfield_from_ndim(ndim):
- """Determine proper GriddedField type from number of dimensions."""
- griddefield_dimension_map = {
- 1: GriddedField1,
- 2: GriddedField2,
- 3: GriddedField3,
- 4: GriddedField4,
- 5: GriddedField5,
- 6: GriddedField6,
- }
- return griddefield_dimension_map[ndim]
-
-
-def griddedfield_from_netcdf(ncfile, **kwargs):
- """Create an ARTS ``GriddedField`` of appropriate dimension from netCDF.
-
- Parameters:
- ncfile (str): Path to netCDF file.
- **kwargs: Additional keyword arguments are passed
- to the :func:`~GriddedField1.from_nc` method.
-
- Returns:
- : Appropriate ARTS GriddedField.
- """
- with netCDF4.Dataset(ncfile) as root:
- cls = _griddedfield_from_ndim(root.ndim)
-
- return cls.from_nc(ncfile, **kwargs)
-
-
-def griddedfield_from_xarray(dataarray):
- """Convert :class:`xarray.DataArray` to ARTS ``GriddedField``.
-
- Parameters:
- dataarray (:class:`xarray.DataArray`): :class:`~xarray.DataArray`
- containing dimensions and data.
-
- Returns:
- : Appropriate ARTS GriddedField.
- """
- return _griddedfield_from_ndim(dataarray.ndim).from_xarray(dataarray)
diff --git a/typhon/arts/internals.py b/typhon/arts/internals.py
deleted file mode 100644
index 2fc9db0b..00000000
--- a/typhon/arts/internals.py
+++ /dev/null
@@ -1,1890 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Implementation of classes to handle various ARTS internal structures.
-
-"""
-
-
-import typhon.constants as constants
-import typhon.spectroscopy as spectroscopy
-import typhon
-
-import numpy as np
-import scipy.interpolate as _ip
-from scipy.special import wofz as _Faddeeva_
-from fractions import Fraction as _R
-from numpy.polynomial import Polynomial as _P
-
-import os
-
-__all__ = ['ARTSCAT5',
- 'Rational',
- 'PressureBroadening',
- 'LineMixing',
- 'PartitionFunctions',
- 'LineFunctionsData',
- 'read_hitran_online'
- ]
-
-def read_hitran_online(hitran_file, fmin=0, fmax=1e9999,
- hit_tmp='HITRAN2012.par', remove_hit_tmp=True,
- reset_qn=True):
- """ Reads catalog from HITRAN online
-
- This function is meant for specific input, so failure is an option. This
- code might also break in the future should HITRAN update their format again
-
- The format has to be [.par line, qns', qns''] in that order
-
- There is a temporary file generated. This must be named so that the ARTS
- function abs_linesReadFromHitran can read the file. After the temporary
- file has been read by ARTS, the catalog is accessed for each line to set
- their quantum numbers of qns' and qns''. Note that these are added as
- additions to existing numbers. Caveats follow.
-
- The following qunatum numbers are ignored at present:
- kronigParity
-
- ElecStateLabel
-
- The following are reinterpreted for ARTS:
- v to v1
-
- nuclearSpinRef to F
-
- The following are given values despite having none:
- parity=- is set as -1
-
- parity=+ is set as +1
-
- All other variables are written to verbatim. This means that if ARTS does
- not have the quantum number keys prescribed, the catalog that is produced
- will not be possible to read with ARTS. Take care!
-
- Input:
- hitran_file:
- a file generated from hitran.org
-
- fmin:
- fmin in abs_linesReadFromHitran
-
- fmax:
- fmax in abs_linesReadFromHitran
-
- hit_tmp:
- temporary filename for abs_linesReadFromHitran
-
- remove_hit_tmp:
- Flag to remove or not the file hit_tmp using os.remove
-
- Output:
- ARTSCAT5 catalog of lines
- """
- assert not hitran_file == hit_tmp, "Must have separate files"
-
- # setup a quiet workspace
- from typhon.arts.workspace import Workspace
- arts = Workspace(verbosity=0)
-
- # read a file from hitran of format [par, qn upper, qn lower]
- f = open(hitran_file, 'r')
- par_data = ''
- up_data = []
- lo_data = []
- line = f.readline().split('\t')
- while len(line) > 2:
- par_data += line[0]
- up_data.append(line[1].split(';'))
- lo_data.append(line[2].split(';'))
- lo_data[-1][-1].replace('\n', '')
- line = f.readline().split('\t')
- if len(line) > 2:
- par_data += '\n'
- f.close()
-
- # Create a temporary file and read it into arts
- f = open(hit_tmp, 'w')
- f.write(par_data)
- f.close()
- arts.abs_linesReadFromHitran(filename=hit_tmp, fmin=float(fmin),
- fmax=float(fmax))
-
- # delete previous file
- if remove_hit_tmp:
- os.remove(hit_tmp)
-
- # replace quantum numbers by
- arts.abs_linesNewestArtscatFromLegacyCatalog()
- cat = arts.abs_lines.value.as_ARTSCAT5()
-
- for i in range(len(cat)):
- if reset_qn:
- cat._dictionaries[i]['QN'] = typhon.arts.catalogues.QuantumNumberRecord(typhon.arts.catalogues.QuantumNumbers(''), typhon.arts.catalogues.QuantumNumbers(''))
- for qn in up_data[i]:
- key, data = qn.split('=')
- if 'ElecStateLabel' in key:
- pass
- elif 'nuclearSpinRef' in key:
- cat.quantumnumbers(i)['UP']['F'] = Rational(data)
- elif 'kronigParity' in key:
- pass
- elif 'parity' in key:
- if data == '-':
- cat.quantumnumbers(i)['UP'][key] = Rational(-1)
- elif data == '+':
- cat.quantumnumbers(i)['UP'][key] = Rational(+1)
- elif 'v' == key:
- cat.quantumnumbers(i)['UP']['v1'] = Rational(data)
- else:
- cat.quantumnumbers(i)['UP'][key] = Rational(data)
- for qn in lo_data[i]:
- key, data = qn.split('=')
- if 'ElecStateLabel' in key:
- pass
- elif 'nuclearSpinRef' in qn:
- cat.quantumnumbers(i)['LO']['F'] = Rational(data)
- elif 'kronigParity' in key:
- pass
- elif 'parity' in key:
- if data == '-':
- cat.quantumnumbers(i)['LO'][key] = Rational(-1)
- elif data == '+':
- cat.quantumnumbers(i)['LO'][key] = Rational(+1)
- elif 'v' == key:
- cat.quantumnumbers(i)['LO']['v1'] = Rational(data)
- else:
- cat.quantumnumbers(i)['LO'][key] = Rational(data)
- return cat
-
-
-class LineFunctionsData:
- def __init__(self):
- self.LS = None
- self.LM = None
- self.species = None
- self.data = None
-
- @staticmethod
- def len_of_key(key):
- if key in ["LM_AER"]:
- return 12
- elif key in ["#"]:
- return 0
- elif key in ["T0"]:
- return 1
- elif key in ["T1", "T3", "T5"]:
- return 2
- elif key in ["T2", "T4"]:
- return 3
-
- def fill_data(self, array, ndata, start=0):
- pos = 1*start
- i = 0
- while i < self.species:
- self.data[i]['spec'] = array[pos]
- pos += 1
- j = 0
- while j < ndata:
- self.data[i]['data'][j]['key'] = array[pos]
- pos += 1
- for k in range(self.len_of_key(self.data[i]['data'][j]['key'])):
- self.data[i]['data'][j]['val'].append(array[pos])
- pos += 1
- j += 1
- i += 1
- return pos
-
- def read_as_part_of_artscat5(self, array, i):
- self.LS = array[i+0]
- self.LM = array[i+1]
- self.species = int(array[i+2])
- j = i + 3
- return self.fill_data(array, self.set_data_shape(), j)
-
- def len_of_data(self):
- if self.LS in ['DP']:
- shape_len = 0
- elif self.LS in ['LP', 'VP']:
- shape_len = 2
- elif self.LS in ['SDVP']:
- shape_len = 4
- elif self.LS in ['HTP']:
- shape_len = 6
- else:
- raise RuntimeError(f'Unknown LS value: {self.LS}')
- if self.LM in ['#']:
- mixing_len = 0
- elif self.LM in ['LM1', 'INT', 'ConstG']:
- mixing_len = 1
- elif self.LM in ['LM2']:
- mixing_len = 3
- else:
- raise RuntimeError(f'Unknown LM value: {self.LM}')
- return mixing_len + shape_len
-
- def set_data_shape(self):
- ndata = self.len_of_data()
-
- self.data = {}
- i = 0
- while i < self.species:
- self.data[i] = {'spec': None, 'data': {}}
- j = 0
- while j < ndata:
- self.data[i]['data'][j] = {'key': None, 'val': []}
- j += 1
- i += 1
- return ndata
-
- def __repr__(self):
- ndata = self.len_of_data()
- st = ''
- st += self.LS + ' ' + self.LM + ' ' + str(self.species) + ' '
- for x in range(self.species):
- st += self.data[x]['spec'] + ' '
- for y in range(ndata):
- st += self.data[x]['data'][y]['key'] + ' '
- for z in self.data[x]['data'][y]['val']:
- st += z + ' '
- return st
- __str__=__repr__
-
-
-class ARTSCAT5:
- """Class to contain ARTSCAT entries that can be accessed and manipulated
-
- Access this data as
- (N, I, F0, S0, T0, E0, A, GL, GU, PB, QN, LM) = ARTSCAT5[line_nr],
- where N is the name of the species, I is the AFGL isotopological code,
- F0 is the central frequency, S0 is the line strength at temperature T0,
- E0 is the lower energy state, A is the einstein coefficient, GL is the
- lower population constant, GU is the upper population constant, PB
- is a dictionary of the pressurebroadening scheme, QN is a
- QuantumNumberRecord, and LM is a line-mixing dictionary. The
- dictionaries have keys corresponding to the ARTSCAT tags. line_nr is
- an index absolutely less than len(self)
-
- Note: Must be ARTSCAT5 line type or this will leave the class data
- in disarray.
-
- Future tech debt 1: The reading of tagged data will fail if major tags
- ever match minor tags (ex. PB is major, N2 is minor for PB, if LM ever
- gets the PB minor tag, then the method below will fail).
-
- Future tech debt 2: To add version number and make the class general,
- ARTSCAT3 only have minor tag N2 for line mixing and ARTSCAT4 has AP.
- These tags are however implicit and not written. A
- """
-
- _spec_ind = 0
- _iso_ind = 1
- _freq_ind = 2
- _str_ind = 3
- _t0_ind = 4
- _elow_ind = 5
- _ein_ind = 6
- _glow_ind = 7
- _gupp_ind = 8
- _pb_ind = 9
- _qn_ind = 10
- _lm_ind = 11
- _ze_ind = 12
- _lf_ind = 13
- _lsm_ind = 14
-
- def __init__(self, init_data=None):
- self._dictionaries = np.array([], dtype=dict)
- self._n = 0
- self.LineRecordData = {
- 'freq': np.array([]),
- 'afgl': np.array([], dtype='int'),
- 'str': np.array([]),
- 'glow': np.array([]),
- 'gupp': np.array([]),
- 'elow': np.array([]),
- 'spec': np.array([], dtype='str'),
- 'ein': np.array([]),
- 't0': np.array([])}
-
- if init_data is None:
- return
-
- self.append(init_data, sort=False)
-
- def _append_linestr_(self, linerecord_str):
- """Takes an arts-xml catalog string and appends info to the class data
- """
- lr = linerecord_str.split()
- len_lr = len(lr)
- if len_lr == 0:
- return
- assert len_lr > 9, "Cannot recognize line data"
-
- self._dictionaries = np.append(self._dictionaries,
- {"QN": QuantumNumberRecord(),
- "PB": {"Type": None,
- "Data": np.array([])},
- "LM": {"Type": None,
- "Data": np.array([])},
- "LF": LineFunctionsData(),
- "ZE": None,
- "LSM": {}})
-
- spec = lr[1].split('-')
- self.LineRecordData['spec'] = np.append(self.LineRecordData['spec'],
- spec[self._spec_ind])
- self.LineRecordData['afgl'] = np.append(self.LineRecordData['afgl'],
- int(spec[self._iso_ind]))
- self.LineRecordData['freq'] = np.append(self.LineRecordData['freq'],
- float(lr[self._freq_ind]))
- self.LineRecordData['str'] = np.append(self.LineRecordData['str'],
- float(lr[self._str_ind]))
- self.LineRecordData['t0'] = np.append(self.LineRecordData['t0'],
- float(lr[self._t0_ind]))
- self.LineRecordData['elow'] = np.append(self.LineRecordData['elow'],
- float(lr[self._elow_ind]))
- self.LineRecordData['ein'] = np.append(self.LineRecordData['ein'],
- float(lr[self._ein_ind]))
- self.LineRecordData['glow'] = np.append(self.LineRecordData['glow'],
- float(lr[self._glow_ind]))
- self.LineRecordData['gupp'] = np.append(self.LineRecordData['gupp'],
- float(lr[self._gupp_ind]))
- self._n += 1
-
- key = lr[9]
- i = 10
- qnr = ''
- ze = {"POL": None}
- while i < len_lr:
- this = lr[i]
- if this in ['QN', 'PB', 'LM', 'ZE', 'LSM', 'LF']:
- key = this
- elif key == 'QN':
- qnr += ' ' + this
- elif key == 'ZE':
- ze = {"POL": lr[i], "GU": float(lr[i+1]),
- "GL": float(lr[i+2])}
- i += 2
- elif key == 'LSM':
- x = int(lr[i])
- i += 1
- for nothing in range(x):
- self._dictionaries[-1]['LSM'][lr[i]] = lr[i+1]
- i += 2
- elif key == 'LF':
- i=self._dictionaries[-1]['LF'].read_as_part_of_artscat5(lr, i)-1
- else:
- try:
- self._dictionaries[-1][key]["Data"] = \
- np.append(self._dictionaries[-1][key]["Data"],
- float(this))
- except:
- self._dictionaries[-1][key]["Type"] = this
- i += 1
- self._dictionaries[-1]['QN'] = QuantumNumberRecord.from_str(qnr)
- self._dictionaries[-1]['LM'] = LineMixing(self._dictionaries[-1]['LM'])
- self._dictionaries[-1]['PB'] = \
- PressureBroadening(self._dictionaries[-1]['PB'])
- self._dictionaries[-1]['ZE'] = ze
-
- def _append_line_(self, line):
- """Appends a line from data
- """
- self.LineRecordData['spec'] = np.append(self.LineRecordData['spec'],
- str(line[self._spec_ind]))
- self.LineRecordData['afgl'] = np.append(self.LineRecordData['afgl'],
- int(line[self._iso_ind]))
- self.LineRecordData['freq'] = np.append(self.LineRecordData['freq'],
- line[self._freq_ind])
- self.LineRecordData['str'] = np.append(self.LineRecordData['str'],
- line[self._str_ind])
- self.LineRecordData['t0'] = np.append(self.LineRecordData['t0'],
- line[self._t0_ind])
- self.LineRecordData['elow'] = np.append(self.LineRecordData['elow'],
- line[self._elow_ind])
- self.LineRecordData['ein'] = np.append(self.LineRecordData['ein'],
- line[self._ein_ind])
- self.LineRecordData['glow'] = np.append(self.LineRecordData['glow'],
- line[self._glow_ind])
- self.LineRecordData['gupp'] = np.append(self.LineRecordData['gupp'],
- line[self._gupp_ind])
- self._dictionaries = np.append(self._dictionaries,
- {'PB': line[self._pb_ind],
- 'QN': line[self._qn_ind],
- 'LM': line[self._lm_ind],
- 'ZE': line[self._ze_ind],
- 'LF': line[self._lf_ind],
- 'LSM': line[self._lsm_ind]})
- self._n += 1
-
- @property
- def F0(self):
- return self.LineRecordData['freq']
-
- @F0.setter
- def F0(self, nums):
- self.LineRecordData['freq'] = nums
-
- @property
- def S0(self):
- return self.LineRecordData['str']
-
- @S0.setter
- def S0(self, nums):
- self.LineRecordData['str'] = nums
-
- @property
- def Species(self):
- return self.LineRecordData['spec']
-
- @property
- def Iso(self):
- return self.LineRecordData['afgl']
-
- @property
- def T0(self):
- return self.LineRecordData['t0']
-
- @property
- def A(self):
- return self.LineRecordData['ein']
-
- @property
- def g00(self):
- return self.LineRecordData['gupp']
-
- @property
- def g0(self):
- return self.LineRecordData['glow']
-
- @property
- def E0(self):
- return self.LineRecordData['elow']
-
- def _append_ArrayOfLineRecord_(self, array_of_linerecord):
- """Appends lines in ArrayOfLineRecord to ARTSCAT5
- """
- assert array_of_linerecord.version == 'ARTSCAT-5', "Only for ARTSCAT-5"
- for l in array_of_linerecord:
- self._append_linestr_(l)
-
- def _append_ARTSCAT5_(self, artscat5):
- """Appends all the lines of another artscat5 to this
- """
- for line in artscat5:
- self._append_line_(line)
-
- def set_testline(self, i_know_what_i_am_doing=False):
- assert(i_know_what_i_am_doing)
- self._n = 1
- self.LineRecordData = {
- 'freq': np.array([100e9], dtype='float'),
- 'afgl': np.array([626], dtype='int'),
- 'str': np.array([1], dtype='float'),
- 'glow': np.array([0], dtype='float'),
- 'gupp': np.array([3], dtype='float'),
- 'elow': np.array([0], dtype='float'),
- 'spec': np.array(['CO2'], dtype='str'),
- 'ein': np.array([1], dtype='float'),
- 't0': np.array([300], dtype='float')}
- self._dictionaries = np.array([
- {"QN": QuantumNumberRecord(as_quantumnumbers("J 1"),
- as_quantumnumbers("J 0")),
- "PB": PressureBroadening([10e3, 0.8, 20e3, 0.8, 1e3,
- -1, -1, -1, -1, -1]),
- "LM": LineMixing([300, 1e-10, 0.8])}])
-
- def append(self, other, sort=True):
- """Appends data to ARTSCAT5. Used at initialization
-
- Parameters:
- other (str, ARTSCAT5, ArrayOfLineRecord, tuple): Data to append,
- Must fit with internal structures. Easiest to guarantee if other
- is another ARTSCAT5 or an ArrayOfLineRecord containing ARTSCAT-5
- data.
-
- sort: Sorts the lines by frequency if True
- """
- if type(other) is str:
- self._append_linestr_(other)
- elif type(other) is ARTSCAT5:
- self._append_ARTSCAT5_(other)
- elif type(other) is tuple: # For lines --- this easily fails
- self._append_line_(other)
- elif type(other) is ArrayOfLineRecord:
- self._append_ArrayOfLineRecord_(other)
- elif type(other) in [list, np.ndarray]:
- for x in other:
- self.append(x)
- else:
- assert False, "Unknown type"
- self._assert_sanity_()
-
- if sort:
- self.sort()
-
- def sort(self, kind='freq', ascending=True):
- """Sorts the ARTSCAT5 data by kind. Set ascending to False for
- descending sorting
-
- Parameters:
- kind (str): The key to LineRecordData
-
- ascending (bool): True sorts ascending, False sorts descending
-
- Examples:
- Sort by descending frequnecy
-
- >>> cat = typhon.arts.xml.load('C2H2.xml').as_ARTSCAT5()
- >>> cat.LineRecordData['freq']
- array([ 5.94503434e+10, 1.18899907e+11, 1.78347792e+11, ...,
- 2.25166734e+12, 2.31051492e+12, 2.36933091e+12])
- >>> cat.sort(ascending=False)
- >>> cat.LineRecordData['freq']
- array([ 2.36933091e+12, 2.31051492e+12, 2.25166734e+12, ...,
- 1.78347792e+11, 1.18899907e+11, 5.94503434e+10])
-
- Sort by line strength
-
- >>> cat = typhon.arts.xml.load('C2H2.xml').as_ARTSCAT5()
- >>> cat.LineRecordData['str']
- array([ 9.02281290e-21, 7.11410308e-20, 2.34380510e-19, ...,
- 4.77325112e-19, 3.56443438e-19, 2.63222798e-19])
- >>> cat.sort(kind='str')
- >>> cat.LineRecordData['str']
- array([ 9.02281290e-21, 7.11410308e-20, 2.34380510e-19, ...,
- 1.09266008e-17, 1.10644138e-17, 1.10939452e-17])
-
- """
- assert kind in self.LineRecordData, "kind must be in LineRecordData"
-
- i = np.argsort(self.LineRecordData[kind])
- if not ascending:
- i = i[::-1]
-
- for key in self.LineRecordData:
- self.LineRecordData[key] = self.LineRecordData[key][i]
- self._dictionaries = self._dictionaries[i]
-
- def remove(self, spec=None, afgl=None,
- upper_limit=None, lower_limit=None, kind='freq'):
- """Removes lines not within limits of kind
-
- This loops over all lines in self and only keeps those fulfilling
-
- .. math::
- l \\leq x \\leq u,
-
- where l is a lower limit, u is an upper limit, and x is a parameter
- in self.LineRecordData
-
- If spec and/or afgl are given, then the species and the afgl code of
- the line must match as well as the limit critera in order for the line
- to be removed
-
- Parameters:
- spec (str): Species string
-
- afgl (int): AFGL isotopologue code
-
- upper_limit (float): value to use for upper limit [-]
-
- lower_limit (float): value to use for lower limit [-]
-
- kind (str): keyword for determining x. Must be key in
- self.LineRecordData
-
- Returns:
- None: Only changes self
-
- Examples:
- Remove lines below 1 THz and above 1.5 THz
-
- >>> cat = typhon.arts.xml.load('C2H2.xml').as_ARTSCAT5()
- >>> cat
- ARTSCAT-5 with 40 lines. Species: ['C2H2']
- >>> cat.remove(lower_limit=1000e9, upper_limit=1500e9)
- >>> cat
- ARTSCAT-5 with 9 lines. Species: ['C2H2']
-
- Remove weak lines
-
- >>> cat = typhon.arts.xml.load('C2H2.xml').as_ARTSCAT5()
- >>> cat
- ARTSCAT-5 with 40 lines. Species: ['C2H2']
- >>> cat.remove(lower_limit=1e-18, kind='str')
- >>> cat
- ARTSCAT-5 with 31 lines. Species: ['C2H2']
- """
- assert upper_limit is not None or lower_limit is not None, \
- "Cannot remove lines when the limits are undeclared"
- assert kind in self.LineRecordData, "Needs kind in LineRecordData"
-
- if spec is not None:
- if spec not in self.LineRecordData['spec']:
- return # Nothing to remove
-
- if afgl is not None:
- if afgl not in self.LineRecordData['afgl']:
- return # Nothing to remove
-
- remove_these = []
- for i in range(self._n):
- if spec is not None:
- if spec != self.LineRecordData['spec'][i]:
- continue
- if afgl is not None:
- if afgl != self.LineRecordData['afgl'][i]:
- continue
- if lower_limit is not None:
- if self.LineRecordData[kind][i] < lower_limit:
- remove_these.append(i)
- continue
- if upper_limit is not None:
- if self.LineRecordData[kind][i] > upper_limit:
- remove_these.append(i)
- continue
-
- for i in remove_these[::-1]:
- self.remove_line(i)
-
- def __repr__(self):
- return "ARTSCAT-5 with " + str(self._n) + " lines. Species: " + \
- str(np.unique(self.LineRecordData['spec']))
-
- def __len__(self):
- return self._n
-
- def _assert_sanity_(self):
- """Helper to assert that the data is good
- """
- assert self._n == len(self.LineRecordData['freq']) and \
- self._n == len(self.LineRecordData['spec']) and \
- self._n == len(self.LineRecordData['afgl']) and \
- self._n == len(self.LineRecordData['str']) and \
- self._n == len(self.LineRecordData['t0']) and \
- self._n == len(self.LineRecordData['elow']) and \
- self._n == len(self.LineRecordData['ein']) and \
- self._n == len(self.LineRecordData['glow']) and \
- self._n == len(self.LineRecordData['gupp']) and \
- self._n == len(self._dictionaries), \
- self._error_in_length_message_()
-
- def __getitem__(self, index):
- """Returns a single line as tuple --- TODO: create LineRecord class?
- """
- return (self.LineRecordData['spec'][index],
- self.LineRecordData['afgl'][index],
- self.LineRecordData['freq'][index],
- self.LineRecordData['str'][index],
- self.LineRecordData['t0'][index],
- self.LineRecordData['elow'][index],
- self.LineRecordData['ein'][index],
- self.LineRecordData['glow'][index],
- self.LineRecordData['gupp'][index],
- self.pressurebroadening(index),
- self.quantumnumbers(index),
- self.linemixing(index),
- self.zeemandata(index),
- self.linefunctionsdata(index),
- self.lineshapemodifiers(index))
-
- def get_arts_str(self, index):
- """Returns the arts-xml catalog string for line at index
- """
- l = self[index]
- s = '@ ' + l[self._spec_ind] + '-' + str(l[self._iso_ind])
- s += ' ' + str(l[self._freq_ind])
- s += ' ' + str(l[self._str_ind])
- s += ' ' + str(l[self._t0_ind])
- s += ' ' + str(l[self._elow_ind])
- s += ' ' + str(l[self._ein_ind])
- s += ' ' + str(l[self._glow_ind])
- s += ' ' + str(l[self._gupp_ind])
- text = str(self.pressurebroadening(index))
- if len(text) > 0:
- s += ' PB ' + text
- text = str(self.quantumnumbers(index))
- if len(text) > 0:
- s += ' QN ' + text
- text = str(self.linemixing(index))
- if len(text) > 0:
- s += ' LM ' + text
- if self.zeemandata(index)['POL']:
- s += ' ZE ' + str(self.zeemandata(index)['POL']) + ' '
- s += str(self.zeemandata(index)['GU']) + ' '
- s += str(self.zeemandata(index)['GL'])
- text = str(self.linefunctionsdata(index))
- if len(text) > 0:
- s += ' LF ' + text
-
- if len(self.lineshapemodifiers(index)):
- s += ' LSM ' + str(len(self.lineshapemodifiers(index)))
- for i in self.lineshapemodifiers(index):
- s += ' ' + i + ' ' + str(self.lineshapemodifiers(index)[i])
- return s
-
- def pressurebroadening(self, index):
- """Return pressure broadening entries for line at index
- """
- return self._dictionaries[index]['PB']
-
- def quantumnumbers(self, index):
- """Return quantum number entries for line at index
- """
- return self._dictionaries[index]['QN']
-
- def linemixing(self, index):
- """Return line mixing entries for line at index
- """
- return self._dictionaries[index]['LM']
-
- def zeemandata(self, index):
- """Return Zeeman entries for line at index
- """
- return self._dictionaries[index]['ZE']
-
- def linefunctionsdata(self, index):
- """Return line function entries for line at index
- """
- return self._dictionaries[index]['LF']
-
- def lineshapemodifiers(self, index):
- """Return line mixing entries for line at index
- """
- return self._dictionaries[index]['LSM']
-
- def _error_in_length_message(self):
- return "Mis-matching length of vectors/lists storing line information"
-
- def as_ArrayOfLineRecord(self, index=None):
- """Turns ARTSCAT5 into ArrayOfLineRecord that can be stored to file
- """
- out = []
- if index is None:
- for i in range(self._n):
- out.append(self.get_arts_str(i))
- else:
- out.append(self.get_arts_str(index))
- if out == []:
- return ArrayOfLineRecord(data=[''], version='ARTSCAT-5')
- else:
- return ArrayOfLineRecord(data=out, version='ARTSCAT-5')
-
- def changeForQN(self, kind='change', qid=None,
- spec=None, afgl=None, qns=None, information=None):
- """Change information of a line according to identifiers
-
- Input:
- kind (str): kind of operation to be applied, either 'change' for
- overwriting, 'add' for addition (+=), 'sub' for subtraction (-=),
- 'remove' to remove matching lines, or 'keep' to only keep matching
- lines
-
- qid (QuantumIdentifier): Identifier to the transition or energy
- level as defined in ARTS
-
- spec (str or NoneType): Name of species for which the operation
- applies. None means for all species. Must be None if qid is given
-
- afgl (int or NoneType): AFGL isotopologue integer for which the
- operation applies. None means for all isotopologue. Must be None
- if qid is given
-
- qns (dict, None, QuantumNumberRecord, QuantumNumbers): The quantum
- numbers for which the operation applies. None means all quantum
- numbers. Can be level or transition. Must be None if qid is given
-
- information (dict or NoneType): None for kind 'remove'. dict
- otherwise. Keys in ARTSCAT5.LineRecordData for non-dictionaries.
- Use 'QN' for quantum numbers, 'LM' for line mixing, and 'PB' for
- pressure-broadening. If level QN-key, the data is applied for both
- levels if they match (only for 'QN'-data)
-
- Output:
- None, only changes the class instance itself
-
- Examples:
- Add S = 1 to both levels quantum numbers by adding information to
- all lines
-
- >>> cat = typhon.arts.xml.load('O2.xml').as_ARTSCAT5()
- >>> cat.quantumnumbers(0)
- UP v1 0 J 32 F 61/2 N 32 LO v1 0 J 32 F 59/2 N 32
- >>> cat.changeForQN(information={'QN': {'S': 1}}, kind='add')
- >>> cat.quantumnumbers(0)
- UP S 1 v1 0 J 32 F 61/2 N 32 LO S 1 v1 0 J 32 F 59/2 N 32
-
- Remove all lines not belonging to a specific isotopologue and band
- by giving the band quantum numbers
-
- >>> cat = typhon.arts.xml.load('O2.xml').as_ARTSCAT5()
- >>> cat
- ARTSCAT-5 with 6079 lines. Species: ['O2']
- >>> cat.changeForQN(kind='keep', afgl=66, qns={'LO': {'v1': 0},
- 'UP': {'v1': 0}})
- >>> cat
- ARTSCAT-5 with 187 lines. Species: ['O2']
-
- Change the frequency of the 119 GHz line to 3000 THz by giving a
- full and unique quantum number match
-
- >>> cat = typhon.arts.xml.load('O2.xml').as_ARTSCAT5()
- >>> cat.sort()
- >>> cat.LineRecordData['freq']
- array([ 9.00e+03, 2.35e+04, 4.01e+04, ..., 2.99e+12,
- 2.99e+12, 2.99e+12])
- >>> cat.changeForQN(afgl=66, qns={'LO': {'v1': 0, 'J': 0, 'N': 1},
- 'UP': {'v1': 0, 'J': 1, 'N': 1}},
- information={'freq': 3000e9})
- >>> cat.sort()
- >>> cat.LineRecordData['freq']
- array([ 9.00e+03, 2.35e+04, 4.01e+04, ..., 2.99e+12,
- 2.99e+12, 3.00e+12])
- """
- if qid is not None:
- assert spec is None and afgl is None and qns is None, \
- "Only one of qid or spec, afgl, and qns combinations allowed"
- spec = qid.species
- afgl = qid.afgl
- qns = as_quantumnumbers(qns)
- else:
- qns = as_quantumnumbers(qns)
-
- if kind == 'remove':
- assert information is None, "information not None for 'remove'"
- remove_these = []
- remove = True
- change = False
- add = False
- sub = False
- keep = False
- elif kind == 'change':
- assert type(information) is dict, "information is not dictionary"
- remove = False
- change = True
- add = False
- sub = False
- keep = False
- elif kind == 'add':
- assert type(information) is dict, "information is not dictionary"
- remove = False
- change = False
- add = True
- sub = False
- keep = False
- elif kind == 'sub':
- assert type(information) is dict, "information is not dictionary"
- remove = False
- change = False
- add = False
- sub = True
- keep = False
- elif kind == 'keep':
- assert information is None, "information not None for 'keep'"
- remove_these = []
- remove = False
- change = False
- add = False
- sub = False
- keep = True
- else:
- raise RuntimeError(f'Invalid value for kind: {kind}')
- assert remove or change or add or keep or sub, "Invalid kind"
-
- # Check if the quantum number information is for level or transition
- if type(qns) is QuantumNumberRecord:
- for_transitions = True
- else:
- for_transitions = False
-
- if information is not None:
- for key in information:
- assert key in self.LineRecordData or \
- key in ['QN', 'LM', 'PB'], \
- "Unrecognized key"
-
- # Looping over all the line data
- for i in range(self._n):
-
- # If spec is None, then all species, otherwise this should match
- if spec is not None:
- if not spec == self.LineRecordData['spec'][i]:
- if keep:
- remove_these.append(i)
- continue
-
- # If afgl is None, then all isotopes, otherwise this should match
- if afgl is not None:
- if not afgl == self.LineRecordData['afgl'][i]:
- if keep:
- remove_these.append(i)
- continue
-
- # Test which levels match and which do not --- partial matching
- test = self.quantumnumbers(i) >= qns
- if for_transitions:
- test = [test] # To let all and any work
-
- # Append lines to remove later (so indexing is not messed up)
- if remove and all(test):
- remove_these.append(i)
- continue
- elif keep and not all(test):
- remove_these.append(i)
- continue
- elif keep or remove:
- continue
-
- # Useless to continue past this point if nothing matches
- if not all(test) and for_transitions:
- continue
- elif not any(test):
- continue
-
- # There should only be matches remaining but only QN info is level-
- # based so all other infromation must be perfect match
- for info_key in information:
- info = information[info_key]
- if info_key == 'QN':
- if for_transitions:
- if change:
- self._dictionaries[i]['QN'] = info
- elif add:
- self._dictionaries[i]['QN'] += info
- elif sub:
- self._dictionaries[i]['QN'] -= info
- else:
- assert False, "Programmer error?"
- else:
- if test[0]:
- if change:
- self._dictionaries[i]['QN']['UP'] = info
- elif add:
- self._dictionaries[i]['QN']['UP'] += info
- elif sub:
- self._dictionaries[i]['QN']['UP'] -= info
- else:
- assert False, "Programmer error?"
- if test[1]:
- if change:
- self._dictionaries[i]['QN']['LO'] = info
- elif add:
- self._dictionaries[i]['QN']['LO'] += info
- elif sub:
- self._dictionaries[i]['QN']['LO'] -= info
- else:
- assert False, "Programmer error?"
- elif info_key in ['PB', 'LM']:
- if not all(test):
- continue
- if change:
- self._dictionaries[i][info_key] = info
- elif add:
- assert info.kind == \
- self._dictionaries[i][info_key].kind, \
- "Can only add to matching type"
- self._dictionaries[i][info_key].data += info
- elif sub:
- assert info.kind == \
- self._dictionaries[i][info_key].kind, \
- "Can only sub from matching type"
- self._dictionaries[i][info_key].data -= info
- else:
- assert False, "Programmer error?"
- else:
- if not all(test):
- continue
- if change:
- self.LineRecordData[info_key][i] = info
- elif add:
- self.LineRecordData[info_key][i] += info
- elif sub:
- self.LineRecordData[info_key][i] -= info
- else:
- assert False, "Programmer error?"
-
- # Again, to not get into index problems, this loop is reversed
- if remove or keep:
- for i in remove_these[::-1]:
- self.remove_line(i)
-
- def remove_line(self, index):
- """Remove line at index from line record
- """
- for key in self.LineRecordData:
- t1 = self.LineRecordData[key][:index]
- t2 = self.LineRecordData[key][(index+1):]
- self.LineRecordData[key] = np.append(t1, t2)
-
- _t = self._dictionaries
- t1 = _t[:index]
- t2 = _t[(index+1):]
- self._dictionaries = np.append(t1, t2)
-
- self._n -= 1
- self._assert_sanity_()
-
- def cross_section(self, temperature=None, pressure=None,
- vmrs=None, mass=None, isotopologue_ratios=None,
- partition_functions=None, f=None):
- """Provides an estimation of the cross-section in the provided
- frequency range
-
- Computes the following estimate (summing over all lines):
-
- .. math::
- \\sigma(f) = \\sum_{k=0}^{k=n-1}
- r_k S_{0, k}(T_0) K_1 K_2 \\frac{Q(T_0)}{Q(T)}
- \\frac{1 + g_k \\; p^2 + iy_k \\; p }{\\gamma_{D,k}\\sqrt{\\pi}}
- \\; F\\left(\\frac{f - f_{0,k} - \\Delta f_k \\; p^2 -
- \\delta f_kp + i\\gamma_{p,k}p} {\\gamma_{D,k}}\\right),
-
- where there are n lines,
- r is the isotopologue ratio,
- S_0 is the line strength,
- K_1 is the boltzman level statistics,
- K_2 is the stimulated emission,
- Q is the partition sum, G is the second
- order line mixing coefficient,
- p is pressure,
- Y is the first order line mixing coefficient,
- f_0 is the line frequency,
- Delta-f is the second order line mixing frequency shift,
- delta-f is the first order pressure shift,
- gamma_p is the pressure broadening half width,
- gamma_D is the Doppler half width, and
- F is assumed to be the Faddeeva function
-
- Note 1: this is only meant for quick-and-dirty estimates. If data is
- lacking, very simplistic assumptions are made to complete the
- calculations.
- Lacking VMR for a species assumes 1.0 vmr of the line species itself,
- lacking mass assumes dry air mass,
- lacking isotopologue ratios means assuming a ratio of unity,
- lacking partition functions means the calculations are performed at
- line temperatures,
- lacking frequency means computing 1000 frequencies from lowest
- frequency line minus its pressure broadening to the highest frequency
- line plus its pressure broadening,
- lacking pressure means computing at 1 ATM, and
- lacking temperature assumes atmospheric temperatures the same as the
- first line temperature. If input f is None then the return
- is (f, sigma), else the return is (sigma)
-
- Warning: Use only as an estimation, this function is neither optimized
- and is only tested for a single species in arts-xml-data to be within
- 1% of the ARTS computed value
-
- Parameters:
- temperature (float): Temperature [Kelvin]
-
- pressure (float): Pressure [Pascal]
-
- vmrs (dict-like): Volume mixing ratios. See PressureBroadening for
- use [-]
-
- mass (dict-like): Mass of isotopologue [kg]
-
- isotopologue_ratios (dict-like): Isotopologue ratios of the
- different species [-]
-
- partition_functions (dict-like): Partition function estimator,
- should compute partition function by taking temperature as the only
- argument [-]
-
- f (ndarray): Frequency [Hz]
-
- Returns:
- (f, xsec) or xsec depending on f
-
- Examples:
- Plot cross-section making no assumptions on the atmosphere or
- species, i.e., isotopologue ratios is 1 for all isotopologue
- (will not agree with ARTS)
-
- >>> import matplotlib.pyplot as plt
- >>> cat = typhon.arts.xml.load('O2.xml').as_ARTSCAT5()
- >>> (f, x) = cat.cross_section()
- >>> plt.plot(f, x)
-
- Plot cross-sections by specifying limited information on the
- species (will agree reasonably with ARTS)
-
- >>> import matplotlib.pyplot as plt
- >>> cat = typhon.arts.xml.load('O2.xml').as_ARTSCAT5()
- >>> cat.changeForQN(afgl=66, kind='keep')
- >>> f, x = cat.cross_section(mass={"O2-66": 31.9898*constants.amu},
- isotopologue_ratios={"O2-66": 0.9953})
- >>> plt.plot(f, x)
-
- """
- if self._n == 0:
- if f is None:
- return 0, 0
- else:
- return np.zeros_like(f)
-
- if temperature is None:
- temperature = self.LineRecordData['t0'][0]
-
- if pressure is None:
- pressure = constants.atm
-
- if vmrs is None:
- vmrs = {}
-
- if mass is None:
- mass = {}
-
- if f is None:
- return_f = True
- f0 = self.pressurebroadening(0).compute_pressurebroadening_params(
- temperature, self.LineRecordData['t0'][0],
- pressure, vmrs)[0]
- f0 = self.LineRecordData['freq'][0] - f0
- f1 = self.pressurebroadening(-1).compute_pressurebroadening_params(
- temperature, self.LineRecordData['t0'][-1],
- pressure, vmrs)[0]
- f1 = self.LineRecordData['freq'][-1] - f1
- f = np.linspace(f0, f1, num=1000)
- else:
- return_f = False
-
- if isotopologue_ratios is None:
- isotopologue_ratios = {}
-
- if partition_functions is None:
- partition_functions = {}
-
- # Cross-section
- sigma = np.zeros_like(f)
-
- for i in range(self._n):
- spec_key = self.LineRecordData['spec'][i] + '-' + \
- str(self.LineRecordData['afgl'][i])
-
- if spec_key in mass:
- m = mass[spec_key]
- else:
- m = constants.molar_mass_dry_air / constants.avogadro
- gamma_D = \
- spectroscopy.doppler_broadening(temperature,
- self.LineRecordData['freq'][i],
- m)
- (G, Df,
- Y) = self.linemixing(i).compute_linemixing_params(temperature)
-
- (gamma_p,
- delta_f) = \
- self.pressurebroadening(i).compute_pressurebroadening_params(
- temperature, self.LineRecordData['t0'][i], pressure, vmrs)
-
- K1 = spectroscopy.boltzmann_level(self.LineRecordData['elow'][i],
- temperature,
- self.LineRecordData['t0'][i])
- K2 = spectroscopy.stimulated_emission(
- self.LineRecordData['freq'][i],
- temperature,
- self.LineRecordData['t0'][i])
-
- if spec_key in partition_functions:
- Q = partition_functions[spec_key]
- else:
- Q = np.ones_like
-
- if spec_key in isotopologue_ratios:
- r = isotopologue_ratios[spec_key]
- else:
- r = 1.0
-
- S = r * self.LineRecordData['str'][i] * K1 * K2 * \
- Q(self.LineRecordData['t0'][i]) / Q(temperature)
-
- lm = 1 + G * pressure**2 + 1j * Y * pressure
- z = (f - self.LineRecordData['freq'][i] -
- delta_f - Df * pressure**2 + 1j * gamma_p) / gamma_D
- sigma += (S * (lm * _Faddeeva_(z) / np.sqrt(np.pi) / gamma_D)).real
- if return_f:
- return f, sigma
- else:
- return sigma
-
- def write_xml(self, xmlwriter, attr=None):
- """Write an ARTSCAT5 object to an ARTS XML file.
- """
- tmp = self.as_ArrayOfLineRecord()
- tmp.write_xml(xmlwriter, attr=attr)
-
-
-class Rational(_R):
- """Rational number
-
- This is a copy of fractions.Fraction with only the __repr__ function over-
- written to match ARTS style. That is 3/2 is represented as such rather
- than as "Fraction(3, 2)". See original class for more information,
- limitations, and options
- """
- def __init__(self, *args):
- super(Rational, self).__init__()
-
- def __repr__(self):
- return str(self.numerator) + '/' + str(self.denominator)
- _R.__repr__ = __repr__
-
-
-class LineMixing:
- """LineMixing data as in ARTS
-
- Not fully feature-complete
-
- Used by ARTSCAT5 to estimated ARTSCAT-5 style line mixing in cross_section
- """
-
- _none = None
- _first_order = "L1"
- _second_order = "L2"
- _lblrtm = "LL"
- _lblrtm_nonresonant = "NR"
- _for_band = "BB"
- _possible_kinds = [_none, _first_order, _second_order, _lblrtm,
- _lblrtm_nonresonant, _for_band]
-
- def __init__(self, data=None, kind=None):
- self.data = data
- if kind is not None:
- self.kind = kind
- self._manually_changed_data = False
-
- self._assert_sanity_()
- self._make_data_as_in_arts_()
-
- def _assert_sanity_(self):
- if self._type is self._none:
- assert len(self._data) == 0, "Data available for none-type"
- elif self._type is self._first_order:
- assert len(self._data) == 3, "Data mismatching first order"
- elif self._type is self._second_order:
- assert len(self._data) == 10, "Data mismatching second order"
- elif self._type is self._lblrtm:
- assert len(self._data) == 12, "Data mismatching LBLRTM data"
- elif self._type is self._lblrtm_nonresonant:
- assert len(self._data) == 1, "Data mismatching LBLRTM data"
- elif self._type is self._for_band:
- assert len(self._data) == 1, "Data mismatching band data"
- else:
- assert False, "Cannot recognize data type at all"
-
- def _make_data_as_in_arts_(self):
- if self._type in [self._none, self._for_band]:
- return
- elif self._type is self._first_order:
- self._t0 = self._data[0]
- self._y0 = self._data[1]
- self._ey = self._data[2]
- elif self._type is self._second_order:
- self._t0 = self._data[6]
-
- self._y0 = self._data[0]
- self._y1 = self._data[1]
- self._ey = self._data[7]
-
- self._g0 = self._data[2]
- self._g1 = self._data[3]
- self._eg = self._data[8]
-
- self._f0 = self._data[4]
- self._f1 = self._data[5]
- self._ef = self._data[9]
- elif self._type is self._lblrtm:
- self._y = _ip.interp1d(self._data[:4], self._data[4:8])
- self._g = _ip.interp1d(self._data[:4], self._data[8:])
- else:
- assert False, "Unknown data type"
-
- def _revert_from_arts_to_data_(self):
- if self._manually_changed_data:
- return
-
- if self._type in [self._none, self._for_band]:
- return
- elif self._type is self._first_order:
- self._data[0] = self._t0
- self._data[1] = self._y0
- self._data[2] = self._ey
- elif self._type is self._second_order:
- self._data[6] = self._t0
-
- self._data[0] = self._y0
- self._data[1] = self._y1
- self._data[7] = self._ey
-
- self._data[2] = self._g0
- self._data[3] = self._g1
- self._data[8] = self._eg
-
- self._data[4] = self._f0
- self._data[5] = self._f1
- self._data[9] = self._ef
- elif self._type is self._lblrtm:
- assert all(self._y.x == self._g.x), "Mismatch between y and g"
- self._data[:4] = self._y.x
- self._data[4:8] = self._y.y
- self._data[8:] = self._g.y
- else:
- assert False, "Unknown data type"
-
- def __repr__(self):
- self._revert_from_arts_to_data_()
- out = ''
- if self._type is self._none:
- return "No Line-Mixing"
- elif self._type in self._possible_kinds:
- out += self._type
- else:
- assert False, "Cannot recognize kind"
- for i in self.data:
- out += ' ' + str(i)
- return out
-
- def __str__(self):
- self._revert_from_arts_to_data_()
- out = ''
- if self._type is self._none:
- return out
- elif self._type in self._possible_kinds:
- out += self._type
- else:
- assert False, "Cannot recognize kind"
- for i in self.data:
- out += ' ' + str(i)
- return out
-
- def __getitem__(self, index):
- return self.data[index]
-
- def __setitem__(self, index, val):
- self.data[index] = val
- self._make_data_as_in_arts_()
-
- @property
- def data(self):
- return self._data
-
- @property
- def kind(self):
- return self._type
-
- @kind.setter
- def kind(self, val):
- found = False
- for i in self._possible_kinds:
- if i == val:
- self._type = i
- found = True
- break
- assert found, "Cannot recognize kind"
-
- @data.setter
- def data(self, val):
- self._data = val
- if self._data is None:
- self._data = np.array([], dtype=float)
- self._type = self._none
- elif type(self._data) is dict:
- if self._data['Type'] is None:
- self._type = self._none
- else:
- self.kind = self._data['Type']
- self._data = self._data['Data']
- else:
- if len(self._data) == 10:
- self._type = self._second_order
- elif len(self._data) == 3:
- self._type = self._first_order
- elif len(self._data) == 12:
- self._type = self._lblrtm
- elif len(self._data) == 0:
- self._type = self._none
- else:
- assert False, "Cannot recognize data type automatically"
- self._manually_changed_data = True
-
- def compute_linemixing_params(self, temperature):
- """Returns the line mixing parameters for given temperature(s)
-
- Cross-section is found from summing all lines
-
- .. math::
- \\sigma(f) \\propto \\sum_{k=0}^{k=n-1}
- \\left[1 + G_k \\; p^2 + iY_k \\; p\\right] \\;
- F\\left(\\frac{f - f_{0,k} - \\Delta f_k \\; p^2 -
- \\delta f_kp + i\\gamma_{p,k}p} {\\gamma_{D,k}}\\right),
-
- where k indicates line dependent variables. This function returns
- the line mixing parameters G, Y, and Delta-f. The non-line
- mixing parameters are gamma_D as the Doppler broadening, gamma_p
- as the pressure broadening, f as frequency,
- f_0 as the line frequency, delta-f as the first order pressure induced
- frequency shift, and p as pressure. The function F(···) is the
- Faddeeva function and gives the line shape. Many scaling factors are
- ignored in the equation above...
-
- Note 1: that for no line mixing, this function returns all zeroes
-
- Developer note: the internal variables used emulates the theory for
- each type of allowed line mixing. Thus it should be easy to extend
- this for other types and for partial derivatives
-
- Input:
- temperature (float or ndarray) in Kelvin
-
- Output:
- G(temperature), Delta-f(temperature), Y(temperature)
- """
- if self._type is self._none:
- return np.zeros_like(temperature), np.zeros_like(temperature), \
- np.zeros_like(temperature)
- elif self._type is self._for_band:
- return np.zeros_like(temperature) * np.nan, \
- np.zeros_like(temperature) * np.nan, \
- np.zeros_like(temperature) * np.nan
- elif self._type is self._lblrtm:
- return self._g(temperature), np.zeros_like(temperature), \
- self._y(temperature)
- elif self._type is self._first_order:
- return np.zeros_like(temperature), np.zeros_like(temperature), \
- self._y0 * (self._t0/temperature) ** self._ey
- elif self._type is self._lblrtm_nonresonant:
- return np.full_like(temperature, self._data[0]), \
- np.zeros_like(temperature), np.zeros_like(temperature)
- elif self._type is self._second_order:
- th = self._t0 / temperature
- return (self._g0 + self._g1 * (th - 1)) * th ** self._eg, \
- (self._f0 + self._f1 * (th - 1)) * th ** self._ef, \
- (self._y0 + self._y1 * (th - 1)) * th ** self._ey
-
-
-class PressureBroadening:
- """PressureBroadening data as in ARTS
-
- Not fully feature-complete
-
- Used by ARTSCAT5 to estimated ARTSCAT-5 style pressure broadening in
- cross_section
- """
-
- _none = None
- _air = "N2"
- _air_and_water = "WA"
- _all_planets = "AP"
- _sd_air = "SD-AIR"
- _possible_kinds = [_none, _air, _air_and_water, _all_planets, _sd_air]
-
- def __init__(self, data=None, kind=None):
- self.data = data
- if kind is not None:
- self.kind = kind
- self._manually_changed_data = False
-
- self._assert_sanity_()
- self._make_data_as_in_arts_()
-
- def _assert_sanity_(self):
- if self._type is self._none:
- assert len(self._data) == 0, "Data available for none-type"
- elif self._type is self._air:
- assert len(self._data) == 10, "mismatching air broadening "
- elif self._type is self._air_and_water:
- assert len(self._data) == 9, "mismatching air and water broadening"
- elif self._type is self._all_planets:
- assert len(self._data) == 20, "mismatching all planets data"
- elif self._type is self._sd_air:
- assert len(self._data) == 8, "mismatching speed dependent air data"
- else:
- assert False, "Cannot recognize data type at all"
-
- def _make_data_as_in_arts_(self):
- if self._type is self._none:
- return
- elif self._type is self._air:
- self._sgam = self._data[0]
- self._sn = self._data[1]
- self._sdel = 0
-
- self._agam = self._data[2]
- self._an = self._data[3]
- self._adel = self._data[4]
-
- self._dsgam = self._data[5]
- self._dnself = self._data[6]
-
- self._dagam = self._data[7]
- self._dnair = self._data[8]
-
- self._dadel = self._data[9]
- elif self._type is self._air_and_water:
- self._sgam = self._data[0]
- self._sn = self._data[1]
- self._sdel = self._data[2]
-
- self._agam = self._data[3]
- self._an = self._data[4]
- self._adel = self._data[5]
-
- self._wgam = self._data[6]
- self._wn = self._data[7]
- self._wdel = self._data[8]
- elif self._type is self._all_planets:
- self._sgam = self._data[0]
- self._sn = self._data[7]
- self._sdel = 0
- self._gam = {'N2': self._data[1], 'O2': self._data[2],
- 'H2O': self._data[3], 'CO2': self._data[4],
- 'H2': self._data[5], 'He': self._data[6]}
- self._n = {'N2': self._data[8], 'O2': self._data[9],
- 'H2O': self._data[10], 'CO2': self._data[11],
- 'H2': self._data[12], 'He': self._data[13]}
- self._delta_f = {'N2': self._data[14], 'O2': self._data[15],
- 'H2O': self._data[16], 'CO2': self._data[17],
- 'H2': self._data[18], 'He': self._data[19]}
- else:
- assert False, "Unknown data type"
-
- def _revert_from_arts_to_data_(self):
- if self._manually_changed_data:
- return
-
- if self._type is self._none:
- return
- elif self._type is self._air:
- self._data[0] = self._sgam
- self._data[1] = self._sn
-
- self._data[2] = self._agam
- self._data[3] = self._an
- self._data[4] = self._adel
-
- self._data[5] = self._dsgam
- self._data[6] = self._dnself
-
- self._data[7] = self._dagam
- self._data[8] = self._dnair
-
- self._data[9] = self._dadel
- elif self._type is self._air_and_water:
- self._data[0] = self._sgam
- self._data[1] = self._sn
- self._data[2] = self._sdel
-
- self._data[3] = self._agam
- self._data[4] = self._an
- self._data[5] = self._adel
-
- self._data[6] = self._wgam
- self._data[7] = self._wn
- self._data[8] = self._wdel
- elif self._type is self._all_planets:
- self._data[0] = self._sgam
-
- self._data[1] = self._gam['N2']
- self._data[2] = self._gam['O2']
- self._data[3] = self._gam['H2O']
- self._data[4] = self._gam['CO2']
- self._data[5] = self._gam['H2']
- self._data[6] = self._gam['He']
-
- self._data[7] = self._sn
-
- self._data[8] = self._n['N2']
- self._data[9] = self._n['O2']
- self._data[10] = self._n['H2O']
- self._data[11] = self._n['CO2']
- self._data[12] = self._n['H2']
- self._data[13] = self._n['He']
-
- self._data[14] = self._delta_f['N2']
- self._data[15] = self._delta_f['O2']
- self._data[16] = self._delta_f['H2O']
- self._data[17] = self._delta_f['CO2']
- self._data[18] = self._delta_f['H2']
- self._data[19] = self._delta_f['He']
- else:
- assert False, "Unknown data type"
-
- def __repr__(self):
- self._revert_from_arts_to_data_()
- out = ''
- if self._type is self._none:
- return "No Pressure-Broadening"
- elif self._type in self._possible_kinds:
- out += self._type
- else:
- assert False, "Cannot recognize kind"
- for i in self.data:
- out += ' ' + str(i)
- return out
-
- def __str__(self):
- self._revert_from_arts_to_data_()
- out = ''
- if self._type is self._none:
- return out
- elif self._type in self._possible_kinds:
- out += self._type
- else:
- assert False, "Cannot recognize kind"
- for i in self.data:
- out += ' ' + str(i)
- return out
-
- def __getitem__(self, index):
- return self.data[index]
-
- def __setitem__(self, index, val):
- self.data[index] = val
- self._make_data_as_in_arts_()
-
- @property
- def data(self):
- return self._data
-
- @property
- def kind(self):
- return self._type
-
- @kind.setter
- def kind(self, val):
- found = False
- for i in self._possible_kinds:
- if i == val:
- self._type = i
- found = True
- break
- assert found, "Cannot recognize kind"
-
- @data.setter
- def data(self, val):
- self._data = val
- if self._data is None:
- self._data = np.array([], dtype=float)
- self._type = self._none
- elif type(self._data) is dict:
- if self._data['Type'] is None:
- self._type = self._none
- else:
- self.kind = self._data['Type']
- self._data = self._data['Data']
- else:
- if len(self._data) == 10:
- self._type = self._air
- elif len(self._data) == 9:
- self._type = self._air_and_water
- elif len(self._data) == 20:
- self._type = self._all_planets
- elif len(self._data) == 0:
- self._type = self._none
- else:
- assert False, "Cannot recognize data type automatically"
- self._manually_changed_data = True
-
- def compute_pressurebroadening_params(self, temperature, line_temperature,
- pressure, vmrs):
- """Computes the pressure broadening parameters for the given atmosphere
-
- Cross-section is found from summing all lines
-
- .. math::
- \\sigma(f) \\propto \\sum_{k=1}^{k=n-1}
- F\\left(\\frac{f - f_{0,k} - \\Delta f_k \\; p^2 -
- \\delta f_kp + i\\gamma_{p,k}p} {\\gamma_{D,k}}\\right),
-
- where k indicates line dependent variables. This function returns
- the pressure broadening parameters p*gamma_p and p*delta-f. The non-
- pressure broadening parameters are gamma_D as the Doppler broadening,
- f as frequency, f_0 as the line frequency, delta-f as the first order
- pressure induced frequency shift, and p as pressure. The function
- F(···) is the Faddeeva function and gives the line shape. Many scaling
- factors are ignored in the equation above...
-
- The pressure broadening parameters are summed from the contribution of
- each individual perturber so that for i perturbers
-
- .. math::
- \\gamma_pp = \\sum_i \\gamma_{p,i} p_i
-
- and
-
- .. math::
- \\delta f_pp = \\sum_i \\delta f_{p,i} p_i
-
- Parameters:
- temperature (float or ndarray): Temperature [Kelvin]
-
- line_temperature (float): Line temperature [Kelvin]
-
- pressure (float or like temperature): Total pressure [Pascal]
-
- vmrs (dict): Volume mixing ratio of atmospheric species.
- dict should be {'self': self_vmr} for 'N2', {'self': self_vmr,
- 'H2O': h2o_vmr} for kind 'WA', and each species of 'AP' should be
- represented in the same manner. When 'self' is one of the list of
- species, then vmrs['self'] should not exist. Missing data is
- treated as a missing species. No data at all is assumed to mean
- 1.0 VMR of self (len(vmrs) == 0 must evaluate as True). The
- internal self_vmr, h2o_vmr, etc., variables must have sime size
- as pressure or be constants
-
- Returns:
- p · gamma0_p, p · delta-f0
- """
- theta = line_temperature / temperature
- if len(vmrs) == 0:
- return self._sgam * theta ** self._sn * pressure, \
- self._sdel * theta ** (0.25 + 1.5 * self._sn) * pressure
-
- sum_vmrs = 0.0
- gamma = np.zeros_like(temperature)
- delta_f = np.zeros_like(temperature)
-
- if self._type is self._none:
- return np.zeros_like(temperature), np.zeros_like(temperature)
- elif self._type is self._air:
- for species in vmrs:
- if species == 'self':
- gamma += self._sgam * theta ** self._sn * \
- pressure * vmrs[species]
- delta_f += self._sdel * \
- theta ** (0.25 + 1.5 * self._sn) * \
- pressure * vmrs[species]
- sum_vmrs += vmrs[species]
- gamma += self._agam * theta ** self._an * \
- pressure * (1 - sum_vmrs)
- delta_f += self._adel * theta ** (0.25 + 1.5 * self._an) * \
- pressure * (1 - sum_vmrs)
- elif self._type is self._air_and_water:
- for species in vmrs:
- if species == 'self':
- gamma += self._sgam * theta ** self._sn * \
- pressure * vmrs[species]
- delta_f += self._sdel * \
- theta ** (0.25 + 1.5 * self._sn) * \
- pressure * vmrs[species]
- sum_vmrs += vmrs[species]
- elif species == 'H2O':
- gamma += self._wgam * theta ** self._wn * \
- pressure * vmrs[species]
- delta_f += self._wdel * \
- theta ** (0.25 + 1.5 * self._wn) * \
- pressure * vmrs[species]
- sum_vmrs += vmrs[species]
- gamma += self._agam * theta ** self._an * \
- pressure * (1 - sum_vmrs)
- delta_f += self._adel * theta ** (0.25 + 1.5 * self._an) * \
- pressure * (1 - sum_vmrs)
- elif self._type is self._all_planets:
- for species in vmrs:
- if species == 'self':
- gamma += self._sgam * theta ** self._sn * \
- pressure * vmrs[species]
- delta_f += self._sdel * \
- theta ** (0.25 + 1.5 * self._sn) * \
- pressure * vmrs[species]
- sum_vmrs += vmrs[species]
- elif species in self._gam:
- gamma += self._gam[species] * theta ** self._n[species] * \
- pressure * vmrs[species]
- delta_f += self._delta_f[species] * \
- theta ** (0.25 + 1.5 * self._n[species]) * \
- pressure * vmrs[species]
- sum_vmrs += vmrs[species]
- gamma /= sum_vmrs
- delta_f /= sum_vmrs
- return gamma, delta_f
-
-
-class PartitionFunctions:
- """Class to compute partition functions given ARTS-like partition functions
- """
- _default_test = 296.0
-
- def __init__(self, init_data=None):
- self._data = {}
- self.append(init_data)
- self._assert_sanity_()
-
- def append(self, data):
- if type(data) is SpeciesAuxData:
- self._from_species_aux_data_(data)
- elif type(data) is dict:
- self._from_dict_(data)
- elif type(data) is PartitionFunctions:
- self.data = PartitionFunctions.data
- elif data is not None:
- assert False, "Cannot recognize the initialization data type"
-
- def _from_species_aux_data_(self, sad):
- assert sad.version == 2, "Must be version 2 data"
- self._from_dict_(sad._data_dict)
-
- def _from_dict_(self, d):
- for k in d:
- assert type(d[k]) is list, "lowest level data must be list"
- self._from_list_(d[k], k)
-
- def _from_list_(self, l, k):
- if l[0] == 'PART_TFIELD':
- self.data[k] = _ip.interp1d(l[1][0].grids[0], l[1][0].data)
- elif l[0] == 'PART_COEFF':
- self.data[k] = _P(l[1][0].data)
- else:
- raise RuntimeError("Unknown or not implemented " +
- "partition_functions type encountered")
-
- def _assert_sanity_(self):
- assert type(self.data) is dict, "Sanity check fail, calss is wrong"
-
- def __getitem__(self, key):
- return self.data[key]
-
- def __setitem__(self, key, data):
- if type(data) is list:
- self._from_list_(data, key)
- elif type(data) in [_P, _ip.interp1d]:
- self.data[key] = data
- else:
- try:
- data(self._default_test)
- self.data[key] = data
- except:
- raise RuntimeError("Cannot determine type")
-
- def __iter__(self):
- return iter(self.data)
-
- def __contains__(self, key):
- return key in self.data
-
- def __len__(self):
- return len(self.data)
-
- def __repr__(self):
- return "partition functions for " + str(len(self)) + " species"
-
- def keys(self):
- return self.data.keys()
- species = keys
-
- @property
- def data(self):
- return self._data
-
- @data.setter
- def data(self, val):
- assert type(val) is dict, "new values must be dictionary type"
- self._data = val
-
-from .catalogues import SpeciesAuxData
-from .catalogues import ArrayOfLineRecord
-from .catalogues import QuantumNumberRecord
-from .utils import as_quantumnumbers
diff --git a/typhon/arts/retrieval.py b/typhon/arts/retrieval.py
deleted file mode 100644
index 8b56af37..00000000
--- a/typhon/arts/retrieval.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Implementation of RetrievalQuantity.
-
-"""
-
-from .utils import return_if_arts_type
-
-__all__ = ['RetrievalQuantity',
- ]
-
-
-class RetrievalQuantity:
- """Represents a RetrievalQuantity object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, maintag=None, subtag=None, subsubtag=None, mode=None,
- analytical=None, perturbation=None, grids=None):
-
- self.maintag = maintag
- self.subtag = subtag
- self.subsubtag = subsubtag
- self.mode = mode
- self.analytical = analytical
- self.perturbation = perturbation
- self.grids = grids
-
- @property
- def maintag(self):
- """MainTag of retrieval species."""
- return self._maintag
-
- @property
- def subtag(self):
- """Subtag of retrieval species."""
- return self._subtag
-
- @property
- def subsubtag(self):
- """Subsubtag of retrieval species."""
- return self._subsubtag
-
- @property
- def mode(self):
- """Retrieval mode."""
- return self._mode
-
- @property
- def analytical(self):
- """Flag to determine whether the retrieval was done analytically."""
- return self._analytical
-
- @property
- def perturbation(self):
- """Amplitude of the perturbation."""
- return self._perturbation
-
- @property
- def grids(self):
- """Pressure grid."""
- return self._grids
-
- @maintag.setter
- def maintag(self, maintag):
- self._maintag = return_if_arts_type(maintag, 'String')
-
- @subtag.setter
- def subtag(self, subtag):
- self._subtag = return_if_arts_type(subtag, 'String')
-
- @subsubtag.setter
- def subsubtag(self, subsubtag):
- self._subsubtag = return_if_arts_type(subsubtag, 'String')
-
- @mode.setter
- def mode(self, mode):
- self._mode = return_if_arts_type(mode, 'String')
-
- @analytical.setter
- def analytical(self, analytical):
- self._analytical = return_if_arts_type(analytical, 'Index')
-
- @perturbation.setter
- def perturbation(self, perturbation):
- self._perturbation = return_if_arts_type(perturbation, 'Numeric')
-
- @grids.setter
- def grids(self, grids):
- self._grids = return_if_arts_type(grids, 'ArrayOfVector')
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a RetrievalQuantity object from an existing file.
-
- """
- obj = cls()
- obj.maintag = xmlelement[0].value()
- obj.subtag = xmlelement[1].value()
- obj.subsubtag = xmlelement[2].value()
- obj.mode = xmlelement[3].value()
- obj.analytical = xmlelement[4].value()
- obj.perturbation = xmlelement[5].value()
- obj.grids = xmlelement[6].value()
-
- return obj
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a RetrievalQuantity object to an ARTS XML file.
- """
- if attr is None:
- attr = {}
-
- xmlwriter.open_tag("RetrievalQuantity", attr)
- xmlwriter.write_xml(self.maintag, {'name': 'MainTag'})
- xmlwriter.write_xml(self.subtag, {'name': 'Subtag'})
- xmlwriter.write_xml(self.subsubtag, {'name': 'SubSubtag'})
- xmlwriter.write_xml(self.mode, {'name': 'Mode'})
- xmlwriter.write_xml(self.analytical, {'name': 'Analytical'})
- xmlwriter.write_xml(self.perturbation, {'name': 'Perturbation'})
- xmlwriter.write_xml(self.grids, {'name': 'Grids'})
- xmlwriter.close_tag()
diff --git a/typhon/arts/scattering.py b/typhon/arts/scattering.py
deleted file mode 100644
index ba42915f..00000000
--- a/typhon/arts/scattering.py
+++ /dev/null
@@ -1,1090 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Implementation of scattering related types such as SingleScatteringData and
-ScatteringMetaData.
-
-"""
-
-import copy
-import numbers
-from io import StringIO
-
-import numpy as np
-
-__all__ = ['SingleScatteringData',
- 'ScatteringMetaData',
- ]
-
-PARTICLE_TYPE_GENERAL = 10
-PARTICLE_TYPE_TOTALLY_RANDOM = 20
-PARTICLE_TYPE_AZIMUTHALLY_RANDOM = 30
-
-_old_ptype_mapping = {
- 10: "general",
- 20: "macroscopically_isotropic",
- 30: "horizontally_aligned",
-}
-
-_valid_ptypes = [
- [],
- # version 1
- [
- 10, 20, 30
- ],
- # version 2
- [
- "general",
- "macroscopically_isotropic",
- "horizontally_aligned",
- ],
- # version 3
- [
- "general",
- "totally_random",
- "azimuthally_random",
- ],
-]
-
-
-def dict_combine_with_default(in_dict, default_dict):
- """A useful function for dealing with dictionary function input. Combines
- parameters from in_dict with those from default_dict with the output
- having default_dict values for keys not present in in_dict
-
- Args:
- in_dict (dict): Input dictionary.
- default_dict (dict): Dictionary with default values.
-
- Returns:
- dict: Dictionary with missing fields filled with default values.
-
- """
- if in_dict is None:
- out_dict = copy.deepcopy(default_dict)
- else:
- out_dict = copy.deepcopy(in_dict)
- for key in default_dict.keys():
- out_dict[key] = copy.deepcopy(in_dict.get(key, default_dict[key]))
- return out_dict
-
-
-class SingleScatteringData:
- """The class representing the arts SingleScatteringData class.
-
- The data members of this object are identical to the class of the same name
- in ARTS; it includes all the single scattering properties required for
- polarized radiative transfer calculations: the extinction matrix, the phase
- matrix, and the absorption coefficient vector. The angular, frequency, and
- temperature grids for which these are defined are also included. Another
- data member - *ptype*, describes the orientational symmetry of the particle
- ensemble, which determines the format of the single scattering properties.
- The data structure of the ARTS SingleScatteringData class is described in
- the ARTS User Guide.
-
- The methods in the SingleScatteringData class enable the calculation of the
- single scattering properties, and the output of the SingleScatteringData
- structure in the ARTS XML format (see example file). The low-level
- calculations are performed in arts_scat.
-
- """
- defaults = {'ptype': 'totally_random',
- 'version': 3,
- # as defined in optproperties.h
- 'description': 'SingleScatteringData created with Typhon.',
- 'T_grid': np.array([250]),
- 'za_grid': np.arange(0, 181, 10),
- 'aa_grid': np.arange(0, 181, 10),
- }
-
- def __init__(self):
- self._ptype = None
- self.version = None
- self.description = None
- self.f_grid = None
- self.T_grid = None
- self.za_grid = None
- self.aa_grid = None
- self.abs_vec_data = None
- self.ext_mat_data = None
- self.pha_mat_data = None
-
- def __eq__(self, other):
- """Test the equality of SingleScatteringData."""
-
- def compare_ndarray(array1, array2, atol):
- if array1 is not None and array2 is not None:
- if not np.allclose(array1, array2, atol=atol):
- return False
- elif array1 is not array2:
- return False
- return True
-
- if isinstance(other, self.__class__):
- if self.ptype != other.ptype:
- return False
-
- if self.version != other.version:
- return False
-
- for member in ('f_grid', 'T_grid', 'za_grid', 'aa_grid'):
- if not compare_ndarray(getattr(self, member),
- getattr(other, member), atol=1e-6):
- return False
-
- for member in ('abs_vec_data', 'ext_mat_data', 'pha_mat_data'):
- if not compare_ndarray(getattr(self, member),
- getattr(other, member), atol=1e-12):
- return False
-
- return True
- return NotImplemented
-
- def __neq__(self, other):
- """Test the non-equality of SingleScatteringData."""
- if isinstance(other, self.__class__):
- return not self.__eq__(other)
- return NotImplemented
-
- @classmethod
- def from_data(cls, params=None, **kwargs):
- """ Constructor
-
- Parameters
- ----------
-
- ptype : string
- As for ARTS; see Arts User Guide
-
- f_grid : 1-D np.array
- np.array for frequency grid [Hz]
-
- T_grid : 1-D np.array
- np.array for temperature grid [K]
-
- za_grid : 1-D np.array
- np.array for zenith-angle grid [degree]
-
- aa_grid : 1-D np.array
- np.array for azimuth-angle grid [degree]
-
- Some inputs have default values, see SingleScatteringData.defaults.
-
- """
- obj = cls()
-
- # enable keyword arguments
- if kwargs and not params:
- params = kwargs
-
- params = dict_combine_with_default(params, obj.__class__.defaults)
-
- # check parameters
- # make sure grids are np np.arrays
- for grid in ['f', 'T', 'za', 'aa']:
- params[grid + '_grid'] = np.array(params[grid + '_grid'])
-
- # Version needs to be set before ptype due to checks on
- # ptype
- obj.version = params['version']
-
- for k, v in params.items():
- setattr(obj, k, v)
-
- return obj
-
- @property
- def ptype(self):
- """str: Particle type"""
-
- return self._ptype
-
- @ptype.setter
- def ptype(self, ptype):
- if isinstance(ptype, int):
- if self.version is None or self.version == 1:
- if ptype not in _old_ptype_mapping.keys():
- raise RuntimeError('Invalid ptype {}'.format(ptype))
- ptype = _old_ptype_mapping[ptype]
- self.version = 2
- else:
- raise RuntimeError(
- 'Integer ptype not allowed for SSD version 2 and later')
- else:
- if ptype not in _valid_ptypes[self.version]:
- raise RuntimeError('Invalid ptype {}'.format(ptype))
-
- self._ptype = ptype
-
- @property
- def version(self):
- """str: Particle type"""
-
- return self._version
-
- @version.setter
- def version(self, v):
- if v is not None:
- if not isinstance(v, int):
- raise TypeError('Version number must be type int')
- if v < 1 or v > 3:
- raise RuntimeError(
- 'Version number must be in the range from 1 to 3')
-
- self._version = v
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a SingleScatteringData object from an xml.ElementTree.Element.
- """
-
- obj = cls()
- if 'version' in xmlelement.attrib.keys():
- obj.version = int(xmlelement.attrib['version'])
-
- if obj.version is None or obj.version == 1:
- obj.version = 2
- obj.ptype = _valid_ptypes[2][_valid_ptypes[1].index(int(xmlelement[0].value()))]
- else:
- obj.ptype = xmlelement[0].value()
-
- # Non-azimuthally random data can be directly converted to version 3
- if obj.version == 2 and obj.ptype != 'horizontally_aligned':
- obj.version = 3
- obj.ptype = _valid_ptypes[3][_valid_ptypes[2].index(obj.ptype)]
-
- if obj.ptype not in _valid_ptypes[obj.version]:
- raise RuntimeError("Invalid ptype: {}".format(obj.ptype))
-
- obj.description = xmlelement[1].value()
- obj.f_grid = xmlelement[2].value()
- obj.T_grid = xmlelement[3].value()
- obj.za_grid = xmlelement[4].value()
- obj.aa_grid = xmlelement[5].value()
- obj.pha_mat_data = xmlelement[6].value()
- obj.ext_mat_data = xmlelement[7].value()
- obj.abs_vec_data = xmlelement[8].value()
- obj.checksize()
-
- return obj
-
- def to_atmlab_dict(self):
- """Returns a copy of the SSD as a dictionary.
-
- Returns a dictionary compatible with an atmlab structure.
-
- Returns:
- Dictionary containing the grids and data.
- """
-
- d = {'ptype': self.ptype,
- 'version': self.version,
- 'description': self.description,
- 'f_grid': self.f_grid,
- 'T_grid': self.T_grid,
- 'za_grid': self.za_grid,
- 'aa_grid': self.aa_grid,
- 'pha_mat_data': self.pha_mat_data,
- 'ext_mat_data': self.ext_mat_data,
- 'abs_vec_data': self.abs_vec_data,
- }
-
- return d
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a SingleScatterinData object to an ARTS XML file.
- """
- self.checksize()
- if self.version is None or self.version < 2:
- raise RuntimeError('SingleScatteringData version not supported.')
- if attr is None:
- attr = {}
- attr['version'] = self.version
- xmlwriter.open_tag("SingleScatteringData", attr)
- xmlwriter.write_xml(self.ptype)
- xmlwriter.write_xml(self.description)
- xmlwriter.write_xml(self.f_grid)
- xmlwriter.write_xml(self.T_grid)
- xmlwriter.write_xml(self.za_grid)
- xmlwriter.write_xml(self.aa_grid)
- xmlwriter.write_xml(self.pha_mat_data)
- xmlwriter.write_xml(self.ext_mat_data)
- xmlwriter.write_xml(self.abs_vec_data)
- xmlwriter.close_tag()
-
- def __repr__(self):
- S = StringIO()
- S.write(" 1:
- S.write("%s=%4e..%4e" % (nm, g.min(), g.max()))
- elif g.size == 1:
- S.write("%s=%4e" % (nm, float(g.squeeze())))
- else:
- S.write("%s=[]" % nm)
- S.write(">")
-
- return S.getvalue()
-
- def __getitem__(self, v):
- """Get subset of single-scattering-data
-
- Must np.take four elements (f, T, za, aa).
- Only implemented for randomly oriented particles.
- """
-
- if self.ptype != PARTICLE_TYPE_TOTALLY_RANDOM:
- raise RuntimeError("Slicing implemented only for"
- "ptype = %d. Found ptype = %d" %
- (PARTICLE_TYPE_TOTALLY_RANDOM, self.ptype))
- v2 = list(v)
- for i, el in enumerate(v):
- # to preserve the rank of the data, [n] -> [n:n+1]
- if isinstance(el, numbers.Integral):
- v2[i] = slice(v[i], v[i] + 1, 1)
- f, T, za, aa = v2
- # make a shallow copy (view of the same data)
- c = copy.copy(self)
- c.f_grid = c.f_grid[f]
- c.T_grid = c.T_grid[T]
- c.za_grid = c.za_grid[za]
- c.aa_grid = c.aa_grid[aa]
- c.ext_mat_data = c.ext_mat_data[f, T, :, :, :]
- c.pha_mat_data = c.pha_mat_data[f, T, za, aa, :, :, :]
- c.abs_vec_data = c.abs_vec_data[f, T, :, :, :]
- c.checksize()
- return c
-
- def checksize(self):
- """Verifies size is consistent.
-
- Raises:
- RuntimeError
-
- """
- if not ((self.f_grid.size or 1, self.T_grid.size or 1) ==
- self.ext_mat_data.shape[:2] ==
- self.pha_mat_data.shape[:2] ==
- self.abs_vec_data.shape[:2] and
- (self.za_grid.size or 1, self.aa_grid.size or 1) ==
- self.pha_mat_data.shape[2:4]):
- raise RuntimeError(
- "Inconsistent sizes in SingleScatteringData.\n"
- "f_grid: %s, T_grid: %s, za_grid: %s, aa_grid: %s, "
- "ext_mat: %s, pha_mat: %s, abs_vec: %s" %
- (self.f_grid.size or 1, self.T_grid.size or 1,
- self.za_grid.size or 1, self.aa_grid.size or 1,
- self.ext_mat_data.shape, self.pha_mat_data.shape,
- self.abs_vec_data.shape))
-
- def assp2backcoef(self):
- """The function returns the radar backscattering coeffcient. This is
- the phase function times 4pi, following the standard definition in the
- radar community.
-
- Returns:
- Backscattering coefficients, one value for each frequency and
- temperature in S. [m2]
-
- """
- back_coef = np.multiply(4 * np.pi,
- self.pha_mat_data[:, :, -1, 0, 0, 0, 0])
- return back_coef
-
- def assp2g(self):
- """For a normalised phase function (p), g equals the 4pi integral of
- p*cos(th), where th is the scattering angle. For pure isotropic
- scattering g = 0, while pure forward scattering has g=1.
-
- Warning, this function does not handle the extreme cases of
- delta-function type of forward or backward scattering lobes. A g of
- zero is returned for these cases.
-
- Returns:
- Backscattering coefficients, one value for each frequency and
- temperature in S. [m2]
-
- """
- g = np.zeros((len(self.f_grid), len(self.T_grid)))
- # ARTS uses pure phase matrix values, and not a normalised phase
- # function, and we need to include a normalisation.
-
- za_rad_grid = np.radians([self.za_grid])
-
- aziWeight = abs(np.sin(za_rad_grid))
- cosTerm = np.cos(za_rad_grid)
-
- for j in range(0, len(self.f_grid)):
- for i in range(0, len(self.T_grid)):
- phase_grid = self.pha_mat_data[j, i, :, 0, 0, 0, 0]
-
- normFac = np.trapz(np.multiply(
- phase_grid, aziWeight), za_rad_grid)
-
- if normFac == 0:
- # If normFac is zero, this means that phase_grid==0 and
- # should indicate very small particles that have g=0.
- g[j, i] = 0
- else:
- temp_cosPhase = np.multiply(cosTerm, phase_grid)
-
- temp = np.trapz(np.multiply(temp_cosPhase, aziWeight),
- za_rad_grid)
-
- g[j, i] = np.divide(temp, normFac)
- return g
-
- def checkassp(self):
- """Verfies properties of SSP.
-
- Raises:
- PyARTSError: If ptype is not macroscopically isotropic, or if first
- and last value of za_grid does not equal exactly 0 and 180
- respectively.
- """
-
- if self.ptype != "macroscopically_isotropic":
- raise RuntimeError(
- "So far just complete random orientation is handled.")
-
- if self.za_grid[0] != 0:
- raise RuntimeError("First value of za_grid must be 0.")
-
- if self.za_grid[-1] != 180:
- raise RuntimeError("Last value of za_grid must be 180.")
-
-
-class SpectralSingleScatteringData:
- """The class representing the arts SpectralSingleScatteringData class.
-
- FIXME: Adjust doc to spectral
- The data members of this object are identical to the class of the same name
- in ARTS; it includes all the spectralsingle scattering properties required for
- polarized radiative transfer calculations: the extinction matrix, the phase
- matrix, and the absorption coefficient vector. The angular, frequency, and
- temperature grids for which these are defined are also included. Another
- data member - *ptype*, describes the orientational symmetry of the particle
- ensemble, which determines the format of the single scattering properties.
- The data structure of the ARTS SingleScatteringData class is described in
- the ARTS User Guide.
-
- The methods in the SingleScatteringData class enable the calculation of the
- single scattering properties, and the output of the SingleScatteringData
- structure in the ARTS XML format (see example file). The low-level
- calculations are performed in arts_scat.
-
- """
- defaults = {'ptype': 'totally_random',
- 'version': 3,
- # as defined in optproperties.h
- 'description': 'SpectralSingleScatteringData created with Typhon.',
- 'T_grid': np.array([250]),
- }
-
- def __init__(self):
- self._ptype = None
- self.version = None
- self.description = None
- self.f_grid = None
- self.T_grid = None
- self.coeff_inc = None
- self.coeff_sca = None
- self.abs_vec_data_real = None
- self.abs_vec_data_imag = None
- self.ext_mat_data_real = None
- self.ext_mat_data_imag = None
- self.pha_mat_data_real = None
- self.pha_mat_data_imag = None
- self.forward_peak_data_real = None
- self.forward_peak_data_imag = None
- self.backward_peak_data_real = None
- self.backward_peak_data_imag = None
-
- def __eq__(self, other):
- """Test the equality of SpectralSingleScatteringData."""
-
- def compare_ndarray(array1, array2, atol):
- if array1 is not None and array2 is not None:
- if not np.allclose(array1, array2, atol=atol):
- return False
- elif array1 is not array2:
- return False
- return True
-
- if isinstance(other, self.__class__):
- if self.ptype != other.ptype:
- return False
-
- if self.version != other.version:
- return False
-
- for member in ('f_grid', 'T_grid', 'coeff_inc', 'coeff_sca'):
- if not compare_ndarray(getattr(self, member),
- getattr(other, member), atol=1e-6):
- return False
-
- for member in ('abs_vec_data_real', 'abs_vec_data_imag', 'ext_mat_data_real',
- 'ext_mat_data_imag', 'pha_mat_data_real', 'pha_mat_data_imag',
- 'forward_peak_data_real', 'backward_peak_data_imag'):
- if not compare_ndarray(getattr(self, member),
- getattr(other, member), atol=1e-12):
- return False
-
- return True
- return NotImplemented
-
- def __neq__(self, other):
- """Test the non-equality of SpectralSingleScatteringData."""
- if isinstance(other, self.__class__):
- return not self.__eq__(other)
- return NotImplemented
-
- @classmethod
- def from_data(cls, params=None, **kwargs):
- """ Constructor
-
- Parameters
- ----------
-
- ptype : string
- As for ARTS; see Arts User Guide
-
- f_grid : 1-D np.array
- np.array for frequency grid [Hz]
-
- T_grid : 1-D np.array
- np.array for temperature grid [K]
-
- coeff_inc : 2-D np.array
- np.array for zenith-angle grid [degree]
-
- coeff_sca : 2-D np.array
- np.array for azimuth-angle grid [degree]
-
- Some inputs have default values, see SpectralSingleScatteringData.defaults.
-
- """
- obj = cls()
-
- # enable keyword arguments
- if kwargs and not params:
- params = kwargs
-
- params = dict_combine_with_default(params, obj.__class__.defaults)
-
- # check parameters
- # make sure grids are np np.arrays
- for grid in ['f', 'T']:
- params[grid + '_grid'] = np.array(params[grid + '_grid'])
-
- # Version needs to be set before ptype due to checks on
- # ptype
- obj.version = params['version']
-
- for k, v in params.items():
- setattr(obj, k, v)
-
- return obj
-
- @property
- def ptype(self):
- """str: Particle type"""
-
- return self._ptype
-
- @ptype.setter
- def ptype(self, ptype):
- if isinstance(ptype, int):
- if self.version is None or self.version == 1:
- if ptype not in _old_ptype_mapping.keys():
- raise RuntimeError('Invalid ptype {}'.format(ptype))
- ptype = _old_ptype_mapping[ptype]
- self.version = 2
- else:
- raise RuntimeError(
- 'Integer ptype not allowed for SSD version 2 and later')
- else:
- if ptype not in _valid_ptypes[self.version]:
- raise RuntimeError('Invalid ptype {}'.format(ptype))
-
- self._ptype = ptype
-
- @property
- def version(self):
- """str: Particle type"""
-
- return self._version
-
- @version.setter
- def version(self, v):
- if v is not None:
- if not isinstance(v, int):
- raise TypeError('Version number must be type int')
- if v < 1 or v > 3:
- raise RuntimeError(
- 'Version number must be in the range from 1 to 3')
-
- self._version = v
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a SpectralSingleScatteringData object from an xml.ElementTree.Element.
- """
-
- obj = cls()
- if 'version' in xmlelement.attrib.keys():
- obj.version = int(xmlelement.attrib['version'])
-
- if obj.version is None or obj.version == 1:
- obj.version = 2
- obj.ptype = _valid_ptypes[2][_valid_ptypes[1].index(int(xmlelement[0].value()))]
- else:
- obj.ptype = xmlelement[0].value()
-
- # Non-azimuthally random data can be directly converted to version 3
- if obj.version == 2 and obj.ptype != 'horizontally_aligned':
- obj.version = 3
- obj.ptype = _valid_ptypes[3][_valid_ptypes[2].index(obj.ptype)]
-
- if obj.ptype not in _valid_ptypes[obj.version]:
- raise RuntimeError("Invalid ptype: {}".format(obj.ptype))
-
- obj.description = xmlelement[1].value()
- obj.f_grid = xmlelement[2].value()
- obj.T_grid = xmlelement[3].value()
- obj.coeff_inc = xmlelement[4].value()
- obj.coeff_sca = xmlelement[5].value()
- obj.pha_mat_data_real = xmlelement[6].value()
- obj.pha_mat_data_imag = xmlelement[7].value()
- obj.ext_mat_data_real = xmlelement[8].value()
- obj.ext_mat_data_imag = xmlelement[9].value()
- obj.abs_vec_data_real = xmlelement[10].value()
- obj.abs_vec_data_imag = xmlelement[11].value()
- obj.forward_peak_data_real = xmlelement[12].value()
- obj.forward_peak_data_imag = xmlelement[13].value()
- obj.backward_peak_data_real = xmlelement[14].value()
- obj.backward_peak_data_imag = xmlelement[15].value()
- obj.checksize()
-
- return obj
-
- def to_atmlab_dict(self):
- """Returns a copy of the SSSD as a dictionary.
-
- Returns a dictionary compatible with an atmlab structure.
-
- Returns:
- Dictionary containing the grids and data.
- """
-
- d = {'ptype': self.ptype,
- 'version': self.version,
- 'description': self.description,
- 'f_grid': self.f_grid,
- 'T_grid': self.T_grid,
- 'coeff_inc': self.coeff_inc,
- 'coeff_sca': self.coeff_sca,
- 'pha_mat_data_real': self.pha_mat_data_real,
- 'pha_mat_data_imag': self.pha_mat_data_imag,
- 'ext_mat_data_real': self.ext_mat_data_real,
- 'ext_mat_data_imag': self.ext_mat_data_imag,
- 'abs_vec_data_real': self.abs_vec_data_real,
- 'abs_vec_data_imag': self.abs_vec_data_imag,
- 'forward_peak_data_real': self.forward_peak_data_real,
- 'forward_peak_data_imag': self.forward_peak_data_imag,
- 'backward_peak_data_real': self.backward_peak_data_real,
- 'backward_peak_data_imag': self.backward_peak_data_imag,
- }
-
- return d
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a SingleScatterinData object to an ARTS XML file.
- """
- self.checksize()
- if self.version is None or self.version < 2:
- raise RuntimeError('SpectralSingleScatteringData version not supported.')
- if attr is None:
- attr = {}
- attr['version'] = self.version
- xmlwriter.open_tag("SpectralSingleScatteringData", attr)
- xmlwriter.write_xml(self.ptype)
- xmlwriter.write_xml(self.description)
- xmlwriter.write_xml(self.f_grid)
- xmlwriter.write_xml(self.T_grid)
- xmlwriter.write_xml(self.coeff_inc)
- xmlwriter.write_xml(self.coeff_sca)
- xmlwriter.write_xml(self.pha_mat_data_real)
- xmlwriter.write_xml(self.pha_mat_data_imag)
- xmlwriter.write_xml(self.ext_mat_data_real)
- xmlwriter.write_xml(self.ext_mat_data_imag)
- xmlwriter.write_xml(self.abs_vec_data_real)
- xmlwriter.write_xml(self.abs_vec_data_imag)
- xmlwriter.write_xml(self.forward_peak_data_real)
- xmlwriter.write_xml(self.forward_peak_data_imag)
- xmlwriter.write_xml(self.backward_peak_data_real)
- xmlwriter.write_xml(self.backward_peak_data_imag)
- xmlwriter.close_tag()
-
- def __repr__(self):
- S = StringIO()
- S.write(" 1:
- S.write("%s=%4e..%4e" % (nm, g.min(), g.max()))
- elif g.size == 1:
- S.write("%s=%4e" % (nm, float(g.squeeze())))
- else:
- S.write("%s=[]" % nm)
- S.write(">")
-
- return S.getvalue()
-
- def __getitem__(self, v):
- """Get subset of spectral-single-scattering-data
-
- Must np.take four elements (f, T, za, aa).
- Only implemented for randomly oriented particles.
- """
-
- if self.ptype != PARTICLE_TYPE_TOTALLY_RANDOM:
- raise RuntimeError("Slicing implemented only for"
- "ptype = %d. Found ptype = %d" %
- (PARTICLE_TYPE_TOTALLY_RANDOM, self.ptype))
- v2 = list(v)
- for i, el in enumerate(v):
- # to preserve the rank of the data, [n] -> [n:n+1]
- if isinstance(el, numbers.Integral):
- v2[i] = slice(v[i], v[i] + 1, 1)
- f, T, clm_i, clm_s = v2
- # make a shallow copy (view of the same data)
- c = copy.copy(self)
- c.f_grid = c.f_grid[f]
- c.T_grid = c.T_grid[T]
- c.coeff_inc = c.coeff_inc[clm_i]
- c.coeff_sca = c.coeff_sca[clm_s]
- c.ext_mat_data_real = c.ext_mat_data_real[f, T, :, :]
- c.pha_mat_data_real = c.pha_mat_data_real[f, T, clm_s, :, :]
- c.abs_vec_data_real = c.abs_vec_data_real[f, T, :, :]
- c.backward_peak_data_real = c.backward_peak_data_real[f, T, :, :]
- c.forward_peak_data_real = c.forward_peak_data_real[f, T, :, :]
- c.ext_mat_data_imag = c.ext_mat_data_imag[f, T, :, :]
- c.pha_mat_data_imag = c.pha_mat_data_imag[f, T, clm_s, :, :]
- c.abs_vec_data_imag = c.abs_vec_data_imag[f, T, :, :]
- c.backward_peak_data_imag = c.backward_peak_data_imag[f, T, :, :]
- c.forward_peak_data_imag = c.forward_peak_data_imag[f, T, :, :]
- c.checksize()
- return c
-
- def checksize(self):
- """Verifies size is consistent.
-
- Raises:
- RuntimeError
-
- """
- # FIXME: Check only the real values??
- if not ((self.f_grid.size or 1, self.T_grid.size or 1) ==
- self.ext_mat_data_real.shape[:2] ==
- self.pha_mat_data_real.shape[:2] ==
- self.abs_vec_data_real.shape[:2] and
- (self.coeff_sca[0,:].size or 1, self.coeff_inc[0,:].size or 1) ==
- self.pha_mat_data_real.shape[2:4]):
- raise RuntimeError(
- "Inconsistent sizes in SingleScatteringData.\n"
- "f_grid: %s, T_grid: %s, coeff_inc: %s, coeff_sca: %s, "
- "ext_mat: %s, pha_mat: %s, abs_vec: %s" %
- (self.f_grid.size or 1, self.T_grid.size or 1,
- self.coeff_sca[0,:].size or 1, self.coeff_inc[0,:].size or 1,
- self.ext_mat_data_real.shape, self.pha_mat_data_real.shape,
- self.abs_vec_data_real.shape))
-
- def assp2backcoef(self):
- """The function returns the radar backscattering coeffcient. This is
- the phase function times 4pi, following the standard definition in the
- radar community.
-
- Returns:
- Backscattering coefficients, one value for each frequency and
- temperature in S. [m2]
-
- """
- return NotImplemented
-
- def assp2g(self):
- """For a normalised phase function (p), g equals the 4pi integral of
- p*cos(th), where th is the scattering angle. For pure isotropic
- scattering g = 0, while pure forward scattering has g=1.
-
- Warning, this function does not handle the extreme cases of
- delta-function type of forward or backward scattering lobes. A g of
- zero is returned for these cases.
-
- Returns:
- Backscattering coefficients, one value for each frequency and
- temperature in S. [m2]
-
- """
- return NotImplemented
-
- def checkassp(self):
- """Verfies properties of SSP.
-
- Raises:
- PyARTSError: If ptype is not macroscopically isotropic, or if first
- and last value of za_grid does not equal exactly 0 and 180
- respectively.
- """
-
- if self.ptype != "macroscopically_isotropic":
- raise RuntimeError(
- "So far just complete random orientation is handled.")
-
-
-class ScatteringMetaData:
- """Represents a ScatteringMetaData object.
-
- See online ARTS documentation for object details.
-
- """
-
- def __init__(self, description=None, source=None, refr_index=None,
- mass=None, diameter_max=None, diameter_volume_equ=None,
- diameter_area_equ_aerodynamical=None):
-
- self.description = description
- self.source = source
- self.refr_index = refr_index
- self.mass = mass
- self.diameter_max = diameter_max
- self.diameter_volume_equ = diameter_volume_equ
- self.diameter_area_equ_aerodynamical = diameter_area_equ_aerodynamical
-
- def __eq__(self, other):
- """Test the equality of ScatteringMetaData."""
-
- if isinstance(other, self.__class__):
- if self.refr_index != other.refr_index:
- return False
-
- if np.isclose(self.mass, other.mass):
- return False
-
- if np.isclose(self.diameter_max, other.diameter_max):
- return False
-
- if np.isclose(self.diameter_volume_equ, other.diameter_volume_equ):
- return False
-
- if np.isclose(self.diameter_area_equ_aerodynamical,
- other.diameter_area_equ_aerodynamical):
- return False
-
- return True
- return NotImplemented
-
- def __neq__(self, other):
- """Test the non-equality of ScatteringMetaData."""
- if isinstance(other, self.__class__):
- return not self.__eq__(other)
- return NotImplemented
-
- @property
- def description(self):
- """Free-form description of the scattering element, holding information
- deemed of interest by the user but not covered by other structure
- members (and not used within ARTS)."""
- return self._description
-
- @property
- def source(self):
- """Free-form description of the source of the data, e.g., Mie, T-Matrix,
- or DDA calculation or a database or a literature source."""
- return self._source
-
- @property
- def refr_index(self):
- """Free-form description of the underlying complex refractive index
- data, e.g., a literature source."""
- return self._refr_index
-
- @property
- def mass(self):
- """The mass of the scattering element."""
- return self._mass
-
- @property
- def diameter_max(self):
- """The maximum diameter (or dimension) of the scattering element,
- defined by the circumferential sphere diameter of the element. Note
- that this parameter is only used by some size distributions; it does
- not have a proper meaning if the scattering element represents an
- ensemble of differently sized particles."""
- return self._diameter_max
-
- @property
- def diameter_volume_equ(self):
- """The volume equivalent sphere diameter of the scattering element,
- i.e., the diameter of a sphere with the same volume. For nonspherical
- particles, volume refers to the volume of the particle-forming
- substance, not that of the circumferential sphere (which can be derived
- from diameter_max). If the particle consists of a mixture of materials,
- the substance encompasses the complete mixture. E.g., the substance of
- 'soft' ice particles includes both the ice and the air."""
- return self._diameter_volume_equ
-
- @property
- def diameter_area_equ_aerodynamical(self):
- """The area equivalent sphere diameter of the scattering element, i.e.,
- the diameter of a sphere with the same cross-sectional area. Here, area
- refers to the aerodynamically relevant area, i.e., the cross-sectional
- area perpendicular to the direction of fall. Similarly to volume in the
- definition of diameter_volume_equ, for non-spherical and mixed-material
- particles, area refers to the area covered by the substance mixture of
- the particle."""
- return self._diameter_area_equ_aerodynamical
-
- @description.setter
- def description(self, description):
- if description is None:
- self._description = None
- return
-
- if isinstance(description, str):
- self._description = description
- else:
- raise TypeError('description has to be str.')
-
- @source.setter
- def source(self, source):
- if source is None:
- self._source = None
- return
-
- if isinstance(source, str):
- self._source = source
- else:
- raise TypeError('source has to be str.')
-
- @refr_index.setter
- def refr_index(self, refr_index):
- if refr_index is None:
- self._refr_index = None
- return
-
- if isinstance(refr_index, str):
- self._refr_index = refr_index
- else:
- raise TypeError('refr_index has to be str.')
-
- @mass.setter
- def mass(self, mass):
- if mass is None:
- self._mass = None
- return
-
- self._mass = mass
-
- @diameter_max.setter
- def diameter_max(self, diameter_max):
- if diameter_max is None:
- self._diameter_max = None
- return
-
- self._diameter_max = diameter_max
-
- @diameter_volume_equ.setter
- def diameter_volume_equ(self, diameter_volume_equ):
- if diameter_volume_equ is None:
- self._diameter_volume_equ = None
- return
-
- self._diameter_volume_equ = diameter_volume_equ
-
- @diameter_area_equ_aerodynamical.setter
- def diameter_area_equ_aerodynamical(self, diameter_area_equ_aerodynamical):
- if diameter_area_equ_aerodynamical is None:
- self._diameter_area_equ_aerodynamical = None
- return
-
- self._diameter_area_equ_aerodynamical = diameter_area_equ_aerodynamical
-
- def __repr__(self):
- s = StringIO()
- s.write(" 0.')
-
- if f_start > f_end:
- raise Exception('End frequency has to be larger than start frequency.')
-
- f_backend = [f_start]
- while f_backend[-1] <= f_end:
- f_backend.append(f_backend[-1] * (bandwidth + 2) / (2 - bandwidth))
-
- # do not include last value in results as it exceeds f_end
- f_backend = np.array(f_backend[:-1])
- backend_bandwidth = f_backend * bandwidth
-
- return f_backend, backend_bandwidth
-
-
-def get_f_backend_const_width(f_start, f_end, bandwidth):
- """Compute backend frequencies with constant bandwidth.
-
- This function computes backend frequencies for a given frequency range and
- a constant bandwidth.
-
- Parameters:
- f_start (float): beginning of frequency range [Hz]
- f_end (float): end of frequency range [Hz]
- bandwidth (float): bandwidth [Hz]
-
- Return:
- np.array, np.array: backend frequencies [Hz], channel widths [Hz]
-
- """
- if f_start <= 0:
- raise Exception('Start frequency must be > 0.')
-
- if f_start > f_end:
- raise Exception('End frequency has to be larger than start frequency.')
-
- f_backend = [f_start]
- while f_backend[-1] <= f_end:
- f_backend.append(f_backend[-1] + bandwidth)
-
- # do not include last value in results as it exceeds f_end
- f_backend = np.array(f_backend[:-1])
- backend_bandwidth = np.array([bandwidth])
-
- return f_backend, backend_bandwidth
diff --git a/typhon/arts/types.py b/typhon/arts/types.py
deleted file mode 100644
index 68e20642..00000000
--- a/typhon/arts/types.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""Collection of all ARTS types."""
-
-from .griddedfield import (GriddedField1,
- GriddedField2,
- GriddedField3,
- GriddedField4,
- GriddedField5,
- GriddedField6,
- )
-from .covariancematrix import (CovarianceMatrix)
-from .scattering import (SingleScatteringData,
- ScatteringMetaData,
- )
-from .retrieval import (RetrievalQuantity)
-from .catalogues import (ArrayOfLineRecord,
- CIARecord,
- GasAbsLookup,
- LineMixingRecord,
- QuantumIdentifier,
- QuantumNumberRecord,
- QuantumNumbers,
- Sparse,
- SpeciesAuxData,
- SpeciesTag,
- PropagationMatrix,
- StokesVector,
- Ppath,
- GridPos,
- )
-from .internals import (ARTSCAT5,
- Rational,
- PressureBroadening,
- LineMixing,
- PartitionFunctions,
- )
-from .xsec import (XsecRecord)
-
-__all__ = []
-
-classes = {
- 'ArrayOfLineRecord': ArrayOfLineRecord,
- 'CIARecord': CIARecord,
- 'CovarianceMatrix': CovarianceMatrix,
- 'GasAbsLookup': GasAbsLookup,
- 'GriddedField1': GriddedField1,
- 'GriddedField2': GriddedField2,
- 'GriddedField3': GriddedField3,
- 'GriddedField4': GriddedField4,
- 'GriddedField5': GriddedField5,
- 'GriddedField6': GriddedField6,
- 'LineMixingRecord': LineMixingRecord,
- 'QuantumIdentifier': QuantumIdentifier,
- 'QuantumNumberRecord': QuantumNumberRecord,
- 'QuantumNumbers': QuantumNumbers,
- 'RetrievalQuantity': RetrievalQuantity,
- 'ScatteringMetaData': ScatteringMetaData,
- 'SingleScatteringData': SingleScatteringData,
- 'Sparse': Sparse,
- 'SpeciesAuxData': SpeciesAuxData,
- 'SpeciesTag': SpeciesTag,
- 'ARTSCAT5': ARTSCAT5,
- 'Rational': Rational,
- 'PressureBroadening': PressureBroadening,
- 'LineMixing': LineMixing,
- 'PartitionFunctions': PartitionFunctions,
- 'XsecRecord': XsecRecord,
- 'PropagationMatrix': PropagationMatrix,
- 'StokesVector': StokesVector,
- 'Ppath': Ppath,
- 'GridPos': GridPos,
-}
diff --git a/typhon/arts/utils.py b/typhon/arts/utils.py
deleted file mode 100644
index fc23c726..00000000
--- a/typhon/arts/utils.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""Collection of utility functions."""
-
-import numpy as np
-
-from .xml.names import basic_types, tensor_names, complex_tensor_names
-
-
-def get_arts_typename(var):
- """Returns the ARTS type name for this variable.
-
- Args:
- var: Variable to get the ARTS type name for.
-
- Returns:
- str: ARTS type name.
-
- """
- if type(var).__name__ in basic_types:
- ret = basic_types[type(var).__name__]
- if ret == 'Array':
- if len(var) == 0:
- return None
- else:
- element_type = get_arts_typename(var[0])
- for element in var[1:]:
- if element_type != get_arts_typename(element):
- return None
- ret = 'ArrayOf' + element_type
- elif isinstance(var, np.ndarray):
- if np.issubdtype(var.dtype, np.complex128):
- ret = complex_tensor_names[var.ndim - 1]
- else:
- ret = tensor_names[var.ndim - 1]
- else:
- ret = type(var).__name__
-
- return ret
-
-
-def return_if_arts_type(var, artstype):
- """If variable is of specified ARTS type, return its value.
-
- Parameters:
- var: variable to check
- artstype: wanted ARTS type
-
- Returns: value of var, if var is specified type. None for NoneType
- """
- arts_name = get_arts_typename(var)
- if arts_name is None:
- return None
- elif arts_name == artstype:
- return var
- else:
- raise TypeError('Expected {} but got {}.' .format(artstype, arts_name))
-
-
-def as_quantumnumbers(var):
- """Takes a quantum number prospect and turns it into a quantum number type
- if possible
-
- Parameters:
- var (dict, QuantumNumberRecord, QuantumNumbers, None, str):
- Quantum numbers
-
- Returns:
- QN (QuantumNumberRecord, QuantumNumbers): Returned quantumn numbers.
- No change if already quantum numbers type
- """
-
- if type(var) in [QuantumNumberRecord,
- QuantumNumbers]:
- return var
- elif var is None:
- return QuantumNumbers()
-
- assert type(var) in [dict, str], "Cannot recognize as quantum number"
-
- if 'UP' in var and 'LO' in var:
- if type(var) is dict:
- return QuantumNumberRecord.from_dict(var)
- else:
- return QuantumNumberRecord.from_str(var)
- elif 'UP' in var:
- if type(var) is dict:
- var['LO'] = {}
- return QuantumNumberRecord.from_dict(var)
- else:
- return QuantumNumberRecord.from_str(var + ' LO')
- elif 'LO' in var:
- if type(var) is dict:
- var['UP'] = {}
- return QuantumNumberRecord.from_dict(var)
- else:
- return QuantumNumberRecord.from_str(var + ' UP')
- else:
- return QuantumNumbers(var)
-
-from .catalogues import QuantumNumberRecord
-from .catalogues import QuantumNumbers
diff --git a/typhon/arts/workspace/__init__.py b/typhon/arts/workspace/__init__.py
deleted file mode 100644
index d3a1f8c0..00000000
--- a/typhon/arts/workspace/__init__.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
-
-The `workspace` subpackage provides an interactive interface to ARTS and
-can be used to directly call workspace methods and agendas as well as
-access and manipulate workspace variables.
-
-Setup
------
-
-The python interface needs access to the :code:`libarts_api.so` shared library, which
-is located in the `src` subfolder of the ARTS build tree. The interface expects
-the location of the ARTS build tree to be provided in the ``ARTS_BUILD_PATH``
-environment variable. For the :code:`libarts_api.so` to be built, ARTS needs to be
-configured with :code:`-DENABLE_C_API=1`:
-
-.. code-block:: bash
-
- cmake -DENABLE_C_API=1 ... /path/to/arts
-
-The Workspace Class
--------------------
-
-The main functionality of the interface is implemented by the Workspace class.
-A Workspace object represents an ongoing ARTS simulation and is used to execute
-controlfiles and workspace methods and access workspace variables
-
->>> from typhon.arts.workspace import Workspace
->>> ws = Workspace()
-
-Executing Controlfiles
-----------------------
-
-Controlfiles can be executed on a workspace using the
-:func:`~Workspace.execute_controlfile` method.
-
->>> ws.execute_controlfile("general.arts")
-
-The search path for controlfiles is the current directory plus the paths
-provided in the environment variable ``ARTS_INCLUDE_PATH``. Controlfiles
-are parsed only once and then cached. Thus, if a controlfile is modified
-after it has been loaded, these changes will only be effective after a
-restart of the python runtime.
-
-Calling Workspace Methods
--------------------------
-
-ARTS workspace methods are available as member functions of each Workspace
-object:
-
->>> ws.AtmosphereSet1D()
->>> ws.IndexSet(ws.stokes_dim, 1)
-
-Arguments can be passed to a workspace function in three ways:
-
- 1. As workspace variables using the attributes of the
- workspace object, such as :code:`ws.stokes_dim` in the
- example above.
- 2. Using one of the symbolic variables in
- :mod:`typhon.arts.workspace.variables`
- 3. Passing supported python objects directly
-
-Arguments to a WSM can be passed using either positional or named arguments or both. If
-positional arguments are provided at least all generic output and generic input arguments
-must be given in order.
-
->>> ws.VectorNLogSpace(ws.p_grid, 361, 500e2, 0.1 )
-
-Keyword arguments to define generic input and output arguments. Available keywords are the
-names of the generic outputs and inputs defined in methods.cc.
-
->>> ws.abs_speciesSet(species=[ "O3" ])
-
-Calls to supergeneric functions are resolved by the interface.
-
-Workspace Variables
--------------------
-
-Symbolic representation of all workspace variables are available in the typhon.arts.workspace.variables
-module as module attributes. The purpose of these is to be passed to workspace functions as placeholders
-for variables in the workspace.
-
-Variable objects can be associated to a workspace, which is the case for variables accessed as attributes
-of a workspace, i.e. using for example:
-
->>> ws.y
-
-If that is the case their value can be accessed using the value() member function. In order
-to print out a textual representation of the value, the print() member function can be used,
-which will call the corresponding Print() ARTS WSV.
-
-Workspace variables of the groups Vector, Matrix and Tensor with an associated workspace
-implement the numpy array interface and can therefore be used just as any other numpy array.
-In some cases, however, it may be necessary to explicitly create a view on the array using
-numpy.asarray.
-
-"""
-
-import logging
-
-from typhon.environment import environ
-
-
-logger = logging.getLogger(__name__)
-
-if environ.get('ARTS_BUILD_PATH') is None:
- logger.warning(
- "ARTS_BUILD_PATH environment variable required to locate ARTS API.")
-else:
- from typhon.arts.workspace.workspace import Workspace, arts_agenda, Include
- from typhon.arts.workspace.variables import WorkspaceVariable
- from typhon.arts.workspace.methods import WorkspaceMethod
- from typhon.arts.workspace.api import arts_include_path \
- , include_path_push \
- , data_path_push \
- , data_path_pop \
-
diff --git a/typhon/arts/workspace/agendas.py b/typhon/arts/workspace/agendas.py
deleted file mode 100644
index 50889eab..00000000
--- a/typhon/arts/workspace/agendas.py
+++ /dev/null
@@ -1,173 +0,0 @@
-"""The agendas submodule.
-
-This module provides the Agenda class, which is used to represent parsed
-controlfiles and can be executed on a given `Workspace` object.
-
-"""
-
-import ctypes as c
-import numpy as np
-import os
-
-from typhon.arts.workspace.api import find_controlfile, arts_api
-from typhon.arts.workspace.output import CoutCapture
-
-class Agenda:
- def __init__(self, ptr):
- """ Initialize Agenda object from pointer to C API Agenda object.
- Args:
- ptr(c.c_void_p): Pointer to Agenda object created with the parse_agenda
- method of the ARTS C API.
- """
- self.ptr = ptr
-
- @classmethod
- def create(cls, name):
- """
- Create agenda with given name.
-
- Parameters:
-
- name(str): The name of the agenda to create.
-
- Returns:
-
- The newly created agenda object with the given name.
-
- """
- ptr = arts_api.create_agenda(name.encode())
- return Agenda(ptr)
-
- def clear(self):
- """ Reset agenda to be empty."""
- arts_api.agenda_clear(self.ptr)
-
- def append(self, agenda):
- """Append agenda to this agenda.
-
- Parameters:
-
- agenda(:class:`Agenda`): The agenda to append to this agenda.
-
- Raises:
-
- Exception if :code:`agenda` is not of the :class:`Agenda`
- """
-
- if not isinstance(agenda, Agenda):
- raise Exception("Agenda to append must be of type Agenda.")
-
- arts_api.agenda_append(self.ptr, agenda.ptr)
-
- def add_method(*args, **kwargs):
- """
- Add a workspace method call to the agenda.
-
- Parameters:
-
- ws(typhon.arts.workspace.Workspace): A (dummy) workspace object.
-
- m(typhon.arts.workspace.WorkspaceMethod): The method to add to the
- agenda
-
- *args: Positional arguments of the WSM call to add to the agenda.
-
- **kwargs: Key-word arguments of the WSM call to add to the agenda.
- """
- from typhon.arts.workspace.variables import group_names
-
- if len(args) < 3:
- raise Exception("Need at least self, a workspace and the method to"
- " add as arguments.")
- self = args[0]
- ws = args[1]
- m = args[2]
- m_id, args_out, args_in, temps = m._parse_output_input_lists(ws,
- args[3:],
- kwargs)
- arg_out_ptr = c.cast((c.c_long * len(args_out))(*args_out),
- c.POINTER(c.c_long))
- arg_in_ptr = c.cast((c.c_long * len(args_in))(*args_in),
- c.POINTER(c.c_long))
- if not m.name[-3:] == "Set" or not m.name[:-3] in group_names:
-
- for t in temps:
- arts_api.agenda_insert_set(ws.ptr, self.ptr, t.ws_id, t.group_id)
-
- arts_api.agenda_add_method(c.c_void_p(self.ptr), m_id,
- len(args_out), arg_out_ptr,
- len(args_in), arg_in_ptr)
- else:
- from typhon.arts.workspace.variables import WorkspaceVariable
-
- name_out = WorkspaceVariable.get_variable_name(args_out[0])
- name_in = WorkspaceVariable.get_variable_name(args_in[0])
- wsv_out = getattr(ws, name_out)
- wsv_in = getattr(ws, name_in)
-
- ws.Copy(wsv_out, wsv_in)
-
- group_id = arts_api.get_variable(args_out[0]).group
- arts_api.agenda_insert_set(ws.ptr, self.ptr, args_out[0], group_id)
-
- def add_callback(self, f):
- """
- Add a Python callback to the agenda.
-
- The function f must except one argument, which is the the pointer to
- the workspace object on which the callback is executed.
-
- Parameters:
-
- f(callable): Python callable.
-
- """
- callback = c.CFUNCTYPE(None, c.c_void_p)(f)
- arts_api.agenda_insert_callback(self.ptr, callback)
- arts_api.callbacks += [callback]
-
-
- def execute(self, ws):
- """ Execute this agenda on the given workspace.
- Args:
- ws(Workspace): Workspace object on wich to execute the agenda.
- Raises:
- Exception: If execution of agenda on workspace fails.
- """
- with CoutCapture(ws):
- e = arts_api.execute_agenda(ws.ptr, self.ptr)
- if (e):
- raise Exception("Error during execution of Agenda:\n" + e.decode("utf8"))
-
- def __to_value_struct__(self):
- return {'ptr' : self.ptr}
-
- def __del__(self):
- """Destroys ARTS C API Agenda object associated with this Agenda object."""
- try:
- arts_api.destroy_agenda(self.ptr)
- except:
- pass
-
- @staticmethod
- def parse(name):
- """Parse controlfile and return agenda representing the agenda.
-
- Due to the way how ARTS works, agendas may not define WSVs with
- the same name. In this case, parsing of the agenda will fail.
- It is therefore not possible to parse an agenda twice which defines
- new workspace variables. In this case the user will have to keep track
- of the fist parsed agenda object.
-
- Args:
- name(str): Name of the control file. Is looked up recursively in the path
- specified by the ARTS_INCLUDE_PATH environmental variable.
- Raises:
- Exception: If parsing of the controlfile fails.
- """
- path = find_controlfile(name)
- ptr = arts_api.parse_agenda(path.encode())
- if not ptr:
- e = arts_api.get_error().decode("utf8")
- raise Exception("Error during parsing of controlfile " + str(path) + ":\n" + e)
- return Agenda(ptr)
diff --git a/typhon/arts/workspace/api.py b/typhon/arts/workspace/api.py
deleted file mode 100644
index 86222f50..00000000
--- a/typhon/arts/workspace/api.py
+++ /dev/null
@@ -1,502 +0,0 @@
-"""ARTS C API Interface
-
-This module provides a foreign function interface for the ARTS C API.
-It defines the C structs used by the interface as ctypes.Structure
-child classes as well as the return argument and return types of the
-function provided by the C API.
-
-Requirements
-------------
-
-The ARTS C API is provided by the arts_api.so library and is required by
-the module. The module will check if the ``ARTS_BUILD_PATH`` variable is set
-and assume the library can be found in the src subdirectory. If opening
-the library fails loading the module will fail with an EnvironmentError.
-
-Attributes:
-
- arts_api(CDLL): The ctypes library handle holding the ARTS C API.
-
-"""
-
-import ctypes as c
-import logging
-import os
-
-import numpy as np
-import scipy as sp
-
-from typhon.environment import environ
-
-
-logger = logging.getLogger(__name__)
-
-################################################################################
-# Version Requirements
-################################################################################
-
-arts_minimum_major = 2
-arts_minimum_minor = 3
-arts_minimum_revision = 1167
-
-################################################################################
-# Load ARTS C API
-################################################################################
-
-if environ.get("ARTS_BUILD_PATH") is None:
- raise EnvironmentError("ARTS_BUILD_PATH environment variable required to"
- + " locate ARTS API.")
-
-try:
- lib_path = os.path.join(environ.get("ARTS_BUILD_PATH"), "src",
- "libarts_api.so")
- logger.info("Loading ARTS API from: " + lib_path)
- arts_api = c.cdll.LoadLibrary(lib_path)
-except:
- raise EnvironmentError("Could not find ARTS API in your ARTS build path. "
- "Did you install it?" + os.linesep +
- "Typhon requires at least ARTS version "
- f"{arts_minimum_major}.{arts_minimum_minor}."
- f"{arts_minimum_revision}")
-
-################################################################################
-# Version Check
-################################################################################
-
-class VersionStruct(c.Structure):
- """
- The ARTS version is represented by 3 values of type long: the major, minor
- and revision number.
- """
- _fields_ = [("major", c.c_long),
- ("minor", c.c_long),
- ("revision", c.c_long)]
-
-arts_api.get_version.argtypes = None
-arts_api.get_version.restype = VersionStruct
-
-version = arts_api.get_version()
-if (version.major, version.minor, version.revision) \
- < (arts_minimum_major, arts_minimum_minor, arts_minimum_revision):
-
- raise EnvironmentError("This typhon version requires at least arts-"
- + str(arts_minimum_major) + "."
- + str(arts_minimum_minor) + "."
- + str(arts_minimum_revision) + " of ARTS.")
-
-################################################################################
-# Initialize API
-################################################################################
-
-arts_api.initialize()
-
-################################################################################
-# ARTS runtime environment manipulation
-################################################################################
-
-def find_controlfile(name):
- """ Recursively search arts include path for given file.
- Args:
- name(str): Name of the file.
- Raises:
- Exception: If the file cannot be found.
- Returns:
- path(str): The full path of the file.
- """
- paths = arts_include_path + [os.getcwd()]
- path = None
-
- for p in paths:
- if os.path.isfile(os.path.join(p, name)):
- path = os.path.join(p, name)
- if (path):
- return path
- else:
- raise Exception("File " + name + " not found. Search path was:\n "
- + str(paths))
-
-def include_path_push(path):
- """
- Add path to include path of the ARTS runtime.
-
- Args:
- path(str): Path to add to the ARTS include path.
- """
- arts_api.include_path_push(c.c_char_p(path.encode()))
-
-def include_path_pop():
- """
- Remove most recently added include path.
- """
- arts_api.include_path_pop()
-
-def data_path_push(path):
- """
- Add path to data path of the ARTS runtime.
-
- Args:
- path(str): Path to add to the ARTS data path.
- """
- arts_api.data_path_push(c.c_char_p(path.encode()))
-
-def data_path_pop():
- """
- Remove most recently added data path.
- """
- arts_api.data_path_pop()
-
-
-################################################################################
-# ctypes Structures
-################################################################################
-
-class VariableStruct(c.Structure):
- """
- A (symbolic) ARTS workspace variable is represented using a struct containing
- pointers to the name and description of the method as well as the group id,
- i.e. the Index variable encoding the type of the variable.
- """
- _fields_ = [("name", c.c_char_p),
- ("description", c.c_char_p),
- ("group", c.c_long)]
-
-
-class VariableValueStruct(c.Structure):
- """
- The ARTS C API uses C-structs to transfer workspace variables from and to the
- ARTS runtime. The VariableValueStruct class is used to access and create these
- structs in Python code. The fields of the C-struct can be accessed directly as
- attributes of the VariableValueStruct object.
- """
- _fields_ = [("ptr", c.c_void_p),
- ("initialized", c.c_bool),
- ("dimensions", 7 * c.c_long),
- (("inner_ptr"), c.POINTER(c.c_int)),
- (("outer_ptr"), c.POINTER(c.c_int))]
-
- @classmethod
- def empty(cls):
- s = VariableValueStruct(0)
- s.ptr = 0
- return s
-
- def __init__(self, value):
- """ Create a VariableValue struct from a python object.
-
- This functions creates a variable value struct from a python object so
- that it can be passed to the C API. If the type of the object is not
- supported, the data pointer will be NONE.
-
- The built-in Python types that are currently supported are:
-
- - int
- - float
- - string
- - numpy.ndarray
- - lists of int and lists of string
-
- User defined classes are supported through a generic interface. The constructor
- looks for an attribute function __to_value_struct__, which should return a dictionary
- containing the value associated with the fields of the C-struct.
-
- Args:
- value(object): The python object to represent as a VariableValue struct.
-
- """
- ptr = None
- initialized = True
- dimensions = [0] * 7
-
- self._temporaries = []
-
- # Generic interface
- if hasattr(value, "__to_value_struct__"):
- d = value.__to_value_struct__()
- if "ptr" in d:
- ptr = d["ptr"]
- if "dimensions" in d:
- dimensions = d["dimensions"]
- # Index
- elif isinstance(value, np.long):
- ptr = c.cast(c.pointer(c.c_long(value)), c.c_void_p)
- # Numeric
- elif isinstance(value, (float, np.double)):
- temp = np.float64(value)
- ptr = c.cast(c.pointer(c.c_double(temp)), c.c_void_p)
- # String
- elif isinstance(value, str):
- ptr = c.cast(c.c_char_p(value.encode()), c.c_void_p)
- # Vector, Matrix
- elif isinstance(value, np.ndarray):
- # arrays need to be contiguous when passed to the ARTS API
- value = np.ascontiguousarray(value)
-
- if value.dtype == np.float64:
- ptr = value.ctypes.data
- for i in range(value.ndim):
- dimensions[i] = value.shape[i]
- # Scipy sparse matrices
- elif sp.sparse.issparse(value):
- m = sp.sparse.coo_matrix(value)
- self._temporaries += [m]
- dimensions[0] = m.shape[0]
- dimensions[1] = m.shape[1]
- dimensions[2] = m.nnz
- ptr = m.data.ctypes.data
- self.inner_ptr = c.cast(m.row.ctypes.data, c.POINTER(c.c_int))
- self.outer_ptr = c.cast(m.col.ctypes.data, c.POINTER(c.c_int))
- # Array of String or Integer
- elif isinstance(value, list):
- if not value:
- raise ValueError("Empty lists currently not supported.")
- ps = []
- if isinstance(value[0], str):
- for s in value:
- ps.append(c.cast(c.c_char_p(s.encode()), c.c_void_p))
- p_array = (c.c_void_p * len(value))(*ps)
- ptr = c.cast(c.pointer(p_array), c.c_void_p)
- if isinstance(value[0], np.long):
- ptr = c.cast(c.pointer((c.c_long * len(value))(*value)), c.c_void_p)
- dimensions[0] = len(value)
-
- self._value = value
- self.ptr = ptr
- self.initialized = initialized
- self.dimensions = (c.c_long * 7)(*dimensions)
-
-class CovarianceMatrixBlockStruct(c.Structure):
- """
- c struct representing block of covariance matrices.
-
- The indices field holds the indices of the corresponding
- retrieval quantities.
-
- The position field holds the row and column indices of the
- left- and upper-most element of the block w.r.t. the full
- covariance matrix.
-
- The dimension field hold the number of rows and columns of
- the block.
-
- The ptr field hold the pointer to the dense matrix data or
- to the element pointer of the sparse matrix that represents
- the block.
-
- The inner and outer pointer fields are null if the block is
- represented by a dense matrix. Otherwise these contain the
- pointers to the index array of the sparse matrix of which the
- block consists.
- """
- _fields_ = [("indices", 2 * c.c_long),
- ("position", 2 * c.c_long),
- ("dimensions", 2 * c.c_long),
- ("ptr", c.c_void_p),
- ("nnz", c.c_long),
- ("inner_ptr", c.POINTER(c.c_int)),
- ("outer_ptr", c.POINTER(c.c_int))]
-
-class MethodStruct(c.Structure):
- """
- The method struct holds the internal index of the method (id), pointers
- to the null-terminated strings holding name and description, the number
- of generic inputs (n_g_in) and a pointer to the array holding the group ids
- of the output types, as well as the number of generic outputs and their types.
- """
- _fields_ = [("id", c.c_ulong),
- ("name", c.c_char_p),
- ("description", c.c_char_p),
- # Output
- ("n_out", c.c_ulong),
- ("outs", c.POINTER(c.c_long)),
- # Generic Output
- ("n_g_out", c.c_ulong),
- ("g_out_types", c.POINTER(c.c_long)),
- # Input
- ("n_in", c.c_ulong),
- ("ins", c.POINTER(c.c_long)),
- # Generic Input
- ("n_g_in", c.c_ulong),
- ("g_in_types", c.POINTER(c.c_long))]
-
-# TODO: Check if can be used as constructor
-def variable_value_factory(value):
- """ Create a VariableValue struct from a python object.
-
- This functions creates a variable value struct from a python object so that it
- can be passed to the C API. If the type of the object is not supported, the data
- pointer will be NULL.
-
- Args:
- value(object): The python object to represent as a VariableValue struct.
-
- TODO: Add proper error handling.
- """
- return VariableValueStruct(value)
-
-################################################################################
-# Function Arguments and Return Types
-################################################################################
-
-# Create ArtsWorkspace and return handle.
-arts_api.create_workspace.argtypes = [c.c_long, c.c_long]
-arts_api.create_workspace.restype = c.c_void_p
-
-# Destroy ArtsWorkspace instance from handle.
-arts_api.destroy_workspace.argtypes = [c.c_void_p]
-arts_api.destroy_workspace.restype = None
-
-# Include path manipulation.
-arts_api.include_path_push.restype = None
-arts_api.include_path_push.argtypes = [c.c_char_p]
-
-# Data path manipulation.
-arts_api.include_path_pop.restype = None
-arts_api.include_path_pop.argtypes = None
-
-arts_api.data_path_push.restype = None
-arts_api.data_path_push.argtypes = [c.c_char_p]
-
-arts_api.data_path_pop.restype = None
-arts_api.data_path_pop.argtypes = None
-
-# Set include ad data path of the arts runtime.
-arts_api.get_error.restype = c.c_char_p
-arts_api.get_error.argtypes = None
-
-# Agendas
-#
-#
-arts_api.create_agenda.argtypes = [c.c_char_p]
-arts_api.create_agenda.restype = c.c_void_p
-
-arts_api.agenda_add_method.argtypes = [c.c_void_p, c.c_long,
- c.c_ulong, c.POINTER(c.c_long),
- c.c_ulong,c.POINTER(c.c_long)]
-arts_api.agenda_add_method.restype = None
-
-arts_api.agenda_clear.argtypes = [c.c_void_p]
-arts_api.agenda_clear.restype = None
-
-arts_api.agenda_insert_set.argtypes = [c.c_void_p, c.c_void_p, c.c_long, c.c_long]
-arts_api.agenda_insert_set.restype = None
-
-arts_api.agenda_append.argtypes = [c.c_void_p, c.c_void_p]
-arts_api.agenda_append.restype = None
-
-arts_api.parse_agenda.argtypes = [c.c_char_p]
-arts_api.parse_agenda.restype = c.c_void_p
-
-arts_api.execute_agenda.argtypes = [c.c_void_p, c.c_void_p]
-arts_api.execute_agenda.restype = c.c_char_p
-
-arts_api.destroy_agenda.argtypes = [c.c_void_p]
-arts_api.destroy_agenda.restype = None
-
-# Groups
-#
-# Returns the number of WSV groups.
-arts_api.get_number_of_groups.argtypes = None
-arts_api.get_number_of_groups.restype = c.c_ulong
-
-# Return pointer to the name of the group with given index.
-arts_api.get_group_name.argtypes = [c.c_long]
-arts_api.get_group_name.restype = c.c_char_p
-
-# Variables
-#
-# Returns the number of (symbolic) workspace variable.
-arts_api.get_number_of_variables.restype = c.c_ulong
-arts_api.get_number_of_variables.argtypes = None
-
-# Returns workspace variable with index c_long as VariableStruct.
-arts_api.lookup_workspace_variable.argtypes = [c.c_char_p]
-arts_api.lookup_workspace_variable.restype = c.c_long
-
-# Returns workspace variable with index c_long as VariableStruct.
-arts_api.get_variable.argtypes = [c.c_long]
-arts_api.get_variable.restype = VariableStruct
-
-# Return pointer to variable value in a given workspace in the form of a VariableValueStruct.
-arts_api.get_variable_value.argtypes = [c.c_void_p, c.c_long, c.c_long]
-arts_api.get_variable_value.restype = VariableValueStruct
-
-# Set variable value in workspace given a workspace handle, the variable id, the group id
-# and a VariableValueStruct
-arts_api.set_variable_value.argtypes = [c.c_void_p, c.c_long, c.c_long, VariableValueStruct]
-arts_api.set_variable_value.restype = c.c_char_p
-
-# Adds a value of a given group to a given workspace.
-arts_api.add_variable.restype = c.c_long
-arts_api.add_variable.argtypes = [c.c_void_p, c.c_long, c.c_char_p]
-
-# Remove given variable from workspace.
-arts_api.erase_variable.restype = None
-arts_api.erase_variable.argtypes = [c.c_void_p, c.c_long, c.c_long]
-
-# Methods
-#
-# Returns the number of (symbolic) workspace variable.
-arts_api.get_number_of_methods.restype = c.c_ulong
-arts_api.get_number_of_methods.argtypes = None
-
-# Returns workspace variable with index c_long as VariableStruct.
-arts_api.get_method.argtypes = [c.c_long]
-arts_api.get_method.restype = MethodStruct
-
-# Return Pointer to name of jth generic output parameter of a given WSM.
-arts_api.get_method_g_out.argtypes = [c.c_long, c.c_long]
-arts_api.get_method_g_out.restype = c.c_char_p
-
-# Return Pointer to name of jth generic input parameter of a given WSM.
-arts_api.get_method_g_in.argtypes = [c.c_long, c.c_long]
-arts_api.get_method_g_in.restype = c.c_char_p
-
-# Return pointer to the default value of the jth generic input of a given WSM.
-arts_api.get_method_g_in_default.argtypes = [c.c_long, c.c_long]
-arts_api.get_method_g_in_default.restype = c.c_char_p
-
-# Return block from covariance matrix.
-arts_api.get_covariance_matrix_block.argtypes = [c.c_void_p, c.c_long, c.c_bool]
-arts_api.get_covariance_matrix_block.restype = CovarianceMatrixBlockStruct
-
-# Execute a given workspace method.
-arts_api.execute_workspace_method.argtypes = [c.c_void_p,
- c.c_long,
- c.c_ulong,
- c.POINTER(c.c_long),
- c.c_ulong,
- c.POINTER(c.c_long)]
-arts_api.execute_workspace_method.restype = c.c_char_p
-
-# Print method documentation.
-arts_api.method_print_doc.argtypes = [c.c_long]
-arts_api.method_print_doc.restype = c.c_char_p
-
-# Callback insertion
-arts_api.callbacks = []
-arts_api.agenda_insert_callback.argtypes = [c.c_void_p, c.c_void_p]
-arts_api.agenda_insert_callback.restype = None
-
-################################################################################
-# Setup ARTS Environment
-################################################################################
-
-try:
- arts_include_path = environ.get("ARTS_INCLUDE_PATH").split(":")
-except:
- arts_include_path = []
-
-try:
- arts_data_path = environ.get("ARTS_DATA_PATH").split(":")
-except:
- arts_data_path = []
-
-# Set runtime parameters
-for p in arts_include_path:
- include_path_push(p)
-
-for p in arts_data_path:
- data_path_push(p)
-
-include_path_push(os.getcwd())
-data_path_push(os.getcwd())
diff --git a/typhon/arts/workspace/methods.py b/typhon/arts/workspace/methods.py
deleted file mode 100644
index d7f5b638..00000000
--- a/typhon/arts/workspace/methods.py
+++ /dev/null
@@ -1,424 +0,0 @@
-""" The methods submodule.
-
-This module exposes all ARTS workspace methods represented by WorkspaceMethod object.
-
-The methods are loaded dynamically when the module is imported, which ensures that they
-up to date with the current ARTS build.
-
-Attributes:
-
- workspace_methods(dict): Dictionary containing all ARTS workspace methods.
-
-"""
-
-import ast
-import re
-import ctypes as c
-import numpy as np
-
-
-from typhon.arts.workspace.api import arts_api
-from typhon.arts.workspace.variables import WorkspaceVariable, group_ids, group_names
-from typhon.arts.workspace import variables, workspace
-from typhon.arts.workspace.output import CoutCapture
-
-class WorkspaceMethod:
- """
- The WorkspaceMethod class represents ARTS workspace methods. Each workspace method
- provided a call function that forwards the call to the function towards the ARTS C API.
-
- Attributes:
- m_ids([int]): Indices of supergeneric overloads of this WSM
- name(str): The name of the method as defined in methods.cc
- description(str): The documentation of the method as defined in methods.cc
- outs([int]): Indices of the output variables of the method.
- n_out(int): The number of output arguments.
- n_g_out(int): The number of generic outputs.
- g_out([str]): The names of the generic output arguments.
- g_out_types([dict]): List of dicts associating the name of a generic output
- with its types for each of the supergeneric overloads.
- n_in(int): The number of input arguments.
- ins([int]): The indices of the input arguments of the WSM.
- n_g_in(int): The number of generic input arguments.
- g_in_types([dict]): List of dicts associating the name of a generic input to the
- expected type for each supergeneric overload of the method.
- g_in_default(dict) Dictionary containing the default values for each generic parameter.
- g_in([str]): List of names of the generic input arguments.
- """
-
- # Regular expression to that matches Create WSMs
- create_regexp = re.compile(r"^(\w*)Create$")
-
- def __init__(self, m_id, name, description, outs, g_out_types, ins, g_in_types):
- """Create a WorkspaceMethod object from a given id, name, description and types of
- generic input and output arguments.
-
- Args:
- m_id(int): The index identifying the method in the C API
- name(str): The name of the method
- description(str): Method documentation
- outs([int]): List of indices of the (non-generic) output arguments of the method.
- g_out_types([int]): Group ids of the generic output types
- outs([int]): List of indices of the (non-generic) input arguments of the method.
- g_in_types([int]): Group ids of the generic input types
- """
- self.m_ids = [m_id]
- self.name = name
- self.description = description
-
- # Output
- self.outs = outs
- self.n_out = len(outs)
-
- # Generic Output
- self.n_g_out = len(g_out_types)
- self.g_out_types = [WorkspaceMethod.get_output_dict(m_id, g_out_types)]
- self.g_out = [k for k in self.g_out_types[0]]
-
- # Input
- self.ins = ins
- self.n_in = len(ins)
-
- # Generic Input
- self.n_g_in = len(g_in_types)
- self.g_in_types = [WorkspaceMethod.get_input_dict(m_id, g_in_types)]
- self.g_in_default = WorkspaceMethod.get_default_input_dict(m_id, g_in_types)
- self.g_in = [k for k in self.g_in_types[0]]
-
- self.is_create = False
- if (WorkspaceMethod.create_regexp.match(name)):
- self.is_create = True
-
- def __repr__(self):
- return arts_api.method_print_doc(self.m_ids[0]).decode("utf8")
-
- def add_overload(self, m_ids, g_in_types, g_out_types):
- """ Add one or more overloads to a workspace method.
-
- Use this function to add a supergeneric overload to a WorkspaceMethod object
- so that is will be considered in overload resolution when call(...) is called.
-
- TODO: Simplify this to take a WorkspaceMethod object.
-
- Args:
- m_ids([int]): The method ids of the supergeneric ARTS WSMs which should be added
- to the list of overloads.
- g_in_types ([dict]): List of dicts containing the mappings between argument
- names and indices of expected groups
- g_out_types ([dict]): List of dicts containing the mappings between argument
- names and indices of expected groups
- """
- self.m_ids += m_ids
- self.g_in_types += g_in_types
- self.g_out_types += g_out_types
-
- @staticmethod
- def get_input_dict(m_id, in_types):
- """Get mapping of names of generic input variables to indices of groups.
-
- Args:
- m_id(int): Index of the method.
- in_types([int]): List of group indices of the generic input arguments.
- Return:
- dict: The mapping.
- """
- res = dict()
- for i,t in enumerate(in_types):
- res[arts_api.get_method_g_in(m_id, i).decode("utf8")] = t
- return res
-
- @staticmethod
- def get_default_input_dict(m_id, g_in):
- """Get dict mapping names of generic input arguments to default values.
-
- Is None if no default value for a given generic input is given.
-
- Args:
- m_id(int): Index of the method.
- g_in([str]): Names of the generic input arguments.
- Return:
- dict: The mapping.
- """
- res = dict()
- for i,t in enumerate(g_in):
- k = arts_api.get_method_g_in(m_id, i).decode("utf8")
- d = arts_api.get_method_g_in_default(m_id, i).decode("utf8")
- if d == "":
- pass
- elif d[0] == "@":
- d = None
- elif d == "Inf":
- res[k] = np.float64("inf")
- else:
- try:
- d = WorkspaceVariable.convert(group_names[t], ast.literal_eval(d))
- res[k] = d
- except:
- res[k] = d
- return res
-
- @staticmethod
- def get_output_dict(m_id, out_types):
- """Get mapping of names of generic output variables to indices of groups.
-
- Args:
- m_id(int): Index of the method.
- in_types([int]): List of group indices of the generic output arguments.
- Return:
- dict: The mapping.
- """
- res = dict()
- for i,t in enumerate(out_types):
- res[arts_api.get_method_g_out(m_id, i).decode("utf8")] = t
- return res
-
- def _parse_output_input_lists(self, ws, args, kwargs):
- n_args = self.n_g_out + self.n_g_in
-
- ins = self.ins[:]
- outs = self.outs[:]
- temps = []
-
- # Add positional arguments to kwargs
- if (len(args)) and (len(args)) < self.n_g_out + self.n_in:
- raise Exception("Only " + str(len(args)) + " positional arguments provided " +
- "but WSM " + self.name + "requires at least "
- + str(self.n_g_out + self.n_in))
- for j in range(len(args)):
- if j < self.n_g_out:
- name = self.g_out[j]
- try:
- kwargs[name] = args[j]
- except:
- raise Exception("Generic parameter " + str(name) + " set twice.")
- elif j < self.n_g_out + self.n_in:
- k = j - self.n_g_out
- if type(args[j]) == WorkspaceVariable:
- ins[k] = args[j].ws_id
- else:
- temps.append(ws.add_variable(args[j]))
- ins[k] = temps[-1].ws_id
- if self.ins[k] in outs:
- # Need to replace variable also in output if its used as both output
- # and input.
- outs[outs.index(self.ins[k])] = ins[k]
-
- elif j < self.n_g_out + self.n_in + self.n_g_in:
- name = self.g_in[j - self.n_g_out - self.n_in]
- try:
- kwargs[name] = args[j]
- except:
- raise Exception("Generic parameter " + str(name) + " set twice.")
- else:
- raise Exception(str(j) + " positional arguments given, but this WSM expects " +
- str(j-1) + ".")
-
- # Check output argument names
- g_output_args = dict()
- for k in self.g_out:
- if not k in kwargs:
- raise Exception("WSM " + self.name + " needs generic output " + k)
- else:
- g_output_args[k] = kwargs[k]
-
- # Check input argument names
- g_input_args = dict()
- for k in self.g_in:
- if not k in kwargs:
- if k in self.g_in_default:
- g_input_args[k] = self.g_in_default[k]
- else:
- raise Exception("WSM " + self.name + " needs generic input " + k)
- else:
- g_input_args[k] = kwargs[k]
-
- # Resolve overload (if necessary).
- g_out_types = dict([(k,WorkspaceVariable.get_group_id(g_output_args[k]))
- for k in self.g_out])
- g_in_types = dict([(k,WorkspaceVariable.get_group_id(g_input_args[k]))
- for k in self.g_in])
- m_id = self.m_ids[0]
- sg_index = 0
-
- if (len(self.m_ids) > 1):
- out_indices = [i for i,ts in enumerate(self.g_out_types) if ts == g_out_types]
- in_indices = [i for i,ts in enumerate(self.g_in_types) if ts == g_in_types]
- sg_indices = set(out_indices) & set(in_indices)
-
- if len(sg_indices) > 1:
- raise Exception("Could not uniquely resolve super-generic overload.")
-
- if len(sg_indices) == 0:
- raise Exception("Could not find super-generic overload matching"
- + " the given groups.")
-
- sg_index = sg_indices.pop()
- m_id = self.m_ids[sg_index]
-
- # Combine input and output arguments into lists.
- arts_args_out = []
- for out in outs:
- arts_args_out.append(out)
-
- for name in self.g_out:
- arg = g_output_args[name]
- if not type(arg) == WorkspaceVariable:
- raise ValueError("Generic Output " + name + " must be an ARTS WSV.")
- group_id = arg.group_id
- expected = self.g_out_types[sg_index][name]
- if not group_id == expected:
- raise Exception("Generic output " + name + " expected to be of type "
- + group_names[expected])
- arts_args_out.append(arg.ws_id)
-
- arts_args_in = []
- for i in ins:
- if not i in outs:
- arts_args_in.append(i)
-
- for name in self.g_in:
- arg = g_input_args[name]
- if type(arg) == WorkspaceVariable:
- arts_args_in.append(arg.ws_id)
- else:
- group_id = WorkspaceVariable.get_group_id(arg)
- expected = self.g_in_types[sg_index][name]
- if not group_id == expected:
- raise Exception("Generic input " + name + " expected to be of type "
- + group_names[expected])
- temps.append(ws.add_variable(arg))
- arts_args_in.append(temps[-1].ws_id)
- return (m_id, arts_args_out, arts_args_in, temps)
-
-
- def create(self, ws, name = None):
- """
- Call to Create WSMs are handled differently. This method simply
- determines the group type from the function name and then add a variable of
- this type to the workspace ws. A handle of this variable is then added to
- as attribute to the typhon.arts.workspace.variables module.
-
- Args:
- ws(Workspace): Workspace object to add the variable to
- name(str): Name of the variable to add to the workspace
- """
- group = WorkspaceMethod.create_regexp.match(self.name).group(1)
- group_id = group_ids[group]
-
- if not name:
- name = "__anonymous_" + str(len(ws._vars))
- ws_id = arts_api.add_variable(ws.ptr, group_id, name.encode())
- else:
- # Is there a WSM with that name?
- if name in workspace_methods.keys():
- raise Exception("A WSM with the name " + name + " already exists.")
-
- # Is there a WSV with that name?
- ws_id = arts_api.lookup_workspace_variable(name.encode())
- # Is yes, check that it is of the same group?
- if not ws_id == -1:
- v = arts_api.get_variable(ws_id)
- if not v.group == group_id:
- raise Exception("A WSV with the name " + name + " but of goup "
- + group_names[v.group] + " already exists.")
- # Otherwise we add the variable.
- else:
- ws_id = arts_api.add_variable(ws.ptr, group_id, name.encode())
-
- wsv = WorkspaceVariable(ws_id, name, group, "User defined variable.", ws)
- setattr(variables, name, wsv)
- ws._vars[name] = wsv
- return wsv
-
- def call(*args, **kwargs):
- """ Execute workspace method.
-
- This method will execute the workspace method (args[0]) on the workspace object (args[1])
- interpreting the remaining arguments in `*args` and `**kwargs` as arguments.
-
- Positional arguments in `*args` are interpreted in order with output arguments coming
- first.
-
- Keyword arguments in kwargs are interpreted according to the name of the generic
- parameters of the ARTS WSM.
-
- Args:
- args(list): Positional arguments with the first argument being the WorkspaceMethod
- instance, i.e. self = args[0], the second the Workspace object (args[1]). The
- remaining arguments are interpreted as generic arguments to the ARTS WSM.
- kargs(dict): Keyword args are interpreted as named generic arguments to the ARTS WSM
- according to its definition in methods.cc.
- """
-
- self = args[0]
-
- if self.is_create:
- return self.create(*args[1:])
-
- ws = args[1]
-
- (m_id, arts_args_out, arts_args_in, temps) = self._parse_output_input_lists(ws,
- args[2:],
- kwargs)
-
- # Execute WSM and check for errors.
- arg_out_ptr = c.cast((c.c_long * len(arts_args_out))(*arts_args_out), c.POINTER(c.c_long))
- arg_in_ptr = c.cast((c.c_long * len(arts_args_in))(*arts_args_in), c.POINTER(c.c_long))
-
- with CoutCapture(ws):
- e_ptr = arts_api.execute_workspace_method(ws.ptr, m_id,
- len(arts_args_out),
- arg_out_ptr,
- len(arts_args_in),
- arg_in_ptr)
- if (e_ptr):
- raise Exception("Call to ARTS WSM " + self.name + " failed with error: "
- + e_ptr.decode("utf8").format())
-
- # Remove temporaries from workspace (in reverse order).
- for t in temps[::-1]:
- t.erase()
-
-
- def describe(self):
- """ Print WSM documentation. """
- print(self.description.format())
-
-def iter_raw():
- """ Iterator returning a WorkspaceMethod object for each available ARTS WSM.
-
- This iterator returns super-generically overloaded methods several times.
-
- Yields:
- WorkspaceMethod: The next ARTS Workspace method as defined in methods.cc in
- increasing order.
- """
- for i in range(arts_api.get_number_of_methods()):
- m = arts_api.get_method(i)
- name = m.name.decode("utf8")
- description = m.description.decode("utf8")
- outs = [m.outs[i] for i in range(m.n_out)]
- g_out_types = [m.g_out_types[i] for i in range(m.n_g_out)]
- ins = [m.ins[i] for i in range(m.n_in)]
- g_in_types = [m.g_in_types[i] for i in range(m.n_g_in)]
- yield WorkspaceMethod(m.id, name, description, outs, g_out_types, ins, g_in_types)
-
-def iter():
- """ Iterator returning a WorkspaceMethod object for each available ARTS WSM.
-
- This iterator returns overloaded Workspace methods, i.e. super-generically overloaded
- WSM are not returned multiple times.
-
- Yields:
- WorkspaceMethod: The next ARTS Workspace method as defined in methods.cc in
- increasing order.
- """
- for k,m in workspace_methods:
- yield m
-
-workspace_methods = dict()
-for m in iter_raw():
- if m.name in workspace_methods:
- workspace_methods[m.name].add_overload(m.m_ids, m.g_in_types, m.g_out_types)
- else:
- workspace_methods[m.name] = m
diff --git a/typhon/arts/workspace/output.py b/typhon/arts/workspace/output.py
deleted file mode 100644
index 009f072c..00000000
--- a/typhon/arts/workspace/output.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-C stdout Redirection
-====================
-
-This module contains the CoutCapture class used to capture
-the output from C sub
-
-Attributes:
- cout_file: Temporary file to which the stdout is redirected if Python's
- `sys.stdout` does not have a file number, i.e. Python's output is not
- directed to the stdout file. This is the case the code is run in a
- jupyter notebook.
-
- If the python output is already directed to stdout, `cout_file` is none
- since no redirection of output is required.
-"""
-import io
-import os
-import sys
-import tempfile
-
-class CoutCapture():
- """
- CoutCapture class to capture output from stdout file. Implements
- the context management protocol.
-
- The CoutCapture class empties the file containing the redirected
- stdout upon entering and prints the file content using `print`
- upon exit.
-
- Usage:
-
- >>> with CoutCapture(ws):
- >>> ws.Print(ws.Verbosity)
-
- """
- def __init__(self, ws, silent = False):
- """
- Create CoutCapture for given workspace object.
-
- Args:
- ws: The `Workspace` object from which the output will be
- captured.
- """
- self.ws = ws
- self.silent = silent
-
- def __enter__(self):
- if cout_file:
- cout_file.seek(0)
- cout_file.truncate()
-
- def __exit__(self, type, value, traceback):
- if cout_file and not self.silent:
- #cout_file.flush()
- cout_file.seek(0, io.SEEK_SET)
- lines = [l.decode("UTF8") for l in cout_file.readlines()]
- if lines:
- print("".join(["ARTS[{0}]: {1}".format(self.ws.ptr , l)
- for l in lines]))
-
-cout_file = None
-try:
- sys.stdout.fileno()
-except:
- cout_file = tempfile.TemporaryFile(mode='w+b')
- os.dup2(cout_file.fileno(), 1)
diff --git a/typhon/arts/workspace/utility.py b/typhon/arts/workspace/utility.py
deleted file mode 100644
index c376abd4..00000000
--- a/typhon/arts/workspace/utility.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-Utility functions.
-
-Contains miscellaneous functions that do complicated things but
-are note really relevant to other code.
-"""
-
-def unindent(source):
- """Unindent source code.
-
- Determines indent level of the first line and unindents
- all lines by the found indent level.
-
- Args:
- source: The source code to unindent as a string as
- obtained for example from inspect.getsource.
-
- Raises:
- Exception: If the non-whitespace characters are detected
- in the characters that are stripped off the code lines.
-
- Returns:
- new_source: The unindented source code.
- """
-
- if not type(source) == str:
- raise Exception("Argument must be a string.")
-
- lines = source.splitlines()
-
- if len(lines) < 1:
- return ""
-
- n_indent = len(lines[0]) - len(lines[0].lstrip())
-
- lines_new = []
- for i, l in enumerate(lines):
- stripped = l[:n_indent]
- if len(stripped.lstrip()) > 0:
- err = "Error when unindenting source code. Stripped characters" \
- + stripped + " in line " + str(i) + " are non-whitespace " \
- + " characters."
- raise Exception(err)
- lines_new += [l[n_indent:]]
- return "\n".join(lines_new)
diff --git a/typhon/arts/workspace/variables.py b/typhon/arts/workspace/variables.py
deleted file mode 100644
index e0ebd6e2..00000000
--- a/typhon/arts/workspace/variables.py
+++ /dev/null
@@ -1,422 +0,0 @@
-""" The variables submodule.
-
-This module contains symbolic representations of all ARTS workspace variables.
-
-The variables are loaded dynamically when the module is imported, which ensures that they
-up to date with the current ARTS build.
-
-TODO: The group names list is redudant w.rt. group_ids.keys(). Should be removed.
-
-Attributes:
- group_names([str]): List of strings holding the groups of ARTS WSV variables.
- group_ids(dict): Dictionary mapping group names to the group IDs which identify
- groups in the ARTS C API.
-"""
-
-import ctypes as c
-import os
-import numpy as np
-import re
-import scipy as sp
-import tempfile
-
-from typhon.arts.workspace.api import arts_api
-from typhon.arts.workspace.agendas import Agenda
-from typhon.arts.xml.names import tensor_names
-
-
-class WorkspaceVariable:
- """
- The WorkspaceVariable represents ARTS workspace variables in a symbolic way. This
- means that they are not associated with a single workspace and therefore do not have a
- unique value. Their value in a given workspacecan be accessed, however, using the value()
- method.
-
- Attributes:
- ws_id(int): The Index variable identifying the variable in the ARTS C API.
- name(str): The name of the workspace variable.
- group(str): The name of the group this variable belongs to.
- description(str): The documentation of the variable as in methods.cc
- """
- def __init__(self, ws_id, name, group, description, ws = None):
- self.ws_id = ws_id
- self.name = name
- self.group = group
- self.group_id = group_ids[group]
- self.description = description
- self.ws = ws
-
- self.ndim = None
- if self.group == "Vector":
- self.ndim = 1
- if self.group == "Matrix":
- self.ndim = 2
- m = re.match(r"^Tensor(\d)$", self.group)
- if m:
- self.ndim = int(m.group(1))
-
- self.update()
-
- def __getstate__(self):
- return self.ws_id, self.name, self.group, \
- self.group_id, self.description, self.ndim
-
- def __setstate__(self, state):
- self.ws_id, self.name, self.group, self.group_id, self.description,\
- self.ndim = state
-
- def __repr__(self):
- s = "ARTS Workspace Variable\n\n"
- s += "Name: " + self.name + "\n"
- s += "Group: " + self.group + "\n\n"
- s += self.description
- return s
-
- def __str__(self):
- return self.__repr__()
-
- def __setattr__(self, name, value):
-
- if name == "value":
- if self.ws is None:
- raise Exception("Cannot set value of WSV without associated "
- " workspace.")
- else:
- self.ws.__setattr__(self.name, value)
- else:
- super().__setattr__(name, value)
-
- def print(self):
- """ Print variable value using ARTS Print(...) WSM.
-
- Raises:
- Exception: If the variable has no associated workspace.
- """
- if (self.ws):
- self.ws.Print(self, 1)
- else:
- raise Exception("Can't print variable without associated ARTS workspace.")
-
- @staticmethod
- def get_variable_name(i):
- """
- Lookup the name of a variable given its workspace index.
-
- Args:
- i(int): The index of the workspace variable.
-
- Returns:
- str: The name of the workspace variable.
- """
- s = arts_api.get_variable(i)
- name = s.name.decode("utf8")
- return name
-
- @staticmethod
- def get_group_id(value):
- """ This static method is used to determine how (and if) a given python variable can
- be mapped to a ARTS workspace variable group. The returned group id is required to
- add the variable to a workspace.
-
- Args:
- value(any): The python variable to map to the ARTS group.
-
- Returns:
- int: The index of the group which can be used to represent the python variable
- or None if the type is not supported.
- """
- if isinstance(value, WorkspaceVariable):
- return group_ids[value.group]
- elif isinstance(value, Agenda):
- return group_ids["Agenda"]
- elif isinstance(value, np.long):
- return group_ids["Index"]
- elif isinstance(value, (float, np.double)):
- return group_ids["Numeric"]
- elif isinstance(value, str):
- return group_ids["String"]
- elif isinstance(value, np.ndarray):
- if value.ndim == 1:
- return group_ids["Vector"]
- elif value.ndim == 2:
- return group_ids["Matrix"]
- elif value.ndim == 3:
- return group_ids["Tensor3"]
- elif value.ndim == 4:
- return group_ids["Tensor4"]
- elif value.ndim == 5:
- return group_ids["Tensor5"]
- elif value.ndim == 6:
- return group_ids["Tensor6"]
- elif value.ndim == 7:
- return group_ids["Tensor7"]
- else:
- raise ValueError(
- "Numpy arrays are only supported up to 7 dimensions."
- )
- elif sp.sparse.issparse(value):
- return group_ids["Sparse"]
- elif type(value) == list:
- group_name = ""
- nested_value = value
- while type(nested_value) == list and len(nested_value) > 0:
- nested_value = nested_value[0]
- group_name += "ArrayOf"
- if type(nested_value) == list and len(nested_value) == 0:
- raise ValueError("Empty lists are currently not handled.")
- else:
- typename = type(nested_value).__name__
- if isinstance(nested_value, str):
- group_name += "String"
- return group_ids[group_name]
- elif isinstance(nested_value, np.long):
- group_name += "Index"
- return group_ids[group_name]
- elif isinstance(nested_value, (float, np.double)):
- raise ValueError("Vectors, Matrices or Tensors should be"
- " passed as numpy.ndarray and not as"
- " lists.")
- elif hasattr(nested_value, 'write_xml') and typename in group_names:
- return group_ids[group_name + typename]
- elif isinstance(nested_value, np.ndarray):
- group_name += tensor_names[len(nested_value.shape) - 1]
- return group_ids[group_name]
- else:
- raise ValueError(
- f"Nested array with internal type "
- f"{type(nested_value)} not supported.")
- elif hasattr(value, 'write_xml') and type(value).__name__ in group_names:
- return group_ids[type(value).__name__]
- else:
- raise ValueError(f"Type {type(value)} currently not supported.")
-
- @classmethod
- def convert(cls, group, value):
- """ Tries to convert a given python object to an object of the python class
- representing the given ARTS WSV group.
-
- Args:
- group(string): The name of an ARTS WSV group.
- group(any): The object to convert
-
- Returns:
- (any): The converted object.
- """
- if (group == "Index"):
- return int(value)
- if (group == "String"):
- return value
- if (group == "ArrayOfString"):
- return [str(i) for i in value]
- if (group == "Numeric"):
- return np.float64(value)
- if (group == "Vector"):
- return np.array(value, dtype=np.float64, order='C', ndmin=1)
- if (group == "Matrix"):
- return np.array(value, dtype=np.float64, order='C', ndmin=2)
- if (group == "Sparse"):
- return sp.sparse.coo_matrix(value)
- if (group[:6] == "Tensor"):
- dim = int(group[6])
- return np.array(value, dtype=np.float64, order='C', ndmin=dim)
- if group.startswith("ArrayOf"):
- subgroup = group[7:]
- if hasattr(value, "__iter__"):
- return [cls.convert(subgroup, v) for v in value]
- else:
- return [cls.convert(subgroup, value)]
- return None
-
- @staticmethod
- def iter():
- """
- Iterator returning a WorkspaceVariable object for each ARTS WSV available.
- """
- for i in range(arts_api.get_number_of_variables()):
- s = arts_api.get_variable(i)
- name = s.name.decode("utf8")
- description = s.description.decode("utf")
- group = group_names[s.group]
- yield WorkspaceVariable(i, name, group, description)
-
- @property
- def initialized(self):
-
- ws = self.ws
- if ws is None:
- raise ValueError("WorkspaceVariable object needs associated"
- " Workspace to determine value.")
-
- v = arts_api.get_variable_value(ws.ptr, self.ws_id, self.group_id)
- return v.initialized
-
- @property
- def value(self):
- """ Return the value of the variable in a given workspace.
-
- By default this function will check the value in the workspace associated
- with the variable of in the workspace object provided as argument to the
- function call. If the variable has an associated workspace the workspace
- provided as argument will be ignored.
-
- Returns:
- The value of the workspace variable represented by an object of
- the corresponding python types.
-
- Raises:
- Exception: If the type of the workspace variable is not supported
- by the interface.
-
- """
- from typhon.arts.types import classes as typhon_classes
-
-
- if (self.ws):
- ws = self.ws
- if not ws:
- raise ValueError("WorkspaceVariable object need Workspace to determine value.")
-
- v = arts_api.get_variable_value(ws.ptr, self.ws_id, self.group_id)
- if not v.initialized:
- raise Exception("WorkspaceVariable " + self.name + " is uninitialized.")
-
- if self.group in typhon_classes:
- cls = typhon_classes[self.group]
- if hasattr(cls, "__from_variable_value_struct__"):
- return cls.__from_variable_value_struct__(v)
- if self.group == "Index":
- return c.cast(v.ptr, c.POINTER(c.c_long))[0]
- elif self.group == "Numeric":
- return c.cast(v.ptr, c.POINTER(c.c_double))[0]
- elif self.group == "String":
- return (c.cast(v.ptr, c.c_char_p)).value.decode("utf8")
- elif self.group == "ArrayOfIndex":
- return [c.cast(v.ptr, c.POINTER(c.c_long))[i]
- for i in range(v.dimensions[0])]
- elif self.group == "Sparse":
- m = v.dimensions[0]
- n = v.dimensions[1]
- nnz = v.dimensions[2]
- if nnz == 0:
- return sp.sparse.csr_matrix(0)
- else:
- print(m, n, nnz)
- data = np.ctypeslib.as_array(c.cast(v.ptr,
- c.POINTER(c.c_double)),
- (nnz,))
- row_indices = np.ctypeslib.as_array(v.inner_ptr, (nnz,))
- col_starts = np.ctypeslib.as_array(v.outer_ptr, (m + 1,))
- return sp.sparse.csr_matrix((data, row_indices, col_starts),
- shape=(m,n))
- elif self.group == "Agenda":
- return Agenda(v.ptr)
- elif self.ndim:
- shape = []
- size = 1
- for i in range(self.ndim):
- shape.append(v.dimensions[i])
- size *= v.dimensions[i]
- if size > 0:
- self.__array_interface__ = {"shape" : tuple(shape),
- "typestr" : "|f8",
- "data" : (v.ptr, False),
- "version" : 3}
- return np.asarray(self)
- else:
- return np.zeros(shape)
- else:
- try:
- return self.to_typhon()
- except:
- raise Exception("Type of workspace variable is not supported "
- + " by the interface.")
-
- def update(self):
- """ Update data references of the object.
-
- References to vector, matrices and tensors may change and must therefore
- be updated dynamically to ensure they are consistent with the state of
- the associated workspace. This method takes care of that.
-
- """
- if not self.ws==None and self.ndim:
- v = arts_api.get_variable_value(self.ws.ptr, self.ws_id, self.group_id)
- shape = []
- for i in range(self.ndim):
- shape.append(v.dimensions[i])
- self.__array_interface__ = {"shape" : tuple(shape),
- "typestr" : "|f8",
- "data" : (v.ptr, False),
- "version" : 3}
-
- def erase(self):
- """
- Erase workspace variable from its associated workspace.
- """
- if self.ws:
- arts_api.erase_variable(self.ws.ptr, self.ws_id, self.group_id)
- self.ws = None
-
- def describe(self):
- """
- Print the description of the variable as given in ARTS methods.cc
- """
- print(self.description.format())
-
- def to_typhon(self):
- """
- Return the value of this variable as a typhon type. This function
- writes the value of the variable to a temporary file and reads it
- into Python using typhon load function. The purpose of this function
- is to access WSV whose groups are not natively supported by the
- C API.
-
- Returns:
- A typhon object with the same value as the WSV in the associated
- workspace.
- """
- from typhon.arts.xml import load
-
- if not self.ws:
- raise Exception("Cannot retrieve the value of a variable without "
- + " associated Workspace.")
- with tempfile.TemporaryDirectory() as tmpdir:
- tfile = os.path.join(tmpdir, 'wsv.xml')
- self.ws.WriteXML("binary", self, tfile)
- v = load(tfile)
-
- return v
-
- def from_typhon(self, var):
- """
- Set the value of this WSV in the associated workspace to the given
- typhon type. This function writes the value in ASCII format to a
- temporary file and reads it into the workspace
-
- Args:
- var: The value to which this WSV should be set in the associated
- workspace.
-
- """
- from typhon.arts.xml import save
-
- if not self.ws:
- raise Exception("Cannot set the value of a variable without "
- + " associated Workspace.")
- with tempfile.TemporaryDirectory() as tmpdir:
- tfile = os.path.join(tmpdir, 'wsv.xml')
- save(var, tfile, format='binary')
- self.ws.ReadXML(self, tfile)
-
-
-# Get ARTS WSV groups
-group_names = [arts_api.get_group_name(i).decode("utf8")
- for i in range(arts_api.get_number_of_groups())]
-group_ids = dict([(id, name) for (name,id) in enumerate(group_names)])
-
-
-workspace_variables = dict()
-for v in WorkspaceVariable.iter():
- globals()[v.name] = v
- workspace_variables[v.name] = v
-
diff --git a/typhon/arts/workspace/workspace.py b/typhon/arts/workspace/workspace.py
deleted file mode 100644
index 666e242d..00000000
--- a/typhon/arts/workspace/workspace.py
+++ /dev/null
@@ -1,550 +0,0 @@
-"""
-The workspace submodule.
-
-Contains the Workspace which implements the main functionality of the ARTS interface.
-Users should only have to use this class to interact with ARTS.
-
-Attributes:
- imports(dict): Dictionary of parsed controlfiles. This is kept to ensure to avoid
- crashing of the ARTS runtime, when a file is parsed for the second time.
-
-"""
-import ctypes as c
-import logging
-import sys
-
-from ast import parse, Call, Name, Expression, Expr, FunctionDef, Starred, Module
-from inspect import getsource, getclosurevars
-from copy import copy
-import os
-
-from typhon.arts.workspace.api import (
- arts_api,
- VariableValueStruct,
- data_path_push,
- data_path_pop,
- include_path_push,
- include_path_pop,
-)
-from typhon.arts.workspace.methods import workspace_methods
-from typhon.arts.workspace.variables import (
- WorkspaceVariable,
- group_names,
- group_ids,
- workspace_variables,
-)
-from typhon.arts.workspace.agendas import Agenda
-from typhon.arts.workspace.output import CoutCapture
-from typhon.arts.workspace.utility import unindent
-
-imports = dict()
-
-
-logger = logging.getLogger(__name__)
-
-################################################################################
-# ARTS Agenda Macro
-################################################################################
-
-class Include:
- """Simple helper class to handle INCLUDE statements in agenda definitions.
-
- Attributes:
-
- agenda: The included controlfile or agenda as
- typhon.arts.workspace.agenda.Agenda object.
- """
- def __init__(self, agenda):
- """ Create include from argument.
-
- Args:
-
- agenda (str, Agenda): Argument to the INCLUDE statement. This can
- either be a string or an Agenda object.
- """
- if type(agenda) == str:
- if not agenda in imports:
- self.agenda = Agenda.parse(agenda)
- imports[agenda] = self.agenda
- else:
- self.agenda = imports[agenda]
- elif type(agenda) == Agenda:
- self.agenda = agenda
- else:
- raise Exception("agenda argument must be either a controlfile"
- " name or a typhon.arts.workspace.agenda.Agenda object.")
-
-def arts_agenda(func):
- """
- Parse python method as ARTS agenda
-
- This decorator can be used to define ARTS agendas using python function syntax.
- The function should have one arguments which is assumed to be a Workspace instance.
- All expressions inside the function must be calls to ARTS WSMs. The result is an
- Agenda object that can be used to copied into a named ARTS agenda
-
- Example:
-
- >>> @arts_agenda
- >>> def inversion_iterate_agenda(ws):
- >>> ws.x2artsStandard()
- >>> ws.atmfields_checkedCalc()
- >>> ws.atmgeom_checkedCalc()
- >>> ws.yCalc()
- >>> ws.VectorAddVector(ws.yf, ws.y, ws.y_baseline)
- >>> ws.jacobianAdjustAfterIteration()
- >>>
- >>> ws.Copy(ws.inversion_iterate_agenda, inversion_iterate_agenda)
- """
-
- source = getsource(func)
- source = unindent(source)
- ast = parse(source)
-
- func_ast = ast.body[0]
- if not type(func_ast) == FunctionDef:
- raise Exception("ARTS agenda definition can only decorate function definiitons.")
-
- args = func_ast.args.args
-
- try:
- arg_name = func_ast.args.args[0].arg
- except:
- raise Exception("Agenda definition needs workspace arguments.")
-
- ws = Workspace(0)
-
- context = copy(func.__globals__)
- context.update({arg_name : ws})
- # Add resolved non-local variables from closure.
- nls, _, _, _ = getclosurevars(func)
- context.update(nls)
-
- #
- # Helper functions
- #
-
- callback_body = []
- def callback_make_fun(body):
- """
- Helper function that creates a wrapper function around
- python code to be executed withing an ARTS agenda.
- """
- if sys.version_info >= (3, 8):
- # https://bugs.python.org/issue35894#msg334808
- m = Module(body, [])
- else:
- m = Module(body)
-
- def callback(ptr):
- try:
- context[arg_name].ptr = ptr
- eval(compile(m , "", 'exec'), context)
- except Exception as e:
- logger.error(r"Exception in Python callback:\n", e)
- context[arg_name].ptr = None
-
- callback_body = []
- return callback
-
- def eval_argument(expr):
- """
- Evaluate argument of workspace method call.
- """
- if not hasattr(expr, "lineno"):
- setattr(expr, "lineno", 0)
- return eval(compile(Expression(expr), "", 'eval'), context)
-
- # Create agenda
- a_ptr = arts_api.create_agenda(func.__name__.encode())
- agenda = Agenda(a_ptr)
-
- illegal_statement_exception = Exception(
- "Agenda definitions may only contain calls to WSMs of the"
- "workspace argument " + arg_name + " or INCLUDE statements.")
-
- #
- # Here the body of the function definition is traversed. Cases
- # that are treated specieal are INCLUDE statements and calls
- # of workspace methods. Remaining statements are accumulated
- # in callback_body and then added to the agenda as a single callback.
- #
-
- for e in func_ast.body:
- if not isinstance(e, Expr):
- callback_body += [e]
- continue
- else:
- call = e.value
-
- if not isinstance(call, Call):
- callback_body += [e]
- continue
-
- # Include statement
- if type(call.func) == Name:
- if not call.func.id == "INCLUDE":
- callback_body += [e]
- else:
- args = []
- for a in call.args:
- args.append(eval_argument(a))
- include = Include(*args)
-
- if len(callback_body) > 0:
- agenda.add_callback(callback_make_fun(callback_body))
- callback_body = []
-
- arts_api.agenda_append(agenda.ptr, include.agenda.ptr)
- else:
- att = call.func.value
- if not att.id == arg_name:
- callback_body += [e]
- continue
-
- # Extract method name.
- name = call.func.attr
-
- # m is not a workspace method
- if not name in workspace_methods:
- callback_body += [e]
- continue
-
- # m is a workspace method.
- m = workspace_methods[name]
-
- args = [ws, m]
-
- for a in call.args:
- # Handle starred expression
- if type(a) == Starred:
- bs = eval_argument(a.value)
- for b in bs:
- args.append(b)
- continue
-
- args.append(eval_argument(a))
-
- # Extract keyword arguments
- kwargs = dict()
- for k in call.keywords:
- kwargs[k.arg] = eval(
- compile(Expression(k.value), "", 'eval'),
- context)
-
- # Add function to agenda
- if len(callback_body) > 0:
- agenda.add_callback(callback_make_fun(callback_body))
- callback_body = []
-
- agenda.add_method(*args, **kwargs)
-
- # Check if there's callback code left to add to the agenda.
- if len(callback_body) > 0:
- agenda.add_callback(callback_make_fun(callback_body))
- callback_body = []
-
- return agenda
-
-
-################################################################################
-# Workspace Method Wrapper Class
-################################################################################
-class WSMCall:
- """
- Wrapper class for workspace methods. This is necessary to be able to print
- the method doc as __repr__, which doesn't work for python function objects.
-
- Attributes:
-
- ws: The workspace object to which the method belongs.
- m: The WorkspaceMethod object
-
- """
- def __init__(self, ws, m):
- self.ws = ws
- self.m = m
- self.__doc__ = m.__doc__
-
- def __call__(self, *args, **kwargs):
- self.m.call(self.ws, *args, **kwargs)
-
- def __repr__(self):
- return repr(self.m)
-
-################################################################################
-# The Workspace Class
-################################################################################
-class Workspace:
- """
- The Workspace class represents an ongoing ARTS simulation. Each Workspace object
- holds its own ARTS workspace and can be used to execute ARTS workspace methods or
- access workspace variables.
-
- All workspace methods taken from workspace_methods in the methods module are added
- as attributed on creation and are thus available as class methods.
-
- Attributes:
-
- ptr(ctypes.c_void_p): object pointing to the ArtsWorkspace instance of the
- ARTS C API
- _vars(dict): Dictionary holding local variables that have been created
- interactively using the one of Create ARTS WSMs.
-
-
- """
- def __init__(self, verbosity=1, agenda_verbosity=0):
- """
- The init function just creates an instance of the ArtsWorkspace class of the
- C API and sets the ptr attributed to the returned handle.
-
- It also adds all workspace methods as attributes to the object.
-
- Parameters:
- verbosity (int): Verbosity level (0-3), 1 by default
- agenda_verbosity (int): Verbosity level for agendas (0-3),
- 0 by default
- """
-
- self.__dict__["_vars"] = dict()
- self.ptr = arts_api.create_workspace(verbosity, agenda_verbosity)
- self.workspace_size = arts_api.get_number_of_variables()
- for name in workspace_methods:
- m = workspace_methods[name]
- setattr(self, m.name, WSMCall(self, m))
- self.__verbosity_init__()
-
- def __del__(self):
- """
- Cleans up the C API.
- """
- if not self.ptr is None:
- if not arts_api is None:
- arts_api.destroy_workspace(self.ptr)
-
- def __getstate__(self):
- return None
-
- def __setstate__(self):
- pass
-
- def __verbosity_init__(self):
- """
- Executes verbosityInit WSM directly through the ARTS api to suppress
- output.
- """
- wsm = workspace_methods["verbosityInit"]
- (m_id, args_out, args_in, ts) = wsm._parse_output_input_lists(self, [], {})
- arg_out_ptr = c.cast((c.c_long * len(args_out))(*args_out),
- c.POINTER(c.c_long))
- arg_in_ptr = c.cast((c.c_long * len(args_in))(*args_in),
- c.POINTER(c.c_long))
- with CoutCapture(self, silent = True):
- e_ptr = arts_api.execute_workspace_method(self.ptr, m_id, len(args_out),
- arg_out_ptr, len(args_in), arg_in_ptr)
- for t in ts[::-1]:
- t.erase()
-
- def create_variable(self, group, name):
- """
- Create a workspace variable.
-
- Args:
-
- group: The group name of the variable to create.
-
- name: The name of the variable to create. If None, the
- ARTS API will assign a unique name.
-
- """
- if not name is None:
- name = name.encode()
-
- group_id = group_ids[group]
- ws_id = arts_api.add_variable(self.ptr, group_id, name)
- v = arts_api.get_variable(ws_id)
- wsv = WorkspaceVariable(ws_id,
- v.name.decode(),
- group_names[group_id],
- "User defined variable.",
- self)
- self._vars[wsv.name] = wsv
- return wsv
-
- def add_variable(self, var):
- """
- This will try to copy a given python variable to the ARTS workspace and
- return a WorkspaceVariable object representing this newly created
- variable.
-
- Types are natively supported by the C API are int, str, [str], [int], and
- numpy.ndarrays. These will be copied directly into the newly created WSV.
-
- In addition to that all typhon ARTS types the can be stored to XML can
- be set to a WSV, but in this case the communication will happen through
- the file systs (cf. WorkspaceVariable.from_typhon).
-
- The user should not have to call this method explicitly, but instead it
- is used by the WorkspaceMethod call function to transfer python
- variable arguments to the ARTS workspace.
-
- Args:
- var: Python variable of type int, str, [str], [int] or np.ndarray
- which should be copied to the workspace.
- """
- if type(var) == WorkspaceVariable:
- return var
-
- # Create WSV in ARTS Workspace
- group = group_names[WorkspaceVariable.get_group_id(var)]
- wsv = self.create_variable(group, None)
-
- # Set WSV value using the ARTS C API
- s = VariableValueStruct(var)
- if s.ptr:
-
- e = arts_api.set_variable_value(self.ptr, wsv.ws_id, wsv.group_id, s)
- if e:
- arts_api.erase_variable(self.ptr, wsv.ws_id, wsv.group_id)
- raise Exception("Setting of workspace variable through C API "
- " failed with the " + "following error:\n"
- + e.decode("utf8"))
- # If the type is not supported by the C API try to write the type to XML
- # and read into ARTS workspace.
- else:
- try:
- wsv.from_typhon(var)
- except:
- raise Exception("Could not add variable since + "
- + str(type(var)) + " is neither supported by "
- + "the C API nor typhon XML IO.")
- self._vars[wsv.name] = wsv
- return wsv
-
- def __dir__(self):
- return {**self._vars, **workspace_variables, **self.__dict__}
-
- def __getattr__(self, name):
- """ Lookup the given variable in the local variables and the ARTS workspace.
-
- Args:
- name(str): Name of the attribute (variable)
-
- Raises:
- ValueError: If the variable is not found.
- """
-
- group_id = None
- if name in self._vars:
- var = self._vars[name]
- var.update()
- return var
- else:
- i = arts_api.lookup_workspace_variable(name.encode())
- if i < 0:
- raise AttributeError("No workspace variable " + str(name) + " found.")
- vs = arts_api.get_variable(i)
- group_id = vs.group
- description = vs.description.decode("utf8")
-
- # Get its symbolic representation
- wsv = WorkspaceVariable(i, name, group_names[group_id], description, self)
- return wsv
-
- def __setattr__(self, name, value):
- """ Set workspace variable.
-
- This will lookup the workspace variable name and try to set it to value.
-
- Args:
- name(str): Name of the attribute (variable)
- value(obj): The value to set the workspace variable to.
-
- Raises:
- ValueError: If the variable is not found or if value cannot uniquely converted to
- a value of a workspace variable.
- """
- try:
- v = self.__getattr__(name)
- except:
- self.__dict__[name] = value
- return None
-
- # Handle empty list or None values.
- if value is None or (isinstance(value, list) and not value):
- arts_api.set_variable_value(self.ptr, v.ws_id, v.group_id,
- VariableValueStruct.empty())
- return None
-
- if type(value) == Agenda:
- arts_api.set_variable_value(self.ptr, v.ws_id, v.group_id,
- VariableValueStruct(value))
- return None
-
- t = self.add_variable(value)
-
- if not t.group_id == v.group_id:
- raise Exception("Incompatible groups: Workspace variable " + name +
- " of group " + group_names[v.group_id] + " and value " + str(value)
- + " of group " + group_names[t.group_id] + ".")
-
- self.Copy(v, t)
-
- # Remove t only if it wasn't an existing WSV already before.
- if not type(value) == WorkspaceVariable:
- t.erase()
-
- def execute_agenda(self, agenda):
- """ Execute agenda on workspace.
-
- Args:
-
- agenda (typhon.arts.workspace.agenda.Agenda): Agenda object to execute.
-
- Raises:
-
- ValueError: If argument is not of type typhon.arts.workspace.agenda.Agenda
- """
-
- value_error = ValueError("Argument must be of type agenda.")
- if not type(agenda) is Agenda:
- raise value_error
-
- include_path_push(os.getcwd())
- data_path_push(os.getcwd())
-
- agenda.execute(self)
-
- include_path_pop()
- data_path_pop()
-
- def execute_controlfile(self, name):
- """ Execute controlfile or agenda on workspace.
-
- This method looks recursively for a controlfile with the given name in the current
- directory and the arts include path. If such a file has been found it will be parsed
- and executed on the workspace.
-
- Args:
-
- name(str): Name of the controlfile
-
- Raises:
-
- Exception: If parsing of the controlfile fails.
-
- Returns:
-
- The controlfile as parsed Agenda object.
-
- """
-
- if not name in imports:
- agenda = Agenda.parse(name)
- imports[name] = agenda
- else:
- agenda = imports[name]
-
- self.execute_agenda(agenda)
-
- return agenda
diff --git a/typhon/arts/xml/__init__.py b/typhon/arts/xml/__init__.py
deleted file mode 100644
index 757902da..00000000
--- a/typhon/arts/xml/__init__.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""This module provides functionality for reading and writing ARTS XML files.
-"""
-
-import gzip
-import glob
-import itertools
-import os
-from os.path import isfile, join, basename, splitext, dirname
-
-from . import read
-from . import write
-
-__all__ = [
- 'load',
- 'save',
- 'load_directory',
- 'load_indexed',
- 'make_binary',
- 'make_directory_binary',
-]
-
-
-def save(var, filename, precision='.7e', format='ascii', comment=None,
- parents=False):
- """Save a variable to an ARTS XML file.
-
- Args:
- var: Variable to be stored.
- filename (str): Name of output XML file.
- If the name ends in .gz, the file is compressed on the fly.
- precision (str): Format for output precision.
- format (str): Output format: 'ascii' (default) or 'binary'.
- comment (str): Comment string included in a tag above data.
- parents (bool): Create missing parent directories.
-
- Note:
- Python's gzip module is extremely slow in writing. Consider
- compressing files manually after writing them normally.
-
- Example:
- >>> x = numpy.array([1.,2.,3.])
- >>> typhon.arts.xml.save(x, 'myvector.xml')
-
- """
- if parents:
- os.makedirs(dirname(filename), exist_ok=True)
-
- if filename.endswith('.gz'):
- if format != 'ascii':
- raise RuntimeError(
- 'For zipped files, the output format must be "ascii"')
- xmlopen = gzip.open
- else:
- xmlopen = open
- with xmlopen(filename, mode='wt', encoding='UTF-8') as fp:
- if format == 'binary':
- with open(filename + '.bin', mode='wb') as binaryfp:
- axw = write.ARTSXMLWriter(fp, precision=precision,
- binaryfp=binaryfp)
- axw.write_header()
- if comment is not None:
- axw.write_comment(comment)
- axw.write_xml(var)
- axw.write_footer()
- elif format == 'ascii':
- axw = write.ARTSXMLWriter(fp, precision=precision)
- axw.write_header()
- if comment is not None:
- axw.write_comment(comment)
- axw.write_xml(var)
- axw.write_footer()
- else:
- raise RuntimeError('Unknown output format "{}".'.format(format))
-
-
-def load(filename):
- """Load a variable from an ARTS XML file.
-
- The input file can be either a plain or gzipped XML file
-
- Args:
- filename (str): Name of ARTS XML file.
-
- Returns:
- Data from the XML file. Type depends on data in file.
-
- Example:
- >>> typhon.arts.xml.load('tests/reference/matrix.xml')
- array([[ 0., 1.],
- [ 2., 3.]])
-
- """
- # If file is not found, try the gzipped version.
- if not isfile(filename):
- if not isfile(filename + '.gz'):
- raise FileNotFoundError("No such file: '{}'".format(filename))
- else:
- filename += '.gz'
-
- if filename.endswith('.gz'):
- xmlopen = gzip.open
- else:
- xmlopen = open
-
- binaryfilename = filename + '.bin'
- with xmlopen(filename, 'rb') as fp:
- if isfile(binaryfilename):
- with open(binaryfilename, 'rb',) as binaryfp:
- return read.parse(fp, binaryfp).getroot().value()
- else:
- return read.parse(fp).getroot().value()
-
-
-def load_directory(directory, exclude=None):
- """Load all XML files in a given directory.
-
- Search given directory for files with ``.xml`` or ``.xml.gz`` extension
- and try to load them using :func:`load`.
-
- Parameters:
- directory (str): Path to the directory.
- exclude (Container[str]): Filenames to exclude.
-
- Returns:
- dict: Filenames without extension are keys for the file content.
-
- Example:
- Load all files in ``foo`` except for the lookup table in
- ``abs_lookup.xml.``
-
- >>> load_directory('foo', exclude=['abs_lookup.xml'])
- """
- def includefile(f):
- """Check if to include file."""
- return basename(f) not in exclude if exclude is not None else True
-
- def stripext(f):
- """Strip the extension of a filename."""
- return splitext(f)[0]
-
- # Create a generator yielding all XML files to load (not excluded).
- xmlfiles = filter(includefile, glob.iglob(join(directory, '*.xml')))
-
- # Remove extension from zipped files to keep dictionary keys clean.
- # The `load` function looks for zipped files anyway.
- gzfiles = filter(includefile, glob.iglob(join(directory, '*.xml.gz')))
- gzfiles = map(stripext, gzfiles)
-
- # Store XML file contents in a dictionary, using the filename as key.
- return {stripext(basename(f)): load(f)
- for f in itertools.chain(xmlfiles, gzfiles)}
-
-
-def load_indexed(filename):
- """Load all indexed XML files matching the given filename.
-
- The function searches all files matching the pattern
- ``..xml`` or ``..xml.gz``.
-
- A list with the loaded file contents is returned. The list indices are
- equivalent to the file indices.
-
- Parameters:
- filename (str): Filename.
-
- Returns:
- list: List of file contents.
-
- Example:
- Load all files matching the pattern ``foo..xml``.
-
- >>> load_indexed_xml('foo')
-
- """
- iidx = -2 # Relative position of fileindex in splitted filename.
-
- # Get all files matching the indexed filename format.
- files = glob.glob('{}.*.xml'.format(filename))
-
- # If no files are found, try the gzipped version.
- if len(files) == 0:
- files = glob.glob('{}.*.xml.gz'.format(filename))
- iidx = -3 # Correct fileindex position for gzipped files.
-
- # Extract indices from filenames.
- maxindex = max(int(x.split('.')[iidx]) for x in files)
-
- # Pre-allocate a list according to the maximum index found.
- ret = (maxindex + 1) * [None]
-
- # Fill list with file contents (file index matching list index).
- for f in files:
- findex = int(f.split('.')[iidx])
- ret[findex] = load(f)
-
- return ret
-
-
-def make_binary(filename, out='', absolute_out=False, parents=True):
- """Loads xml-file at filename and saves it back in binary format
-
- Parameters:
- filename (str): Filename path.
- out (str): Path to save the binary. Empty causes overwrite of file.
- absolute_out (bool): If true, then write file to out-path rather than
- to the relative path out. Does nothing if file is in the working
- folder and out is relative.
- parents (bool): Create missing parent directories.
-
- Returns:
- str: Path to the created binary file.
-
- Example:
- Load t_field.xml and save it back as binary it as ./binary/t_field.xml
- and ./binary/t_field.bin
-
- >>> make_binary('t_field.xml', out='binary')
- 'binary/t_field.xml'
- """
-
- xml_data = load(filename)
- if absolute_out:
- outfile = join(out, basename(filename))
- else:
- outfile = join(dirname(filename), out, basename(filename))
-
- save(xml_data, outfile, format='binary', parents=parents)
-
- return outfile
-
-
-def make_directory_binary(directory, out='', absolute_out=False, parents=True):
- """Loads xml-files in directory and saves them back in binary format
-
- Parameters:
- directory (str): Directory path.
- out (str): Path to save the binary.
- absolute_out (bool): If true, then write file to out-path rather than
- to the relative path out. Does nothing if file is in the working
- folder and out is relative.
- parents (bool): Create missing parent directories.
-
- Returns:
- list[str]: Paths to the created binary files.
-
- Example:
- Load arts-xml-data/spectroscopy/cia/hitran2011/ and save it back as
- binary it at arts-xml-data-binary/spectroscopy/cia/hitran2011/
-
- >>> make_directory_binary('arts-xml-data/spectroscopy/cia/hitran2011',
- out='arts-xml-data-binary/spectroscopy/cia/hitran2011',
- absolute_out=True)
- ['arts-xml-data-binary/spectroscopy/cia/hitran2011/hitran_cia2012_adapted.xml']
- """
-
- directory_of_xmls = load_directory(directory)
- outfiles = [] # Empty list to store output filepaths.
-
- if absolute_out:
- get_outfile = join(out, '{entry}.xml')
- else:
- get_outfile = join(directory, out, '{entry}.xml')
-
- for entry in directory_of_xmls:
- outfile = get_outfile.format(entry=entry)
- save(directory_of_xmls[entry],
- outfile,
- format='binary',
- parents=parents)
- outfiles.append(outfile)
-
- return outfiles
diff --git a/typhon/arts/xml/names.py b/typhon/arts/xml/names.py
deleted file mode 100644
index 4268e606..00000000
--- a/typhon/arts/xml/names.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-
-__all__ = ['dimension_names', 'tensor_names', 'complex_tensor_names',
- 'basic_types']
-
-# Source: ARTS developer guide, section 3.4
-dimension_names = [
- 'ncols',
- 'nrows',
- 'npages',
- 'nbooks',
- 'nshelves',
- 'nvitrines',
- 'nlibraries']
-
-tensor_names = [
- 'Vector', 'Matrix', 'Tensor3', 'Tensor4', 'Tensor5', 'Tensor6', 'Tensor7']
-
-complex_tensor_names = [
- 'ComplexVector', 'ComplexMatrix', 'ComplexTensor3', 'ComplexTensor4',
- 'ComplexTensor5', 'ComplexTensor6', 'ComplexTensor7']
-
-basic_types = {
- 'tuple': 'Array',
- 'list': 'Array',
- 'int': 'Index',
- 'int8': 'Index',
- 'int16': 'Index',
- 'int32': 'Index',
- 'int64': 'Index',
- 'float': 'Numeric',
- 'float16': 'Numeric',
- 'float32': 'Numeric',
- 'float64': 'Numeric',
- 'float128': 'Numeric',
- 'str': 'String',
- 'str_': 'String',
- 'NoneType': None,
-}
diff --git a/typhon/arts/xml/read.py b/typhon/arts/xml/read.py
deleted file mode 100644
index f3e15c40..00000000
--- a/typhon/arts/xml/read.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""Read ARTS XML types
-
-This packages contains the internal implementation for reading ARTS XML files.
-"""
-
-from xml.etree import ElementTree
-
-import numpy as np
-
-from .names import dimension_names
-from .. import types
-
-__all__ = ['parse']
-
-
-class ARTSTypesLoadMultiplexer:
- """Used by the xml.etree.ElementTree to parse ARTS variables.
-
- Tag names in the XML file are mapped to the corresponding parsing method.
-
- """
-
- @staticmethod
- def arts(elem):
- if (elem.attrib['format'] not in ('ascii', 'binary')):
- raise RuntimeError('Unknown format in tag: {}'.format(
- elem.attrib['format']))
-
- ret = elem[0].value()
-
- # Try next element, if return value is None (comment tags).
- n = 1
- while ret is None:
- try:
- ret = elem[n].value()
- n += 1
- except:
- break
-
- return ret
-
- @staticmethod
- def comment(elem):
- return
-
- @staticmethod
- def Array(elem):
- arr = [t.value() for t in elem]
- if len(arr) != int(elem.attrib['nelem']):
- raise RuntimeError('Expected {:s} elements in Array, found {:d}'
- ' elements!'.format(elem.attrib['nelem'],
- len(arr)))
- return arr
-
- @staticmethod
- def String(elem):
- if elem.text is None:
- return ''
- return elem.text.strip()[1:-1]
-
- @staticmethod
- def Index(elem):
- if elem.binaryfp is not None:
- return np.fromfile(elem.binaryfp, dtype='\n')
- self.open_tag('arts', {'version': version, 'format': filetype})
-
- def write_comment(self, comment):
- """Write comment tag to XML file."""
- self.open_tag('comment')
- self.write(str(comment) + '\n')
- self.close_tag()
-
- def open_tag(self, tag, attr=None, newline=True):
- """Write opening tag with attributes.
-
- Args:
- tag (str): Tag name.
- attr (dict): Optional XML attributes.
- newline (bool): Put newline after tag.
- """
- if attr is None:
- attr = {}
- tagstr = '<{}{}>'.format(tag,
- ''.join([' {}="{}"'.format(a, v) for a, v in
- attr.items()]))
- if newline:
- tagstr += '\n'
-
- self._tag_stack.append(tag)
- self.write(tagstr)
-
- def close_tag(self, newline=True):
- """Close current XML tag."""
- tagstr = '{}>'.format(self._tag_stack.pop())
-
- if newline:
- tagstr += '\n'
-
- self.write(tagstr)
-
- def write_footer(self):
- """Write closing tag for ARTS XML file."""
- self.close_tag()
-
- def write(self, s):
- """Write string to XML file."""
- self.filepointer.write(s)
-
- def write_xml(self, var, attr=None, arraytype=None):
- """Write a variable as XML.
-
- Writing basic matpack types is implemented here. Custom types (e.g.
- GriddedFields) must implement a class member function called
- 'write_xml'.
-
- Tuples and list are mapped to ARTS Array types.
-
- """
- if hasattr(var, 'write_xml'):
- var.write_xml(self, attr)
- elif isinstance(var, np.ndarray):
- self.write_ndarray(var, attr)
- elif isinstance(var, int):
- self.write_basic_type('Index', var, attr)
- elif isinstance(var, float):
- self.write_basic_type('Numeric', var, attr, self.precision)
- elif isinstance(var, str):
- self.write_basic_type('String', '"' + var + '"', attr)
- elif type(var) in (list, tuple):
- if arraytype is None:
- try:
- arraytype = get_arts_typename(var[0])
- except IndexError:
- raise RuntimeError('Array must have at least one element.')
-
- if attr is None:
- attr = {}
- else:
- attr = attr.copy()
- attr['nelem'] = len(var)
- attr['type'] = arraytype
- self.open_tag('Array', attr)
- for i, v in enumerate(var):
- if get_arts_typename(v) != arraytype:
- raise RuntimeError(
- 'All array elements must have the same type. '
- 'Array type is {}, but element {} has type {}'.format(
- arraytype, i, get_arts_typename(v)))
- self.write_xml(v)
- self.close_tag()
- else:
- raise TypeError(
- "Can't map '{}' to any ARTS type.".format(type(var).__name__))
-
- def write_basic_type(self, name, var, attr=None, precision=''):
- """Write a basic ARTS type as XML.
-
- Args:
- name (str): Variable type name.
- var: See :meth:`write_xml`.
- attr: See :meth:`write_xml`.
- precision (str): Output format string.
-
- """
- self.open_tag(name, attr, newline=False)
- if self.binaryfilepointer is not None and name == 'Index':
- np.array(var, dtype='i4').tofile(self.binaryfilepointer)
- elif self.binaryfilepointer is not None and name == 'Numeric':
- np.array(var, dtype='d').tofile(self.binaryfilepointer)
- else:
- self.write(('{:' + precision + '}').format(var))
- self.close_tag()
-
- def write_ndarray(self, var, attr):
- """Convert ndarray to ARTS XML representation.
-
- For arguments see :meth:`write_xml`.
-
- """
- if attr is None:
- attr = {}
- ndim = var.ndim
- tag = get_arts_typename(var)
- if np.issubdtype(var.dtype, np.complex128):
- dtype = np.complex128
- else:
- dtype = 'd'
-
- # Vector
- if ndim == 1:
- attr['nelem'] = var.shape[0]
- self.open_tag(tag, attr)
- if self.binaryfilepointer is not None:
- np.array(var, dtype=dtype).tofile(self.binaryfilepointer)
- else:
- if np.issubdtype(var.dtype, np.complex128):
- var = var.astype(np.complex128)
- var.dtype = np.float64
- fmt = "{:" + self.precision + "}"
- for i in var:
- self.write(fmt.format(i) + '\n')
- self.close_tag()
- # Matrix and Tensors
- elif ndim <= len(dimension_names):
- for i in range(0, ndim):
- attr[dimension_names[i]] = var.shape[ndim - 1 - i]
-
- self.open_tag(tag, attr)
-
- if self.binaryfilepointer is not None:
- np.array(var, dtype=dtype).tofile(self.binaryfilepointer)
- else:
- if np.issubdtype(var.dtype, np.complex128):
- var = var.astype(np.complex128)
- var.dtype = np.float64
- # Reshape for row-based linebreaks in XML file
- if np.prod(var.shape) != 0:
- if ndim > 2:
- var = var.reshape(-1, var.shape[-1])
-
- fmt = ' '.join(['%' + self.precision, ] * var.shape[1])
-
- for i in var:
- self.write((fmt % tuple(i) + '\n'))
- self.close_tag()
- else:
- raise RuntimeError(
- 'Dimensionality ({}) of ndarray too large for '
- 'conversion to ARTS XML'.format(ndim))
diff --git a/typhon/arts/xsec.py b/typhon/arts/xsec.py
deleted file mode 100644
index f60e3852..00000000
--- a/typhon/arts/xsec.py
+++ /dev/null
@@ -1,73 +0,0 @@
-__all__ = ['XsecRecord']
-
-
-class XsecRecord:
- """:class:`XsecRecord` implements the same-named ARTS datatype.
-
- Contains the reference cross section data at low pressure and
- the coefficients for the broadening formula.
- """
-
- def __init__(self, species=None, coeffs=None, fmin=None, fmax=None,
- refpressure=None, reftemperature=None, xsec=None,
- tfit_slope=None, tfit_intersect=None):
- """Initialize XsecRecord object.
- """
- self.version = 1
- self.species = species
- self.coeffs = coeffs
- self.fmin = fmin
- self.fmax = fmax
- self.refpressure = refpressure
- self.reftemperature = reftemperature
- self.xsec = xsec
- self.tfit_slope = tfit_slope
- self.tfit_intersect = tfit_intersect
-
- def write_xml(self, xmlwriter, attr=None):
- """Write a XsecRecord object to an ARTS XML file.
- """
- # self.checksize()
- if attr is None:
- attr = {}
- attr['version'] = self.version
- xmlwriter.open_tag("XsecRecord", attr)
- xmlwriter.write_xml(self.species, {'name': 'Species'})
- xmlwriter.write_xml(self.coeffs, {'name': 'Broadening Coefficients'})
- xmlwriter.write_xml(self.fmin, {'name': 'fmin'})
- xmlwriter.write_xml(self.fmax, {'name': 'fmax'})
- xmlwriter.write_xml(self.refpressure,
- {'name': 'Reference Pressure'})
- xmlwriter.write_xml(self.reftemperature,
- {'name': 'Reference Temperature'})
- xmlwriter.write_xml(self.xsec, {'name': 'Cross Sections'})
- xmlwriter.write_xml(self.tfit_slope, {'name': 'Temperature Fit Slope'})
- xmlwriter.write_xml(self.tfit_intersect,
- {'name': 'Temperature Fit Intersect'})
- xmlwriter.close_tag()
-
- @classmethod
- def from_xml(cls, xmlelement):
- """Loads a XsecRecord object from an xml.ElementTree.Element.
- """
-
- obj = cls()
- if 'version' in xmlelement.attrib.keys():
- obj.version = int(xmlelement.attrib['version'])
- else:
- obj.version = 1
-
- if obj.version != 1:
- raise RuntimeError(f'Unknown XsecRecord version {obj.version}')
-
- obj.species = xmlelement[0].value()
- obj.coeffs = xmlelement[1].value()
- obj.fmin = xmlelement[2].value()
- obj.fmax = xmlelement[3].value()
- obj.refpressure = xmlelement[4].value()
- obj.reftemperature = xmlelement[5].value()
- obj.xsec = xmlelement[6].value()
- obj.tfit_slope = xmlelement[7].value()
- obj.tfit_intersect = xmlelement[8].value()
-
- return obj
diff --git a/typhon/physics/units/em.py b/typhon/physics/units/em.py
index 74fe0d48..908782fa 100644
--- a/typhon/physics/units/em.py
+++ b/typhon/physics/units/em.py
@@ -26,7 +26,6 @@
from typhon import config
-from typhon.arts import xml
from typhon.constants import (h, k, c)
from typhon.physics.units.common import (ureg, radiance_units)
from typhon.physics.units.tools import UnitsAwareDataArray as UADA
@@ -200,6 +199,8 @@ def fromArtsXML(cls, sat, instr, ch):
Channel number (start counting at 1).
"""
+ from pyarts import xml
+
cf = config.conf[instr]
centres = xml.load(
cf["srf_backend_f"].format(sat=sat))
diff --git a/typhon/plots/__init__.py b/typhon/plots/__init__.py
index e7972423..34c649c8 100644
--- a/typhon/plots/__init__.py
+++ b/typhon/plots/__init__.py
@@ -5,7 +5,6 @@
from typhon.plots.common import * # noqa
from typhon.plots.formatter import * # noqa
from typhon.plots.plots import * # noqa
-from typhon.plots.arts_lookup import * # noqa
from typhon.plots.ppath import * # noqa
from typhon.plots.maps import * # noqa
diff --git a/typhon/plots/arts_lookup.py b/typhon/plots/arts_lookup.py
deleted file mode 100755
index 16618978..00000000
--- a/typhon/plots/arts_lookup.py
+++ /dev/null
@@ -1,342 +0,0 @@
-"""Visualize an absorption lookup table.
-
-Author: oliver.lemke@uni-hamburg.de
-"""
-import re
-from itertools import zip_longest
-
-import matplotlib.pyplot as plt
-import numpy as np
-from cycler import cycler
-from matplotlib.lines import Line2D
-from scipy.interpolate import interp1d
-
-import typhon.constants
-from typhon.plots import (ScalingFormatter, set_xaxis_formatter)
-
-__all__ = [
- 'plot_arts_lookup',
-]
-
-
-def _calc_lookup_species_count(lookup):
- """Calculate number of cross sections per species.
-
- Usually one, except for the nonlinear species.
- """
- nlsspecies = lookup.nonlinearspecies
- speciescount = np.ones_like(lookup.speciestags, dtype=int)
- if nlsspecies is not None:
- speciescount[nlsspecies] = lookup.nonlinearspeciesvmrperturbations.size
- return speciescount
-
-
-def _get_lookup_species_index(lookup, species, vmrpert):
- """Get index of given species in lookup table."""
- ret = 0
- spindex = lookup.speciestags.index(species)
- nlsspecies = lookup.nonlinearspecies
- speciescount = _calc_lookup_species_count(lookup)
- if nlsspecies is not None and spindex in nlsspecies:
- if vmrpert >= speciescount[spindex]:
- raise RuntimeError(
- 'Nonlinear species VMR perturbation index too large')
- ret = vmrpert
-
- return ret + (np.sum(speciescount[0:spindex]) if spindex > 0 else 0)
-
-
-def plot_lookup_xsec(lookup, ipressures, species=None, ax=None, tpert=0,
- vmrpert=0):
- """Plot the cross section for one or more species of an ARTS lookup table.
-
- Parameters:
- lookup (typhon.arts.catalogues.GasAbsLookup): ARTS lookup table.
- ipressures (ndarray(int)): Indices of pressure levels to plot.
- species (list(list(str)), optional):
- ARTS species tags e.g. [['H2O-*-*0*']]. If none is given, plots
- all species in the lookup table for the given vmr perturbation.
- ax (AxesSubplot, optional): Axes to plot in.
- vmrpert (int): Index of vmr perturbation for nonlinear species to plot.
- tpert (int): Index of temperature perturbation to plot.
- """
- if ax is None:
- ax = plt.gca()
-
- ax.set_yscale('log')
- if species is None:
- species = lookup.speciestags
-
- for tag in species:
- ax.set_prop_cycle(
- cycler('color', [plt.cm.viridis(i) for i in
- np.linspace(0, 1, len(ipressures))]))
- for pi in ipressures:
- xsec = lookup.absorptioncrosssection[
- tpert,
- _get_lookup_species_index(lookup, tag, vmrpert), :, pi]
- ax.plot(lookup.frequencygrid, xsec, label=f'{pi/100.:8.3f} hPa')
-
- if len(species) > 1:
- ax.legend(fontsize='xx-small', frameon=False)
- else:
- ax.set_title(
- ',\n'.join(re.sub(r'(-\*)+$', '', s) for s in species[0]),
- y=1. - len(species[0]) * 0.05,
- fontsize='xx-small')
-
- set_xaxis_formatter(ScalingFormatter('giga', r'${x:g}$'), ax=ax)
- ax.tick_params(axis='both', which='major', labelsize='xx-small')
- ax.spines['right'].set_visible(False)
- ax.spines['top'].set_visible(False)
-
-
-def plot_lookup_opacity(lookup, opacity, species=None, vmrpert=0, ax=None,
- oneline=False, total=False):
- """Plot the opacity for one or more species of an ARTS lookup table.
-
- Parameters:
- lookup (typhon.arts.catalogues.GasAbsLookup): ARTS lookup table.
- opacity (ndarray): Opacity per species in lookup table as generated by
- `calc_opacity_from_lookup`.
- species (list(list(str)), optional):
- ARTS species tags e.g. [['H2O-*-*0*']]. If none is given, plots
- all species in the lookup table for the given vmr perturbation.
- vmrpert (int): Index of vmr perturbation for nonlinear species to plot.
- ax (AxesSubplot, optional): Axes to plot in.
- oneline (bool, optional): Draw a line where opacity == 1.
- total (bool, optional): Additionally plot the sum of opacities of all
- species.
- """
- if ax is None:
- ax = plt.gca()
-
- ax.set_yscale('log')
- if species is None:
- species = lookup.speciestags
-
- for tag in species:
- ax.plot(lookup.frequencygrid,
- opacity[_get_lookup_species_index(lookup, tag, vmrpert), :],
- label=',\n'.join(tag))
- if oneline:
- ax.plot(lookup.frequencygrid, np.ones_like(lookup.frequencygrid),
- linewidth=1, linestyle='--', color='k')
- if total:
- if lookup.nonlinearspecies is not None:
- speciescount = _calc_lookup_species_count(lookup)
- spindex = np.cumsum(speciescount)
- spindex[1:] = spindex[0:-1]
- spindex[0] = 0
- spindex[lookup.nonlinearspecies] += vmrpert
- o = opacity[spindex]
- else:
- o = opacity
- ax.plot(lookup.frequencygrid, np.sum(o, axis=0),
- linewidth=1, color='k')
-
- if len(species) > 1:
- ax.legend(fontsize='xx-small', frameon=False)
- else:
- ax.set_title(',\n'.join(re.sub(r'(-\*)+$', '', s) for s in species[0]),
- y=1. - len(species[0]) * 0.05,
- fontsize='xx-small')
-
- set_xaxis_formatter(ScalingFormatter('giga', r'${x:g}$'), ax=ax)
- ax.tick_params(axis='both', which='major', labelsize='xx-small')
- ax.tick_params(axis='both', which='minor', labelsize='xx-small')
- ax.spines['right'].set_visible(False)
- ax.spines['top'].set_visible(False)
-
-
-def calc_opacity_from_lookup(lookup, z=None, g=typhon.constants.g,
- r=typhon.constants.gas_constant_dry_air, tpert=0):
- """Calculate the opacity from an ARTS lookup table.
-
- Parameters:
- lookup (typhon.arts.catalogues.GasAbsLookup): ARTS lookup table.
- z (ndarray, optional): Altitude profile. If not given, the layer
- thicknesses are calculated based on the hypsometric formula.
- g (float, optional): Gravity constant. Default uses Earth's gravity.
- r (float, optional): Gas constant for dry air. Default uses constant
- for Earth.
- tpert (int, optional): Index of temperature perturbation to plot.
-
- Returns:
- ndarray: Opacity per species in lookup table.
- """
- speciescount = _calc_lookup_species_count(lookup)
- vmrs = (np.repeat(lookup.referencevmrprofiles, speciescount, axis=0)
- if lookup.nonlinearspecies is not None
- else lookup.referencevmrprofiles)
-
- ni = (lookup.pressuregrid * vmrs
- / lookup.referencetemperatureprofile / typhon.constants.boltzmann
- ).reshape(np.sum(speciescount), 1, len(lookup.pressuregrid))
-
- alpha = ni * lookup.absorptioncrosssection[tpert, :, :, :]
-
- if z is not None:
- z = interp1d(z.grids[0], z.data[:, 0, 0])(lookup.pressuregrid)
- else:
- # Calculate z from hypsometric formula
- pgrid = lookup.pressuregrid
- z = [r * t / g * np.log(p1 / p2)
- for p1, p2, t in zip(pgrid[:-1], pgrid[1:], (
- lookup.referencetemperatureprofile[
- 1:] + lookup.referencetemperatureprofile[:-1]) / 2.)]
- z = np.cumsum(z)
- p = (pgrid[1:] + pgrid[:-1]) / 2.
- z = interp1d(p, z, fill_value='extrapolate')(lookup.pressuregrid)
-
- return np.vstack([np.trapz(ialpha, z, axis=1) for ialpha in alpha])
-
-
-def _add_opacity_legend(ax=None):
- """Add legend to an opacity lookup table plot."""
- if ax is None:
- ax = plt.gca()
-
- blue_line = Line2D([], [], label='species opacity')
- black_line = Line2D([], [], color='k', linewidth=1.,
- label='total opacity')
- dashed_line = Line2D([], [], color='k', linestyle='--',
- linewidth=1., label='opacity=1')
-
- handles = [blue_line, black_line, dashed_line]
- labels = [h.get_label() for h in handles]
-
- ax.legend(handles=handles, labels=labels, fontsize='xx-small',
- loc='upper left', ncol=6)
-
-
-def _add_xsec_legend(lookup, ipressures, ax=None):
- """Add legend to a cross section lookup table plot."""
- if ax is None:
- ax = plt.gca()
-
- pgrid = lookup.pressuregrid
- colors = [plt.cm.viridis(i) for i in np.linspace(0, 1, len(ipressures))]
- handles = [Line2D([], [],
- color=colors[i],
- label=f'{pgrid[ip]/100.:8.3f} hPa')
- for i, ip in enumerate(ipressures)]
-
- labels = [h.get_label() for h in handles]
-
- ax.legend(handles=handles, labels=labels, fontsize='xx-small',
- loc='upper left', ncol=6)
-
-
-def _setup_lookup_figure(lookup, cols=3):
- """Create the figure and axes objects for the lookup table plot."""
- rows = int(np.ceil(len(lookup.speciestags) / cols))
- fig, ax = plt.subplots(rows + 1, cols, figsize=(4*cols, (rows + 1) * 2))
- fig.tight_layout()
-
- return rows, cols, fig, ax
-
-
-def plot_arts_lookup(lookup, opacity=True, z=None, g=typhon.constants.g,
- r=typhon.constants.gas_constant_dry_air, tpert=0,
- vmrpert=0, pressures=None, cols=3):
- """Visualize an ARTS lookup table.
-
- Plots the opacity or the absorption cross sections based on an
- ARTS lookup table.
-
- Parameters:
- lookup (typhon.arts.catalogues.GasAbsLookup): ARTS lookup table object.
- opacity (bool, optional): Set to False to plot the absorption cross
- sections.
- z (ndarray, optional): Altitude profile. Optional input for opacity
- calculation. If not given, the layer thicknesses are calculated
- based on the hypsometric formula.
- g (float, optional): Gravity constant. Uses Earth's gravity by default.
- r (float, optional): Gas constant for dry air.
- Uses constant for Earth by default.
- tpert (int, optional): Index of temperature perturbation to plot.
- vmrpert (int, optional): Index of vmr perturbation for nonlinear
- species to plot.
- pressures (ndarray(int), optional): Pressure levels to plot. If not
- given, up to 6 pressure levels are selected.
- cols (int, optional): Species to plot per row.
-
- Returns:
- matplotlib.figure.Figure, ndarray(AxesSubplot):
- Matplotlib Figure and Axes objects.
-
- Examples:
-
- .. plot::
- :include-source:
-
- from os.path import join, dirname
- import matplotlib.pyplot as plt
- import typhon as ty
-
- lookup_file = join(dirname(ty.__file__), 'tests', 'plots',
- 'reference', 'abs_lookup_small.xml')
- fig, ax = ty.plots.plot_arts_lookup(ty.arts.xml.load(lookup_file))
-
- fig.suptitle('Lookup table opacities')
- fig.subplots_adjust(top=0.88)
- plt.show()
-
- .. plot::
- :include-source:
-
- from os.path import join, dirname
- import matplotlib.pyplot as plt
- import typhon as ty
-
- lookup_file = join(dirname(ty.__file__), 'tests', 'plots',
- 'reference', 'abs_lookup_small.xml')
- fig, ax = ty.plots.plot_arts_lookup(ty.arts.xml.load(lookup_file),
- opacity=False)
-
- fig.suptitle('Lookup table absorption cross sections [m$^2$]')
- fig.subplots_adjust(top=0.88)
- plt.show()
-
- """
- rows, cols, fig, ax = _setup_lookup_figure(lookup, cols)
-
- if opacity:
- lookup_opacity = calc_opacity_from_lookup(lookup, z, g, r, tpert)
-
- for cax, species in zip_longest(ax.flatten() if len(ax.shape) == 2
- else ax.reshape(ax.size, 1),
- lookup.speciestags):
- if species is None:
- cax.axis('off')
- continue
-
- if opacity:
- plot_lookup_opacity(lookup, lookup_opacity, vmrpert=vmrpert,
- oneline=True, total=True, species=[species],
- ax=cax)
- else:
- psize = lookup.pressuregrid.size
- if pressures is not None:
- ipressures = [np.abs(lookup.pressuregrid - p).argmin()
- for p in pressures]
- else:
- ipressures = (lookup.pressuregrid.size - 1 - (
- range(psize) if psize <= 5
- else np.linspace(0, lookup.pressuregrid.size,
- num=6,
- endpoint=False,
- dtype=int)))
- plot_lookup_xsec(lookup, ipressures, species=[species], ax=cax,
- tpert=tpert, vmrpert=vmrpert)
-
- if opacity:
- _add_opacity_legend(ax[-1, 0])
- else:
- _add_xsec_legend(lookup, ipressures, ax[-1, 0])
-
- for cax in ax[-2, :]:
- cax.set_xlabel('Frequency [GHz]', fontsize='xx-small')
-
- return fig, ax
diff --git a/typhon/tests/arts/__init__.py b/typhon/tests/arts/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/typhon/tests/arts/reference/GriddedField3.xml b/typhon/tests/arts/reference/GriddedField3.xml
deleted file mode 100644
index 32ca2eda..00000000
--- a/typhon/tests/arts/reference/GriddedField3.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
-
-0.0000000e+00
-1.0000000e+00
-
-
-0.0000000e+00
-1.0000000e+00
-
-
-0.0000000e+00
-1.0000000e+00
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-4.0000000e+00 5.0000000e+00
-6.0000000e+00 7.0000000e+00
-
-
-
diff --git a/typhon/tests/arts/test_arts.py b/typhon/tests/arts/test_arts.py
deleted file mode 100644
index d1bb9278..00000000
--- a/typhon/tests/arts/test_arts.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Testing the functions in typhon.arts.
-"""
-import shutil
-
-import pytest
-
-from typhon import arts
-
-
-class TestARTS:
- """Testing the ARTS utility functions."""
- @pytest.mark.skipif(not shutil.which('arts'), reason='arts not in PATH')
- def test_run_arts(self):
- """Test ARTS system call.
-
- Note: This test is only run, if ARTS is found in PATH.
- """
- arts_out = arts.run_arts(help=True)
-
- assert arts_out.retcode == 0
diff --git a/typhon/tests/arts/test_covariancematrix.py b/typhon/tests/arts/test_covariancematrix.py
deleted file mode 100644
index 6eb4774d..00000000
--- a/typhon/tests/arts/test_covariancematrix.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Testing the covariance matrix class.
-"""
-import shutil
-
-import pytest
-import numpy as np
-import scipy as sp
-import os
-from tempfile import mkstemp
-from typhon.arts.covariancematrix import Block, CovarianceMatrix
-from typhon.arts.xml import load, save
-
-class TestCovarianceMatrix:
-
- def setup_method(self):
- # Temporary file
- fd, self.f = mkstemp()
- os.close(fd)
-
- # Simple covariance matrix for testing
- b1 = Block(0, 0, 0, 0, False, np.random.normal(size = (10, 10)))
- b2 = Block(1, 1, 10, 10, False, sp.sparse.identity(10))
- self.covmat = CovarianceMatrix([b1, b2])
-
- def test_xml_io(self):
- save(self.covmat, self.f)
- covmat2 = load(self.f)
-
- def compare_matrices(args):
- b1, b2 = args
- m1 = b1.matrix
- m2 = b2.matrix
- if isinstance(m1, sp.sparse.spmatrix):
- m1 = m1.todense()
- m2 = m2.todense()
- print(m1)
- return np.allclose(m1, m2)
-
- assert(all(map(compare_matrices, zip(self.covmat.blocks, covmat2.blocks))))
-
- def test_to_dense(self):
- m = self.covmat.to_dense()
- assert(np.allclose(m[:10, :10], self.covmat.blocks[0].matrix))
- assert(np.allclose(m[10:, 10:], self.covmat.blocks[1].matrix.toarray()))
-
- def teardown_method(self):
- # Remove temp file
- os.remove(self.f)
diff --git a/typhon/tests/arts/test_files/controlfile.arts b/typhon/tests/arts/test_files/controlfile.arts
deleted file mode 100644
index a0e549dc..00000000
--- a/typhon/tests/arts/test_files/controlfile.arts
+++ /dev/null
@@ -1,14 +0,0 @@
-Arts2 {
- StringCreate(foo)
- StringSet(foo, "bar")
- Print(foo)
-
- VectorCreate(v_1)
- VectorCreate(v_2)
-
- ReadXML(v_1, "vector.xml")
- VectorSet(v_2, [1.0])
-
- Compare(v_1, v_2, 1e-10)
-
-}
diff --git a/typhon/tests/arts/test_griddedfield.py b/typhon/tests/arts/test_griddedfield.py
deleted file mode 100644
index 91f61420..00000000
--- a/typhon/tests/arts/test_griddedfield.py
+++ /dev/null
@@ -1,340 +0,0 @@
-# -*- encoding: utf-8 -*-
-import os
-from tempfile import mkstemp
-
-import numpy as np
-import pytest
-
-from typhon.arts import griddedfield, xml
-
-
-def _create_tensor(n):
- """Create a tensor of dimension n.
-
- Create a tensor with n dimensions with two entries in each dimension.
- The tensor is filled with increasing integers starting with 0.
-
- Args:
- n (int): number of dimensions
-
- Returns:
- np.ndarray: n-dimensional tensor
-
- """
- return np.arange(2 ** n).reshape(2 * np.ones(n).astype(int))
-
-
-def _get_griddedfield_type(dim):
- """Return the appropriate `GriddedField` type for given dimension."""
- return {
- 1: griddedfield.GriddedField1,
- 2: griddedfield.GriddedField2,
- 3: griddedfield.GriddedField3,
- 4: griddedfield.GriddedField4,
- 5: griddedfield.GriddedField5,
- 6: griddedfield.GriddedField6,
- }.get(dim)
-
-
-class TestGriddedFieldUsage:
- ref_dir = os.path.join(os.path.dirname(__file__), "reference", "")
-
- @pytest.mark.parametrize("dim", range(1, 7))
- def test_check_init(self, dim):
- """Test initialisation of GriddedFields."""
- cls = _get_griddedfield_type(dim)
- assert cls().dimension == dim
-
- def test_check_dimension1(self):
- """Test if grid and data dimension agree (positive)."""
- gf3 = griddedfield.GriddedField3()
- gf3.grids = [np.arange(5), np.arange(5), []]
- gf3.gridnames = ["A", "B", "C"]
- gf3.data = np.ones((5, 5, 1))
- assert gf3.check_dimension() is True
-
- def test_check_dimension2(self):
- """Test if grid and data dimension agree (negative)."""
- gf3 = griddedfield.GriddedField3()
- gf3.grids = [np.arange(5), np.arange(5), []]
- gf3.gridnames = ["A", "B", "C"]
-
- # It shold not be allowed to set a Matrix as data in a GriddedField3.
- with pytest.raises(TypeError):
- gf3.data = np.ones((5, 5))
-
- def test_data(self):
- """Test setting and getting of data. """
- reference = np.random.randn(10, 10, 10)
- gf3 = griddedfield.GriddedField3()
- gf3.data = reference
- assert np.array_equal(gf3.data, reference)
-
- def test_name_setter(self):
- """Test name setter and getter."""
- reference = 'TestName'
- gf = griddedfield.GriddedField1()
- gf.name = reference
- assert gf.name == reference
-
- def _setup_gf2(self):
- """Helper for test_to_dict and test_to_xarray"""
- gf2 = griddedfield.GriddedField2()
- gf2.grids = [np.ones(5), np.zeros(5)]
- gf2.gridnames = ["ones", "zeros"]
- gf2.data = np.ones((5, 5))
- gf2.name = "semprini"
- return gf2
-
- def test_to_dict(self):
- """Test the conversion into a dictionary."""
- gf2 = self._setup_gf2()
- d = gf2.to_dict()
-
- res = (np.array_equal(d['ones'], np.ones(5)) and
- np.array_equal(d['zeros'], np.zeros(5)) and
- np.array_equal(d['data'], np.ones((5, 5))))
-
- assert res is True
-
- def test_to_xarray(self):
- """Test the conversion into xarray DataArray"""
- gf2 = self._setup_gf2()
-
- da = gf2.to_xarray()
-
- assert (da.name == "semprini" and
- da.dims == ("ones", "zeros") and
- np.array_equal(da.coords["zeros"], np.zeros(5)) and
- np.array_equal(da.coords["ones"], np.ones(5)) and
- np.array_equal(da.values, np.ones((5, 5))))
-
- @pytest.mark.parametrize('nametype', [float(), int()])
- def test_name_type(self, nametype):
- """Test if only names of type str are accepted."""
- with pytest.raises(TypeError):
- gf = griddedfield.GriddedField1()
- gf.name = nametype
-
- def test_shape(self):
- """Test return of data shape."""
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
-
- assert gf3.shape == gf3.data.shape == (2, 2, 2)
-
- def test_data_subscription(self):
- """Test direct data subscription."""
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
-
- assert gf3[0, 1, 0] == gf3.data[0, 1, 0]
-
- def test_slicing(self):
- """Test GriddedField slicing."""
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
-
- # Create new GriddedField which is a sliced subset of the initial one.
- gf_sliced = gf3.extract_slice(slice(1, None), axis=1)
-
- assert np.allclose(gf3.data[:, 1:, :], gf_sliced.data)
-
- def test_repr(self):
- """Test string represenation of GriddedField objects."""
- str(xml.load(self.ref_dir + 'GriddedField3.xml'))
-
- def test_repr_empty(self):
- """Test string represenation of empty GriddedField objects."""
- str(griddedfield.GriddedField1())
-
- def test_get(self):
- """Test the get method for named fields."""
- gf1 = griddedfield.GriddedField1(
- grids=[['foo', 'bar']],
- data=np.array([42, 13]),
- )
-
- assert gf1.get('foo') == np.array([42])
-
- def test_get_default(self):
- """Test the GriddedField.get() behavior for non-existing fieldnames."""
- gf1 = griddedfield.GriddedField1(
- grids=[['dummy']],
- data=np.array([0]),
- )
-
- # Return given default, if a name is not existing.
- assert gf1.get('nonexisting', 42) == 42
-
- # If no default is specified, return `None`.
- assert gf1.get('nonexisting') is None
-
- def test_get_keepdims(self):
- """Test the dimension handling of the GriddedField.get()."""
- gf1 = griddedfield.GriddedField1(
- grids=[['foo', 'bar']],
- data=np.array([42, 13]),
- )
-
- assert gf1.get('foo').shape == (1,)
- assert gf1.get('foo', keep_dims=False).shape == tuple()
-
- def test_get_nofieldnames(self):
- """Test behavior if first grids is not ArrayOfString."""
- gf1 = griddedfield.GriddedField1(
- grids=[[0]],
- data=np.array([0]),
- )
-
- with pytest.raises(TypeError):
- gf1.get(0)
-
- def test_scaling(self):
- """Test the scaling of data in named fields."""
- gf1 = griddedfield.GriddedField1(
- grids=[['first_field', 'second_field']],
- data=np.array([1., 1.]),
- )
-
- gf1.scale('second_field', 0.1)
-
- # Check if values if *only* values of the second fields are scaled.
- assert gf1.data[0] == np.array([1])
- assert gf1.data[1] == np.array([0.1])
-
- def test_add_offset(self):
- """Test adding of offset to data in named fields."""
- gf1 = griddedfield.GriddedField1(
- grids=[['first_field', 'second_field']],
- data=np.array([1., 1.]),
- )
-
- gf1.add('second_field', 0.5)
-
- # Check if values if *only* values of the second fields are scaled.
- assert gf1.data[0] == np.array([1.])
- assert gf1.data[1] == np.array([1.5])
-
- def test_integer_scaling(self):
- """Test the scaling of integer data in named fields."""
- gf1 = griddedfield.GriddedField1(
- grids=[['first_field', 'second_field']],
- data=np.array([1, 1]),
- )
-
- gf1.scale('second_field', 0.1)
-
- # Check if values if *only* values of the second fields are scaled.
- assert gf1.data[0] == np.array([1])
- assert gf1.data[1] == np.array([0.1])
-
- def test_set(self):
- """Test the set method for named fields."""
- gf1 = griddedfield.GriddedField1(
- grids=[['zero', 'one']],
- data=np.array([0, 0]),
- )
-
- gf1.set('one', 1)
-
- assert gf1.data[1] == np.array([1])
-
-
-class TestGriddedFieldLoad:
- ref_dir = os.path.join(os.path.dirname(__file__), "reference", "")
-
- def test_load_data(self):
- """Load reference XML file for GriddedField3 and check the data."""
- reference = _create_tensor(3)
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
- test_data = gf3.data
- assert np.array_equal(test_data, reference)
-
- def test_load_grids(self):
- """Load reference XML file for GriddedField3 and check the grids."""
- reference = [np.arange(2)] * 3
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
- test_data = gf3.grids
- assert all(np.allclose(a, b) for a, b in zip(test_data, reference))
-
- def test_load_gridnames(self):
- """Load reference XML file for GriddedField3 and check gridnames."""
- reference = ['grid1', 'grid2', 'grid3']
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
- test_data = gf3.gridnames
- assert np.array_equal(test_data, reference)
-
- def test_load_dimension(self):
- """Load reference XML file for GriddedField3 and run check."""
- gf3 = xml.load(self.ref_dir + 'GriddedField3.xml')
- assert gf3.check_dimension()
-
- def test_equality(self):
- """Check the equality of two GriddedField objects."""
- # Create two different objects with same content.
- a = xml.load(self.ref_dir + 'GriddedField3.xml')
- b = xml.load(self.ref_dir + 'GriddedField3.xml')
-
- assert a == b
-
- def test_equality_empty(self):
- """Check the equality of two empty GriddedField objects."""
- # Create two different objects with same content.
- a = griddedfield.GriddedField3()
- b = griddedfield.GriddedField3()
-
- assert a == b
-
- def test_nonequality(self):
- """Check the non-equality of two GriddedField objects."""
- # Create two different objects with same content.
- a = xml.load(self.ref_dir + 'GriddedField3.xml')
- b = xml.load(self.ref_dir + 'GriddedField3.xml')
-
- a.name = 'foo'
- b.name = 'bar'
-
- assert a != b
-
- def test_copy(self):
- """Test copying of GriddedFields."""
- a = xml.load(self.ref_dir + 'GriddedField3.xml')
- b = a.copy()
-
- # GriddedFields should be equal but not the same object.
- assert a is not b and a == b
-
- def test_deepcopy(self):
- """Test deepcopying of GriddedField attributes."""
- a = xml.load(self.ref_dir + 'GriddedField3.xml')
- b = a.copy()
-
- # Grids should not be the same object.
- assert a.grids is not b.grids
-
- def test_from_xarray(self):
- a = xml.load(self.ref_dir + 'GriddedField3.xml')
- a.dataname = 'Testdata'
- da = a.to_xarray()
- b = griddedfield.GriddedField3.from_xarray(da)
- assert a == b
-
-
-class TestGriddedFieldWrite:
- def setup_method(self):
- """Create a temporary file."""
- fd, self.f = mkstemp()
- os.close(fd)
-
- def teardown_method(self):
- """Delete temporary file."""
- os.remove(self.f)
-
- @pytest.mark.parametrize("dim", range(1, 7))
- def test_write_load_griddedfield(self, dim):
- gf = _get_griddedfield_type(dim)()
- gf.grids = [np.arange(2)] * dim
- gf.data = _create_tensor(dim)
- xml.save(gf, self.f)
-
- test_data = xml.load(self.f)
-
- assert np.array_equal(gf.data, test_data.data)
diff --git a/typhon/tests/arts/test_workspace.py b/typhon/tests/arts/test_workspace.py
deleted file mode 100644
index 3b24cd13..00000000
--- a/typhon/tests/arts/test_workspace.py
+++ /dev/null
@@ -1,328 +0,0 @@
-# -*- encoding: utf-8 -*-
-import os
-
-import numpy as np
-import pytest
-import scipy as sp
-
-import typhon
-
-try:
- from typhon.arts.workspace import Workspace, arts_agenda
- from typhon.arts.workspace.variables import WorkspaceVariable
-except ImportError:
- skip_arts_tests = True
-else:
- skip_arts_tests = False
-
-
-def agenda(ws):
- ws.Print(ws.y, 0)
-
-@pytest.mark.skipif(skip_arts_tests, reason='ARTS library not available')
-class TestWorkspace:
- def setup_method(self):
- """This ensures a new Workspace for every test."""
- self.dir = os.path.dirname(os.path.realpath(__file__))
- self.ws = Workspace()
- self.setup_workspace()
-
- def setup_workspace(self):
- ws = self.ws
- ws.atmosphere_dim = 1
- ws.p_grid = np.linspace(1e5, 1e3, 21)
- ws.Touch(ws.lat_grid)
- ws.Touch(ws.lon_grid)
-
- ws.f_grid = 183.0e9 * np.ones(1)
- ws.stokes_dim = 1
-
- ws.sensor_los = 180.0 * np.ones((1, 1))
- ws.sensor_pos = 830e3 * np.ones((1, 1))
- ws.sensorOff()
-
- def test_index_transfer(self):
- self.ws.IndexCreate("index_variable")
- i = np.random.randint(0, 100)
- self.ws.index_variable = i
- assert self.ws.index_variable.value == i
-
- def test_array_of_index_transfer(self):
- self.ws.ArrayOfIndexCreate("array_of_index_variable")
- i = [np.random.randint(0, 100) for j in range(10)]
-
- self.ws.array_of_index_variable = i
- assert self.ws.array_of_index_variable.value == i
-
- self.ws.array_of_index_variable = []
- assert self.ws.array_of_index_variable.value == []
-
- def test_array_of_vector_transfer(self):
- self.ws.ArrayOfVectorCreate("array_of_vector_variable")
- aov = typhon.arts.xml.load(os.path.join(self.dir,
- "xml/reference/arrayofvector.xml"))
- self.ws.array_of_vector_variable = aov
- assert self.ws.array_of_vector_variable.value == aov
-
- def test_string_transfer(self):
- self.ws.StringCreate("string_variable")
- s = "some random string."
- self.ws.string_variable = s
- assert self.ws.string_variable.value == s
-
- def test_vector_transfer(self):
- self.ws.VectorCreate("vector_variable")
- v = np.random.rand(10)
- self.ws.vector_variable = v
- assert all(self.ws.vector_variable.value == v)
-
- def test_matrix_transfer(self):
- self.ws.MatrixCreate("matrix_variable")
- m = np.random.rand(10, 10)
- self.ws.matrix_variable = m
- assert all(self.ws.matrix_variable.value.ravel() == m.ravel())
-
- def test_sparse_transfer(self):
- n = 100
- d2 = np.ones(n - 2)
- d1 = np.ones(n - 1)
- d = np.ones(n)
- m = sp.sparse.diags(diagonals=[d2, d1, d, d1, d2],
- offsets=[2, 1, 0, -1, -2])
- self.ws.sensor_response = m
- assert np.all(m.todense() == self.ws.sensor_response.value.todense())
-
- def test_supergeneric_overload_resolution(self):
- self.ws.ArrayOfIndexCreate("array_of_index")
- self.ws.ArrayOfArrayOfIndexCreate("array_of_array_of_index")
- self.ws.array_of_index = [1, 2, 3]
- self.ws.Append(self.ws.array_of_array_of_index, self.ws.array_of_index)
- self.ws.Append(self.ws.array_of_array_of_index, self.ws.array_of_index)
-
- def test_creation(self):
- self.ws.ArrayOfIndexCreate("array_of_index")
- self.ws.ArrayOfIndexCreate("array_of_index")
- with pytest.raises(Exception):
- self.ws.VectorCreate("array_of_index")
-
- def test_wsm_error(self):
- with pytest.raises(Exception):
- self.ws.yCalc()
-
- def test_doc(self):
- repr(self.ws.yCalc)
-
- def test_agenda(self):
-
- self.ws.atmosphere_dim = 1
-
- @arts_agenda
- def add_1(ws):
- ws.IndexAdd(ws.atmosphere_dim,
- ws.atmosphere_dim,
- 1)
- add_1.execute(self.ws)
-
- assert self.ws.atmosphere_dim.value == 2
-
- add_1.append(add_1)
- add_1.execute(self.ws)
-
- assert self.ws.atmosphere_dim.value == 4
-
- args = [self.ws.atmosphere_dim, self.ws.atmosphere_dim, 1]
-
- @arts_agenda
- def add_2(ws):
- ws.IndexAdd(*args)
-
- add_2.execute(self.ws)
-
- assert self.ws.atmosphere_dim.value == 5
-
-
- def test_execute_controlfile(self):
-
- dir = os.path.dirname(os.path.realpath(__file__))
- test_dir = os.path.join(dir, "test_files")
- self.ws.WriteXML("ascii", np.array([1.0]),
- os.path.join(test_dir, "vector.xml"))
- os.chdir(test_dir)
- self.ws.execute_controlfile("controlfile.arts")
-
- os.remove(os.path.join(test_dir, "vector.xml"))
-
- def test_supergeneric_overload_failure(self):
- with pytest.raises(Exception):
- self.ws.NumericCreate("numeric_wsv")
- self.ws.StringCreate("string_wsv")
- self.ws.Copy(self.ws.string_wsv, self.ws.numeric_wsv)
-
- def test_tensor_3(self):
- t_0 = np.random.rand(*([3] * 3))
- self.ws.Tensor3Create("tensor_3")
- self.ws.tensor_3 = t_0
- assert np.all(t_0 == self.ws.tensor_3.value)
-
- def test_tensor_4(self):
- t_0 = np.random.rand(*([3] * 4))
- t_1 = self.ws.Tensor4Create("tensor_4")
- self.ws.tensor_4 = t_0
- assert np.all(t_0 == self.ws.tensor_4.value)
-
- def test_tensor_5(self):
- t_0 = np.random.rand(*([3] * 5))
- t_1 = self.ws.Tensor5Create("tensor_5")
- self.ws.tensor_5 = t_0
- assert np.all(t_0 == self.ws.tensor_5.value)
-
- def test_tensor_6(self):
- t_0 = np.random.rand(*([3] * 6))
- t_1 = self.ws.Tensor6Create("tensor_6")
- self.ws.tensor_6 = t_0
- assert np.all(t_0 == self.ws.tensor_6.value)
-
- def test_tensor_7(self):
- t_0 = np.random.rand(*([3] * 7))
- self.ws.Tensor7Create("tensor_7")
- self.ws.tensor_7 = t_0
- assert np.all(t_0 == self.ws.tensor_7.value)
-
- def test_execute_controlfile(self):
-
- dir = os.path.dirname(os.path.realpath(__file__))
- test_dir = os.path.join(dir, "test_files")
- self.ws.WriteXML("ascii", np.array([1.0]),
- os.path.join(test_dir, "vector.xml"))
- os.chdir(test_dir)
-
- agenda = self.ws.execute_controlfile("controlfile.arts")
- self.ws.foo = "not bar"
-
- @arts_agenda
- def execute(ws):
- ws.FlagOff(ws.jacobian_do)
- ws.StringSet(ws.foo, "still not bar")
- INCLUDE("controlfile.arts")
- INCLUDE(agenda)
-
- self.ws.execute_agenda(execute)
-
- assert self.ws.foo.value == "bar"
- os.remove(os.path.join(test_dir, "vector.xml"))
-
- def test_covariance_matrix(self):
- ws = self.ws
-
- ws.jacobianInit()
- ws.jacobianAddAbsSpecies(species = "O3",
- g1 = ws.p_grid,
- g2 = ws.lat_grid,
- g3 = ws.lon_grid)
- ws.jacobianAddAbsSpecies(species = "H2O",
- g1 = ws.p_grid,
- g2 = ws.lat_grid,
- g3 = ws.lon_grid)
- ws.jacobianClose()
-
- ws.covmatDiagonal(out = ws.covmat_block,
- out_inverse = ws.covmat_block,
- vars = 10.0 * np.ones(ws.p_grid.value.size))
- ws.covmat_sxAddBlock(block = ws.covmat_block)
- ws.covmatDiagonal(out = ws.covmat_block,
- out_inverse = ws.covmat_block,
- vars = 20.0 * np.ones(ws.p_grid.value.size))
- ws.covmat_sxAddBlock(block = ws.covmat_block)
-
-
-
- def test_variable_set_empty(self):
- self.ws.f_grid = np.array([94e9])
- self.ws.f_grid = []
- assert self.ws.f_grid.value.size == 0
-
- def test_variable_creation(self):
-
- # Unnamed variable
- wsv = self.ws.create_variable("Matrix", None)
- self.ws.__setattr__(wsv.name, np.eye(5))
- assert np.all(np.isclose(np.eye(5),
- self.ws.__getattr__(wsv.name).value))
-
- # Named variable
- wsv = self.ws.create_variable("Matrix", "matrix_wsv")
- self.ws.matrix_wsv = np.eye(5)
- assert np.all(np.isclose(np.eye(5), self.ws.matrix_wsv.value))
-
- def test_wsv_setattr(self):
- wsv = self.ws.atmosphere_dim
- wsv.value = 12
- assert self.ws.atmosphere_dim.value == 12
-
- def test_convert(self):
-
- v = WorkspaceVariable.convert("Index", 1.2)
- assert(v == 1)
-
- v = WorkspaceVariable.convert("String", "string")
- assert(v == "string")
-
- v = WorkspaceVariable.convert("Numeric", 1)
- assert(type(v) == np.float64)
-
- v = WorkspaceVariable.convert("Vector", 1.0)
- assert(v.shape == (1,))
-
- v = WorkspaceVariable.convert("Matrix", 1.0)
- assert(v.shape == (1, 1))
-
- v = WorkspaceVariable.convert("Tensor3", 1.0)
- assert(v.shape == (1, 1, 1))
-
- v = WorkspaceVariable.convert("Tensor6", 1.0)
- assert(v.shape == (1, 1, 1, 1, 1, 1))
-
- v = WorkspaceVariable.convert("ArrayOfArrayOfIndex", 1.0)
- assert(type(v) == list)
- assert(type(v[0]) == list)
- assert(type(v[0][0]) == int)
-
- v = WorkspaceVariable.convert("ArrayOfArrayOfIndex", 1)
- return v
-
- def test_callbacks(self):
-
- @arts_agenda
- def agenda(ws):
- """
- This agenda sets a workspace variable in a very
- obscure way.
- """
-
- class Foo:
- def __init__(self, ws):
- self.ws = ws
-
- def ooo(self):
- self.ws.IndexSet(ws.stokes_dim, 42)
-
- foo = Foo(ws)
- ws.IndexSet(ws.stokes_dim, 21)
- foo.ooo()
-
- agenda.execute(self.ws)
-
- assert self.ws.stokes_dim.value == 42
-
- def test_contiguous_arrays(self):
- x = np.linspace(0, 1, 256)
-
- xf = np.asarray(x, order='F')
- self.ws.f_grid = xf
- assert np.array_equal(self.ws.f_grid.value, xf)
-
- self.ws.f_grid = x[::2]
- assert np.array_equal(self.ws.f_grid.value, x[::2])
-
- self.ws.f_grid = np.ascontiguousarray(x[::2])
- assert np.array_equal(self.ws.f_grid.value, x[::2])
diff --git a/typhon/tests/arts/xml/__init__.py b/typhon/tests/arts/xml/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/typhon/tests/arts/xml/load_arts_xml_data.py b/typhon/tests/arts/xml/load_arts_xml_data.py
deleted file mode 100644
index a1c121d8..00000000
--- a/typhon/tests/arts/xml/load_arts_xml_data.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""Special test case which tries to load all files in ARTS XML data."""
-import os
-
-import pytest
-
-from typhon.arts import xml
-from typhon import environment
-
-
-def collect_xml_files():
- """Collect all XML files in the ARTS XML data tree."""
- for d in environment.ARTS_DATA_PATH.split(os.path.pathsep):
- for root, _, filenames in os.walk(d):
- for filename in filenames:
- if filename.endswith(('.xml', '.xml.gz')):
- yield os.path.join(root, filename)
-
-
-@pytest.mark.slow
-@pytest.mark.skipif(environment.ARTS_DATA_PATH is None,
- reason='ARTS_DATA_PATH not set.')
-@pytest.mark.parametrize('xmlfile', collect_xml_files())
-def test_load_arts_xml_data(xmlfile):
- """Try to load all XML files in ARTS_DATA_PATH.
-
- Search for XML files in ARTS_DATA_PATH. If files are found, try to load
- them. It is just checked, if xml.load runs without exception.
- """
- xml.load(xmlfile)
- pass
diff --git a/typhon/tests/arts/xml/reference/arrayofindex-bin.xml b/typhon/tests/arts/xml/reference/arrayofindex-bin.xml
deleted file mode 100644
index ca9a985f..00000000
--- a/typhon/tests/arts/xml/reference/arrayofindex-bin.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
-
diff --git a/typhon/tests/arts/xml/reference/arrayofindex-bin.xml.bin b/typhon/tests/arts/xml/reference/arrayofindex-bin.xml.bin
deleted file mode 100644
index 9adb25b5..00000000
Binary files a/typhon/tests/arts/xml/reference/arrayofindex-bin.xml.bin and /dev/null differ
diff --git a/typhon/tests/arts/xml/reference/arrayofindex-comment.xml b/typhon/tests/arts/xml/reference/arrayofindex-comment.xml
deleted file mode 100644
index 3b191add..00000000
--- a/typhon/tests/arts/xml/reference/arrayofindex-comment.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-Ignore me.
-
-1
-2
-3
-
-
diff --git a/typhon/tests/arts/xml/reference/arrayofindex.xml b/typhon/tests/arts/xml/reference/arrayofindex.xml
deleted file mode 100644
index 4e6c87a8..00000000
--- a/typhon/tests/arts/xml/reference/arrayofindex.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-1
-2
-3
-
-
diff --git a/typhon/tests/arts/xml/reference/arrayofstring.xml b/typhon/tests/arts/xml/reference/arrayofstring.xml
deleted file mode 100644
index 129bf9ce..00000000
--- a/typhon/tests/arts/xml/reference/arrayofstring.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-"a"
-"bb"
-"ccc"
-
-
diff --git a/typhon/tests/arts/xml/reference/arrayofvector.xml b/typhon/tests/arts/xml/reference/arrayofvector.xml
deleted file mode 100644
index e84ac7c2..00000000
--- a/typhon/tests/arts/xml/reference/arrayofvector.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-0.0000000e+00
-
-
-0.0000000e+00
-
-
-
diff --git a/typhon/tests/arts/xml/reference/comment.xml b/typhon/tests/arts/xml/reference/comment.xml
deleted file mode 100644
index 3a91010c..00000000
--- a/typhon/tests/arts/xml/reference/comment.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-Ignore me.
-
diff --git a/typhon/tests/arts/xml/reference/index.xml b/typhon/tests/arts/xml/reference/index.xml
deleted file mode 100644
index 73f0fa66..00000000
--- a/typhon/tests/arts/xml/reference/index.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-0
-
-
diff --git a/typhon/tests/arts/xml/reference/matrix.xml b/typhon/tests/arts/xml/reference/matrix.xml
deleted file mode 100644
index 02f27d60..00000000
--- a/typhon/tests/arts/xml/reference/matrix.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-
-
diff --git a/typhon/tests/arts/xml/reference/sparse-bin.xml b/typhon/tests/arts/xml/reference/sparse-bin.xml
deleted file mode 100644
index 441b1dbe..00000000
--- a/typhon/tests/arts/xml/reference/sparse-bin.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/typhon/tests/arts/xml/reference/sparse-bin.xml.bin b/typhon/tests/arts/xml/reference/sparse-bin.xml.bin
deleted file mode 100644
index 07e452e2..00000000
Binary files a/typhon/tests/arts/xml/reference/sparse-bin.xml.bin and /dev/null differ
diff --git a/typhon/tests/arts/xml/reference/sparse.xml b/typhon/tests/arts/xml/reference/sparse.xml
deleted file mode 100644
index 214f9358..00000000
--- a/typhon/tests/arts/xml/reference/sparse.xml
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
-
-
-0
-1
-2
-3
-4
-5
-6
-7
-8
-9
-
-
-0
-1
-2
-3
-4
-5
-6
-7
-8
-9
-
-
-7 7 7 7 7 7 7 7 7 7
-
-
-
diff --git a/typhon/tests/arts/xml/reference/tensor3.xml b/typhon/tests/arts/xml/reference/tensor3.xml
deleted file mode 100644
index 7fc96f44..00000000
--- a/typhon/tests/arts/xml/reference/tensor3.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-4.0000000e+00 5.0000000e+00
-6.0000000e+00 7.0000000e+00
-
-
diff --git a/typhon/tests/arts/xml/reference/tensor4.xml b/typhon/tests/arts/xml/reference/tensor4.xml
deleted file mode 100644
index 0da3c944..00000000
--- a/typhon/tests/arts/xml/reference/tensor4.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-4.0000000e+00 5.0000000e+00
-6.0000000e+00 7.0000000e+00
-8.0000000e+00 9.0000000e+00
-1.0000000e+01 1.1000000e+01
-1.2000000e+01 1.3000000e+01
-1.4000000e+01 1.5000000e+01
-
-
diff --git a/typhon/tests/arts/xml/reference/tensor5.xml b/typhon/tests/arts/xml/reference/tensor5.xml
deleted file mode 100644
index ef564299..00000000
--- a/typhon/tests/arts/xml/reference/tensor5.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-4.0000000e+00 5.0000000e+00
-6.0000000e+00 7.0000000e+00
-8.0000000e+00 9.0000000e+00
-1.0000000e+01 1.1000000e+01
-1.2000000e+01 1.3000000e+01
-1.4000000e+01 1.5000000e+01
-1.6000000e+01 1.7000000e+01
-1.8000000e+01 1.9000000e+01
-2.0000000e+01 2.1000000e+01
-2.2000000e+01 2.3000000e+01
-2.4000000e+01 2.5000000e+01
-2.6000000e+01 2.7000000e+01
-2.8000000e+01 2.9000000e+01
-3.0000000e+01 3.1000000e+01
-
-
diff --git a/typhon/tests/arts/xml/reference/tensor6.xml b/typhon/tests/arts/xml/reference/tensor6.xml
deleted file mode 100644
index cef84a90..00000000
--- a/typhon/tests/arts/xml/reference/tensor6.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-4.0000000e+00 5.0000000e+00
-6.0000000e+00 7.0000000e+00
-8.0000000e+00 9.0000000e+00
-1.0000000e+01 1.1000000e+01
-1.2000000e+01 1.3000000e+01
-1.4000000e+01 1.5000000e+01
-1.6000000e+01 1.7000000e+01
-1.8000000e+01 1.9000000e+01
-2.0000000e+01 2.1000000e+01
-2.2000000e+01 2.3000000e+01
-2.4000000e+01 2.5000000e+01
-2.6000000e+01 2.7000000e+01
-2.8000000e+01 2.9000000e+01
-3.0000000e+01 3.1000000e+01
-3.2000000e+01 3.3000000e+01
-3.4000000e+01 3.5000000e+01
-3.6000000e+01 3.7000000e+01
-3.8000000e+01 3.9000000e+01
-4.0000000e+01 4.1000000e+01
-4.2000000e+01 4.3000000e+01
-4.4000000e+01 4.5000000e+01
-4.6000000e+01 4.7000000e+01
-4.8000000e+01 4.9000000e+01
-5.0000000e+01 5.1000000e+01
-5.2000000e+01 5.3000000e+01
-5.4000000e+01 5.5000000e+01
-5.6000000e+01 5.7000000e+01
-5.8000000e+01 5.9000000e+01
-6.0000000e+01 6.1000000e+01
-6.2000000e+01 6.3000000e+01
-
-
diff --git a/typhon/tests/arts/xml/reference/tensor7.xml b/typhon/tests/arts/xml/reference/tensor7.xml
deleted file mode 100644
index 7d053a06..00000000
--- a/typhon/tests/arts/xml/reference/tensor7.xml
+++ /dev/null
@@ -1,69 +0,0 @@
-
-
-
-0.0000000e+00 1.0000000e+00
-2.0000000e+00 3.0000000e+00
-4.0000000e+00 5.0000000e+00
-6.0000000e+00 7.0000000e+00
-8.0000000e+00 9.0000000e+00
-1.0000000e+01 1.1000000e+01
-1.2000000e+01 1.3000000e+01
-1.4000000e+01 1.5000000e+01
-1.6000000e+01 1.7000000e+01
-1.8000000e+01 1.9000000e+01
-2.0000000e+01 2.1000000e+01
-2.2000000e+01 2.3000000e+01
-2.4000000e+01 2.5000000e+01
-2.6000000e+01 2.7000000e+01
-2.8000000e+01 2.9000000e+01
-3.0000000e+01 3.1000000e+01
-3.2000000e+01 3.3000000e+01
-3.4000000e+01 3.5000000e+01
-3.6000000e+01 3.7000000e+01
-3.8000000e+01 3.9000000e+01
-4.0000000e+01 4.1000000e+01
-4.2000000e+01 4.3000000e+01
-4.4000000e+01 4.5000000e+01
-4.6000000e+01 4.7000000e+01
-4.8000000e+01 4.9000000e+01
-5.0000000e+01 5.1000000e+01
-5.2000000e+01 5.3000000e+01
-5.4000000e+01 5.5000000e+01
-5.6000000e+01 5.7000000e+01
-5.8000000e+01 5.9000000e+01
-6.0000000e+01 6.1000000e+01
-6.2000000e+01 6.3000000e+01
-6.4000000e+01 6.5000000e+01
-6.6000000e+01 6.7000000e+01
-6.8000000e+01 6.9000000e+01
-7.0000000e+01 7.1000000e+01
-7.2000000e+01 7.3000000e+01
-7.4000000e+01 7.5000000e+01
-7.6000000e+01 7.7000000e+01
-7.8000000e+01 7.9000000e+01
-8.0000000e+01 8.1000000e+01
-8.2000000e+01 8.3000000e+01
-8.4000000e+01 8.5000000e+01
-8.6000000e+01 8.7000000e+01
-8.8000000e+01 8.9000000e+01
-9.0000000e+01 9.1000000e+01
-9.2000000e+01 9.3000000e+01
-9.4000000e+01 9.5000000e+01
-9.6000000e+01 9.7000000e+01
-9.8000000e+01 9.9000000e+01
-1.0000000e+02 1.0100000e+02
-1.0200000e+02 1.0300000e+02
-1.0400000e+02 1.0500000e+02
-1.0600000e+02 1.0700000e+02
-1.0800000e+02 1.0900000e+02
-1.1000000e+02 1.1100000e+02
-1.1200000e+02 1.1300000e+02
-1.1400000e+02 1.1500000e+02
-1.1600000e+02 1.1700000e+02
-1.1800000e+02 1.1900000e+02
-1.2000000e+02 1.2100000e+02
-1.2200000e+02 1.2300000e+02
-1.2400000e+02 1.2500000e+02
-1.2600000e+02 1.2700000e+02
-
-
diff --git a/typhon/tests/arts/xml/reference/vector-bin.xml b/typhon/tests/arts/xml/reference/vector-bin.xml
deleted file mode 100644
index e2a0ed29..00000000
--- a/typhon/tests/arts/xml/reference/vector-bin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/typhon/tests/arts/xml/reference/vector-bin.xml.bin b/typhon/tests/arts/xml/reference/vector-bin.xml.bin
deleted file mode 100644
index e133de42..00000000
Binary files a/typhon/tests/arts/xml/reference/vector-bin.xml.bin and /dev/null differ
diff --git a/typhon/tests/arts/xml/reference/vector.xml b/typhon/tests/arts/xml/reference/vector.xml
deleted file mode 100644
index 0fc02a3f..00000000
--- a/typhon/tests/arts/xml/reference/vector.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-0.0000000e+00
-1.0000000e+00
-
-
diff --git a/typhon/tests/arts/xml/test_matpack_types.py b/typhon/tests/arts/xml/test_matpack_types.py
deleted file mode 100644
index b10bfb2b..00000000
--- a/typhon/tests/arts/xml/test_matpack_types.py
+++ /dev/null
@@ -1,349 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Testing the basic ARTS XML functions
-
-This module provides basic functions to test the reading and writing
-of ARTS XML files.
-"""
-import os
-from tempfile import mkstemp
-
-import numpy as np
-from scipy import sparse
-import pytest
-
-from typhon.arts import xml
-from typhon.arts.catalogues import Sparse
-
-
-def _create_tensor(n):
- """Create a tensor of dimension n.
-
- Create a tensor with n dimensions with two entries in each dimension.
- The tensor is filled with increasing integers starting with 0.
-
- Args:
- n (int): number of dimensions
-
- Returns:
- np.ndarray: n-dimensional tensor
-
- """
- return np.arange(2 ** n).reshape(2 * np.ones(n).astype(int))
-
-
-def _create_complex_tensor(n):
- """Create a complex tensor of dimension n.
-
- Create a complex tensor with n dimensions with two entries in each
- dimension. The tensor is filled with increasing integers starting with 0.
-
- Args:
- n (int): number of dimensions
-
- Returns:
- np.ndarray: n-dimensional tensor
-
- """
- return np.arange(2 ** n,
- dtype=np.complex128).reshape(2 * np.ones(n).astype(int))
-
-
-def _create_empty_tensor(n):
- """Create an empty tensor of dimension n.
-
- Create a tensor with n dimensions of size 0.
-
- Args:
- n (int): number of dimensions
-
- Returns:
- np.ndarray: n-dimensional tensor
-
- """
- return np.ndarray((0,) * n)
-
-
-def _create_sparse(n):
- """Create a n x n diagonal sparse with ones on the diagonal.
-
- Args:
- n (int): size
-
- Returns:
- typhon.arts.catalogues.Sparse: sparse matrix with ones as diagonals
-
- """
- a = sparse.diags(np.ones(n), format='csc')
- return Sparse(a)
-
-
-class TestLoad:
- """Testing the ARTS XML reading functions.
-
- This class provides functions to test the reading of XML files. For this
- purpose reference files are read and compared to the expexted results.
-
- Attributes:
- ref_dir (str): absolute path to the reference data directory.
-
- """
- ref_dir = os.path.join(os.path.dirname(__file__), "reference", "")
-
- def test_load_index(self):
- """Load reference XML file for ARTS type Index."""
- assert xml.load(self.ref_dir + 'index.xml') == 0
-
- def test_load_vector(self):
- """Load reference XML file for ARTS type Vector."""
- reference = _create_tensor(1)
- test_data = xml.load(self.ref_dir + 'vector.xml')
- assert np.array_equal(test_data, reference)
-
- def test_load_vector_binary(self):
- """Load reference binary XML file for ARTS type Vector."""
- reference = _create_tensor(1)
- test_data = xml.load(self.ref_dir + 'vector-bin.xml')
- assert np.array_equal(test_data, reference)
-
- def test_load_matrix(self):
- """Load reference XML file for ARTS type Matrix."""
- reference = _create_tensor(2)
- test_data = xml.load(self.ref_dir + 'matrix.xml')
- assert np.array_equal(test_data, reference)
-
- @pytest.mark.parametrize('suffix', ['.xml', '-bin.xml'])
- def test_load_sparse(self, suffix):
- """Load reference XML file for ARTS type Sparse."""
- reference = _create_sparse(10) * 7
- test_data = xml.load(self.ref_dir + 'sparse' + suffix)
- assert np.array_equal(test_data.toarray(), reference.toarray())
-
- @pytest.mark.parametrize('n', range(3, 8))
- def test_load_tensor(self, n):
- """Load tensor of dimension n and compare data to reference.
-
- Args:
- n (int): number of dimensions
- """
- reference = _create_tensor(n)
- test_data = xml.load(self.ref_dir + 'tensor{}.xml'.format(n))
- assert np.array_equal(test_data, reference)
-
- def test_load_arrayofindex(self):
- """Load reference XML file for ARTS type ArrayOfIndex."""
- reference = [1., 2., 3.]
- test_data = xml.load(self.ref_dir + 'arrayofindex.xml')
- assert np.array_equal(test_data, reference)
-
- def test_load_arrayofindex_binary(self):
- """Load reference binary XML file for ARTS type ArrayOfIndex."""
- reference = [1., 2., 3.]
- test_data = xml.load(self.ref_dir + 'arrayofindex-bin.xml')
- assert np.array_equal(test_data, reference)
-
- def test_load_arrayofstring(self):
- """Load reference XML file for ARTS type ArrayOfString."""
- reference = ['a', 'bb', 'ccc']
- test_data = xml.load(self.ref_dir + 'arrayofstring.xml')
- assert np.array_equal(test_data, reference)
-
- def test_load_arrayofvector(self):
- """Load reference XML file for ARTS type ArrayOfVector."""
- reference = [np.arange(1), np.arange(1)]
- test_data = xml.load(self.ref_dir + 'arrayofvector.xml')
- assert np.array_equal(test_data, reference)
-
- def test_load_comment(self):
- """Load reference XML file storing only a comment."""
- test_data = xml.load(self.ref_dir + 'comment.xml')
- assert test_data is None
-
- def test_load_arrayofindex_with_comment(self):
- """Load reference XML file for ARTS type ArrayOfIndex with comment."""
- reference = [1., 2., 3.]
- test_data = xml.load(self.ref_dir + 'arrayofindex-comment.xml')
- assert np.array_equal(test_data, reference)
-
-
-class TestSave:
- """Testing the ARTS XML saving functions.
-
- This class provides functions to test the saving of XML files. Data is
- created and stored to a temporay file. Afterwards the file is read and
- the data gets compared to the initial data.
-
- Notes:
- The functions setUp() and tearDown() are run automatically before every
- other function.
-
- """
- def setup_method(self):
- """Create a temporary file."""
- fd, self.f = mkstemp()
- os.close(fd)
-
- def teardown_method(self):
- """Delete temporary file."""
- for f in [self.f, self.f + '.gz', self.f + '.bin']:
- if os.path.isfile(f):
- os.remove(f)
-
- def test_save_index(self):
- """Save Index to file, read it and compare the results."""
- reference = 0
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert test_data == reference
-
- def test_save_vector(self):
- """Save Vector to file, read it and compare the results."""
- reference = _create_tensor(1)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_vector_binary(self):
- """Save Vector to binary file, read it and compare the results."""
- reference = _create_tensor(1)
- xml.save(reference, self.f, format='binary')
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_complex_vector(self):
- """Save complex Vector to file, read it and compare the results."""
- reference = _create_complex_tensor(1)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_complex_vector_binary(self):
- """Save complex Vector to file, read it and compare the results."""
- reference = _create_complex_tensor(1)
- xml.save(reference, self.f, format='binary')
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_empty_vector(self):
- """Save empty Vector to file, read it and compare the results."""
- reference = _create_empty_tensor(1)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_empty_matrix(self):
- """Save empty Matrix to file, read it and compare the results."""
- reference = _create_empty_tensor(2)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_matrix(self):
- """Save Matrix to file, read it and compare the results."""
- reference = _create_tensor(2)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_matrix_binary(self):
- """Save Matrix to file, read it and compare the results."""
- reference = _create_tensor(2)
- xml.save(reference, self.f, format='binary')
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- @pytest.mark.parametrize('fileformat', ['ascii', 'binary'])
- def test_sparse(self, fileformat):
- """Save Sparse to file, read it and compare the result."""
- reference = _create_sparse(10)
- xml.save(reference, self.f, format=fileformat)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data.toarray(), reference.toarray())
-
- def test_save_complex_matrix(self):
- """Save complex Matrix to file, read it and compare the results."""
- reference = _create_complex_tensor(2)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_complex_matrix_binary(self):
- """Save complex Matrix to file, read it and compare the results."""
- reference = _create_complex_tensor(2)
- xml.save(reference, self.f, format='binary')
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_arrayofindex(self):
- """Save ArrayOfIndex to file, read it and compare the results."""
- reference = [1., 2., 3.]
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_arrayofindex_binary(self):
- """Save ArrayOfIndex to binary file, read it and compare the result."""
- reference = [1., 2., 3.]
- xml.save(reference, self.f, format='binary')
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_arrayofstring(self):
- """Save ArrayOfString to file, read it and compare the results."""
- reference = ['a', 'bb', 'ccc']
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_arrayofvector(self):
- """Save ArrayOfIndex to file, read it and compare the results."""
- reference = [np.arange(1), np.arange(1)]
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- def test_save_gzip(self):
- """Test writing/reading of gzipped files."""
- f = self.f + '.gz'
- ref = np.arange(10)
-
- xml.save(ref, f)
-
- assert np.array_equal(ref, xml.load(f))
-
- def test_save_binary_gzip(self):
- """Check for exception when attempting to write zipped binary file."""
- f = self.f + '.gz'
- ref = np.arange(10)
-
- with pytest.raises(Exception):
- xml.save(ref, f, format='binary')
-
- @pytest.mark.parametrize('n', range(3, 8))
- @pytest.mark.parametrize('fileformat', ['ascii', 'binary'])
- def test_save_load_tensor(self, n, fileformat):
- """Save tensor of dimension n to file, read it and compare data to
- reference.
-
- Args:
- n (int): number of dimensions
- fileformat (str): 'ascii' or 'binary'.
-
- """
- reference = _create_tensor(n)
- xml.save(reference, self.f, format=fileformat)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
-
- @pytest.mark.parametrize('n', range(3, 8))
- def test_save_empty_tensor(self, n):
- """Save empty tensor of dimension n to file, read it and compare data
- to reference.
-
- Args:
- n (int): number of dimensions
-
- """
- reference = _create_empty_tensor(n)
- xml.save(reference, self.f)
- test_data = xml.load(self.f)
- assert np.array_equal(test_data, reference)
diff --git a/typhon/tests/arts/xml/test_xml.py b/typhon/tests/arts/xml/test_xml.py
deleted file mode 100644
index bc6e6445..00000000
--- a/typhon/tests/arts/xml/test_xml.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Testing high-level functionality in typhon.arts.xml.
-"""
-from os.path import (dirname, join)
-
-import numpy as np
-import pytest
-
-from typhon.arts import xml
-
-
-class TestXML:
- """Testing high-level functionality in typhon.arts.xml."""
- ref_dir = join(dirname(__file__), "reference")
-
- def test_load_directory(self):
- """Test loading all XML files in a directory."""
- t = xml.load_directory(self.ref_dir)
- ref = xml.load(join(self.ref_dir, 'vector.xml'))
-
- assert np.allclose(t['vector'], ref)
-
- def test_load_directory_exclude(self):
- """Test excluding files when loading directory content."""
- t = xml.load_directory(self.ref_dir, exclude=['vector.xml'])
-
- with pytest.raises(KeyError):
- t['vector']
diff --git a/typhon/tests/plots/reference/abs_lookup_small.xml b/typhon/tests/plots/reference/abs_lookup_small.xml
deleted file mode 100644
index 54c09c84..00000000
--- a/typhon/tests/plots/reference/abs_lookup_small.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-
-
-
-
-"H2O-*-*-*"
-"H2O-SelfContCKDMT252-*-*"
-"H2O-ForeignContCKDMT252-*-*"
-
-
-"O3-*-*-*"
-
-
-"CO2-*-*-*"
-"CO2-CKDMT252-*-*"
-
-
-"N2O-*-*-*"
-
-
-"CO-*-*-*"
-
-
-"CH4-*-*-*"
-
-
-"O2-*-*-*"
-"O2-CIAfunCKDMT100-*-*"
-
-
-"N2-*-*-*"
-"N2-CIAfunCKDMT252-*-*"
-"N2-CIArotCKDMT252-*-*"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/typhon/tests/plots/reference/abs_lookup_small.xml.bin b/typhon/tests/plots/reference/abs_lookup_small.xml.bin
deleted file mode 100644
index 1b174f92..00000000
Binary files a/typhon/tests/plots/reference/abs_lookup_small.xml.bin and /dev/null differ