From 59c30ff39cbfda3c46227cb02c89140f6c6ee5af Mon Sep 17 00:00:00 2001 From: Arnaud Sevin Date: Mon, 26 Apr 2021 11:26:36 +0200 Subject: [PATCH] COMPASS v5.1 release notes : New class ParamConfig to handle parameters configuration : supervisor constructor requires an instance of it now. This change Multi GPU controller generic improvements Standalone RTC debug + pytests Add leaky factor in the generic controller Add [CLOSE](https://arxiv.org/abs/2103.09921) algorithm implementation Multi controllers support in the supervisor Sub-pixels move for phase screens GuARDIANS package updated Code documentation update --- doc/shesha.rst | 2 +- doc/shesha_sim.rst | 33 - doc/shesha_supervisor.rst | 37 +- doc/sutra_wrap.rst | 88 ++ guardians/__init__.py | 4 + guardians/drax.py | 930 ++++++++++++++++++ guardians/gamora.py | 406 ++++++++ guardians/groot.py | 903 +++++++++++++++++ guardians/misc/correlations/bash_script.sh | 24 + .../misc/correlations/correlation_bokeh.py | 340 +++++++ .../misc/correlations/correlation_study.py | 737 ++++++++++++++ guardians/misc/correlations/script_roket.py | 488 +++++++++ .../misc/correlations/script_roket_cpu.py | 713 ++++++++++++++ guardians/misc/layer_linearity/layers_test.py | 133 +++ guardians/misc/roket_cpu.py | 702 +++++++++++++ guardians/misc/roket_gpu.py | 500 ++++++++++ guardians/misc/roket_widget.py | 809 +++++++++++++++ guardians/misc/valid_roket_files.py | 45 + guardians/roket.py | 468 +++++++++ guardians/scripts/bash_roket.sh | 24 + guardians/scripts/convergence.py | 24 + guardians/scripts/old2new_attrs.py | 127 +++ guardians/scripts/psf_error.py | 45 + guardians/scripts/script_roket.py | 87 ++ guardians/starlord.py | 140 +++ guardians/widgets/bokeh_gamora.py | 28 + guardians/widgets/bokeh_groot.py | 28 + guardians/widgets/bokeh_guardian.py | 25 + guardians/widgets/bokeh_roket.py | 28 + guardians/widgets/widget_gamora.py | 174 ++++ guardians/widgets/widget_groot.py | 295 ++++++ guardians/widgets/widget_guardian.py | 21 + guardians/widgets/widget_roket.py | 240 +++++ shesha/__init__.py | 2 +- shesha/ao/__init__.py | 2 +- shesha/ao/basis.py | 26 +- shesha/ao/cmats.py | 10 +- shesha/ao/imats.py | 51 +- shesha/ao/modopti.py | 4 +- shesha/ao/tomo.py | 8 +- shesha/ao/wfs.py | 8 +- shesha/config/PATMOS.py | 2 +- shesha/config/PCENTROIDER.py | 2 +- shesha/config/PCONTROLLER.py | 112 ++- shesha/config/PDMS.py | 19 +- shesha/config/PGEOM.py | 2 +- shesha/config/PLOOP.py | 8 +- shesha/config/PTARGET.py | 2 +- shesha/config/PTEL.py | 2 +- shesha/config/PWFS.py | 18 +- shesha/config/__init__.py | 5 +- shesha/config/config_setter_utils.py | 2 +- shesha/config/pconfig.py | 407 ++++++++ shesha/constants.py | 24 +- shesha/init/__init__.py | 2 +- shesha/init/atmos_init.py | 24 +- shesha/init/dm_init.py | 49 +- shesha/init/geom_init.py | 28 +- shesha/init/lgs_init.py | 6 +- shesha/init/rtc_init.py | 186 +++- shesha/init/target_init.py | 4 +- shesha/init/wfs_init.py | 4 +- shesha/scripts/__init__.py | 4 +- shesha/scripts/closed_loop.py | 10 +- shesha/scripts/dm_standalone.py | 4 +- shesha/supervisor/__init__.py | 2 +- shesha/supervisor/benchSupervisor.py | 328 +++--- shesha/supervisor/canapassSupervisor.py | 55 +- shesha/supervisor/compassSupervisor.py | 239 ++++- shesha/supervisor/components/__init__.py | 18 +- shesha/supervisor/components/atmosCompass.py | 12 +- shesha/supervisor/components/dmCompass.py | 16 +- shesha/supervisor/components/rtc/__init__.py | 40 + .../{rtcCompass.py => rtc/rtcAbstract.py} | 160 +-- .../supervisor/components/rtc/rtcCompass.py | 99 ++ .../components/rtc/rtcStandalone.py | 111 +++ shesha/supervisor/components/sourceCompass.py | 11 +- shesha/supervisor/components/targetCompass.py | 16 +- .../supervisor/components/telescopeCompass.py | 2 +- shesha/supervisor/components/wfsCompass.py | 30 +- shesha/supervisor/genericSupervisor.py | 241 +---- shesha/supervisor/optimizers/__init__.py | 5 +- shesha/supervisor/optimizers/calibration.py | 15 +- shesha/supervisor/optimizers/modalBasis.py | 15 +- shesha/supervisor/optimizers/modalGains.py | 215 ++++ shesha/sutra_wrap.py | 37 - shesha/util/__init__.py | 2 +- shesha/util/dm_util.py | 18 +- shesha/util/hdf5_util.py | 67 +- shesha/util/influ_util.py | 18 +- shesha/util/iterkolmo.py | 8 +- shesha/util/kl_util.py | 22 +- shesha/util/make_apodizer.py | 4 +- shesha/util/make_pupil.py | 62 +- shesha/util/psfMap.py | 2 +- shesha/util/rtc_util.py | 6 +- shesha/util/tao/__init__.py | 149 +++ shesha/util/tao/ltao.py | 70 ++ shesha/util/tao/mcao.py | 69 ++ shesha/util/tao/writer.py | 513 ++++++++++ shesha/util/tools.py | 6 +- shesha/util/utilities.py | 90 +- shesha/util/writers/common/__init__.py | 5 + shesha/util/writers/common/atmos.py | 35 + shesha/util/writers/common/dm.py | 55 ++ shesha/util/writers/common/fits.py | 154 +++ shesha/util/writers/common/imat.py | 40 + shesha/util/writers/common/wfs.py | 161 +++ shesha/util/writers/tao/__init__.py | 41 + shesha/util/writers/tao/atmParams.py | 18 + shesha/util/writers/tao/sysParams.py | 106 ++ shesha/util/writers/yao/__init__.py | 70 ++ shesha/util/writers/yao/atmos.py | 37 + shesha/util/writers/yao/dm.py | 86 ++ shesha/util/writers/yao/general.py | 38 + shesha/util/writers/yao/gs.py | 23 + shesha/util/writers/yao/loop.py | 26 + shesha/util/writers/yao/targets.py | 30 + shesha/util/writers/yao/wfs.py | 127 +++ shesha/widgets/__init__.py | 2 +- shesha/widgets/widget_ao.py | 10 +- shesha/widgets/widget_base.py | 2 +- tests/check.py | 50 +- tests/checkCompass.sh | 6 +- tests/old_scripts/benchmark.sh | 79 ++ tests/old_scripts/benchmark_script.py | 420 ++++++++ tests/old_scripts/closed_loop_fake_wfs.py | 405 ++++++++ tests/old_scripts/closed_loop_mpi.py | 197 ++++ tests/old_scripts/debug_pyr.py | 117 +++ tests/old_scripts/dm_scripts/par_dm.py | 69 ++ tests/old_scripts/dm_scripts/script_dminit.py | 60 ++ tests/old_scripts/nono_loop.py | 41 + tests/old_scripts/open_loop.py | 107 ++ tests/old_scripts/scriptBashCompass_PYR39m.sh | 65 ++ tests/old_scripts/scriptBashCompass_SH39m.sh | 97 ++ tests/old_scripts/script_PYR39m.py | 392 ++++++++ tests/old_scripts/script_PYR39m_optimGain.py | 715 ++++++++++++++ tests/old_scripts/script_SH39m.py | 392 ++++++++ tests/old_scripts/test_rtc_standalone.py | 79 ++ tests/old_scripts/test_rtc_standalone_pyr.py | 89 ++ tests/pytest/rtc/FP32/test_rtcFFF_pyramid.py | 60 -- tests/pytest/rtc/{FP32 => }/test_rtcFFF.py | 111 ++- tests/pytest/rtc/test_rtcFFF_pyramid.py | 97 ++ tests/pytest/rtc/{FP32 => }/test_rtcFFU.py | 125 ++- tests/pytest/rtc/{FP32 => }/test_rtcUFF.py | 125 ++- tests/pytest/rtc/{FP32 => }/test_rtcUFU.py | 125 ++- .../FP32/test_rtcstandalone_maskedpix.py | 108 ++ .../FP32/test_rtcstandalone_sh.py | 267 +++++ .../supervisor/test_compassSupervisor.py | 122 ++- 149 files changed, 16795 insertions(+), 1249 deletions(-) delete mode 100644 doc/shesha_sim.rst create mode 100644 doc/sutra_wrap.rst create mode 100644 guardians/__init__.py create mode 100644 guardians/drax.py create mode 100644 guardians/gamora.py create mode 100644 guardians/groot.py create mode 100755 guardians/misc/correlations/bash_script.sh create mode 100644 guardians/misc/correlations/correlation_bokeh.py create mode 100644 guardians/misc/correlations/correlation_study.py create mode 100644 guardians/misc/correlations/script_roket.py create mode 100644 guardians/misc/correlations/script_roket_cpu.py create mode 100644 guardians/misc/layer_linearity/layers_test.py create mode 100644 guardians/misc/roket_cpu.py create mode 100644 guardians/misc/roket_gpu.py create mode 100644 guardians/misc/roket_widget.py create mode 100644 guardians/misc/valid_roket_files.py create mode 100644 guardians/roket.py create mode 100755 guardians/scripts/bash_roket.sh create mode 100644 guardians/scripts/convergence.py create mode 100644 guardians/scripts/old2new_attrs.py create mode 100644 guardians/scripts/psf_error.py create mode 100644 guardians/scripts/script_roket.py create mode 100644 guardians/starlord.py create mode 100644 guardians/widgets/bokeh_gamora.py create mode 100644 guardians/widgets/bokeh_groot.py create mode 100644 guardians/widgets/bokeh_guardian.py create mode 100644 guardians/widgets/bokeh_roket.py create mode 100644 guardians/widgets/widget_gamora.py create mode 100644 guardians/widgets/widget_groot.py create mode 100644 guardians/widgets/widget_guardian.py create mode 100644 guardians/widgets/widget_roket.py create mode 100644 shesha/config/pconfig.py create mode 100644 shesha/supervisor/components/rtc/__init__.py rename shesha/supervisor/components/{rtcCompass.py => rtc/rtcAbstract.py} (86%) create mode 100644 shesha/supervisor/components/rtc/rtcCompass.py create mode 100644 shesha/supervisor/components/rtc/rtcStandalone.py create mode 100644 shesha/supervisor/optimizers/modalGains.py create mode 100644 shesha/util/tao/__init__.py create mode 100644 shesha/util/tao/ltao.py create mode 100644 shesha/util/tao/mcao.py create mode 100644 shesha/util/tao/writer.py create mode 100644 shesha/util/writers/common/__init__.py create mode 100644 shesha/util/writers/common/atmos.py create mode 100644 shesha/util/writers/common/dm.py create mode 100644 shesha/util/writers/common/fits.py create mode 100644 shesha/util/writers/common/imat.py create mode 100644 shesha/util/writers/common/wfs.py create mode 100644 shesha/util/writers/tao/__init__.py create mode 100644 shesha/util/writers/tao/atmParams.py create mode 100644 shesha/util/writers/tao/sysParams.py create mode 100644 shesha/util/writers/yao/__init__.py create mode 100644 shesha/util/writers/yao/atmos.py create mode 100644 shesha/util/writers/yao/dm.py create mode 100644 shesha/util/writers/yao/general.py create mode 100644 shesha/util/writers/yao/gs.py create mode 100644 shesha/util/writers/yao/loop.py create mode 100644 shesha/util/writers/yao/targets.py create mode 100644 shesha/util/writers/yao/wfs.py create mode 100755 tests/old_scripts/benchmark.sh create mode 100644 tests/old_scripts/benchmark_script.py create mode 100644 tests/old_scripts/closed_loop_fake_wfs.py create mode 100644 tests/old_scripts/closed_loop_mpi.py create mode 100644 tests/old_scripts/debug_pyr.py create mode 100644 tests/old_scripts/dm_scripts/par_dm.py create mode 100644 tests/old_scripts/dm_scripts/script_dminit.py create mode 100644 tests/old_scripts/nono_loop.py create mode 100644 tests/old_scripts/open_loop.py create mode 100755 tests/old_scripts/scriptBashCompass_PYR39m.sh create mode 100755 tests/old_scripts/scriptBashCompass_SH39m.sh create mode 100644 tests/old_scripts/script_PYR39m.py create mode 100644 tests/old_scripts/script_PYR39m_optimGain.py create mode 100644 tests/old_scripts/script_SH39m.py create mode 100644 tests/old_scripts/test_rtc_standalone.py create mode 100644 tests/old_scripts/test_rtc_standalone_pyr.py delete mode 100644 tests/pytest/rtc/FP32/test_rtcFFF_pyramid.py rename tests/pytest/rtc/{FP32 => }/test_rtcFFF.py (61%) create mode 100644 tests/pytest/rtc/test_rtcFFF_pyramid.py rename tests/pytest/rtc/{FP32 => }/test_rtcFFU.py (61%) rename tests/pytest/rtc/{FP32 => }/test_rtcUFF.py (60%) rename tests/pytest/rtc/{FP32 => }/test_rtcUFU.py (61%) create mode 100644 tests/pytest/rtc_standalone/FP32/test_rtcstandalone_maskedpix.py create mode 100644 tests/pytest/rtc_standalone/FP32/test_rtcstandalone_sh.py diff --git a/doc/shesha.rst b/doc/shesha.rst index e7c57de..2a41a7e 100644 --- a/doc/shesha.rst +++ b/doc/shesha.rst @@ -5,6 +5,6 @@ shesha_config.rst shesha_constants.rst shesha_init.rst - shesha_sim.rst shesha_supervisor.rst shesha_util.rst + sutra_wrap.rst diff --git a/doc/shesha_sim.rst b/doc/shesha_sim.rst deleted file mode 100644 index 5573330..0000000 --- a/doc/shesha_sim.rst +++ /dev/null @@ -1,33 +0,0 @@ -***************** -:mod:`shesha.sim` -***************** - -.. _shesha.sim module: - -.. automodule:: shesha.sim - :members: - -.. _shesha.sim.bench.Bench class: - -.. autoclass:: shesha.sim.bench.Bench - :members: - -.. _shesha.sim.simulator.Simulator class: - -.. autoclass:: shesha.sim.simulator.Simulator - :members: - -.. _shesha.sim.simulatorRTC.SimulatorRTC class: - -.. autoclass:: shesha.sim.simulatorRTC.SimulatorRTC - :members: - -.. _shesha.sim.benchBrahma.BenchBrahma class: - -.. autoclass:: shesha.sim.benchBrahma.BenchBrahma - :members: - -.. _shesha.sim.simulatorBrahma.SimulatorBrahma class: - -.. autoclass:: shesha.sim.simulatorBrahma.SimulatorBrahma - :members: diff --git a/doc/shesha_supervisor.rst b/doc/shesha_supervisor.rst index 707e3f8..321b48f 100644 --- a/doc/shesha_supervisor.rst +++ b/doc/shesha_supervisor.rst @@ -7,17 +7,42 @@ .. automodule:: shesha.supervisor :members: -.. _shesha.supervisor.compassSupervisor.CompassSupervisor class: +.. _shesha.supervisor.components.rtc.rtcCompass.RtcCompass class: -.. autoclass:: shesha.supervisor.compassSupervisor.CompassSupervisor +.. autoclass:: shesha.supervisor.components.rtc.rtcCompass.RtcCompass :members: -.. _shesha.supervisor.rtcSupervisor.RTCSupervisor class: +.. _shesha.supervisor.components.rtc.rtcStandalone.RtcStandalone class: -.. autoclass:: shesha.supervisor.rtcSupervisor.RTCSupervisor +.. autoclass:: shesha.supervisor.components.rtc.rtcStandalone.RtcStandalone :members: -.. _shesha.supervisor.benchSupervisor.BenchSupervisor class: +.. _shesha.supervisor.components.atmosCompass.AtmosCompass class: -.. autoclass:: shesha.supervisor.benchSupervisor.BenchSupervisor +.. autoclass:: shesha.supervisor.components.atmosCompass.AtmosCompass + :members: + +.. _shesha.supervisor.components.dmCompass.DmCompass class: + +.. autoclass:: shesha.supervisor.components.dmCompass.DmCompass + :members: + +.. _shesha.supervisor.components.sourceCompass.SourceCompass class: + +.. autoclass:: shesha.supervisor.components.sourceCompass.SourceCompass + :members: + +.. _shesha.supervisor.components.targetCompass.TargetCompass class: + +.. autoclass:: shesha.supervisor.components.targetCompass.TargetCompass + :members: + +.. _shesha.supervisor.components.telescope.Telescope class: + +.. autoclass:: shesha.supervisor.components.telescope.Telescope + :members: + +.. _shesha.supervisor.components.wfsCompass.WfsCompass class: + +.. autoclass:: shesha.supervisor.components.wfsCompass.WfsCompass :members: diff --git a/doc/sutra_wrap.rst b/doc/sutra_wrap.rst new file mode 100644 index 0000000..aa710e4 --- /dev/null +++ b/doc/sutra_wrap.rst @@ -0,0 +1,88 @@ +************************ +:mod:`shesha.sutra_wrap` +************************ + +.. _shesha.sutra_wrap module: + +.. automodule:: shesha.sutra_wrap + :members: + +.. _shesha.sutra_wrap.Atmos class: + +.. autoclass:: shesha.sutra_wrap.Atmos + :members: + +.. _shesha.sutra_wrap.carmaWrap_context class: + +.. autoclass:: shesha.sutra_wrap.carmaWrap_context + :members: + +.. _shesha.sutra_wrap.Dms class: + +.. autoclass:: shesha.sutra_wrap.Dms + :members: + +.. _shesha.sutra_wrap.Gamora class: + +.. autoclass:: shesha.sutra_wrap.Gamora + :members: + +.. _shesha.sutra_wrap.Groot class: + +.. autoclass:: shesha.sutra_wrap.Groot + :members: + +.. _shesha.sutra_wrap.Rtc_FFF class: + +.. autoclass:: shesha.sutra_wrap.Rtc_FFF + :members: + +.. _shesha.sutra_wrap.Rtc_FFU class: + +.. autoclass:: shesha.sutra_wrap.Rtc_FFU + :members: + +.. _shesha.sutra_wrap.Rtc_FHF class: + +.. autoclass:: shesha.sutra_wrap.Rtc_FHF + :members: + +.. _shesha.sutra_wrap.Rtc_FHU class: + +.. autoclass:: shesha.sutra_wrap.Rtc_FHU + :members: + +.. _shesha.sutra_wrap.Rtc_UFF class: + +.. autoclass:: shesha.sutra_wrap.Rtc_UFF + :members: + +.. _shesha.sutra_wrap.Rtc_UFU class: + +.. autoclass:: shesha.sutra_wrap.Rtc_UFU + :members: + +.. _shesha.sutra_wrap.Rtc_UHF class: + +.. autoclass:: shesha.sutra_wrap.Rtc_UHF + :members: + +.. _shesha.sutra_wrap.Rtc_UHU class: + +.. autoclass:: shesha.sutra_wrap.Rtc_UHU + :members: + +.. _shesha.sutra_wrap.Sensors class: + +.. autoclass:: shesha.sutra_wrap.Sensors + :members: + +.. _shesha.sutra_wrap.Target class: + +.. autoclass:: shesha.sutra_wrap.Target + :members: + +.. _shesha.sutra_wrap.Telescope class: + +.. autoclass:: shesha.sutra_wrap.Telescope + :members: diff --git a/guardians/__init__.py b/guardians/__init__.py new file mode 100644 index 0000000..8b6f8c5 --- /dev/null +++ b/guardians/__init__.py @@ -0,0 +1,4 @@ +""" +GuARDIANs (packaGe for Ao eRror breakDown estImation And exploitatioN) +""" +__all__ = ["groot", "gamora", "roket", "drax", "starlord"] diff --git a/guardians/drax.py b/guardians/drax.py new file mode 100644 index 0000000..009523c --- /dev/null +++ b/guardians/drax.py @@ -0,0 +1,930 @@ +""" +DRAX (Dedicated functions for Roket file Analysis and eXploitation) + +Useful functions for ROKET file exploitation +""" + +import numpy as np +import h5py +import pandas +import matplotlib.pyplot as plt +plt.ion() +from scipy.sparse import csr_matrix + + +def variance(f, contributors, method="Default"): + """ Return the error variance of specified contributors + params: + f : (h5py.File) : roket hdf5 file opened with h5py + contributors : (list of string) : list of the contributors + method : (optional, default="Default") : if "Independence", the + function returns ths sum of the contributors variances. + If "Default", it returns the variance of the contributors sum + """ + P = f["P"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + if (method == "Default"): + err = f[contributors[0]][:] * 0. + for c in contributors: + err += f[c][:] + return np.var(P.dot(err), axis=1) #[swap] + + elif (method == "Independence"): + nmodes = P.shape[0] + v = np.zeros(nmodes) + for c in contributors: + v += np.var(P.dot(f[c][:]), axis=1) + return v #[swap] + + else: + raise TypeError("Wrong method input") + + +def varianceMultiFiles(fs, frac_per_layer, contributors): + """ Return the variance computed from the sum of contributors of roket + files fs, ponderated by frac + params: + fs : (list) : list of hdf5 files opened with h5py + frac_per_layer : (dict) : frac for each layer + contributors : (list of string) : list of the contributors + return: + v : (np.array(dim=1)) : variance vector + """ + f = fs[0] + P = f["P"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + err = f[contributors[0]][:] * 0. + for f in fs: + frac = frac_per_layer[f.attrs["_Param_atmos__.alt"][0]] + for c in contributors: + err += np.sqrt(frac) * f[c][:] + + return np.var(P.dot(err), axis=1) #[swap] + + +def cumulativeSR(v, Lambda_tar): + """ Returns the cumulative Strehl ratio over the modes from the variance + on each mode + params: + v : (np.array(dim=1)) : variance vector + return: + s : (np.array(dim=1)) : cumulative SR + """ + s = np.cumsum(v) + s = np.exp(-s * (2 * np.pi / Lambda_tar)**2) + + return s + + +def get_cumSR(filename): + """ + Compute the SR over the modes from the variance + on each mode + + Args: + filename: (str): path to the ROKET file + """ + f = h5py.File(filename, 'r') + error_list = [ + "noise", "aliasing", "tomography", "filtered modes", "non linearity", + "bandwidth" + ] + if (list(f.attrs.keys()).count("_Param_target__Lambda")): + Lambda = f.attrs["_Param_target__Lambda"][0] + else: + Lambda = 1.65 + nactus = f["noise"][:].shape[0] + niter = f["noise"][:].shape[1] + P = f["P"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + data = np.zeros((nmodes, niter)) + data2 = np.zeros(nmodes) + + for i in error_list: + data += np.dot(P, f[i][:]) + data2 += np.var(np.dot(P, f[i][:]), axis=1) + + data = np.var(data, axis=1) + data = np.cumsum(data[swap]) + data = np.exp(-data * (2 * np.pi / Lambda)**2) + data2 = np.cumsum(data2[swap]) + data2 = np.exp(-data2 * (2 * np.pi / Lambda)**2) + data *= np.exp(-f["fitting"].value) + data2 *= np.exp(-f["fitting"].value) + + SR2 = np.ones(nmodes) * f["SR2"].value + SR = np.ones(nmodes) * f["SR"].value + + return data, data2, SR, SR2 + + +def get_Btt(filename): + """ + Return the Modes to Volt matrix + Args: + filename: (str): path to the ROKET file + """ + f = h5py.File(filename, 'r') + return f["Btt"][:] + + +def get_P(filename): + """ + Return the Volt to Modes matrix + Args: + filename: (str): path to the ROKET file + """ + f = h5py.File(filename, 'r') + return f["P"][:] + + +def get_contribution(filename, contributor): + """ + Return the variance of an error contributor + + Args: + filename: (str): path to the ROKET file + contributor: (str): contributor name + :return: + v: (np.array[ndim=1, dtype=np.float32]): variance of the contributor + """ + f = h5py.File(filename, 'r') + P = f["P"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + + return np.var(np.dot(P, f[contributor][:]), axis=1) #[swap] + + +def get_err_contributors(filename, contributors): + """ + Return the sum of the specified contributors error buffers + + Args: + filename: (str): path to the ROKET file + contributors: (list): list of contributors + :return: + err: (np.ndarray[ndim=2,dtype=np.float32]): Sum of the error buffers + """ + f = h5py.File(filename, 'r') + # Get the sum of error contributors + err = f["noise"][:] * 0. + for c in contributors: + err += f[c][:] + f.close() + + return err + + +def get_err(filename): + """ + Return the sum of all the error buffers + + Args: + filename: (str): path to the ROKET file + :return: + err: (np.ndarray[ndim=2,dtype=np.float32]): Sum of the error buffers + """ + + f = h5py.File(filename, 'r') + # Get the sum of error contributors + err = f["noise"][:] + err += f["aliasing"][:] + err += f["tomography"][:] + err += f["filtered modes"][:] + err += f["non linearity"][:] + err += f["bandwidth"][:] + f.close() + + return err + + +def get_coverr_independence(filename): + """ + Return the error covariance matrix considering statistical independence between contributors + + Args: + filename: (str): path to the ROKET file + :return: + err: (np.ndarray[ndim=2,dtype=np.float32]): Covariance matrix + """ + + f = h5py.File(filename, 'r') + # Get the sum of error contributors + N = f["noise"][:].shape[1] + err = f["noise"][:].dot(f["noise"][:].T) + err += f["aliasing"][:].dot(f["aliasing"][:].T) + err += f["tomography"][:].dot(f["tomography"][:].T) + err += f["filtered modes"][:].dot(f["filtered modes"][:].T) + err += f["non linearity"][:].dot(f["non linearity"][:].T) + err += f["bandwidth"][:].dot(f["bandwidth"][:].T) + f.close() + + return err / N + + +def get_coverr_independence_contributors(filename, contributors): + """ + Return the error covariance matrix considering statistical independence between specified contributors + + Args: + filename: (str): path to the ROKET file + contributors: (list): list of contributors + :return: + err: (np.ndarray[ndim=2,dtype=np.float32]): Covariance matrix + """ + + f = h5py.File(filename, 'r') + # Get the sum of error contributors + N = f["noise"][:].shape[1] + err = np.zeros((f["noise"][:].shape[0], f["noise"][:].shape[0])) + for c in contributors: + err += f[c][:].dot(f[c][:].T) + + f.close() + + return err / N + + +def get_covmat_contrib(filename, contributors, modal=True): + """ + Return the covariance matrix of the specified contributors + + Args: + filename: (str): path to the ROKET file + contributor: (list): name of a contributor of the ROKET file + modal: (bool): if True (default), return the matrix expressed in the modal basis + :return: + covmat: (np.ndarray(ndim=2, dtype=np.float32)): covariance matrix + """ + h5f = h5py.File(filename, 'r') + contrib = h5f["bandwidth"][:] * 0. + for c in contributors: + contrib += h5f[c][:] + covmat = contrib.dot(contrib.T) / contrib.shape[1] + if modal: + P = h5f["P"][:] + covmat = P.dot(covmat).dot(P.T) + h5f.close() + + return covmat + + +def get_pup(filename): + """ + Return the pupil saved in a ROKET file + Args: + filename: (str): path to the ROKET file + :return: + spup: (np.ndarray[ndim=2,dtype=np.float32]): pupil + + """ + f = h5py.File(filename, 'r') + if (list(f.keys()).count("spup")): + spup = f["spup"][:] + else: + indx_pup = f["indx_pup"][:] + pup = np.zeros((f["dm_dim"].value, f["dm_dim"].value)) + pup_F = pup.flatten() + pup_F[indx_pup] = 1. + pup = pup_F.reshape(pup.shape) + spup = pup[np.where(pup)[0].min():np.where(pup)[0].max() + 1, + np.where(pup)[1].min():np.where(pup)[1].max() + 1] + + f.close() + return spup + + +def get_breakdown(filename): + """ + Computes the error breakdown in nm rms from a ROKET file + + Args: + filename: (str): path to the ROKET file + :return: + breakdown: (dict): dictionnary containing the error breakdown + """ + f = h5py.File(filename, 'r') + P = f["P"][:] + noise = f["noise"][:] + trunc = f["non linearity"][:] + bp = f["bandwidth"][:] + tomo = f["tomography"][:] + aliasing = f["aliasing"][:] + filt = f["filtered modes"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + N = np.var(P.dot(noise), axis=1) + S = np.var(P.dot(trunc), axis=1) + B = np.var(P.dot(bp), axis=1) + T = np.var(P.dot(tomo), axis=1) + A = np.var(P.dot(aliasing), axis=1) + F = np.var(P.dot(filt), axis=1) + C = np.var(P.dot(filt + noise + trunc + bp + tomo + aliasing), axis=1) + inde = N + S + B + T + A + F + + if (list(f.attrs.keys()).count("_Param_target__Lambda")): + Lambda = f.attrs["_Param_target__Lambda"][0] + else: + Lambda = 1.65 + + print("noise :", np.sqrt(np.sum(N)) * 1e3, " nm rms") + print("trunc :", np.sqrt(np.sum(S)) * 1e3, " nm rms") + print("bp :", np.sqrt(np.sum(B)) * 1e3, " nm rms") + print("tomo :", np.sqrt(np.sum(T)) * 1e3, " nm rms") + print("aliasing :", np.sqrt(np.sum(A)) * 1e3, " nm rms") + print("filt :", np.sqrt(np.sum(F)) * 1e3, " nm rms") + print("fitting :", + np.mean(np.sqrt(f["fitting"].value / ((2 * np.pi / Lambda)**2)) * 1e3), + " nm rms") + print("cross-terms :", np.sqrt(np.abs(np.sum(C) - np.sum(inde))) * 1e3, " nm rms") + return { + "noise": + np.sqrt(np.sum(N)) * 1e3, + "non linearity": + np.sqrt(np.sum(S)) * 1e3, + "bandwidth": + np.sqrt(np.sum(B)) * 1e3, + "tomography": + np.sqrt(np.sum(T)) * 1e3, + "aliasing": + np.sqrt(np.sum(A)) * 1e3, + "filtered modes": + np.sqrt(np.sum(F)) * 1e3, + "fitting": + np.mean( + np.sqrt(f["fitting"].value / + ((2 * np.pi / Lambda)**2)) * 1e3) + } + + +# def plotContributions(filename): +# f = h5py.File(filename, 'r') +# P = f["P"][:] +# noise = f["noise"][:] +# trunc = f["non linearity"][:] +# bp = f["bandwidth"][:] +# tomo = f["tomography"][:] +# aliasing = f["aliasing"][:] +# filt = f["filtered modes"][:] +# nmodes = P.shape[0] +# swap = np.arange(nmodes) - 2 +# swap[0:2] = [nmodes - 2, nmodes - 1] + +# plt.figure() +# plt.plot(np.var(noise, axis=1), color="black") +# plt.plot(np.var(trunc, axis=1), color="green") +# plt.plot(np.var(bp, axis=1), color="red") +# plt.plot(np.var(tomo, axis=1), color="blue") +# plt.plot(np.var(aliasing, axis=1), color="cyan") +# plt.plot(np.var(filt, axis=1), color="magenta") +# plt.xlabel("Actuators") +# plt.ylabel("Variance [microns^2]") +# plt.title("Variance of estimated errors on actuators") +# plt.legend([ +# "noise", "WFS non-linearity", "Bandwidth", "Anisoplanatism", "Aliasing", +# "Filtered modes" +# ]) + +# plt.figure() +# N = np.var(P.dot(noise), axis=1) +# S = np.var(P.dot(trunc), axis=1) +# B = np.var(P.dot(bp), axis=1) +# T = np.var(P.dot(tomo), axis=1) +# A = np.var(P.dot(aliasing), axis=1) +# F = np.var(P.dot(filt), axis=1) +# plt.plot(N[swap], color="black") +# plt.plot(S[swap], color="green") +# plt.plot(B[swap], color="red") +# plt.plot(T[swap], color="blue") +# plt.plot(A[swap], color="cyan") +# plt.plot(F[swap], color="magenta") +# plt.xlabel("Modes") +# plt.ylabel("Variance [microns^2]") +# plt.yscale("log") +# plt.title("Variance of estimated errors on modal basis B") + +# if (list(f.attrs.keys()).count("_Param_target__Lambda")): +# Lambda = f.attrs["_Param_target__Lambda"][0] +# else: +# Lambda = 1.65 + +# print("noise :", +# np.sqrt(np.sum(N)) * 1e3, " nm, ", "SR : ", +# np.exp(-np.sum(N) * (2 * np.pi / Lambda)**2)) +# print("trunc :", +# np.sqrt(np.sum(S)) * 1e3, " nm, ", "SR : ", +# np.exp(-np.sum(S) * (2 * np.pi / Lambda)**2)) +# print("bp :", +# np.sqrt(np.sum(B)) * 1e3, " nm, ", "SR : ", +# np.exp(-np.sum(B) * (2 * np.pi / Lambda)**2)) +# print("tomo :", +# np.sqrt(np.sum(T)) * 1e3, " nm, ", "SR : ", +# np.exp(-np.sum(T) * (2 * np.pi / Lambda)**2)) +# print("aliasing :", +# np.sqrt(np.sum(A)) * 1e3, " nm, ", "SR : ", +# np.exp(-np.sum(A) * (2 * np.pi / Lambda)**2)) +# print("filt :", +# np.sqrt(np.sum(F)) * 1e3, " nm, ", "SR : ", +# np.exp(-np.sum(F) * (2 * np.pi / Lambda)**2)) +# print("fitting :", +# np.sqrt(f["fitting"].value / ((2 * np.pi / Lambda)**2)) * 1e3, " nm, ", +# "SR : ", np.exp(-f["fitting"].value)) +# #plt.legend(["noise","WFS non-linearity","Bandwidth","Anisoplanatism","Aliasing","Filtered modes"]) + + +def plotCovCor(filename, maparico=None): + """ + Displays the covariance and correlation matrix between the contributors + Args: + filename: (str): path to the ROKET file + maparico: (str): (optional) matplotlib colormap to use + + """ + f = h5py.File(filename, 'r') + cov = f["cov"][:] + cor = f["cor"][:] + + labels = ["noise", "WF deviation", "aliasing", "filt. modes", "bandwidth", "aniso"] + if (maparico is None): + maparico = "viridis" + x = np.arange(6) + plt.matshow(cov, cmap=maparico) + plt.colorbar() + plt.xticks(x, labels, rotation="vertical") + plt.yticks(x, labels) + + plt.matshow(cor, cmap=maparico) + plt.colorbar() + plt.xticks(x, labels, rotation="vertical") + plt.yticks(x, labels) + print("Total variance : ", cov.sum(), " microns^2") + + +def get_IF(filename): + """ + Return the influence functions of the pzt and tt DM saved in a ROKET file + Args: + filename: (str): path to the ROKET file + :return: + IF: (csr_matrix): pzt influence function (sparse) + T: (np.ndarray[ndim=2,dtype=np.float32]): tip tilt influence function + """ + f = h5py.File(filename, 'r') + IF = csr_matrix((f["IF.data"][:], f["IF.indices"][:], f["IF.indptr"][:])) + if (list(f.keys()).count("TT")): + T = f["TT"][:] + else: + T = IF[-2:, :].toarray() + IF = IF[:-2, :] + f.close() + return IF, T.T.astype(np.float32) + + +def get_mode(filename, n): + """ + Return the #n mode of the Btt modal basis contains in a ROKET file + Args: + filename: (str): path to the ROKET file + n: (int): mode number + :return: + sc: (np.ndarray[ndim=2,dtype=np.float32]): mode #n of the Btt basis + """ + f = h5py.File(filename, 'r') + Btt = f["Btt"][:] + IF, TT = get_IF(filename) + dim = f["dm_dim"].value + indx = f["indx_pup"][:] + sc = np.zeros((dim, dim)) + sc = sc.flatten() + mode = IF.T.dot(Btt[:-2, n]) + mode += TT.T.dot(Btt[-2:, n]) + sc[indx] = mode + + return sc.reshape((dim, dim)) + + +def get_tar_image(filename): + """ + Return the PSF computed by COMPASS saved in the ROKET file + + Args: + filename: (str): path to the ROKET file + :return: + psf: (np.ndarray[ndim=2,dtype=np.float32]): PSF computed by COMPASS + """ + f = h5py.File(filename, "r") + psf = f["psf"][:] + f.close() + + return psf + + +def getMap(filename, covmat): + """ + Return the spatial representation of a covariance matrix expressed in the DM space + Args: + filename: (str): path to the ROKET file + covmat: (np.ndarray[ndim=2,dtype=np.float32]): covariance matrix + :return: + Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map + """ + f = h5py.File(filename, 'r') + # nn, c'est, en gros un where(actus==valides) + xpos = f["dm.xpos"][:] + ypos = f["dm.ypos"][:] + pitch = xpos[1] - xpos[0] + nact = f.attrs["_Param_dm__nact"][0] + x = ((xpos - xpos.min()) / pitch).astype(np.int32) + y = ((ypos - ypos.min()) / pitch).astype(np.int32) + nn = (x, y) + + #creation du tableau des decalages + #xx et yy c'est les cood des actus valides + #dx et dy c'est la matrice des differences de coordonnees, entre -nssp et +nssp + xx = np.tile(np.arange(nact), (nact, 1)) + yy = xx.T + dx = np.zeros((x.size, x.size), dtype=np.int32) + dy = dx.copy() + for k in range(x.size): + dx[k, :] = xx[nn][k] - xx[nn] + dy[k, :] = yy[nn][k] - yy[nn] + + # transformation des decalages en indice de tableau + dx += (nact - 1) + dy += (nact - 1) + + # transformation d'un couple de decalages (dx,dy) en un indice du tableau 'Map' + Map = np.zeros((nact * 2 - 1, nact * 2 - 1)).flatten() + div = Map.copy() + ind = dy.flatten() + (nact * 2 - 1) * (dx.flatten()) + Cf = covmat.flatten() + for k in range(ind.size): + Map[ind[k]] += Cf[k] + div[ind[k]] += 1 + + div[np.where(div == 0)] = 1 + Map /= div + + return Map.reshape((nact * 2 - 1, nact * 2 - 1)) + + +def SlopesMap(covmat, filename=None, nssp=None, validint=None): + """ + Return a part of the spatial representation of a covariance matrix expressed in the slopes space. + Need to be called 4 times to get the full map (XX, YY, XY, YX) + + Args: + covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix + filename: (str): (optional) path to the ROKET file + nssp: (int): (optional) Number of ssp in the diameter + validint: (float): (optional) Central obstruction as a ratio of D + + :return: + Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map + """ + if filename is not None: + f = h5py.File(filename, 'r') + nssp = f.attrs["_Param_wfs__nxsub"][0] + validint = f.attrs["_Param_tel__cobs"] + f.close() + + if nssp is None or validint is None: + raise ValueError("nssp and validint not defined") + + nsub = covmat.shape[0] + x = np.linspace(-1, 1, nssp) + x, y = np.meshgrid(x, x) + r = np.sqrt(x * x + y * y) + + rorder = np.sort(r.reshape(nssp * nssp)) + ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32) + validext = rorder[ncentral + nsub] + valid = (r < validext) & (r >= validint) + nn = np.where(valid) + + xx = np.tile(np.arange(nssp), (nssp, 1)) + yy = xx.T + xx = xx[nn] + yy = yy[nn] + dx = np.zeros((xx.size, xx.size), dtype=np.int32) + dy = dx.copy() + for k in range(xx.size): + dx[k, :] = xx[k] - xx + dy[k, :] = yy[k] - yy + + # transformation des decalages en indice de tableau + dx += (nssp - 1) + dy += (nssp - 1) + + # transformation d'un couple de decalages (dx,dy) en un indice du tableau 'Map' + Map = np.zeros((nssp * 2 - 1, nssp * 2 - 1)).flatten() + div = Map.copy() + ind = dy.flatten() + (nssp * 2 - 1) * (dx.flatten()) + Cf = covmat.flatten() + for k in range(ind.size): + Map[ind[k]] += Cf[k] + div[ind[k]] += 1 + + div[np.where(div == 0)] = 1 + Map /= div + + return Map.reshape((nssp * 2 - 1, nssp * 2 - 1)) + + +def covFromMap(Map, nsub, filename=None, nssp=None, validint=None): + """ + Return a part of the spatial representation of a covariance matrix expressed in the slopes space. + Need to be called 4 times to get the full map (XX, YY, XY, YX) + + Args: + covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix + filename: (str): (optional) path to the ROKET file + nssp: (int): (optional) Number of ssp in the diameter + validint: (float): (optional) Central obstruction as a ratio of D + + :return: + Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map + """ + if filename is not None: + f = h5py.File(filename, 'r') + nssp = f.attrs["_Param_wfs__nxsub"][0] + validint = f.attrs["_Param_tel__cobs"] + f.close() + + if nssp is None or validint is None: + raise ValueError("nssp and validint not defined") + + x = np.linspace(-1, 1, nssp) + x, y = np.meshgrid(x, x) + r = np.sqrt(x * x + y * y) + + rorder = np.sort(r.reshape(nssp * nssp)) + ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32) + validext = rorder[ncentral + nsub] + valid = (r < validext) & (r >= validint) + nn = np.where(valid) + + xx = np.tile(np.arange(nssp), (nssp, 1)) + yy = xx.T + xx = xx[nn] + yy = yy[nn] + dx = np.zeros((xx.size, xx.size), dtype=np.int32) + dy = dx.copy() + for k in range(xx.size): + dx[k, :] = xx[k] - xx + dy[k, :] = yy[k] - yy + + # transformation des decalages en indice de tableau + dx += (nssp - 1) + dy += (nssp - 1) + + # transformation d'un couple de decalages (dx,dy) en un indice du tableau 'Map' + covmat = np.zeros((nsub, nsub)) + ind = dy.flatten() + (nssp * 2 - 1) * (dx.flatten()) + Cf = covmat.flatten() + Map = Map.flatten() + for k in range(ind.size): + Cf[k] = Map[ind[k]] + + return Cf.reshape((nsub, nsub)) + + +def getCovFromMap(Map, nsub, filename=None, nssp=None, validint=None): + """ + Return the full spatial representation of a covariance matrix expressed in the slopes space. + + Args: + covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix + filename: (str): (optional) path to the ROKET file + nssp: (int): (optional) Number of ssp in the diameter + validint: (float): (optional) Central obstruction as a ratio of D + :return: + Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map + """ + if filename is not None: + f = h5py.File(filename, 'r') + nssp = f.attrs["_Param_wfs__nxsub"][0] + f.close() + mapSize = 2 * nssp - 1 + covmat = np.zeros((nsub, nsub)) + + covmat[:nsub // 2, :nsub // 2] = covFromMap(Map[:mapSize, :mapSize], nsub // 2, + filename=filename) + covmat[nsub // 2:, nsub // 2:] = covFromMap(Map[mapSize:, mapSize:], nsub // 2, + filename=filename) + covmat[:nsub // 2, nsub // 2:] = covFromMap(Map[:mapSize, mapSize:], nsub // 2, + filename=filename) + covmat[nsub // 2:, :nsub // 2] = covFromMap(Map[mapSize:, :mapSize], nsub // 2, + filename=filename) + + return covmat + + +def get_slopessMap(covmat, filename=None, nssp=None, validint=None): + """ + Return the full spatial representation of a covariance matrix expressed in the slopes space. + + Args: + covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix + filename: (str): (optional) path to the ROKET file + nssp: (int): (optional) Number of ssp in the diameter + validint: (float): (optional) Central obstruction as a ratio of D + :return: + Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map + """ + if filename is not None: + f = h5py.File(filename, 'r') + nssp = f.attrs["_Param_wfs__nxsub"][0] + f.close() + nsub = covmat.shape[0] // 2 + mapSize = 2 * nssp - 1 + Map = np.zeros((2 * mapSize, 2 * mapSize)) + + Map[:mapSize, :mapSize] = SlopesMap(covmat[:nsub, :nsub], filename=filename, + nssp=nssp, validint=validint) + Map[:mapSize, mapSize:] = SlopesMap(covmat[:nsub, nsub:], filename=filename, + nssp=nssp, validint=validint) + Map[mapSize:, :mapSize] = SlopesMap(covmat[nsub:, :nsub], filename=filename, + nssp=nssp, validint=validint) + Map[mapSize:, mapSize:] = SlopesMap(covmat[nsub:, nsub:], filename=filename, + nssp=nssp, validint=validint) + + return Map + + +def ensquare_PSF(filename, psf, N, display=False, cmap="jet"): + """ + Return the ensquared PSF + + Args: + filename: (str): path to the ROKET file + psf: (np.ndarray[ndim=2,dtype=np.float32]): PSF to ensquare + N: (int): size of the square in units of Lambda/D + display: (bool): (optional) if True, displays also the ensquare PSF + cmat: (str): (optional) matplotlib colormap to use + :return: + psf: (np.ndarray[ndim=2,dtype=np.float32]): the ensquared psf + """ + f = h5py.File(filename, 'r') + Lambda_tar = f.attrs["_Param_target__Lambda"][0] + RASC = 180 / np.pi * 3600. + pixsize = Lambda_tar * 1e-6 / (psf.shape[0] * f.attrs["_Param_tel__diam"] / f.attrs[ + "_Param_geom__pupdiam"]) * RASC + x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / ( + Lambda_tar * 1e-6 / f.attrs["_Param_tel__diam"] * RASC) + w = int(N * (Lambda_tar * 1e-6 / f.attrs["_Param_tel__diam"] * RASC) / pixsize) + mid = psf.shape[0] // 2 + psfe = np.abs(psf[mid - w:mid + w, mid - w:mid + w]) + if (display): + plt.matshow(np.log10(psfe), cmap=cmap) + plt.colorbar() + xt = np.linspace(0, psfe.shape[0] - 1, 6).astype(np.int32) + yt = np.linspace(-N, N, 6).astype(np.int32) + plt.xticks(xt, yt) + plt.yticks(xt, yt) + + f.close() + return psf[mid - w:mid + w, mid - w:mid + w] + + +def ensquared_energy(filename, psf, N): + """ + Return the ensquared energy in a box width of N * lambda/D + + Args: + filename: (str): path to the ROKET file + N: (int): size of the square in units of Lambda/D + """ + return ensquare_PSF(filename, psf, N).sum() / psf.sum() + + +def cutsPSF(filename, psf, psfs): + """ + Plots cuts of two PSF along X and Y axis for comparison + Args: + filename: (str): path to the ROKET file + psf: (np.ndarray[ndim=2,dtype=np.float32]): first PSF + psfs: (np.ndarray[ndim=2,dtype=np.float32]): second PSF + """ + f = h5py.File(filename, 'r') + Lambda_tar = f.attrs["_Param_target__Lambda"][0] + RASC = 180 / np.pi * 3600. + pixsize = Lambda_tar * 1e-6 / (psf.shape[0] * f.attrs["_Param_tel__diam"] / f.attrs[ + "_Param_geom__pupdiam"]) * RASC + x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / ( + Lambda_tar * 1e-6 / f.attrs["_Param_tel__diam"] * RASC) + plt.figure() + plt.subplot(2, 1, 1) + plt.semilogy(x, psf[psf.shape[0] // 2, :], color="blue") + plt.semilogy(x, psfs[psf.shape[0] // 2, :], color="red") + plt.semilogy(x, + np.abs(psf[psf.shape[0] // 2, :] - psfs[psf.shape[0] // 2, :]), + color="green") + plt.xlabel("X-axis angular distance [units of lambda/D]") + plt.ylabel("Normalized intensity") + plt.legend(["PSF exp", "PSF model", "Diff"]) + plt.xlim(-20, 20) + plt.ylim(1e-7, 1) + plt.subplot(2, 1, 2) + plt.semilogy(x, psf[:, psf.shape[0] // 2], color="blue") + plt.semilogy(x, psfs[:, psf.shape[0] // 2], color="red") + plt.semilogy(x, + np.abs(psf[:, psf.shape[0] // 2] - psfs[:, psf.shape[0] // 2]), + color="green") + plt.xlabel("Y-axis angular distance [units of lambda/D]") + plt.ylabel("Normalized intensity") + plt.legend(["PSF exp", "PSF model", "Diff"]) + plt.xlim(-20, 20) + plt.ylim(1e-7, 1) + plt.savefig('thefig.png') + f.close() + + +def compDerivativeCmm(filename=None, slopes=None, dt=1, dd=False, ss=False): + """ + Compute d/dt(slopes)*slopes from ROKET buffer + Args: + filename: (str): (optional) path to the ROKET file + slopes: (np.ndarray[ndim=2,dtype=np.float32]: (optional) Buffer of slopes arranged as (nsub x niter) + dt: (int): (optionnal) dt in frames + dd: (bool): (optionnal) if True, computes d/dt(slopes)*d/dt(slopes) + :return: + dCmm: (np.ndarray[ndim=2,dtype=np.float32]: covariance matrix of slopes with their derivative + """ + if filename is not None: + f = h5py.File(filename, 'r') + slopes = f["slopes"][:] + f.close() + if slopes is not None: + if dd: + dCmm = (slopes[:, dt:] - slopes[:, :-dt]).dot( + (slopes[:, dt:] - slopes[:, :-dt]).T / 2) + elif ss: + dCmm = slopes[:, :-dt].dot(slopes[:, dt:].T) + else: + dCmm = (slopes[:, dt:] - slopes[:, :-dt]).dot( + (slopes[:, dt:] + slopes[:, :-dt]).T / 2) + + return dCmm / slopes[:, dt:].shape[1] + + +def compProfile(filename, nlayers): + """ + Identify turbulent parameters (wind speed, direction and frac. of r0) from ROKET file + + Args: + filename: (str): path to the ROKET file + nlayers: (int): number of turbulent layers (maybe deduced in the future ?) + """ + f = h5py.File(filename, "r") + dt = f.attrs["_Param_loop__ittime"] + dk = int(2 / 3 * f.attrs["_Param_tel__diam"] / 20 / dt) + pdiam = f.attrs["_Param_tel__diam"] / f.attrs["_Param_wfs__nxsub"] + + mapC = get_slopessMap(compDerivativeCmm(filename, dt=dk), filename) + size = mapC.shape[0] // 2 + minimap = mapC[size:, size:] + mapC[size:, :size] + mapC[:size, + size:] + mapC[:size, :size] + + ws = np.zeros(nlayers) + wd = np.zeros(nlayers) + frac = np.zeros(nlayers) + + for k in range(nlayers): + plt.matshow(minimap) + plt.title(str(k)) + x, y = np.where(minimap == minimap.max()) + x = int(x) + y = int(y) + print("max ", k, ": x=", x, " ; y=", y) + frac[k] = minimap[x, y] + r = np.linalg.norm([x - size / 2, y - size / 2]) * pdiam + ws[k] = r / (dk * dt) + wd[k] = np.arctan2(x - size / 2, y - size / 2) * 180 / np.pi + if (wd[k] < 0): + wd[k] += 360 + minimap[x - 2:x + 3, y - 2:y + 3] = 0 + minimap[(size - x - 1) - 2:(size - x - 1) + 3, (size - y - 1) - 2: + (size - y - 1) + 3] = 0 + frac /= frac.sum() + + ind = np.argsort(f.attrs["_Param_atmos__frac"])[::-1] + print("Real wind speed: ", f.attrs["_Param_atmos__windspeed"][ind].tolist()) + print("Estimated wind speed: ", ws.tolist()) + print("-----------------------------") + print("Real wind direction: ", f.attrs["_Param_atmos__winddir"][ind].tolist()) + print("Estimated wind direction: ", wd.tolist()) + print("-----------------------------") + print("Real frac: ", f.attrs["_Param_atmos__frac"][ind].tolist()) + print("Estimated frac: ", frac.tolist()) + print("-----------------------------") + f.close() diff --git a/guardians/gamora.py b/guardians/gamora.py new file mode 100644 index 0000000..e92f56b --- /dev/null +++ b/guardians/gamora.py @@ -0,0 +1,406 @@ +""" +GAMORA (Gpu Accelerated Module fOr psf Reconstruction Algorithms) + +Python module for GPU accelerated PSF reconstruction using Vii functions and ROKET file + +Note: GPU devices used are hardcoded here. Change gpudevices if needed +""" +import numpy as np +import matplotlib.pyplot as plt +import h5py +from shesha.sutra_wrap import carmaWrap_context, Gamora +from scipy.sparse import csr_matrix +from sys import stdout +import time +from guardians import drax + +plt.ion() + +#gpudevices = np.array([0, 1, 2, 3], dtype=np.int32) +gpudevices = np.array([0], dtype=np.int32) +c = carmaWrap_context.get_instance_ngpu(gpudevices.size, gpudevices) + + +def psf_rec_Vii(filename, err=None, fitting=True, covmodes=None, cov=None): + """ + PSF reconstruction using Vii functions with GPU acceleration. + + Args: + filename: (str): path to the ROKET file + err: (np.ndarray[ndim=2, dtype=np.float32]): (optionnal) Buffers of command error + fitting: (bool): (optional) Add the fitting error to the PSF or not (True by default) + covmodes: (np.ndarray[ndim=2, dtype=np.float32]): (optionnal) Error covariance matrix in the modal space + cov: (np.ndarray[ndim=2, dtype=np.float32]): (optionnal) Error covariance matrix in the DM space + + :return: + otftel: (np.ndarray[ndim=2, dtype=np.float32]): OTF of the perfect telescope + otf2: (np.ndarray[ndim=2, dtype=np.float32]): OTF due to residual phase error + psf: (np.ndarray[ndim=2, dtype=np.float32]): LE PSF + gpu: (Gamora): Gamora GPU object (for manipulation and debug) + """ + + f = h5py.File(filename, 'r') + spup = drax.get_pup(filename) + # Sparse IF matrix + IF, T = drax.get_IF(filename) + # Covariance matrix + P = f["P"][:] + print("Projecting error buffer into modal space...") + if ((err is None) and (cov is None)): + err = drax.get_err(filename) + err = P.dot(err) + print("Computing covariance matrix...") + if (cov is None): + if (covmodes is None): + covmodes = err.dot(err.T) / err.shape[1] + else: + covmodes = (P.dot(covmodes)).dot(P.T) + else: + covmodes = cov + print("Done") + Btt = f["Btt"][:] + + # Scale factor + scale = float(2 * np.pi / f.attrs["_Param_target__Lambda"][0]) + # Init GPU + gpu = Gamora(c, c.active_device, "Vii", Btt.shape[0], covmodes.shape[0], + f["noise"][:].shape[1], IF.data, IF.indices, IF.indptr, IF.data.size, T, + spup, spup.shape[0], + np.where(spup)[0].size, scale, Btt, covmodes) + # Launch computation + # gamora.set_eigenvals(e) + # gamora.set_covmodes(V) + tic = time.time() + gpu.psf_rec_Vii() + + otftel = np.array(gpu.d_otftel) + otf2 = np.array(gpu.d_otfVii) + + otftel /= otftel.max() + if (list(f.keys()).count("psfortho") and fitting): + print("\nAdding fitting to PSF...") + psfortho = f["psfortho"][:] + otffit = np.real(np.fft.fft2(psfortho)) + otffit /= otffit.max() + psf = np.fft.fftshift(np.real(np.fft.ifft2(otffit * otf2))) + else: + psf = np.fft.fftshift(np.real(np.fft.ifft2(otftel * otf2))) + + psf *= (psf.shape[0] * psf.shape[0] / float(np.where(spup)[0].shape[0])) + f.close() + tac = time.time() + print(" ") + print("PSF renconstruction took ", tac - tic, " seconds") + + return otftel, otf2, psf, gpu + + +def psf_rec_vii_cpu(filename): + """ + PSF reconstruction using Vii functions (CPU version) + + Args: + filename: (str): path to the ROKET file + + :return: + otftel: (np.ndarray[ndim=2, dtype=np.float32]): OTF of the perfect telescope + otf2: (np.ndarray[ndim=2, dtype=np.float32]): OTF due to residual phase error + psf: (np.ndarray[ndim=2, dtype=np.float32]): LE PSF + """ + + f = h5py.File(filename, 'r') + IF, T = drax.get_IF(filename) + ratio_lambda = 2 * np.pi / f.attrs["_Param_target__Lambda"][0] + # Telescope OTF + print("Computing telescope OTF...") + spup = drax.get_pup(filename) + mradix = 2 + fft_size = mradix**int((np.log(2 * spup.shape[0]) / np.log(mradix)) + 1) + pup = np.zeros((fft_size, fft_size)) + pup[:spup.shape[0], :spup.shape[0]] = spup + pupfft = np.fft.fft2(pup) + conjpupfft = np.conjugate(pupfft) + otftel = np.real(np.fft.ifft2(pupfft * conjpupfft)) + den = 1. / otftel + den[np.where(np.isinf(den))] = 0 + mask = np.ones((fft_size, fft_size)) + mask[np.where(otftel < 1e-5)] = 0 + otftel = otftel / otftel.max() + print("Done") + # Covariance matrix + print("Computing covariance matrix...") + err = drax.get_err(filename) + P = f["P"][:] + err = P.dot(err) + Btt = f["Btt"][:] + #modes = IF.T.dot(Btt) + covmodes = err.dot(err.T) / err.shape[1] + print("Done") + # Vii algorithm + print("Diagonalizing cov matrix...") + e, V = np.linalg.eig(covmodes) + print("Done") + tmp = np.zeros((fft_size, fft_size)) + newmodek = tmp.copy() + ind = np.where(pup) + for k in range(err.shape[0]): + #newmodek[ind] = IF.T.dot(V[:,k]) + #newmodek[ind] = modes.dot(V[:,k]) + tmp2 = Btt.dot(V[:, k]) + newmodek[ind] = IF.T.dot(tmp2[:-2]) + newmodek[ind] += T.T.dot(tmp2[-2:]) + term1 = np.real(np.fft.fft2(newmodek**2) * conjpupfft) + term2 = np.abs(np.fft.fft2(newmodek))**2 + tmp += ((term1 - term2) * e[k]) + print(" Computing Vii : %d/%d\r" % (k, covmodes.shape[0]), end=' ') + print("Vii computed") + + dphi = np.real(np.fft.ifft2(2 * tmp)) * den * mask * ratio_lambda**2 + otf2 = np.exp(-0.5 * dphi) * mask + otf2 = otf2 / otf2.max() + + psf = np.fft.fftshift(np.real(np.fft.ifft2(otftel * otf2))) + psf *= (fft_size * fft_size / float(np.where(pup)[0].shape[0])) + + f.close() + return otftel, otf2, psf + + +def test_Vii(filename): + """ + Test function comparing results and performance of GPU version + versus CPU version of Vii PSF reconstruction + + Args: + filename: (str): path to the ROKET file + """ + a = time.time() + otftel_cpu, otf2_cpu, psf_cpu = psf_rec_vii_cpu(filename) + b = time.time() + otftel_gpu, otf2_gpu, psf_gpu, gamora = psf_rec_Vii(filename) + c = time.time() + cputime = b - a + gputime = c - b + print("CPU exec time : ", cputime, " s") + print("GPU exec time : ", gputime, " s") + print("Speed up : x", cputime / gputime) + print("---------------------------------") + print("precision on psf : ", np.abs(psf_cpu - psf_gpu).max() / psf_cpu.max()) + + +def add_fitting_to_psf(filename, otf, otffit): + """ + Compute the PSF including the fitting OTF + + Args: + otf: (np.ndarray[ndim=2, dtype=np.float32]): OTF + otffit: (np.ndarray[ndim=2, dtype=np.float32]): Fitting error OTF + :return: + psf: (np.ndarray[ndim=2, dtype=np.float32]): PSF + + """ + print("\nAdding fitting to PSF...") + spup = drax.get_pup(filename) + psf = np.fft.fftshift(np.real(np.fft.ifft2(otffit * otf))) + psf *= (psf.shape[0] * psf.shape[0] / float(np.where(spup)[0].shape[0])) + + return psf + + +def intersample(Cvvmap, pupilImage, IFImage, pixscale, dactu, lambdaIR): + """ + res = intersample( Cvvmap, pupilImage, IFImage, pixscale, dactu, lambdaIR) + + Cvvmap is the 'map' of the Cvv matrix (cov matrix of tomo error + expressed on volts). The "volts" unit must be used together with + the influence function funcInflu(x,y,dm.x0) expressed in meters. + + Then, the result of intersample is in meter^2. + + : output of Cvvmap=getMap(Cvv) + : pupil image, of size (N,N), shall be properly zero-padded, + ready for FFT + : image of influence function of 1 actu. Same support + as pupilImage, same sampling. + : size of pixel (in pupil space, meters) of pupilImage + and IFImage + : inter-actuator pitch in pupil space (meters) + : in microns + + Units of IFImage and Cvvmap shall be such that the product of Cvvmap + numbers and IFImage^2 is microns^2 + + + SEE ALSO: getMap() + + + # pour test/debug : + N = 1024 + D=39. + npup=300 + pixscale = D/npup + dactu = 4*pixscale + x=(np.arange(N)-N/2)*pixscale + x,y = np.meshgrid(x,x,indexing='ij') + r2=(x**2+y**2) + IFImage = np.exp(-1.5* r2 / dactu**2) + pupilImage = generateEeltPupilReflectivity(1., N, 0.53, N/2, N/2, pixscale, 0.03, -10., softGap=1) + Nactu = int(np.round(D/dactu))+1 + ncov = 2*Nactu+1 + x=np.arange(ncov)-Nactu + x,y = np.meshgrid(x,x,indexing='ij') + r=np.sqrt(x**2+y**2) + Cvvmap = np.exp(-r/3) + Cvvmap = np.zeros((ncov, ncov)) + Cvvmap[Nactu, Nactu]=1. + + + """ + + print("Interpolating Dphi map") + + # image size + N = pupilImage.shape[0] + + # size of the side of Cvvmap (always odd number) + ncov = Cvvmap.shape[0] + if (ncov % 2) == 0: + ncov = 3 / 0 + print("Fucking error") + + # nber of elements on each side of the center of Cvvmap + nelem = (ncov - 1) // 2 + # compute inter-actuator distance in pixels dactupix + # dactupix *should* be an integer : pixscale shall be chosen in such a way + # that dactupix is an integer. However, for safety here, we round the + # result. + dactupix = int(np.round(dactu / pixscale)) + # Fill MAP array with values of Cvvmap. Centre of MAP is located at + # index [ncmap, ncmap] (i.e. Fourier-centred) + MAP = np.zeros((N, N)) + ncmap = N // 2 # central element of the MAP, in a Fourier-sense + i = ncmap - nelem * dactupix + j = ncmap + nelem * dactupix + 1 + MAP[i:j:dactupix, i:j:dactupix] = Cvvmap + print("done") + + # Computing the phase correlation function + # One should have corr(0) = phase_variance. + # Computing corr(0) is done using the (diagonal of Cvv). + # We decided that is the average value for 1 single actuator (i.e. it's the average + # of the diagonal of Cvv). + # Then the average phase variance over the pupil equals to + # (1/S_pupil) * $_pupil(fi^2) * Nactu * + # with S_pupil the surface, and $ is an integral. Nactu needs to be here + # because if it wasn't, we'd have computed the phase variance over the pupil + # with only 1 single actu moving. + # So, in our formula, we have replaced the value of (S_pupil/Nactu) by (dactu^2). + # The (dactu^2) needs to be expressed in pixels because our integral $(fi^2) is not + # a real integral : it's just summing pixels instead. + + corr = np.fft.fft2(np.abs(np.fft.fft2(IFImage))**2 * np.fft.fft2(MAP)).real / ( + IFImage.size * dactupix**2) + # From correlation to Dphi + # Dphi(r) = 2*C(0) - 2*C(r) + # We take advantage we need to do a multiplication to multiply by another factor + # in the same line. This is to translate dphi from m^2 into rd^2 + fact = 2 * (2 * np.pi / lambdaIR)**2 + corr = np.fft.fftshift(corr) + dphi = fact * corr[0, 0] - fact * corr + + # computation of the PSF + FTOtel = np.fft.ifft2(np.abs(np.fft.fft2(pupilImage))**2).real + # FTOtel is normalized with np.sum(FTOtel)=1 + # This ensures to get a PSF with SR=np.max(psf), when the PSF is computed + # using just np.fft.fft2() without other normalisation + FTOtel /= np.sum(FTOtel) + # variable mask could be omitted because FTOtel should be zero after + # telescope cutoff. However, numeric errors lead to FTOtel small but not + # zero, and multiplying with exp(dphi) with dphi undefined after + # telescope cutoff may lead to unexpected results. + mask = FTOtel > (FTOtel[0, 0] / 1e9) + psf = np.fft.fftshift(np.fft.fft2(np.exp(-0.5 * dphi * mask) * FTOtel).real) + + return psf + + +# def psf_rec_roket_file(filename, err=None): +# """ +# PSF reconstruction using ROKET file. SE PSF is reconstructed +# for each frame and stacked to obtain the LE PSF. +# Used for ROKET debug only. + +# Args: +# filename: (str): path to the ROKET file +# err: (np.ndarray[ndim=2, dtype=np.float32]): (optionnal) Buffers of command error +# :return: +# psf: (np.ndarray[ndim=2, dtype=np.float32]): LE PSF +# gpu: (Gamora): Gamora GPU object (for manipulation and debug) +# """ +# f = h5py.File(filename, 'r') +# if (err is None): +# err = drax.get_err(filename) +# spup = drax.get_pup(filename) +# # Sparse IF matrix +# IF, T = drax.get_IF(filename) +# # Scale factor +# scale = float(2 * np.pi / f.attrs["_Param_target__Lambda"][0]) +# # Init GPU +# gpu = gamora_init(b"roket", err.shape[0], err.shape[1], +# IF.data, IF.indices, IF.indptr, T, +# spup, scale) +# # Launch computation +# gpu.psf_rec_roket(err) +# # Get psf +# psf = gpu.get_tar_image() +# f.close() +# return psf, gpu + +# def psf_rec_roket_file_cpu(filename): +# """ +# PSF reconstruction using ROKET file (CPU version). SE PSF is reconstructed +# for each frame and stacked to obtain the LE PSF. +# Used for ROKET debug only. + +# Args: +# filename: (str): path to the ROKET file +# :return: +# psf: (np.ndarray[ndim=2, dtype=np.float32]): LE PSF +# """ + +# f = h5py.File(filename, 'r') +# # Get the sum of error contributors +# err = drax.get_err(filename) + +# # Retrieving spupil (for file where spupil was not saved) +# indx_pup = f["indx_pup"][:] +# pup = np.zeros((f["dm_dim"].value, f["dm_dim"].value)) +# pup_F = pup.flatten() +# pup_F[indx_pup] = 1. +# pup = pup_F.reshape(pup.shape) +# spup = pup[np.where(pup)[0].min():np.where(pup)[0].max() + 1, +# np.where(pup)[1].min():np.where(pup)[1].max() + 1] +# phase = spup.copy() +# mradix = 2 +# fft_size = mradix**int((np.log(2 * spup.shape[0]) / np.log(mradix)) + 1) +# amplipup = np.zeros((fft_size, fft_size), dtype=np.complex) +# psf = amplipup.copy() +# psf = psf + +# # Sparse IF matrix +# IF, T = drax.get_IF(filename) +# # Scale factor +# scale = float(2 * np.pi / f.attrs["_Param_target__Lambda"][0]) + +# for k in range(err.shape[1]): +# amplipup = np.zeros((fft_size, fft_size), dtype=np.complex) +# phase[np.where(spup)] = IF.T.dot(err[:-2, k]) +# phase[np.where(spup)] += T.dot(err[-2:, k]) +# amplipup[:phase.shape[0], :phase.shape[1]] = np.exp(-1j * phase * scale) +# amplipup = np.fft.fft2(amplipup) +# psf += np.fft.fftshift(np.abs(amplipup)**2) / \ +# IF.shape[1] / IF.shape[1] / err.shape[1] +# print(" Computing and stacking PSF : %d/%d\r" % (k, err.shape[1]), end=' ') +# print("PSF computed and stacked") +# f.close() +# return psf diff --git a/guardians/groot.py b/guardians/groot.py new file mode 100644 index 0000000..22a5454 --- /dev/null +++ b/guardians/groot.py @@ -0,0 +1,903 @@ +""" +GROOT (Gpu-based Residual errOr cOvariance maTrix) +Python module for modelization of error covariance matrix +""" +import numpy as np +import h5py +from shesha.sutra_wrap import carmaWrap_context, Groot +import time +import sys +import os +from tqdm import tqdm + +from guardians import gamora +from guardians import drax, starlord +import matplotlib.pyplot as plt +plt.ion() + +#gpudevices = np.array([0, 1, 2, 3], dtype=np.int32) +gpudevices = np.array([0], dtype=np.int32) + +cxt = carmaWrap_context.get_instance_ngpu(gpudevices.size, gpudevices) + + +def compute_Cerr(filename, modal=True, ctype="float", speed=None, H=None, theta=None, + r0=None, L0=None): + """ Returns the residual error covariance matrix using GROOT from a ROKET file + :parameter: + filename : (string) : full path to the ROKET file + modal : (bool) : if True (default), Cerr is returned in the Btt modal basis, + in the actuator basis if False + ctype : (string) : "float" or "double" + speed: (np.ndarray(ndim=1, dtype=np.float32)): (optionnal) wind speed for each layer [m/s] + H: (np.ndarray(ndim=1, dtype=np.float32)): (optionnal) altitude of each layer [m] + theta: (np.ndarray(ndim=1, dtype=np.float32)): (optionnal) wind direction for each layer [rad] + r0: (float): (optionnal) Fried parameter @ 0.5 µm [m] + L0: (np.ndarray(ndim=1, dtype=np.float32)): (optionnal) Outer scale [m] + + :return: + Cerr : (np.ndarray(dim=2, dtype=np.float32)) : residual error covariance matrix + """ + f = h5py.File(filename, 'r') + Lambda_tar = f.attrs["_Param_target__Lambda"][0] + Lambda_wfs = f.attrs["_Param_wfs__Lambda"] + dt = f.attrs["_Param_loop__ittime"] + gain = f.attrs["_Param_controller__gain"] + wxpos = f.attrs["_Param_wfs__xpos"][0] + wypos = f.attrs["_Param_wfs__ypos"][0] + if r0 is None: + r0 = f.attrs["_Param_atmos__r0"] + r0 = r0 * (Lambda_tar / Lambda_wfs)**(6. / 5.) + RASC = 180. / np.pi * 3600. + xpos = f["dm.xpos"][:] + ypos = f["dm.ypos"][:] + p2m = f.attrs["_Param_tel__diam"] / f.attrs["_Param_geom__pupdiam"] + pupshape = int(2**np.ceil(np.log2(f.attrs["_Param_geom__pupdiam"]) + 1)) + xactu = (xpos - pupshape / 2) * p2m + yactu = (ypos - pupshape / 2) * p2m + if H is None: + H = f.attrs["_Param_atmos__alt"] + if L0 is None: + L0 = f.attrs["_Param_atmos__L0"] + if speed is None: + speed = f.attrs["_Param_atmos__windspeed"] + if theta is None: + theta = (f.attrs["_Param_atmos__winddir"] * np.pi / 180.) + frac = f.attrs["_Param_atmos__frac"] + + Htheta = np.linalg.norm([wxpos, wypos]) / RASC * H + vdt = speed * dt / gain + angleht = np.arctan2(wypos, wxpos) + fc = 1 / (2 * (xactu[1] - xactu[0])) + scale = (1 / r0)**(5 / 3.) * frac * (Lambda_tar / (2 * np.pi))**2 + Nact = f["Nact"][:] + Nact = np.linalg.inv(Nact) + P = f["P"][:] + Btt = f["Btt"][:] + Tf = Btt[:-2, :-2].dot(P[:-2, :-2]) + IF, T = drax.get_IF(filename) + IF = IF.T + T = T.T + N = IF.shape[0] + deltaTT = T.T.dot(T) / N + deltaF = IF.T.dot(T) / N + pzt2tt = np.linalg.inv(deltaTT).dot(deltaF.T) + + if (ctype == "float"): + groot = Groot(cxt, cxt.active_device, Nact.shape[0], + int(f.attrs["_Param_atmos__nscreens"]), angleht, + vdt.astype(np.float32), Htheta.astype(np.float32), L0, theta, + scale.astype(np.float32), pzt2tt.astype(np.float32), + Tf.astype(np.float32), Nact.astype(np.float32), + xactu.astype(np.float32), yactu.astype(np.float32), fc) + else: + raise TypeError("Unknown ctype : must be float") + tic = time.time() + groot.compute_Cerr() + Cerr = np.array(groot.d_Cerr) + cov_err_groot = np.zeros((Nact.shape[0] + 2, Nact.shape[0] + 2)) + cov_err_groot[:-2, :-2] = Cerr + cov_err_groot[-2:, -2:] = np.array(groot.d_TT) + tac = time.time() + print("Cee computed in : %.2f seconds" % (tac - tic)) + if (modal): + cov_err_groot = P.dot(cov_err_groot).dot(P.T) + + f.close() + return cov_err_groot + + +def compute_Cerr_cpu(filename, modal=True): + """ Returns the residual error covariance matrix using CPU version of GROOT + from a ROKET file + :parameter: + filename : (string) : full path to the ROKET file + modal : (bool) : if True (default), Cerr is returned in the Btt modal basis, + in the actuator basis if False + :return: + Cerr : (np.ndarray(dim=2, dtype=np.float32)) : residual error covariance matrix + """ + f = h5py.File(filename, 'r') + + tabx, taby = starlord.tabulateIj0() + Lambda_tar = f.attrs["_Param_target__Lambda"][0] + Lambda_wfs = f.attrs["_Param_wfs__Lambda"] + dt = f.attrs["_Param_loop__ittime"] + gain = f.attrs["_Param_controller__gain"] + wxpos = f.attrs["_Param_wfs__xpos"][0] + wypos = f.attrs["_Param_wfs__ypos"][0] + r0 = f.attrs["_Param_atmos__r0"] * (Lambda_tar / Lambda_wfs)**(6. / 5.) + RASC = 180. / np.pi * 3600. + xpos = f["dm.xpos"][:] + ypos = f["dm.ypos"][:] + p2m = f.attrs["_Param_tel__diam"] / f.attrs["_Param_geom__pupdiam"] + pupshape = int(2**np.ceil(np.log2(f.attrs["_Param_geom__pupdiam"]) + 1)) + xactu = (xpos - pupshape / 2) * p2m + yactu = (ypos - pupshape / 2) * p2m + Ccov = np.zeros((xpos.size, xpos.size)) + Caniso = np.zeros((xpos.size, xpos.size)) + Cbp = np.zeros((xpos.size, xpos.size)) + xx = np.tile(xactu, (xactu.shape[0], 1)) + yy = np.tile(yactu, (yactu.shape[0], 1)) + xij = xx - xx.T + yij = yy - yy.T + + for l in range(f.attrs["_Param_atmos__nscreens"]): + H = f.attrs["_Param_atmos__alt"][l] + L0 = f.attrs["_Param_atmos__L0"][l] + speed = f.attrs["_Param_atmos__windspeed"][l] + theta = f.attrs["_Param_atmos__winddir"][l] * np.pi / 180. + frac = f.attrs["_Param_atmos__frac"][l] + + Htheta = np.linalg.norm([wxpos, wypos]) / RASC * H + vdt = speed * dt / gain + # Covariance matrices models on actuators space + M = np.zeros((xpos.size, xpos.size)) + Mvdt = M.copy() + Mht = M.copy() + Mhvdt = M.copy() + angleht = np.arctan2(wypos, wxpos) + fc = xactu[1] - xactu[0] + + M = np.linalg.norm([xij, yij], axis=0) + Mvdt = np.linalg.norm([xij - vdt * np.cos(theta), yij - vdt * np.sin(theta)], + axis=0) + Mht = np.linalg.norm( + [xij - Htheta * np.cos(angleht), yij - Htheta * np.sin(angleht)], axis=0) + Mhvdt = np.linalg.norm([ + xij - vdt * np.cos(theta) - Htheta * np.cos(angleht), + yij - vdt * np.sin(theta) - Htheta * np.sin(angleht) + ], axis=0) + + Ccov += 0.5 * (starlord.dphi_lowpass(Mhvdt, fc, L0, tabx, taby) - + starlord.dphi_lowpass(Mht, fc, L0, tabx, taby) - starlord. + dphi_lowpass(Mvdt, fc, L0, tabx, taby) + starlord.dphi_lowpass( + M, fc, L0, tabx, taby)) * (1. / r0)**(5. / 3.) * frac + + Caniso += 0.5 * ( + starlord.dphi_lowpass(Mht, fc, L0, tabx, taby) - starlord.dphi_lowpass( + M, fc, L0, tabx, taby)) * (1. / r0)**(5. / 3.) * frac + Cbp += 0.5 * (starlord.dphi_lowpass(Mvdt, fc, L0, tabx, taby) - starlord. + dphi_lowpass(M, fc, L0, tabx, taby)) * (1. / r0)**(5. / 3.) * frac + + Sp = (Lambda_tar / (2 * np.pi))**2 + Ctt = (Caniso + Caniso.T) * Sp + Ctt += ((Cbp + Cbp.T) * Sp) + Ctt += ((Ccov + Ccov.T) * Sp) + + P = f["P"][:] + Btt = f["Btt"][:] + Tf = Btt[:-2, :-2].dot(P[:-2, :-2]) + + IF, T = drax.get_IF(filename) + IF = IF.T + T = T.T + N = IF.shape[0] + deltaTT = T.T.dot(T) / N + deltaF = IF.T.dot(T) / N + pzt2tt = np.linalg.inv(deltaTT).dot(deltaF.T) + + Nact = f["Nact"][:] + N1 = np.linalg.inv(Nact) + Ctt = N1.dot(Ctt).dot(N1) + ttcomp = pzt2tt.dot(Ctt).dot(pzt2tt.T) + Ctt = Tf.dot(Ctt).dot(Tf.T) + cov_err = np.zeros((Ctt.shape[0] + 2, Ctt.shape[0] + 2)) + cov_err[:-2, :-2] = Ctt + cov_err[-2:, -2:] = ttcomp + if (modal): + cov_err = P.dot(cov_err).dot(P.T) + f.close() + + return cov_err + + +def test_Cerr(filename): + """ Compute PSF of aniso and bandwidth from GROOT model and ROKET to compare + + Args: + filename:(str):path to the ROKET file + """ + C = drax.get_covmat_contrib(filename, ["bandwidth", "tomography"]) + Cerr = compute_Cerr(filename) + _, _, psfr, _ = gamora.psf_rec_Vii(filename, covmodes=C.astype(np.float32), + fitting=False) + _, _, psf, _ = gamora.psf_rec_Vii(filename, cov=Cerr.astype(np.float32), + fitting=False) + drax.cutsPSF(filename, psfr, psf) + print("PSFR SR: ", psfr.max()) + print("PSF SR: ", psf.max()) + psf = drax.ensquare_PSF(filename, psf, 20) + psfr = drax.ensquare_PSF(filename, psfr, 20) + plt.matshow(np.log10(np.abs(psfr))) + plt.colorbar() + plt.title("PSF_R") + plt.matshow( + np.log10(np.abs(psf)), vmax=np.log10(np.abs(psfr)).max(), + vmin=np.log10(np.abs(psfr)).min()) + plt.colorbar() + plt.title("PSF") + plt.matshow( + np.log10(np.abs(psfr - psf)), vmax=np.log10(np.abs(psfr)).max(), + vmin=np.log10(np.abs(psfr)).min()) + plt.colorbar() + plt.title("PSF_R - PSF") + + return psf, psfr + + +def compare_GPU_vs_CPU(filename): + """ Compare results of GROOT vs its CPU version in terms of execution time + and precision on the PSF renconstruction + :parameter: + filename : (string) : full path to the ROKET file + + """ + timer = ch.carmaWrap_timer() + + ch.threadSync() + timer.start() + ch.threadSync() + synctime = timer.stop() + timer.reset() + + timer.start() + cov_err_gpu_s = compute_Cerr(filename) + ch.threadSync() + gpu_time_s = timer.stop() - synctime + timer.reset() + + timer.start() + cov_err_gpu_d = compute_Cerr(filename, ctype="double") + ch.threadSync() + gpu_time_d = timer.stop() - synctime + timer.reset() + + tic = time.time() + cov_err_cpu = compute_Cerr_cpu(filename) + tac = time.time() + cpu_time = tac - tic + + otftel, otf2, psf_cpu, gpu = gamora.psf_rec_Vii(filename, fitting=False, + cov=cov_err_cpu.astype(np.float32)) + otftel, otf2, psf_gpu_s, gpu = gamora.psf_rec_Vii( + filename, fitting=False, cov=cov_err_gpu_s.astype(np.float32)) + otftel, otf2, psf_gpu_d, gpu = gamora.psf_rec_Vii( + filename, fitting=False, cov=cov_err_gpu_d.astype(np.float32)) + + print("-----------------------------------------") + print("CPU time : ", cpu_time, " s ") + print("GPU time simple precision : ", gpu_time_s, " s ") + print("GPU time double precision : ", gpu_time_d, " s ") + print("Max absolute difference in PSFs simple precision : ", + np.abs(psf_cpu - psf_gpu_s).max()) + print("Max absolute difference in PSFs double precision : ", + np.abs(psf_cpu - psf_gpu_d).max()) + gamora.cutsPSF(filename, psf_cpu, psf_gpu_s) + gamora.cutsPSF(filename, psf_cpu, psf_gpu_d) + + +def compute_Ca_cpu(filename, modal=True): + """ Returns the aliasing error covariance matrix using CPU version of GROOT + from a ROKET file + :parameter: + filename : (string) : full path to the ROKET file + modal : (bool) : if True (default), Ca is returned in the Btt modal basis, + in the actuator basis if False + :return: + Ca : (np.ndarray(dim=2, dtype=np.float32)) : aliasing error covariance matrix + """ + f = h5py.File(filename, 'r') + nsub = f["R"][:].shape[1] // 2 + nssp = f.attrs["_Param_wfs__nxsub"][0] + validint = f.attrs["_Param_tel__cobs"] + x = np.linspace(-1, 1, nssp) + x, y = np.meshgrid(x, x) + r = np.sqrt(x * x + y * y) + rorder = np.sort(r.reshape(nssp * nssp)) + ncentral = nssp * nssp - np.sum(r >= validint) + validext = rorder[ncentral + nsub] + valid = (r < validext) & (r >= validint) + ivalid = np.where(valid) + xvalid = ivalid[0] + 1 + yvalid = ivalid[1] + 1 + ivalid = (xvalid, yvalid) + d = f.attrs["_Param_tel__diam"] / (f.attrs["_Param_dm__nact"][0] - 1) + r0 = f.attrs["_Param_atmos__r0"] * (f.attrs["_Param_target__Lambda"] / 0.5)**( + 6. / 5.) + RASC = 180 / np.pi * 3600. + + scale = 0.23 * (d / r0)**(5 / 3.) * \ + (f.attrs["_Param_target__Lambda"] * 1e-6 / (2 * np.pi * d))**2 * RASC**2 + + mask = np.zeros((nssp + 2, nssp + 2)) + Ca = np.identity(nsub * 2) + + for k in range(nsub): + mask *= 0 + mask[xvalid[k], yvalid[k]] = 1 + mask[xvalid[k], yvalid[k] - 1] = -0.5 + mask[xvalid[k], yvalid[k] + 1] = -0.5 + Ca[k, :nsub] = mask[ivalid].flatten() + mask *= 0 + mask[xvalid[k], yvalid[k]] = 1 + mask[xvalid[k] - 1, yvalid[k]] = -0.5 + mask[xvalid[k] + 1, yvalid[k]] = -0.5 + Ca[k + nsub, nsub:] = mask[ivalid].flatten() + + R = f["R"][:] + Ca = R.dot(Ca * scale).dot(R.T) + if (modal): + P = f["P"][:] + Ca = P.dot(Ca).dot(P.T) + f.close() + return Ca + + +def compute_Cn_cpu(filename, model="data", modal=True): + """ Returns the noise error covariance matrix using CPU version of GROOT + from a ROKET file + :parameter: + filename : (string) : full path to the ROKET file + modal : (bool) : if True (default), Cn is returned in the Btt modal basis, + in the actuator basis if False + :return: + Cn : (np.ndarray(dim=2, dtype=np.float32)) : noise error covariance matrix + """ + f = h5py.File(filename, 'r') + if (model == "data"): + N = f["noise"][:] + Cn = N.dot(N.T) / N.shape[1] + if modal: + P = f["P"][:] + Cn = P.dot(Cn).dot(P.T) + else: + nslopes = f["R"][:].shape[1] + Cn = np.zeros(nslopes) + noise = f.attrs["_Param_wfs__noise"][0] + RASC = 180 / np.pi * 3600. + if (noise >= 0): + Nph = f.attrs["_Param_wfs__zerop"] * 10 ** (-0.4 * f.attrs["_Param_wfs__gsmag"]) * \ + f.attrs["_Param_wfs__optthroughput"] * \ + (f.attrs["_Param_tel__diam"] / f.attrs["_Param_wfs__nxsub"] + ) ** 2. * f.attrs["_Param_loop__ittime"] + + r0 = (f.attrs["_Param_wfs__Lambda"] / 0.5)**( + 6.0 / 5.0) * f.attrs["_Param_atmos__r0"] + + sig = (np.pi ** 2 / 2) * (1 / Nph) * \ + (1. / r0) ** 2 # Photon noise in m^-2 + # Noise variance in arcsec^2 + sig = sig * ( + (f.attrs["_Param_wfs__Lambda"] * 1e-6) / (2 * np.pi))**2 * RASC**2 + + Ns = f.attrs["_Param_wfs__npix"] # Number of pixel + Nd = (f.attrs["_Param_wfs__Lambda"] * + 1e-6) * RASC / f.attrs["_Param_wfs__pixsize"] + sigphi = (np.pi ** 2 / 3.0) * (1 / Nph ** 2) * (f.attrs["_Param_wfs__noise"]) ** 2 * \ + Ns ** 2 * (Ns / Nd) ** 2 # Phase variance in m^-2 + # Noise variance in arcsec^2 + sigsh = sigphi * \ + ((f.attrs["_Param_wfs__Lambda"] * 1e-6) / (2 * np.pi)) ** 2 * RASC ** 2 + + Cn[:len(sig)] = sig + sigsh + Cn[len(sig):] = sig + sigsh + + Cn = np.diag(Cn) + R = f["R"][:] + Cn = R.dot(Cn).dot(R.T) + if (modal): + P = f["P"][:] + Cn = P.dot(Cn).dot(P.T) + f.close() + return Cn + + +def compute_OTF_fitting(filename, otftel): + """ + Modelize the OTF due to the fitting using dphi_highpass + + Args: + filename: (str) : ROKET hdf5 file path + otftel: (np.ndarray) : Telescope OTF + :return: + otf_fit: (np.ndarray) : Fitting OTF + psf_fit (np.ndarray) : Fitting PSF + """ + f = h5py.File(filename, 'r') + r0 = f.attrs["_Param_atmos__r0"] * (f.attrs["_Param_target__Lambda"][0] / 0.5)**( + 6. / 5.) + ratio_lambda = 2 * np.pi / f.attrs["_Param_target__Lambda"][0] + # Telescope OTF + spup = drax.get_pup(filename) + mradix = 2 + fft_size = mradix**int((np.log(2 * spup.shape[0]) / np.log(mradix)) + 1) + mask = np.ones((fft_size, fft_size)) + mask[np.where(otftel < 1e-5)] = 0 + + x = np.arange(fft_size) - fft_size / 2 + pixsize = f.attrs["_Param_tel__diam"] / f.attrs["_Param_geom__pupdiam"] + x = x * pixsize + r = np.sqrt(x[:, None] * x[:, None] + x[None, :] * x[None, :]) + tabx, taby = starlord.tabulateIj0() + dphi = np.fft.fftshift( + starlord.dphi_highpass( + r, f.attrs["_Param_tel__diam"] / (f.attrs["_Param_dm__nact"][0] - 1), + tabx, taby) * (1 / r0)**(5 / 3.)) # * den * ratio_lambda**2 * mask + otf_fit = np.exp(-0.5 * dphi) * mask + otf_fit = otf_fit / otf_fit.max() + + psf_fit = np.fft.fftshift(np.real(np.fft.ifft2(otftel * otf_fit))) + psf_fit *= (fft_size * fft_size / float(np.where(spup)[0].shape[0])) + + f.close() + return otf_fit, psf_fit + + +def compute_PSF(filename): + """ + Modelize the PSF using GROOT model for aniso and bandwidth, Gendron model for aliasing, + dphi_highpass for fitting, noise extracted from datas. Non linearity not taken into account + + Args: + filename: (str) : ROKET hdf5 file path + :return: + psf: (np.ndarray) : PSF + """ + tic = time.time() + spup = drax.get_pup(filename) + Cab = compute_Cerr(filename) + Cn = compute_Cn_cpu(filename) + Ca = compute_Calias(filename) + Cee = Cab + Cn + Ca + otftel, otf2, psf, gpu = gamora.psf_rec_Vii(filename, fitting=False, + cov=(Cee).astype(np.float32)) + otf_fit, psf_fit = compute_OTF_fitting(filename, otftel) + psf = np.fft.fftshift(np.real(np.fft.ifft2(otf_fit * otf2 * otftel))) + psf *= (psf.shape[0] * psf.shape[0] / float(np.where(spup)[0].shape[0])) + tac = time.time() + print("PSF computed in ", tac - tic, " seconds") + + return psf + + +def compute_Calias_gpu(filename, slopes_space=False, modal=True, npts=3): + f = h5py.File(filename, 'r') + nsub = f["R"][:].shape[1] // 2 + nssp = f.attrs["_Param_wfs__nxsub"][0] + npix = f.attrs["_Param_wfs__npix"][0] + validint = f.attrs["_Param_tel__cobs"] + x = np.linspace(-1, 1, nssp) + x, y = np.meshgrid(x, x) + r = np.sqrt(x * x + y * y) + rorder = np.sort(r.reshape(nssp * nssp)) + ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32) + validext = rorder[ncentral + nsub] + valid = (r < validext) & (r >= validint) + ivalid = np.where(valid) + r0 = f.attrs["_Param_atmos__r0"] + Lambda_wfs = f.attrs["_Param_wfs__Lambda"][0] + d = f.attrs["_Param_tel__diam"] / nssp + RASC = 180 / np.pi * 3600 + scale = 0.5 * (1 / r0)**(5 / 3) + c = (RASC * Lambda_wfs * 1e-6 / 2 / np.pi) / d**2 + h = d / (npts - 1) + x = (np.arange(nssp) - nssp / 2) * d + x, y = np.meshgrid(x, x) + x = x[ivalid].astype(np.float32) + y = y[ivalid].astype(np.float32) + fc = 1 / (2 * d) #/ npix + scale = scale * c**2 * (h / 3)**2 + coeff = simpson_coeff(npts) + weights = np.zeros(npts) + for k in range(npts): + weights[k] = (coeff[k:] * coeff[:npts - k]).sum() + groot = Groot(cxt, cxt.active_device, nsub, weights.astype(np.float32), scale, x, y, + fc, d, npts) + groot.compute_Calias() + CaXX = np.array(groot.d_CaXX) + Ca = np.zeros((2 * CaXX.shape[0], 2 * CaXX.shape[0])) + Ca[:CaXX.shape[0], :CaXX.shape[0]] = CaXX + Ca[CaXX.shape[0]:, CaXX.shape[0]:] = np.array(groot.d_CaYY) + if not slopes_space: + R = f["R"][:] + Ca = R.dot(Ca).dot(R.T) + if modal: + P = f["P"][:] + Ca = P.dot(Ca).dot(P.T) + f.close() + + return Ca + + +def compute_Calias(filename, slopes_space=False, modal=True, npts=3): + """ Returns the aliasing slopes covariance matrix using CPU version of GROOT + from a ROKET file and a model based on structure function + :parameter: + filename : (string) : full path to the ROKET file + slopes_space: (bool): (optionnal) if True, return the covariance matrix in the slopes space + modal: (bool): (optionnal) if True, return the covariance matrix in the modal space + :return: + Ca : (np.ndarray(dim=2, dtype=np.float32)) : aliasing error covariance matrix + """ + + f = h5py.File(filename, 'r') + tabx, taby = starlord.tabulateIj0() + nsub = f["R"][:].shape[1] // 2 + nssp = f.attrs["_Param_wfs__nxsub"][0] + npix = f.attrs["_Param_wfs__npix"][0] + validint = f.attrs["_Param_tel__cobs"] + x = np.linspace(-1, 1, nssp) + x, y = np.meshgrid(x, x) + r = np.sqrt(x * x + y * y) + rorder = np.sort(r.reshape(nssp * nssp)) + ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32) + validext = rorder[ncentral + nsub] + valid = (r < validext) & (r >= validint) + ivalid = np.where(valid) + r0 = f.attrs["_Param_atmos__r0"] + Lambda_wfs = f.attrs["_Param_wfs__Lambda"][0] + d = f.attrs["_Param_tel__diam"] / nssp + RASC = 180 / np.pi * 3600 + scale = 0.5 * (1 / r0)**(5 / 3) + c = (RASC * Lambda_wfs * 1e-6 / 2 / np.pi) / d**2 + x = (np.arange(nssp) - nssp / 2) * d + x, y = np.meshgrid(x, x) + x = x[ivalid] + y = y[ivalid] + fc = d #/ npix + xx = np.tile(x, (nsub, 1)) + yy = np.tile(y, (nsub, 1)) + # Ca = compute_Calias_element(xx, yy, fc, d, nsub, tabx, taby) + # Ca += compute_Calias_element(xx, yy, fc, d, nsub, tabx, taby, xoff=0.5) + # Ca += compute_Calias_element(xx, yy, fc, d, nsub, tabx, taby, xoff=-0.5) + # Ca += compute_Calias_element(xx, yy, fc, d, nsub, tabx, taby, yoff=0.5) + # Ca += compute_Calias_element(xx, yy, fc, d, nsub, tabx, taby, yoff=-0.5) + # Ca = Ca * scale / 5 + Ca = np.zeros((2 * nsub, 2 * nsub)) + coeff = simpson_coeff(npts) + # for k in tqdm(range(npts)): + # weight = (coeff[k:] * coeff[:npts - k]).sum() + # Ca += compute_Calias_element_XX(xx, yy, fc, d, nsub, tabx, taby, yoff=k / + # (npts - 1)) * weight + # Ca += compute_Calias_element_YY(xx, yy, fc, d, nsub, tabx, taby, xoff=k / + # (npts - 1)) * weight + # if k > 0: + # Ca += compute_Calias_element_XX(xx, yy, fc, d, nsub, tabx, taby, yoff=-k / + # (npts - 1)) * weight + # Ca += compute_Calias_element_YY(xx, yy, fc, d, nsub, tabx, taby, xoff=-k / + # (npts - 1)) * weight + if (npts > 1): + h = d / (npts - 1) + else: + h = 1 + for k in tqdm(range(npts)): + for p in tqdm(range(npts)): + Ca += (compute_Calias_element_XX(xx, yy, fc, d, nsub, tabx, taby, + yoff=(k - p) * h) * coeff[k] * coeff[p]) + Ca += (compute_Calias_element_YY(xx, yy, fc, d, nsub, tabx, taby, + xoff=(k - p) * h) * coeff[k] * coeff[p]) + + if not slopes_space: + R = f["R"][:] + Ca = R.dot(Ca).dot(R.T) + if modal: + P = f["P"][:] + Ca = P.dot(Ca).dot(P.T) + f.close() + + return Ca * scale * c**2 * (h / 3)**2 + + +def simpson_coeff(n): + """ + Returns the n weights to apply for a Simpson integration on n elements + Args: + n: (int): number of elements, must be odd + :return: + coeff: (np.array[ndims=1,dtype=np.int64]): simpson coefficients + """ + if (n == 1): + coeff = np.ones(n) + else: + if (n % 2): + coeff = np.ones(n) + coeff[1::2] = 4 + coeff[2:-1:2] = 2 + else: + raise ValueError("n must be odd") + + return coeff + + +def compute_Calias_element_XX(xx, yy, fc, d, nsub, tabx, taby, xoff=0, yoff=0): + """ + Compute the element of the aliasing covariance matrix + + Args: + Ca: (np.ndarray(ndim=2, dtype=np.float32)): aliasing covariance matrix to fill + xx: (np.ndarray(ndim=2, dtype=np.float32)): X positions of the WFS subap + yy: (np.ndarray(ndim=2, dtype=np.float32)): Y positions of the WFS subap + fc: (float): cut-off frequency for structure function + d: (float): subap diameter + nsub: (int): number of subap + tabx: (np.ndarray(ndim=1, dtype=np.float32)): X tabulation for dphi + taby: (np.ndarray(ndim=1, dtype=np.float32)): Y tabulation for dphi + xoff: (float) : (optionnal) offset to apply on the WFS xpos (units of d) + yoff: (float) : (optionnal) offset to apply on the WFS ypos (units of d) + """ + xx = xx - xx.T #+ xoff * d + yy = yy - yy.T #+ yoff * d + #xx = np.triu(xx) - np.triu(xx, -1).T + #yy = np.triu(yy) - np.triu(yy, -1).T + Ca = np.zeros((2 * nsub, 2 * nsub)) + + # XX covariance + AB = np.linalg.norm([xx, yy + yoff], axis=0) + Ab = np.linalg.norm([xx - d, yy + yoff], axis=0) + aB = np.linalg.norm([xx + d, yy + yoff], axis=0) + ab = AB + + Ca[:nsub, :nsub] += starlord.dphi_highpass( + Ab, fc, tabx, taby) + starlord.dphi_highpass( + aB, fc, tabx, taby) - 2 * starlord.dphi_highpass(AB, fc, tabx, taby) + + return Ca + + +def compute_Calias_element_YY(xx, yy, fc, d, nsub, tabx, taby, xoff=0, yoff=0): + """ + Compute the element of the aliasing covariance matrix + + Args: + Ca: (np.ndarray(ndim=2, dtype=np.float32)): aliasing covariance matrix to fill + xx: (np.ndarray(ndim=2, dtype=np.float32)): X positions of the WFS subap + yy: (np.ndarray(ndim=2, dtype=np.float32)): Y positions of the WFS subap + fc: (float): cut-off frequency for structure function + d: (float): subap diameter + nsub: (int): number of subap + tabx: (np.ndarray(ndim=1, dtype=np.float32)): X tabulation for dphi + taby: (np.ndarray(ndim=1, dtype=np.float32)): Y tabulation for dphi + xoff: (float) : (optionnal) offset to apply on the WFS xpos (units of d) + yoff: (float) : (optionnal) offset to apply on the WFS ypos (units of d) + """ + xx = xx - xx.T #+ xoff * d + yy = yy - yy.T #+ yoff * d + #xx = np.triu(xx) - np.triu(xx, -1).T + #yy = np.triu(yy) - np.triu(yy, -1).T + Ca = np.zeros((2 * nsub, 2 * nsub)) + + # YY covariance + CD = np.linalg.norm([xx + xoff, yy], axis=0) + Cd = np.linalg.norm([xx + xoff, yy - d], axis=0) + cD = np.linalg.norm([xx + xoff, yy + d], axis=0) + cd = CD + + Ca[nsub:, nsub:] += starlord.dphi_highpass( + Cd, fc, tabx, taby) + starlord.dphi_highpass( + cD, fc, tabx, taby) - 2 * starlord.dphi_highpass(CD, fc, tabx, taby) + + return Ca + + +def compute_Calias_element_XY(xx, yy, fc, d, nsub, tabx, taby, xoff=0, yoff=0): + """ + Compute the element of the aliasing covariance matrix + + Args: + Ca: (np.ndarray(ndim=2, dtype=np.float32)): aliasing covariance matrix to fill + xx: (np.ndarray(ndim=2, dtype=np.float32)): X positions of the WFS subap + yy: (np.ndarray(ndim=2, dtype=np.float32)): Y positions of the WFS subap + fc: (float): cut-off frequency for struture function + d: (float): subap diameter + nsub: (int): number of subap + tabx: (np.ndarray(ndim=1, dtype=np.float32)): X tabulation for dphi + taby: (np.ndarray(ndim=1, dtype=np.float32)): Y tabulation for dphi + xoff: (float) : (optionnal) offset to apply on the WFS xpos (units of d) + yoff: (float) : (optionnal) offset to apply on the WFS ypos (units of d) + """ + xx = xx - xx.T + xoff * d + yy = yy - yy.T + yoff * d + Ca = np.zeros((2 * nsub, 2 * nsub)) + + # YY covariance + aD = np.linalg.norm([xx + d / 2, yy + d / 2], axis=0) + ad = np.linalg.norm([xx + d / 2, yy - d / 2], axis=0) + Ad = np.linalg.norm([xx - d / 2, yy - d / 2], axis=0) + AD = np.linalg.norm([xx - d / 2, yy + d / 2], axis=0) + + Ca[nsub:, :nsub] = 0.25 * ( + starlord.dphi_highpass(Ad, d, tabx, taby) + starlord.dphi_highpass( + aD, d, tabx, taby) - starlord.dphi_highpass(AD, d, tabx, taby) - + starlord.dphi_highpass(ad, d, tabx, taby)) + Ca[:nsub, nsub:] = Ca[nsub:, :nsub].copy() + return Ca + + +def compute_Calias_element(xx, yy, fc, d, nsub, tabx, taby, xoff=0, yoff=0): + """ + Compute the element of the aliasing covariance matrix + + Args: + Ca: (np.ndarray(ndim=2, dtype=np.float32)): aliasing covariance matrix to fill + xx: (np.ndarray(ndim=2, dtype=np.float32)): X positions of the WFS subap + yy: (np.ndarray(ndim=2, dtype=np.float32)): Y positions of the WFS subap + fc: (float): cut-off frequency for structure function + d: (float): subap diameter + nsub: (int): number of subap + tabx: (np.ndarray(ndim=1, dtype=np.float32)): X tabulation for dphi + taby: (np.ndarray(ndim=1, dtype=np.float32)): Y tabulation for dphi + xoff: (float) : (optionnal) offset to apply on the WFS xpos (units of d) + yoff: (float) : (optionnal) offset to apply on the WFS ypos (units of d) + """ + xx = xx - xx.T + xoff * d + yy = yy - yy.T + yoff * d + Ca = np.zeros((2 * nsub, 2 * nsub)) + + # XX covariance + AB = np.linalg.norm([xx, yy], axis=0) + Ab = np.linalg.norm([xx - d, yy], axis=0) + aB = np.linalg.norm([xx + d, yy], axis=0) + ab = AB + + Ca[:nsub, :nsub] += starlord.dphi_highpass( + Ab, fc, tabx, taby) + starlord.dphi_highpass( + aB, fc, tabx, taby) - 2 * starlord.dphi_highpass(AB, fc, tabx, taby) + + # YY covariance + CD = AB + Cd = np.linalg.norm([xx, yy - d], axis=0) + cD = np.linalg.norm([xx, yy + d], axis=0) + cd = CD + + Ca[nsub:, nsub:] += starlord.dphi_highpass( + Cd, fc, tabx, taby) + starlord.dphi_highpass( + cD, fc, tabx, taby) - 2 * starlord.dphi_highpass(CD, fc, tabx, taby) + + # XY covariance + + # aD = np.linalg.norm([xx + d/2, yy + d/2], axis=0) + # ad = np.linalg.norm([xx + d/2, yy - d/2], axis=0) + # Ad = np.linalg.norm([xx - d/2, yy - d/2], axis=0) + # AD = np.linalg.norm([xx - d/2, yy + d/2], axis=0) + # + # Ca[nsub:,:nsub] = 0.25 * (starlord.dphi_highpass(Ad, d, tabx, taby) + # + starlord.dphi_highpass(aD, d, tabx, taby) + # - starlord.dphi_highpass(AD, d, tabx, taby) + # - starlord.dphi_highpass(ad, d, tabx, taby)) * (1 / r0)**(5. / 3.) + # Ca[:nsub,nsub:] = Ca[nsub:,:nsub].copy() + return Ca + + +def compute_dCmm(filename, ws=None, wd=None, dk=1): + """ Returns the derivative slopes covariance matrix using CPU version of GROOT + from a ROKET file and a model based on structure function + :parameter: + filename : (string) : full path to the ROKET file + ws: (np.array[ndim=1, dtype=np.float32]): wind speed per layer [m/s] + wd: (np.array[ndim=1, dtype=np.float32]): wind direction per layer [deg] + dk: (int): slopes shift [iterations] + :return: + dCmm : (np.ndarray(dim=2, dtype=np.float32)) : d/dt(slopes)*slopes + """ + + f = h5py.File(filename, 'r') + if ws is None: + ws = f.attrs["_Param_atmos__windspeed"] + if wd is None: + wd = f.attrs["_Param_atmos__winddir"] + dt = f.attrs["_Param_loop__ittime"] * dk + L0 = f.attrs["_Param_atmos__L0"] + frac = f.attrs["_Param_atmos__frac"] + nsub = f["R"][:].shape[1] // 2 + nssp = f.attrs["_Param_wfs__nxsub"][0] + validint = f.attrs["_Param_tel__cobs"] + x = np.linspace(-1, 1, nssp) + x, y = np.meshgrid(x, x) + r = np.sqrt(x * x + y * y) + rorder = np.sort(r.reshape(nssp * nssp)) + ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32) + validext = rorder[ncentral + nsub] + valid = (r < validext) & (r >= validint) + ivalid = np.where(valid) + r0 = f.attrs["_Param_atmos__r0"] + Lambda_wfs = f.attrs["_Param_wfs__Lambda"][0] + d = f.attrs["_Param_tel__diam"] / nssp + RASC = 180 / np.pi * 3600 + scale = 0.5 * (1 / r0)**(5 / 3) * (RASC * Lambda_wfs * 1e-6 / 2 / np.pi)**2 / d**2 + x = (np.arange(nssp) - nssp / 2) * d + x, y = np.meshgrid(x, x) + x = x[ivalid] + y = y[ivalid] + xx = np.tile(x, (nsub, 1)) + yy = np.tile(y, (nsub, 1)) + f.close() + dCmm = np.zeros((2 * nsub, 2 * nsub)) + for k in range(ws.size): + dCmm += frac[k] * compute_dCmm_element(xx, yy, d, nsub, ws[k], wd[k], dt, L0[k]) + + return dCmm * scale + + +def compute_dCmm_element(xx, yy, d, nsub, ws, wd, dt, L0): + """ + Compute the element of the derivative slopes covariance matrix + + Args: + xx: (np.ndarray(ndim=2, dtype=np.float32)): X positions of the WFS subap + yy: (np.ndarray(ndim=2, dtype=np.float32)): Y positions of the WFS subap + d: (float): subap diameter + nsub: (int): number of subap + ws: (float): wind speed per layer [m/s] + wd: (float): wind direction per layer [deg] + dt: (float): iteration time [s] + L0: (float): outer scale [m] + """ + xij = xx - xx.T + yij = yy - yy.T + dCmm = np.zeros((2 * nsub, 2 * nsub)) + vdt = ws * dt + wd = wd / 180 * np.pi + + # XX covariance + AB = np.linalg.norm([-xij + vdt * np.cos(wd), -yij + vdt * np.sin(wd)], axis=0) + Ab = np.linalg.norm([-xij - d + vdt * np.cos(wd), -yij + vdt * np.sin(wd)], axis=0) + aB = np.linalg.norm([-xij + d + vdt * np.cos(wd), -yij + vdt * np.sin(wd)], axis=0) + + dCmm[:nsub, :nsub] += starlord.rodconan(Ab, L0) + starlord.rodconan( + aB, L0) - 2 * starlord.rodconan(AB, L0) + + AB = np.linalg.norm([xij + vdt * np.cos(wd), yij + vdt * np.sin(wd)], axis=0) + Ab = np.linalg.norm([xij - d + vdt * np.cos(wd), yij + vdt * np.sin(wd)], axis=0) + aB = np.linalg.norm([xij + d + vdt * np.cos(wd), yij + vdt * np.sin(wd)], axis=0) + + dCmm[:nsub, :nsub] -= (starlord.rodconan(Ab, L0) + starlord.rodconan(aB, L0) - + 2 * starlord.rodconan(AB, L0)) + + # YY covariance + CD = np.linalg.norm([-xij + vdt * np.cos(wd), -yij + vdt * np.sin(wd)], axis=0) + Cd = np.linalg.norm([-xij + vdt * np.cos(wd), -yij - d + vdt * np.sin(wd)], axis=0) + cD = np.linalg.norm([-xij + vdt * np.cos(wd), -yij + d + vdt * np.sin(wd)], axis=0) + + dCmm[nsub:, nsub:] += starlord.rodconan(Cd, L0) + starlord.rodconan( + cD, L0) - 2 * starlord.rodconan(CD, L0) + + CD = np.linalg.norm([xij + vdt * np.cos(wd), yij + vdt * np.sin(wd)], axis=0) + Cd = np.linalg.norm([xij + vdt * np.cos(wd), yij - d + vdt * np.sin(wd)], axis=0) + cD = np.linalg.norm([xij + vdt * np.cos(wd), yij + d + vdt * np.sin(wd)], axis=0) + + dCmm[nsub:, nsub:] -= (starlord.rodconan(Cd, L0) + starlord.rodconan(cD, L0) - + 2 * starlord.rodconan(CD, L0)) + # XY covariance + + # aD = np.linalg.norm([xx + d/2, yy + d/2], axis=0) + # ad = np.linalg.norm([xx + d/2, yy - d/2], axis=0) + # Ad = np.linalg.norm([xx - d/2, yy - d/2], axis=0) + # AD = np.linalg.norm([xx - d/2, yy + d/2], axis=0) + # + # dCmm[nsub:,:nsub] = 0.25 * (starlord.rodconan(Ad, d, tabx, taby) + # + starlord.dphi_highpass(aD, d, tabx, taby) + # - starlord.dphi_highpass(AD, d, tabx, taby) + # - starlord.dphi_highpass(ad, d, tabx, taby)) * (1 / r0)**(5. / 3.) + # dCmm[:nsub,nsub:] = dCmm[nsub:,:nsub].copy() + return 0.25 * dCmm diff --git a/guardians/misc/correlations/bash_script.sh b/guardians/misc/correlations/bash_script.sh new file mode 100755 index 0000000..c179c3b --- /dev/null +++ b/guardians/misc/correlations/bash_script.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +WINDDIR="0 45 90 135 180" +WINDSPEED="10 15 20" +GAIN="0.1 0.2 0.3 0.4" +OUTPUT="$SHESHA_ROOT/test/roket/correlations/outputfile" + +echo "writing output in "$OUTPUT + +script="$SHESHA_ROOT/test/roket/correlations/script_roket_cpu.py" + +for D in $WINDDIR +do + for S in $WINDSPEED + do + for G in $GAIN + do + + CMD="ipython $script $D $S $G" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done + done +done diff --git a/guardians/misc/correlations/correlation_bokeh.py b/guardians/misc/correlations/correlation_bokeh.py new file mode 100644 index 0000000..feb6b39 --- /dev/null +++ b/guardians/misc/correlations/correlation_bokeh.py @@ -0,0 +1,340 @@ +""" +Created on Wed Oct 5 14:28:23 2016 + +@author: fferreira +""" + +import numpy as np +import h5py +import pandas +from bokeh.plotting import Figure, figure +from bokeh.models import Range1d, ColumnDataSource, HoverTool +from bokeh.models.widgets import Select, Slider, CheckboxButtonGroup, Panel, Tabs, Button, Dialog, Paragraph, RadioButtonGroup, TextInput +from bokeh.io import curdoc +from bokeh.models.layouts import HBox, VBox +from bokeh.models.widgets import DataTable, DateFormatter, TableColumn +from bokeh.client import push_session +import glob +import matplotlib.pyplot as plt +from scipy.special import jv # Bessel function +plt.ion() + + +def dphi_highpass(r, x0, tabx, taby): + return (r** + (5. / 3.)) * (1.1183343328701949 - Ij0t83(r * (np.pi / x0), tabx, taby)) * ( + 2 * (2 * np.pi)**(8 / 3.) * 0.0228956) + + +def dphi_lowpass(r, x0, L0, tabx, taby): + return rodconan(r, L0) - dphi_highpass(r, x0, tabx, taby) + #return (r**(5./3.)) * Ij0t83(r*(np.pi/x0), tabx, taby) * (2*(2*np.pi)**(8/3.)*0.0228956) + + +def Ij0t83(x, tabx, taby): + if (x < np.exp(-3.0)): + return 0.75 * x**(1. / 3) * (1 - x**2 / 112.) + else: + return np.interp(x, tabx, taby) + + +def unMoinsJ0(x): + # if(x<0.1): + # x22 = (x/2.)**2 + # return (1-x22/4.)*x22 + # else: + return 1 - jv(0, x) + + +def tabulateIj0(L0): + n = 10000 + t = np.linspace(-4, 10, n) + dt = (t[-1] - t[0]) / (n - 1) + smallx = np.exp(-4.0) + A = 0.75 * smallx**(1. / 3) * (1 - smallx**2 / 112.) + X = np.exp(t) + #Y = np.exp(-t*(5./3.))*unMoinsJ0(X) + Y = (np.exp(2 * t) + (1. / L0)**2)**(-8. / 6.) * unMoinsJ0(X) * np.exp(t) + Y[1:] = np.cumsum(Y[:-1] + np.diff(Y) / 2.) + Y[0] = 0. + Y = Y * dt + A + + return X, Y + + +def asymp_macdo(x): + k2 = 1.00563491799858928388289314170833 + k3 = 1.25331413731550012081 + a1 = 0.22222222222222222222 + a2 = -0.08641975308641974829 + a3 = 0.08001828989483310284 + + x_1 = 1. / x + res = k2 - k3 * np.exp(-x) * x**(1. / 3.) * (1.0 + x_1 * (a1 + x_1 * + (a2 + x_1 * a3))) + return res + + +def macdo(x): + a = 5. / 6. + x2a = x**(2. * a) + x22 = x * x / 4. + s = 0.0 + + Ga = [ + 0, 12.067619015983075, 5.17183672113560444, 0.795667187867016068, + 0.0628158306210802181, 0.00301515986981185091, 9.72632216068338833e-05, + 2.25320204494595251e-06, 3.93000356676612095e-08, 5.34694362825451923e-10, + 5.83302941264329804e-12 + ] + + Gma = [ + -3.74878707653729304, -2.04479295083852408, -0.360845814853857083, + -0.0313778969438136685, -0.001622994669507603, -5.56455315259749673e-05, + -1.35720808599938951e-06, -2.47515152461894642e-08, -3.50257291219662472e-10, + -3.95770950530691961e-12, -3.65327031259100284e-14 + ] + + x2n = 0.5 + + s = Gma[0] * x2a + s *= x2n + + x2n *= x22 + + for n in np.arange(10) + 1: + + s += (Gma[n] * x2a + Ga[n]) * x2n + x2n *= x22 + + return s + + +def rodconan(r, L0): + res = 0 + k1 = 0.1716613621245709486 + dprf0 = (2 * np.pi / L0) * r + if (dprf0 > 4.71239): + res = asymp_macdo(dprf0) + else: + res = -macdo(dprf0) + + res *= k1 * L0**(5. / 3.) + + return res + + +def variance(f, contributors, method="Default"): + """ Return the error variance of specified contributors + params: + f : (h5py.File) : roket hdf5 file opened with h5py + contributors : (list of string) : list of the contributors + method : (optional, default="Default") : if "Independence", the + function returns ths sum of the contributors variances. + If "Default", it returns the variance of the contributors sum + """ + P = f["P"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + if (method == b"Default"): + err = f[contributors[0]][:] * 0. + for c in contributors: + err += f[c][:] + return np.var(P.dot(err), axis=1)[swap], np.var( + P.dot(f["tomography"][:]), + axis=1)[swap], np.var(P.dot(f["bandwidth"][:]), axis=1)[swap] + + elif (method == b"Independence"): + nmodes = P.shape[0] + v = np.zeros(nmodes) + for c in contributors: + v += np.var(P.dot(f[c][:]), axis=1) + return v[swap] + + else: + raise TypeError("Wrong method input") + + +def varianceMultiFiles(fs, frac_per_layer, contributors): + """ Return the variance computed from the sum of contributors of roket + files fs, ponderated by frac + params: + fs : (list) : list of hdf5 files opened with h5py + frac_per_layer : (dict) : frac for each layer + contributors : (list of string) : list of the contributors + return: + v : (np.array(dim=1)) : variance vector + """ + f = fs[0] + P = f["P"][:] + nmodes = P.shape[0] + swap = np.arange(nmodes) - 2 + swap[0:2] = [nmodes - 2, nmodes - 1] + err = f[contributors[0]][:] * 0. + for f in fs: + frac = frac_per_layer[f.attrs["atm.alt"][0]] + for c in contributors: + err += np.sqrt(frac) * f[c][:] + + return np.var(P.dot(err), axis=1)[swap] + + +def cumulativeSR(v, Lambda_tar): + """ Returns the cumulative Strehl ratio over the modes from the variance + on each mode + params: + v : (np.array(dim=1)) : variance vector + return: + s : (np.array(dim=1)) : cumulative SR + """ + s = np.cumsum(v) + s = np.exp(-s * (2 * np.pi / Lambda_tar)**2) + + return s + + +def update(attrs, old, new): + speed = speed_select.value + direction = dir_select.value + g = gain_select.value + xname = xaxis_select.value + yname = yaxis_select.value + + ydata = ymap[yname] + x = xmap[xname] + + ind = np.ones(ydata.shape[0]) + if (direction != "All"): + ind *= (xmap["Winddir"] == float(direction)) + if (speed != "All"): + ind *= (xmap["Windspeed"] == float(speed)) + if (g != "All"): + ind *= (xmap["Gain"] == float(g)) + + ind = np.where(ind) + if (yname == b"Var(t)"): + Hthetak = Htheta / xmap["Gain"] + y_model = np.ones(ind[0].shape[0]) + #y_model = y_model * 6.88 * (Htheta/r0)**(5./3.) * 0.5 + for k in range(ind[0].shape[0]): + y_model[k] = dphi_lowpass(Htheta, 0.2, L0, tabx, taby) * (1 / r0)**( + 5. / 3.) * 0.5 #* xmap["Gain"][ind][k]**2 + if (yname == b"Var(bp)"): + vdt = xmap["Windspeed"] * dt / xmap["Gain"] + y_model = np.zeros(vdt[ind].shape[0]) + for k in range(vdt[ind].shape[0]): + y_model[k] = dphi_lowpass(vdt[ind][k], 0.2, L0, tabx, + taby) * (1. / r0)**(5. / 3.) * 0.5 + if (yname == b"Covar"): + vdt = xmap["Windspeed"] * dt / xmap["Gain"] + Hthetak = Htheta / xmap["Gain"] + gamma = np.arctan2(ypos, xpos) - xmap["Winddir"] * np.pi / 180. + rho = np.sqrt(Htheta**2 + (vdt)**2 - 2 * Htheta * vdt * np.cos(gamma)) + Drho = np.zeros(rho[ind].shape[0]) + Dt = Drho.copy() + for k in range(rho[ind].shape[0]): + Drho[k] = dphi_lowpass(rho[ind][k], 0.2, L0, tabx, + taby) * (1 / r0)**(5. / 3.) + #Drho = 6.88 * (rho[ind]/r0)**(5./3.) + for k in range(Dt.shape[0]): + Dt[k] = dphi_lowpass(Htheta, 0.2, L0, tabx, taby) * (1 / r0)**( + 5. / 3.) # * xmap["Gain"][ind][k]**2 + #Dt = 6.88 * (Htheta/r0)**(5./3.) + Dbp = np.zeros(vdt[ind].shape[0]) + for k in range(vdt[ind].shape[0]): + Dbp[k] = dphi_lowpass(vdt[ind][k], 0.2, L0, tabx, taby) * (1 / r0)**(5. / 3.) + #Dbp = 6.88 * (vdt[ind]/r0) ** (5./3.) + y_model = 0.5 * (Dt + Dbp - Drho) + + source.data = dict(x=x[ind], y=ydata[ind], speed=xmap["Windspeed"][ind], + theta=xmap["Winddir"][ind], gain=xmap["Gain"][ind]) + source_model.data = dict(x=x[ind], y=y_model, speed=xmap["Windspeed"][ind], + theta=xmap["Winddir"][ind], gain=xmap["Gain"][ind]) + + +datapath = "/home/fferreira/Data/correlation/" +filenames = glob.glob(datapath + "roket_8m_1layer_dir*_cpu.h5") + +files = [] +for f in filenames: + ff = h5py.File(f, mode='r') + #if(ff.attrs["validity"]): + files.append(ff) + +nmodes = (files[0])["P"][:].shape[0] +xpos = files[0].attrs["wfs.xpos"][0] +ypos = files[0].attrs["wfs.ypos"][0] +contributors = ["tomography", "bandwidth"] +Lambda_tar = files[0].attrs["target.Lambda"][0] +Lambda_wfs = files[0].attrs["wfs.Lambda"][0] +L0 = files[0].attrs["L0"][0] +dt = files[0].attrs["ittime"] +H = files[0].attrs["atm.alt"][0] +RASC = 180 / np.pi * 3600. +Htheta = np.linalg.norm( + [xpos, ypos] +) / RASC * H # np.sqrt(2)*4/RASC*H # Hardcoded for angular separation of sqrt(2)*4 arcsec +r0 = files[0].attrs["r0"] * (Lambda_tar / Lambda_wfs)**(6. / 5.) +nfiles = len(files) +data = np.zeros((nmodes, 4, nfiles)) +theta = np.zeros(nfiles) +speeds = np.zeros(nfiles) +gain = np.zeros(nfiles) + +tabx, taby = tabulateIj0(L0) + +# data[:,0,i] = var(tomo+bp) for file #i +# data[:,1,i] = var(tomo) for file #i +# data[:,2,i] = var(bp) for file #i +# data[:,3,i] = var(tomo)+var(bp) for file #i +ind = 0 +for f in files: + data[:, 0, ind], data[:, 1, ind], data[:, 2, ind] = variance(f, contributors) + data[:, 3, ind] = variance(f, contributors, method="Independence") + theta[ind] = f.attrs["winddir"][0] + speeds[ind] = f.attrs["windspeed"][0] + gain[ind] = float('%.1f' % f.attrs["gain"][0]) + ind += 1 +data = data * ((2 * np.pi / Lambda_tar)**2) +covar = (data[:, 0, :] - data[:, 3, :]) / 2. + +xaxis_select = Select(title="X-axis", value="Windspeed", + options=["Windspeed", "Winddir", "Gain"]) +yaxis_select = Select( + title="Y-axis", value="Covar", + options=["Covar", "Var(t+bp)", "Var(t)", "Var(bp)", "Var(t)+Var(bp)"]) + +speed_select = Select(title="Windspeeds", value="All", + options=["All"] + [str(s) for s in np.unique(speeds)]) +dir_select = Select(title="Winddirs", value="All", + options=["All"] + [str(s) for s in np.unique(theta)]) +gain_select = Select(title="Gain", value="All", + options=["All"] + [str(s)[:3] for s in np.unique(gain)]) +source = ColumnDataSource(data=dict(x=[], y=[], speed=[], theta=[], gain=[])) +source_model = ColumnDataSource(data=dict(x=[], y=[], speed=[], theta=[], gain=[])) +hover = HoverTool(tooltips=[("Speed", "@speed"), ("Winddir", "@theta"), ("Gain", + "@gain")]) +TOOLS = "resize,save,pan,box_zoom,tap, box_select, wheel_zoom, lasso_select,reset" + +p = figure(plot_height=600, plot_width=700, title="", tools=[hover, TOOLS]) +p.circle(x="x", y="y", source=source, size=7, color="blue") +p.circle(x="x", y="y", source=source_model, size=7, color="red") + +xmap = {"Windspeed": speeds, "Winddir": theta, "Gain": gain} +ymap = { + "Covar": np.sum(covar, axis=0), + "Var(t+bp)": np.sum(data[:, 0, :], axis=0), + "Var(t)": np.sum(data[:, 1, :], axis=0), + "Var(bp)": np.sum(data[:, 2, :], axis=0), + "Var(t)+Var(bp)": np.sum(data[:, 3, :], axis=0) +} + +buttons = [xaxis_select, speed_select, dir_select, yaxis_select, gain_select] +for b in buttons: + b.on_change('value', update) + +curdoc().clear() +update(None, None, None) +curdoc().add_root( + HBox(VBox(xaxis_select, yaxis_select, speed_select, dir_select, gain_select), p)) diff --git a/guardians/misc/correlations/correlation_study.py b/guardians/misc/correlations/correlation_study.py new file mode 100644 index 0000000..20719d3 --- /dev/null +++ b/guardians/misc/correlations/correlation_study.py @@ -0,0 +1,737 @@ +""" +Created on Wed Oct 5 14:28:23 2016 + +@author: fferreira +""" + +import numpy as np +import h5py +import glob +import sys +sys.path.append('/home/fferreira/compass/shesha/test/roket/tools/') +sys.path.append('/home/fferreira/compass/shesha/test/gamora/') +import Dphi +import roket_exploitation as rexp +import gamora +import shesha as ao +import matplotlib.pyplot as plt +plt.ion() +import matplotlib +import time +font = {'family': 'normal', 'weight': 'bold', 'size': 22} + +matplotlib.rc('font', **font) + + +def compute_psf(filename): + otftel, otf, psf, gpu = gamora.psf_rec_Vii(filename) + return psf + + +def compute_psf_independence(filename): + cov_err = rexp.get_coverr_independence(filename) + otfteli, otf2i, psfi, gpu = gamora.psf_rec_Vii(filenames[11], covmodes=cov_err) + return psfi + + +def compute_and_compare_PSFs(filename, plot=False): + f = h5py.File(filename, 'r') + psf_compass = np.fft.fftshift(f["psf"][:]) + #psf = compute_psf(filename) + #psfi = compute_psf_independence(filename) + psfc = 0 + psfs = 0 + tic = time.time() + Caniso, Cbp, Ccov = compute_covariance_model(filename) + + Nact = f["Nact"][:] + N1 = np.linalg.inv(Nact) + Caniso = N1.dot(Caniso).dot(N1) + Cbp = N1.dot(Cbp).dot(N1) + Ccov = N1.dot(Ccov).dot(N1) + + Ccov_filtered = filter_piston_TT(filename, Ccov) + Ctt = add_TT_model(filename, Ccov) + Ctt[:-2, :-2] = Ccov_filtered + Ctt = Ctt + Ctt.T + Caniso_filtered = filter_piston_TT(filename, Caniso) + tmp = add_TT_model(filename, Caniso) + tmp[:-2, :-2] = Caniso_filtered + Ctt += tmp + Cbp_filtered = filter_piston_TT(filename, Cbp) + tmp = add_TT_model(filename, Cbp) + tmp[:-2, :-2] = Cbp_filtered + Ctt += tmp + #contributors = ["noise","aliasing","non linearity","filtered modes"] + #cov_err = rexp.get_coverr_independence_contributors(filename,contributors) + P = f["P"][:] + cov_err = P.dot(Ctt).dot(P.T) + otftels, otf2s, psfs, gpu = gamora.psf_rec_Vii(filename, fitting=False, + cov=cov_err.astype(np.float32)) + tac = time.time() + print("PSF estimated in ", tac - tic, " seconds") + t = f["tomography"][:] + b = f["bandwidth"][:] + tb = t + b + tb = tb.dot(tb.T) / float(tb.shape[1]) + cov_err = P.dot(tb).dot(P.T) + otftel, otf2, psf, gpu = gamora.psf_rec_Vii(filename, fitting=False, + cov=cov_err.astype(np.float32)) + if (plot): + Lambda_tar = f.attrs["target.Lambda"][0] + RASC = 180 / np.pi * 3600. + pixsize = Lambda_tar * 1e-6 / ( + psf.shape[0] * f.attrs["tel_diam"] / f.attrs["pupdiam"]) * RASC + x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / ( + Lambda_tar * 1e-6 / f.attrs["tel_diam"] * RASC) + plt.figure() + plt.subplot(2, 1, 1) + plt.semilogy(x, psf[psf.shape[0] / 2, :], color="blue") + plt.semilogy(x, psfs[psf.shape[0] / 2, :], color="red") + plt.xlabel("X-axis angular distance [units of lambda/D]") + plt.ylabel("Normalized intensity") + plt.legend(["PSF exp", "PSF model"]) + + #plt.semilogy(x,psf_compass[psf.shape[0]/2,:],color="red") + plt.legend(["PSF exp", "PSF model"]) + plt.subplot(2, 1, 2) + plt.semilogy(x, psf[:, psf.shape[0] / 2], color="blue") + plt.semilogy(x, psfs[:, psf.shape[0] / 2], color="red") + plt.xlabel("Y-axis angular distance [units of lambda/D]") + plt.ylabel("Normalized intensity") + + #plt.semilogy(x,psf_compass[psf.shape[0]/2,:],color="red") + plt.legend(["PSF exp", "PSF model"]) + ''' + if(correction): + #plt.semilogy(x,psfc[psf.shape[0]/2,:],color="purple") + if(synth): + plt.semilogy(x,psfs[psf.shape[0]/2,:],color="black") + #plt.legend(["PSF rec","PSF ind. assumption", "PSF COMPASS", "PSF corrected", "PSF synth"]) + plt.legend(["PSF rec","PSF ind. assumption", "PSF COMPASS", "PSF synth"]) + else: + plt.legend(["PSF rec","PSF ind. assumption", "PSF COMPASS", "PSF corrected"]) + elif(synth): + plt.semilogy(x,psfs[psf.shape[0]/2,:],color="purple") + plt.legend(["PSF rec","PSF ind. assumption", "PSF COMPASS", "PSF synth"]) + + else: + plt.legend(["PSF rec","PSF ind. assumption", "PSF COMPASS"]) + ''' + + f.close() + return psf_compass, psf, psfs + + +def filter_piston_TT(filename, C): + IF, T = rexp.get_IF(filename) + IF = IF.T + T = T.T + N = IF.shape[0] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + # Tip-tilt + piston + Tp = np.ones((T.shape[0], T.shape[1] + 1)) + Tp[:, :2] = T.copy() #.toarray() + deltaT = IF.T.dot(Tp) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille generatrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + return G.T.dot(C).dot(G) + + +def filter_TT(filename, C): + IF, T = rexp.get_IF(filename) + IF = IF.T + T = T.T + N = IF.shape[0] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + deltaT = IF.T.dot(T) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille generatrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + return G.T.dot(C).dot(G) + + +def compute_covariance_model(filename): + f = h5py.File(filename, 'r') + Lambda_tar = f.attrs["target.Lambda"][0] + Lambda_wfs = f.attrs["wfs.Lambda"] + dt = f.attrs["ittime"] + gain = f.attrs["gain"] + wxpos = f.attrs["wfs.xpos"][0] + wypos = f.attrs["wfs.ypos"][0] + r0 = f.attrs["r0"] * (Lambda_tar / Lambda_wfs)**(6. / 5.) + RASC = 180. / np.pi * 3600. + xpos = f["dm.xpos"][:] + ypos = f["dm.ypos"][:] + p2m = f.attrs["tel_diam"] / f.attrs["pupdiam"] + pupshape = int(2**np.ceil(np.log2(f.attrs["pupdiam"]) + 1)) + xactu = (xpos - pupshape / 2) * p2m + yactu = (ypos - pupshape / 2) * p2m + Ccov = np.zeros((xpos.size, xpos.size)) + Caniso = np.zeros((xpos.size, xpos.size)) + Cbp = np.zeros((xpos.size, xpos.size)) + xx = np.tile(xactu, (xactu.shape[0], 1)) + yy = np.tile(yactu, (yactu.shape[0], 1)) + xij = xx - xx.T + yij = yy - yy.T + + for l in range(f.attrs["nscreens"]): + H = f.attrs["atm.alt"][l] + L0 = f.attrs["L0"][l] + speed = f.attrs["windspeed"][l] + theta = f.attrs["winddir"][l] * np.pi / 180. + frac = f.attrs["frac"][l] + + Htheta = np.linalg.norm([wxpos, wypos]) / RASC * H + vdt = speed * dt / gain + # Covariance matrices models on actuators space + M = np.zeros((xpos.size, xpos.size)) + Mvdt = M.copy() + Mht = M.copy() + Mhvdt = M.copy() + angleht = np.arctan2(wypos, wxpos) + fc = xactu[1] - xactu[0] + #fc = 0.05 + + M = np.linalg.norm([xij, yij], axis=0) + Mvdt = np.linalg.norm([xij - vdt * np.cos(theta), yij - vdt * np.sin(theta)], + axis=0) + Mht = np.linalg.norm( + [xij - Htheta * np.cos(angleht), yij - Htheta * np.sin(angleht)], axis=0) + Mhvdt = np.linalg.norm([ + xij - vdt * np.cos(theta) - Htheta * np.cos(angleht), + yij - vdt * np.sin(theta) - Htheta * np.sin(angleht) + ], axis=0) + # for i in range(xpos.size): + # for j in range(xpos.size): + # Mvdt[i,j] = (np.sqrt((xactu[i]-(xactu[j]-vdt*np.cos(theta)))**2 + (yactu[i]-(yactu[j]-vdt*np.sin(theta)))**2)) + # M[i,j] = (np.sqrt((xactu[i]-xactu[j])**2 + (yactu[i]-yactu[j])**2)) + # Mht[i,j] = (np.sqrt((xactu[i]-(xactu[j]-Htheta*np.cos(angleht)))**2 + (yactu[i]-(yactu[j]-Htheta*np.sin(angleht)))**2)) + # #Mhvdt[i,j] = (np.sqrt((xactu[i]-(xactu[j]+rho*np.cos(anglehvdt)))**2 + (yactu[i]-(yactu[j]+rho*np.sin(anglehvdt)))**2)) + # Mhvdt[i,j] = (np.sqrt(((xactu[i]+vdt*np.cos(theta))-(xactu[j]-Htheta*np.cos(angleht)))**2 + ((yactu[i]+vdt*np.sin(theta))-(yactu[j]-Htheta*np.sin(angleht)))**2)) + + Ccov += 0.5 * (Dphi.dphi_lowpass(Mhvdt,fc,L0,tabx,taby) - Dphi.dphi_lowpass(Mht,fc,L0,tabx,taby) \ + - Dphi.dphi_lowpass(Mvdt,fc,L0,tabx,taby) + Dphi.dphi_lowpass(M,fc,L0,tabx,taby)) * (1./r0)**(5./3.) * frac + + Caniso += 0.5 * (Dphi.dphi_lowpass(Mht, fc, L0, tabx, taby) - Dphi.dphi_lowpass( + M, fc, L0, tabx, taby)) * (1. / r0)**(5. / 3.) * frac + Cbp += 0.5 * (Dphi.dphi_lowpass(Mvdt, fc, L0, tabx, taby) - Dphi.dphi_lowpass( + M, fc, L0, tabx, taby)) * (1. / r0)**(5. / 3.) * frac + + #Sp = (f.attrs["tel_diam"]/f.attrs["nxsub"])**2/2. + Sp = (Lambda_tar / (2 * np.pi))**2 #/3.45 + f.close() + return (Caniso + Caniso.T) * Sp, (Cbp + Cbp.T) * Sp, Ccov * Sp + + +def add_TT_model(filename, Ccov): + C = np.zeros((Ccov.shape[0] + 2, Ccov.shape[0] + 2)) + IF, T = rexp.get_IF(filename) + IF = IF.T + T = T.T + N = IF.shape[0] + deltaTT = T.T.dot(T) / N + deltaF = IF.T.dot(T) / N + pzt2tt = np.linalg.inv(deltaTT).dot(deltaF.T) + + CTT = Ccov - filter_TT(filename, Ccov) + C[-2:, -2:] = pzt2tt.dot(CTT).dot(pzt2tt.T) + C[:-2, :-2] = Ccov + + return C + + +def load_datas(files): + nmodes = (files[0])["P"][:].shape[0] + P = (files[0])["P"][:] + xpos = files[0].attrs["wfs.xpos"][0] + ypos = files[0].attrs["wfs.ypos"][0] + contributors = ["tomography", "bandwidth"] + Lambda_tar = files[0].attrs["target.Lambda"][0] + Lambda_wfs = files[0].attrs["wfs.Lambda"][0] + L0 = files[0].attrs["L0"][0] + dt = files[0].attrs["ittime"] + H = files[0].attrs["atm.alt"][0] + RASC = 180 / np.pi * 3600. + Htheta = np.linalg.norm( + [xpos, ypos] + ) / RASC * H # np.sqrt(2)*4/RASC*H # Hardcoded for angular separation of sqrt(2)*4 arcsec + r0 = files[0].attrs["r0"] * (Lambda_tar / Lambda_wfs)**(6. / 5.) + nfiles = len(files) + vartomo = np.zeros((nfiles, nmodes)) + varbp = np.zeros((nfiles, nmodes)) + vartot = np.zeros((nfiles, nmodes)) + theta = np.zeros(nfiles) + speeds = np.zeros(nfiles) + gain = np.zeros(nfiles) + # data[:,0,i] = var(tomo+bp) for file #i + # data[:,1,i] = var(tomo) for file #i + # data[:,2,i] = var(bp) for file #i + # data[:,3,i] = var(tomo)+var(bp) for file #i + ind = 0 + print("Loading data...") + for f in files: + vartot[ind, :] = rexp.variance(f, contributors) * ((2 * np.pi / Lambda_tar)**2) + vartomo[ind, :] = rexp.variance(f, ["tomography"]) * ( + (2 * np.pi / Lambda_tar)**2) + varbp[ind, :] = rexp.variance(f, ["bandwidth"]) * ((2 * np.pi / Lambda_tar)**2) + theta[ind] = f.attrs["winddir"][0] + speeds[ind] = f.attrs["windspeed"][0] + gain[ind] = float('%.1f' % f.attrs["gain"][0]) + ind += 1 + print(ind, "/", len(files)) + + covar = (vartot - (vartomo + varbp)) / 2. + + stot = np.sum(vartot, axis=1) + sbp = np.sum(varbp, axis=1) + stomo = np.sum(vartomo, axis=1) + scov = np.sum(covar, axis=1) + + return stot, sbp, stomo, scov, covar + + +def ensquare_PSF(filename, psf, N, display=False): + f = h5py.File(filename, 'r') + Lambda_tar = f.attrs["target.Lambda"][0] + RASC = 180 / np.pi * 3600. + pixsize = Lambda_tar * 1e-6 / ( + psf.shape[0] * f.attrs["tel_diam"] / f.attrs["pupdiam"]) * RASC + x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / ( + Lambda_tar * 1e-6 / f.attrs["tel_diam"] * RASC) + w = int(N * (Lambda_tar * 1e-6 / f.attrs["tel_diam"] * RASC) / pixsize) + mid = psf.shape[0] / 2 + psfe = psf[mid - w:mid + w, mid - w:mid + w] + if (display): + plt.matshow(np.log10(psfe)) + xt = np.linspace(0, psfe.shape[0] - 1, 6).astype(np.int32) + yt = np.linspace(-N, N, 6).astype(np.int32) + plt.xticks(xt, yt) + plt.yticks(xt, yt) + + f.close() + return psf[mid - w:mid + w, mid - w:mid + w] + + +def cutsPSF(filename, psf, psfs): + f = h5py.File(filename, 'r') + Lambda_tar = f.attrs["target.Lambda"][0] + RASC = 180 / np.pi * 3600. + pixsize = Lambda_tar * 1e-6 / ( + psf.shape[0] * f.attrs["tel_diam"] / f.attrs["pupdiam"]) * RASC + x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / ( + Lambda_tar * 1e-6 / f.attrs["tel_diam"] * RASC) + plt.figure() + plt.subplot(2, 1, 1) + plt.semilogy(x, psf[psf.shape[0] / 2, :], color="blue") + plt.semilogy(x, psfs[psf.shape[0] / 2, :], color="red") + plt.xlabel("X-axis angular distance [units of lambda/D]") + plt.ylabel("Normalized intensity") + plt.legend(["PSF exp", "PSF model"]) + plt.xlim(-20, 20) + plt.ylim(1e-5, 1) + plt.subplot(2, 1, 2) + plt.semilogy(x, psf[:, psf.shape[0] / 2], color="blue") + plt.semilogy(x, psfs[:, psf.shape[0] / 2], color="red") + plt.xlabel("Y-axis angular distance [units of lambda/D]") + plt.ylabel("Normalized intensity") + plt.legend(["PSF exp", "PSF model"]) + plt.xlim(-20, 20) + plt.ylim(1e-5, 1) + f.close() + + +def Hcor(f, Fe, g, dt): + p = 1j * 2 * np.pi * f + return np.abs(1 / (1 + g * Fe / p * np.exp(-dt * p)))**2 + + +def Hretard(f, Fe, g, dt): + p = 1j * 2 * np.pi * f + return np.abs(1 - np.exp(-p * dt / g))**2 + + +def compareTransferFunctions(filename): + rfile = h5py.File(filename, 'r') + v = rfile.attrs["windspeed"][0] + dt = rfile.attrs["ittime"] + Fe = 1 / dt + g = rfile.attrs["gain"] + Lambda_tar = rfile.attrs["target.Lambda"][0] + Lambda_wfs = rfile.attrs["wfs.Lambda"][0] + r0 = rfile.attrs["r0"] * (Lambda_tar / Lambda_wfs)**(6. / 5.) + d = rfile.attrs["tel_diam"] / rfile.attrs["nxsub"] + fc = 0.314 * v / d + f = np.linspace(0.1, fc * 1.5, 1000) + H = Hcor(f, Fe, g, dt) + Hr = Hretard(f, Fe, g, dt) + plt.figure() + plt.plot(f, H) + plt.plot(f, Hr, color="red") + plt.plot([fc, fc], [H.min(), Hr.max()]) + rfile.close() + + +datapath = '/home/fferreira/Data/correlation/' +filenames = glob.glob(datapath + 'roket_8m_1layer_dir*_cpu.h5') +files = [] +for f in filenames: + files.append(h5py.File(f, 'r')) + +tabx, taby = Dphi.tabulateIj0() + +nfiles = len(filenames) +theta = np.zeros(nfiles) +speeds = np.zeros(nfiles) +gain = np.zeros(nfiles) +SRcompass = np.zeros(nfiles) +SRroket = np.zeros(nfiles) +SRi = np.zeros(nfiles) +fROKET = h5py.File('ROKETStudy.h5', 'r') +psfr = fROKET["psf"][:] +psfi = fROKET["psfi"][:] +nrjcompass = np.zeros(nfiles) +nrjroket = np.zeros(nfiles) +nrji = np.zeros(nfiles) + +ind = 0 +for f in files: + theta[ind] = f.attrs["winddir"][0] + speeds[ind] = f.attrs["windspeed"][0] + gain[ind] = float('%.1f' % f.attrs["gain"][0]) + SRcompass[ind] = f["psf"][:].max() + SRroket[ind] = psfr[:, :, ind].max() + SRi[ind] = psfi[:, :, ind].max() + nrjcompass[ind] = np.sum( + ensquare_PSF(filenames[ind], np.fft.fftshift(f["psf"][:]), + 5)) / f["psf"][:].sum() + nrjroket[ind] = np.sum(ensquare_PSF(filenames[ind], psfr[:, :, ind], + 5)) / psfr[:, :, ind].sum() + nrji[ind] = np.sum(ensquare_PSF(filenames[ind], psfi[:, :, ind], + 5)) / psfi[:, :, ind].sum() + ind += 1 +""" +eSR = np.abs(SRroket-SRcompass) / SRcompass +eSRi = np.abs(SRi - SRcompass) / SRcompass +enrj = np.abs(nrjroket-nrjcompass) / nrjcompass +enrji = np.abs(nrji-nrjcompass) / nrjcompass + +plt.figure() +plt.scatter(SRcompass,SRroket,s=200) +plt.plot([SRcompass.min(),SRcompass.max()],[SRcompass.min(),SRcompass.max()],color="red") +plt.xlabel("COMPASS Strehl ratio") +plt.ylabel("ROKET Strehl ratio") +plt.figure() +plt.scatter(nrjcompass,nrjroket,s=200) +plt.plot([nrjcompass.min(),nrjcompass.max()],[nrjcompass.min(),nrjcompass.max()],color="red") +plt.xlabel("COMPASS PSF ensquared energy") +plt.ylabel("ROKET PSF ensquared energy") + +colors = ["blue","red","green","black","yellow"] +plt.figure() +indc = 0 +for t in np.unique(theta): + ind = np.where(theta == t) + plt.scatter(SRcompass[ind],SRi[ind],s=200,color=colors[indc]) + indc += 1 +plt.legend(["0 deg","45 deg","90 deg","135 deg","180 deg"]) +plt.plot([SRcompass.min(),SRcompass.max()],[SRcompass.min(),SRcompass.max()],color="red") +plt.xlabel("COMPASS Strehl ratio") +plt.ylabel("ROKET Strehl ratio") + +plt.figure() +indc = 0 +for t in np.unique(theta): + ind = np.where(theta == t) + plt.scatter(nrjcompass[ind],nrji[ind],s=200,color=colors[indc]) + indc += 1 +plt.legend(["0 deg","45 deg","90 deg","135 deg","180 deg"]) +plt.plot([nrjcompass.min(),nrjcompass.max()],[nrjcompass.min(),nrjcompass.max()],color="red") +plt.xlabel("COMPASS PSF ensquared energy") +plt.ylabel("ROKET PSF ensquared energy") +""" +f = h5py.File('corStudy_Nact.h5', 'r') +psf = f["psf"][:] +psfs = f["psfs"][:] +nrj = f["nrj5"][:] +nrjs = f["nrj5s"][:] +SR = np.max(psf, axis=(0, 1)) +SRs = np.max(psfs, axis=(0, 1)) + +colors = ["blue", "red", "green"] +plt.figure() +k = 0 +for g in np.unique(gain): + plt.subplot(2, 2, k + 1) + plt.title("g = %.1f" % (g)) + for i in range(len(colors)): + c = colors[i] + v = np.unique(speeds)[i] + ind = np.where((gain == g) * (speeds == v)) + plt.scatter(SR[ind], SRs[ind], color=c, s=200) + plt.legend(["10 m/s", "15 m/s", "20 m/s"], loc=2) + plt.xlabel("SR ROKET") + plt.ylabel("SR model") + plt.plot([SR.min(), SR.max()], [SR.min(), SR.max()], color="red") + k += 1 +""" +# Illustration du probleme +#psf_compass, psf, psfs = compute_and_compare_PSFs(filenames[13],plot=True) +# psf_compass = np.zeros((2048,2048,len(filenames))) +# psf = np.zeros((2048,2048,len(filenames))) +# psfi = np.zeros((2048,2048,len(filenames))) +# SR = np.zeros(len(filenames)) +# SRi = np.zeros(len(filenames)) +# SRcompass = np.zeros(len(filenames)) +# nrj20 = np.zeros(len(filenames)) +# nrj20s = np.zeros(len(filenames)) +# nrj5 = np.zeros(len(filenames)) +# nrj5i = np.zeros(len(filenames)) +# nrj5compass = np.zeros(len(filenames)) +# +# cc = 0 +# for f in filenames: +# ff = h5py.File(f,'r') +# psf_compass[:,:,cc] = np.fft.fftshift(ff["psf"][:]) +# psf[:,:,cc] = compute_psf(f) +# psfi[:,:,cc] = compute_psf_independence(f) +# #psf_compass, psf[:,:,cc], psfs[:,:,cc] = compute_and_compare_PSFs(f) +# SR[cc] = psf[:,:,cc].max() +# SRi[cc] = psfi[:,:,cc].max() +# SRcompass[cc] = psf_compass[:,:,cc].max() +# # nrj20[cc] = ensquare_PSF(f,psf[:,:,cc],20).sum()/psf[:,:,cc].sum() +# # nrj20s[cc] = ensquare_PSF(f,psfs[:,:,cc],20).sum()/psfs[:,:,cc].sum() +# nrj5[cc] = ensquare_PSF(f,psf[:,:,cc],5).sum()/psf[:,:,cc].sum() +# nrj5i[cc] = ensquare_PSF(f,psfi[:,:,cc],5).sum()/psfi[:,:,cc].sum() +# nrj5compass[cc] = ensquare_PSF(f,psf_compass[:,:,cc],5).sum()/psf_compass[:,:,cc].sum() +# +# cc +=1 +# ff.close() +# print(cc) +# +# f = h5py.File("ROKETStudy.h5") +# f["psf"] = psf +# f["filenames"] = filenames +# f["psfi"] = psfi +# f["psf_compass"] = psf_compass +# f["SR"] = SR +# f["SRi"] = SRi +# f["SRcompass"] = SRcompass +# #f["nrj20"] = nrj20 +# #f["nrj20s"] = nrj20s +# f["nrj5"] = nrj5 +# f["nrj5i"] = nrj5i +# f["nrj5compass"] = nrj5compass +# f.close() + + + + + + +files = [] +for f in filenames: + files.append(h5py.File(f,'r')) + + +# Datas + +nmodes = (files[0])["P"][:].shape[0] +P = (files[0])["P"][:] +xpos = files[0].attrs["wfs.xpos"][0] +ypos = files[0].attrs["wfs.ypos"][0] +contributors = ["tomography", "bandwidth"] +Lambda_tar = files[0].attrs["target.Lambda"][0] +Lambda_wfs = files[0].attrs["wfs.Lambda"][0] +L0 = files[0].attrs["L0"][0] +dt = files[0].attrs["ittime"] +H = files[0].attrs["atm.alt"][0] +RASC = 180/np.pi * 3600. +Htheta = np.linalg.norm([xpos,ypos])/RASC*H# np.sqrt(2)*4/RASC*H # Hardcoded for angular separation of sqrt(2)*4 arcsec +r0 = files[0].attrs["r0"] * (Lambda_tar/Lambda_wfs)**(6./5.) +nfiles = len(files) +vartomo = np.zeros((nfiles,nmodes)) +varbp = np.zeros((nfiles,nmodes)) +vartot = np.zeros((nfiles,nmodes)) +theta = np.zeros(nfiles) +speeds = np.zeros(nfiles) +gain = np.zeros(nfiles) + +# Illustration du probleme +otftel, otf2, psf, gpu = gamora.psf_rec_Vii(filenames[11]) +cov_err = rexp.get_coverr_independence(filenames[11]) +otfteli, otf2i, psfi, gpu = gamora.psf_rec_Vii(filenames[11],covmodes=cov_err) +psf_compass = np.fft.fftshift(files[11]["psf"][:]) +RASC = 180/np.pi*3600. +pixsize = Lambda_tar*1e-6 / (psf.shape[0] * 8./640) * RASC +x = (np.arange(psf.shape[0]) - psf.shape[0]/2) * pixsize / (Lambda_tar*1e-6/8. * RASC) +plt.semilogy(x,psf[psf.shape[0]/2,:]) +plt.semilogy(x,psfi[psf.shape[0]/2,:],color="green") +plt.semilogy(x,psf_compass[psf.shape[0]/2,:],color="red") +plt.xlabel("Angular distance [units of lambda/D]") +plt.ylabel("Normalized intensity") +plt.legend(["PSF rec","PSF ind. assumption", "PSF COMPASS"]) + + +# data[:,0,i] = var(tomo+bp) for file #i +# data[:,1,i] = var(tomo) for file #i +# data[:,2,i] = var(bp) for file #i +# data[:,3,i] = var(tomo)+var(bp) for file #i +ind = 0 +print("Loading data...") +for f in files: + #vartot[ind,:] = rexp.variance(f, contributors) * ((2*np.pi/Lambda_tar)**2) + #vartomo[ind,:] = rexp.variance(f, ["tomography"]) * ((2*np.pi/Lambda_tar)**2) + #varbp[ind,:] = rexp.variance(f, ["bandwidth"]) * ((2*np.pi/Lambda_tar)**2) + theta[ind] = f.attrs["winddir"][0] + speeds[ind] = f.attrs["windspeed"][0] + gain[ind] = float('%.1f' % f.attrs["gain"][0]) + ind += 1 + print(ind,"/",len(files)) + +covar = (vartot - (vartomo+varbp))/2. + +stot = np.sum(vartot,axis=1) +sbp = np.sum(varbp,axis=1) +stomo = np.sum(vartomo,axis=1) +scov = np.sum(covar,axis=1) + +# Model + +print("Building models...") +vdt = speeds*dt/gain +Htheta = np.ones(nfiles) * Htheta +gamma = np.arctan2(ypos,xpos) - theta*np.pi/180. +rho = np.sqrt(Htheta**2 + (vdt)**2 - 2*Htheta*vdt*np.cos(np.pi-gamma)) +# Covariance matrices models on actuators space +xpos = files[11]["dm.xpos"][:] +ypos = files[11]["dm.ypos"][:] +p2m = files[11].attrs["tel_diam"] / files[11].attrs["pupdiam"] +pupshape = long(2 ** np.ceil(np.log2(files[11].attrs["pupdiam"]) + 1)) +xactu = (xpos - pupshape/2) * p2m +yactu = (ypos - pupshape/2) * p2m +M = np.zeros((1304,1304)) +Mvdt = M.copy() +Mht = M.copy() +Mhvdt = M.copy() +angleht = np.arctan2(files[11].attrs["wfs.ypos"][0],files[11].attrs["wfs.xpos"][0]) +anglehvdt = gamma/2. - theta*np.pi/180. +thetar = theta*np.pi/180. + +for i in range(1304): + for j in range(1304): + Mvdt[i,j] = (np.sqrt((xactu[i]-(xactu[j]+vdt[11]*np.cos(thetar[11])))**2 + (yactu[i]-(yactu[j]+vdt[11]*np.sin(thetar[11])))**2)) + M[i,j] = (np.sqrt((xactu[i]-xactu[j])**2 + (yactu[i]-yactu[j])**2)) + Mht[i,j] = (np.sqrt((xactu[i]-(xactu[j]+Htheta[11]*np.cos(angleht)))**2 + (yactu[i]-(yactu[j]+Htheta[11]*np.sin(angleht)))**2)) + #Mhvdt[i,j] = (np.sqrt((xactu[i]-(xactu[j]+rho[11]*np.cos(anglehvdt[11])))**2 + (yactu[i]-(yactu[j]+rho[11]*np.sin(anglehvdt[11])))**2)) + Mhvdt[i,j] = (np.sqrt(((xactu[i]-vdt[11]*np.cos(thetar[11]))-(xactu[j]+Htheta[11]*np.cos(angleht)))**2 + ((yactu[i]-vdt[11]*np.sin(thetar[11]))-(yactu[j]+Htheta[11]*np.sin(angleht)))**2)) + +Ccov = (Dphi.dphi_lowpass(Mhvdt,0.2,L0,tabx,taby) - Dphi.dphi_lowpass(Mht,0.2,L0,tabx,taby) \ + - Dphi.dphi_lowpass(Mvdt,0.2,L0,tabx,taby) + Dphi.dphi_lowpass(M,0.2,L0,tabx,taby)) * (1/r0)**(5./3.) + + +mtomo = Dphi.dphi_lowpass(Htheta,0.2,L0, tabx, taby) * (1/r0)**(5./3.) +mbp = Dphi.dphi_lowpass(vdt ,0.2, L0, tabx, taby) * (1/r0)**(5./3.) +mtot = Dphi.dphi_lowpass(rho,0.2,L0,tabx,taby) * (1/r0)**(5./3.) + +# Piston correction +print("Computing piston correction...") +pup = gamora.get_pup(filenames[11]) +r = np.zeros((8192,8192)) +p2m = files[11].attrs["tel_diam"]/pup.shape[0] +Npts = files[11]["indx_pup"].size +for k in range(r.shape[0]): + for j in range(r.shape[0]): + r[k,j] = np.sqrt((k-r.shape[0]/2+0.5)**2+(j-r.shape[0]/2+0.5)**2) * p2m + +ipup = np.zeros((8192,8192)) +ipup[3776:3776+640,3776:3776+640] = pup +dphi_map = Dphi.dphi_lowpass(r,0.2,L0,tabx,taby) * (1/r0)**(5./3.) +fpup = np.fft.fft2(ipup)#,s=[8192,8192]) +fdphi = np.fft.fft2(dphi_map)#,s=[8192,8192]) +fconv = fpup * fpup * fdphi +dconv = np.fft.ifft2(fconv).real / Npts / Npts +mini = np.where(dconv == dconv.min()) +dutil = dconv[mini[0][0],mini[1][0]:] +#Avdt = dconv[mini[0]+(vdt/p2m).astype(np.int16),mini[1]] - dconv[mini] +Pbp = np.interp(vdt/p2m,np.arange(dutil.shape[0]),dutil) - dutil[0] +Ptomo = (np.interp(Htheta/gain/p2m,np.arange(dutil.shape[0]),dutil) - dutil[0])*gain**2 +Ptot = np.interp(rho/p2m,np.arange(dutil.shape[0]),dutil) - dutil[0] + +mtomo -= Ptomo +mbp -= Pbp +mtot -= Ptot +mcov = (-mtomo - mbp + mtot)*0.5 + + +# Correction on psf +m = (np.arange(nmodes)+1)**(-5/6.) +m /= m.sum() +m = m * (mcov[11] / (2*np.pi/Lambda_tar)**2) + + +cov_err2 = P.dot(cov_err).dot(P.T) + 2*np.diag(m) +otftelc, otf2c, psfc, gpu = gamora.psf_rec_Vii(filenames[11],cov=cov_err2.astype(np.float32)) +plt.figure() +plt.semilogy(x,psf_compass[psf.shape[0]/2,:],color="red") +plt.semilogy(x,psfi[psf.shape[0]/2,:],color="green") +plt.semilogy(x,psfc[psf.shape[0]/2,:],color="blue") +plt.xlabel("Angular distance [units of lambda/D]") +plt.ylabel("Normalized intensity") +plt.legend([ "PSF COMPASS","PSF ind. assumption","PSF corrected"]) + + +xpos = files[11]["dm.xpos"][:] +ypos = files[11]["dm.ypos"][:] +dm_dim = files[11]["dm_dim"].value +xpos -= (pupshape-dm_dim)/2 +ypos -= (pupshape-dm_dim)/2 +influ = np.load("influ.npy") +influ2 = np.zeros((dm_dim,dm_dim)) +tmp = influ2.copy() +indx_pup = files[11]["indx_pup"][:] +pup = np.zeros((dm_dim,dm_dim)).flatten() +pup[indx_pup] = 1 +pup = pup.reshape((dm_dim,dm_dim)) +ind2 = np.where(pup) +influshape = influ.shape[0] +A = np.zeros((xpos.size,xpos.size)) +rr = Htheta[11] +xx = np.cos(0*theta[11]*np.pi/180.)*rr/p2m +yy = np.sin(0*theta[11]*np.pi/180.)*rr/p2m +pup2 = pup.copy()*0. +pup2[(ind2[0]+xx).astype(np.int32),(ind2[1]+yy).astype(np.int32)] = 1. + +for i in range(xpos.size): + influ2 *=0 + influ2[xpos[i]-influshape/2+1:xpos[i]+influshape/2+1,ypos[i]-influshape/2+1:ypos[i]+influshape/2+1] = influ + influ2 *= pup + for j in range(xpos.size): + if(tmp[xpos[j]-influshape/2+1+xx:xpos[j]+influshape/2+1+xx,ypos[j]-influshape/2+1+yy:ypos[j]+influshape/2+1+yy].shape == influ.shape): + tmp *=0 + tmp[xpos[j]-influshape/2+1+xx:xpos[j]+influshape/2+1+xx,ypos[j]-influshape/2+1+yy:ypos[j]+influshape/2+1+yy] = influ + tmp *= pup2 + A[i,j] = (influ2*tmp).sum() + else: + A[i,j] = 0. + print(i) +""" diff --git a/guardians/misc/correlations/script_roket.py b/guardians/misc/correlations/script_roket.py new file mode 100644 index 0000000..7458585 --- /dev/null +++ b/guardians/misc/correlations/script_roket.py @@ -0,0 +1,488 @@ +""" +Created on Tue Jul 12 09:28:23 2016 + +@author: fferreira +""" + +import cProfile +import pstats as ps + +import sys, os +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as plt +plt.ion() +import hdf5_util as h5u +import pandas +from scipy.sparse import csr_matrix + +############################################################################ +# _ _ _ +# (_)_ __ (_) |_ ___ +# | | '_ \| | __/ __| +# | | | | | | |_\__ \ +# |_|_| |_|_|\__|___/ +############################################################################ +c = ch.carmaWrap_context(devices=np.array([6, 7], dtype=np.int32)) + + +def init_config(config): + if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name + else: + simul_name = "" + print("simul name is", simul_name) + + matricesToLoad = {} + if (simul_name == b""): + clean = 1 + else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + #initialisation: + # context + #c=ch.carmaWrap_context(device) + c = ch.carmaWrap_context(devices=np.array([6, 7], dtype=np.int32)) + #c.set_active_device(device) + + # wfs + print("->wfs") + wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + + # atmos + print("->atmos") + atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0, clean=clean, + load=matricesToLoad) + + # dm + print("->dm") + dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + + # target + print("->target") + tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + + print("->rtc") + # rtc + rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + clean=clean, simul_name=simul_name, load=matricesToLoad) + + if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(atm) + print(wfs) + print(dms) + print(tar) + print(rtc) + + print("----------------------------------------------------") + print("iter# | SE SR image | LE SR image | Fitting | LE SR phase var") + print("----------------------------------------------------") + + error_flag = True in [w.roket for w in config.p_wfss] + + return atm, wfs, tel, dms, tar, rtc + + +############################################################################## +# _ ___ _ +# /_\ / _ \ | |___ ___ _ __ +# / _ \ (_) | | / _ \/ _ \ '_ \ +# /_/ \_\___/ |_\___/\___/ .__/ +# |_| +############################################################################## +def loop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + if (error_flag): + # Initialize buffers for error breakdown + nactu = rtc.get_command(0).size + nslopes = rtc.get_centroids(0).size + com = np.zeros((n, nactu), dtype=np.float32) + noise_com = np.zeros((n, nactu), dtype=np.float32) + alias_wfs_com = np.copy(noise_com) + wf_com = np.copy(noise_com) + tomo_com = np.copy(noise_com) + trunc_com = np.copy(noise_com) + H_com = np.copy(noise_com) + mod_com = np.copy(noise_com) + bp_com = np.copy(noise_com) + fit = np.zeros(n) + # covm = np.zeros((nslopes,nslopes)) + # covv = np.zeros((nactu,nactu)) + + t0 = time.time() + for i in range(-10, n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + tar.dmtrace(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + #m = np.reshape(rtc.get_centroids(0),(nslopes,1)) + #v = np.reshape(rtc.get_command(0),(nactu,1)) + if (error_flag and i > -1): + #compute the error breakdown for this iteration + #covm += m.dot(m.T) + #covv += v.dot(v.T) + roket.computeBreakdown() + rtc.applycontrol(0, dms) + + if ((i + 1) % 100 == 0 and i > -1): + strehltmp = tar.get_strehl(0) + print(i + 1, "\t", strehltmp[0], "\t", strehltmp[1], "\t", + np.exp(-strehltmp[2]), "\t", np.exp(-strehltmp[3])) + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + if (error_flag): + #Returns the error breakdown + SR2 = np.exp(-tar.get_strehl(0, comp_strehl=False)[3]) + SR = tar.get_strehl(0, comp_strehl=False)[1] + #bp_com[-1,:] = bp_com[-2,:] + #SR = tar.get_strehl(0,comp_strehl=False)[1] + return SR, SR2 + + +def preloop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + for i in range(0, n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + + rtc.applycontrol(0, dms) + + +################################################################################ +# ___ _ +# | _ ) __ _ __(_)___ +# | _ \/ _` (_-< (_-< +# |___/\__,_/__/_/__/ +################################################################################ +def compute_btt(): + IF = rtc.get_IFsparse(1).T + N = IF.shape[0] + n = IF.shape[1] + #T = IF[:,-2:].copy() + T = rtc.get_IFtt(1) + #IF = IF[:,:n-2] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + # Tip-tilt + piston + Tp = np.ones((T.shape[0], T.shape[1] + 1)) + Tp[:, :2] = T.copy() #.toarray() + deltaT = IF.T.dot(Tp) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille generatrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + # Base orthonormee sans TT + gdg = G.T.dot(delta).dot(G) + U, s, V = np.linalg.svd(gdg) + U = U[:, :U.shape[1] - 3] + s = s[:s.size - 3] + L = np.identity(s.size) / np.sqrt(s) + B = G.dot(U).dot(L) + + # Rajout du TT + TT = T.T.dot(T) / N #.toarray()/N + Btt = np.zeros((n + 2, n - 1)) + Btt[:B.shape[0], :B.shape[1]] = B + mini = 1. / np.sqrt(TT) + mini[0, 1] = 0 + mini[1, 0] = 0 + Btt[n:, n - 3:] = mini + + # Calcul du projecteur actus-->modes + delta = np.zeros((n + T.shape[1], n + T.shape[1])) + #IF = rtc.get_IFsparse(1).T + delta[:-2, :-2] = IF.T.dot(IF).toarray() / N + delta[-2:, -2:] = T.T.dot(T) / N + P = Btt.T.dot(delta) + + return Btt.astype(np.float32), P.astype(np.float32) + + +def compute_cmatWithBtt(Btt, nfilt): + D = rtc.get_imat(0) + #D = ao.imat_geom(wfs,config.p_wfss,config.p_controllers[0],dms,config.p_dms,meth=0) + # Filtering on Btt modes + Btt_filt = np.zeros((Btt.shape[0], Btt.shape[1] - nfilt)) + Btt_filt[:, :Btt_filt.shape[1] - 2] = Btt[:, :Btt.shape[1] - (nfilt + 2)] + Btt_filt[:, Btt_filt.shape[1] - 2:] = Btt[:, Btt.shape[1] - 2:] + + # Modal interaction basis + Dm = D.dot(Btt_filt) + # Direct inversion + Dmp = np.linalg.inv(Dm.T.dot(Dm)).dot(Dm.T) + # Command matrix + cmat = Btt_filt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +def compute_cmatWithBtt2(Btt, nfilt): + D = rtc.get_imat(0) + + # Modal interaction basis + Dm = D.dot(Btt) + # Filtering on modal imat + DmtDm = Dm.T.dot(Dm) + U, s, V = np.linalg.svd(DmtDm) + s = 1. / s + s[s.shape[0] - nfilt - 2:s.shape[0] - 2] = 0. + DmtDm1 = U.dot(np.diag(s)).dot(U.T) + Dmp = DmtDm1.dot(Dm.T) + # Command matrix + cmat = Btt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +########################################################################################### +# ___ _ __ _ _ _ +# / __|_____ ____ _ _ _(_)__ _ _ _ __ ___ / _|___ __ ___ _ _ _ _ ___| |__ _| |_(_)___ _ _ +# | (__/ _ \ V / _` | '_| / _` | ' \/ _/ -_) > _|_ _| / _/ _ \ '_| '_/ -_) / _` | _| / _ \ ' \ +# \___\___/\_/\__,_|_| |_\__,_|_||_\__\___| \_____| \__\___/_| |_| \___|_\__,_|\__|_\___/_||_| +# +########################################################################################### + + +def cov_cor(P, noise, trunc, alias, H, bp, tomo): + cov = np.zeros((6, 6)) + bufdict = { + "0": noise.T, + "1": trunc.T, + "2": alias.T, + "3": H.T, + "4": bp.T, + "5": tomo.T + } + for i in range(cov.shape[0]): + for j in range(cov.shape[1]): + if (j >= i): + tmpi = P.dot(bufdict[str(i)]) + tmpj = P.dot(bufdict[str(j)]) + cov[i, j] = np.sum( + np.mean(tmpi * tmpj, axis=1) - + np.mean(tmpi, axis=1) * np.mean(tmpj, axis=1)) + else: + cov[i, j] = cov[j, i] + + s = np.reshape(np.diag(cov), (cov.shape[0], 1)) + sst = np.dot(s, s.T) + cor = cov / np.sqrt(sst) + + return cov, cor + + +########################################################################################### +# ___ +# / __| __ ___ _____ +# \__ \/ _` \ V / -_) +# |___/\__,_|\_/\___| +########################################################################################### + + +def save_it(filename): + IF = rtc.get_IFsparse(1) + TT = rtc.get_IFtt(1) + noise_com = roket.getContributor("noise") + trunc_com = roket.getContributor("nonlinear") + alias_wfs_com = roket.getContributor("aliasing") + H_com = roket.getContributor("filtered") + bp_com = roket.getContributor("bandwidth") + tomo_com = roket.getContributor("tomo") + fit = roket.getContributor("fitting") + + tmp = (config.p_geom._ipupil.shape[0] - + (config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1)) / 2 + tmp_e0 = config.p_geom._ipupil.shape[0] - tmp + tmp_e1 = config.p_geom._ipupil.shape[1] - tmp + pup = config.p_geom._ipupil[tmp:tmp_e0, tmp:tmp_e1] + indx_pup = np.where(pup.flatten() > 0)[0].astype(np.int32) + dm_dim = config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1 + cov, cor = cov_cor(P, noise_com, trunc_com, alias_wfs_com, H_com, bp_com, tomo_com) + psf = tar.get_image(0, "le", fluxNorm=False) + psfortho = roket.get_tar_imageortho() + covv = roket.get_covv() + covm = roket.get_covm() + + fname = "/home/fferreira/Data/" + filename + pdict = { + "noise": noise_com.T, + "aliasing": alias_wfs_com.T, + "tomography": tomo_com.T, + "filtered modes": H_com.T, + "non linearity": trunc_com.T, + "bandwidth": bp_com.T, + "P": P, + "Btt": Btt, + "IF.data": IF.data, + "IF.indices": IF.indices, + "IF.indptr": IF.indptr, + "TT": TT, + "dm_dim": dm_dim, + "indx_pup": indx_pup, + "fitting": fit, + "SR": SR, + "SR2": SR2, + "cov": cov, + "cor": cor, + "psfortho": psfortho, + "covm": covm, + "covv": covv + } + h5u.save_h5(fname, "psf", config, psf) + #h5u.writeHdf5SingleDataset(fname,com.T,datasetName="com") + for k in list(pdict.keys()): + h5u.save_hdf5(fname, k, pdict[k]) + + +############################################################################################### +# _ _ +# | |_ ___ ___| |_ ___ +# | __/ _ \/ __| __/ __| +# | || __/\__ \ |_\__ \ +# \__\___||___/\__|___/ +############################################################################################### +param_file = "/home/fferreira/compass/trunk/shesha/data/par/par4roket/correlation_study/roket_8m_1layer.py" +error_flag = True +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + #sys.path.remove(param_path) +nfiltered = 20 +niters = config.p_loop.niter + +winddirs = [0, 45, 90, 135, 180] +windspeeds = [5., 10., 15., 20.] + +d = float(sys.argv[1]) +s = float(sys.argv[2]) +g = float(sys.argv[3]) + +savename = "roket_8m_1layer_dir%d_speed%d_g%d.h5" % (d, s, g * 10) +config.p_atmos.set_winddir([d]) +config.p_atmos.set_windspeed([s]) +config.p_controllers[0].set_gain(g) + +atm, wfs, tel, dms, tar, rtc = init_config(config) +#config.p_loop.set_niter(niters) +Btt, P = compute_btt() +rtc.load_Btt(1, Btt.dot(Btt.T)) +Dm, cmat = compute_cmatWithBtt(Btt, nfiltered) +rtc.set_cmat(0, cmat) +R = rtc.get_cmat(0) +imat = rtc.get_imat(0) +RD = np.dot(R, imat).astype(np.float32) +gRD = (np.identity(RD.shape[0]) - config.p_controllers[0].gain * RD).astype(np.float32) +roket = ao.roket_init(rtc, wfs, tar, dms, tel, atm, 0, 1, Btt.shape[0], Btt.shape[1], + nfiltered, niters, Btt, P, gRD, RD) +preloop(1000) +SR, SR2 = loop(niters) + +save_it(savename) diff --git a/guardians/misc/correlations/script_roket_cpu.py b/guardians/misc/correlations/script_roket_cpu.py new file mode 100644 index 0000000..e76afad --- /dev/null +++ b/guardians/misc/correlations/script_roket_cpu.py @@ -0,0 +1,713 @@ +""" +Created on Wed Apr 27 09:28:23 2016 + +@author: fferreira +""" + +import cProfile +import pstats as ps + +import sys, os +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as pl +pl.ion() +import hdf5_util as h5u +import pandas +from scipy.sparse import csr_matrix + +c = ch.carmaWrap_context(devices=np.array([6], dtype=np.int32)) + +############################################################################ +# _ _ _ +# (_)_ __ (_) |_ ___ +# | | '_ \| | __/ __| +# | | | | | | |_\__ \ +# |_|_| |_|_|\__|___/ +############################################################################ + + +def init_config(config): + if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name + else: + simul_name = "" + print("simul name is", simul_name) + + matricesToLoad = {} + if (simul_name == b""): + clean = 1 + else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + #initialisation: + # context + + #c=ch.carmaWrap_context(devices=np.array([4,5,6,7], dtype=np.int32)) + #c.set_active_device(device) + + # wfs + print("->wfs") + wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + + # atmos + print("->atmos") + atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0, clean=clean, + load=matricesToLoad) + + # dm + print("->dm") + dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + + # target + print("->target") + tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + + print("->rtc") + # rtc + rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + clean=clean, simul_name=simul_name, load=matricesToLoad) + + if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(atm) + print(wfs) + print(dms) + print(tar) + print(rtc) + + print("----------------------------------------------------") + print("iter# | SE SR image | LE SR image | Fitting | LE SR phase var") + print("----------------------------------------------------") + + error_flag = True in [w.roket for w in config.p_wfss] + + return atm, wfs, tel, dms, tar, rtc + + +############################################################################## +# _ ___ _ +# /_\ / _ \ | |___ ___ _ __ +# / _ \ (_) | | / _ \/ _ \ '_ \ +# /_/ \_\___/ |_\___/\___/ .__/ +# |_| +############################################################################## +def loop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + if (error_flag): + # Initialize buffers for error breakdown + nactu = rtc.get_command(0).size + com = np.zeros((n, nactu), dtype=np.float32) + noise_com = np.zeros((n, nactu), dtype=np.float32) + alias_wfs_com = np.copy(noise_com) + wf_com = np.copy(noise_com) + tomo_com = np.copy(noise_com) + trunc_com = np.copy(noise_com) + H_com = np.copy(noise_com) + mod_com = np.copy(noise_com) + bp_com = np.copy(noise_com) + fit = np.zeros(n) + psf_ortho = tar.get_image(0, 'se') * 0. + + t0 = time.time() + for i in range(n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + tar.dmtrace(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + + if (error_flag): + #compute the error breakdown for this iteration + error_breakdown(com, noise_com, alias_wfs_com, tomo_com, H_com, + trunc_com, bp_com, wf_com, mod_com, fit, psf_ortho, i) + + rtc.applycontrol(0, dms) + + if ((i + 1) % 10 == 0 and i > -1): + strehltmp = tar.get_strehl(0) + print(i + 1, "\t", strehltmp[0], "\t", strehltmp[1], "\t", + np.exp(-strehltmp[2]), "\t", np.exp(-strehltmp[3])) + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + if (error_flag): + #Returns the error breakdown + SR2 = np.exp(-tar.get_strehl(0, comp_strehl=False)[3]) + SR = tar.get_strehl(0, comp_strehl=False)[1] + #bp_com[-1,:] = bp_com[-2,:] + #SR = tar.get_strehl(0,comp_strehl=False)[1] + return com, noise_com, alias_wfs_com, tomo_com, H_com, trunc_com, bp_com, mod_com, np.mean( + fit[N_preloop:]), SR, SR2, psf_ortho + + +def preloop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + for i in range(0, n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + + rtc.applycontrol(0, dms) + + +################################################################################### +# ___ ___ _ _ +# | __|_ _ _ _ ___ _ _ | _ )_ _ ___ __ _| |____| |_____ __ ___ _ +# | _|| '_| '_/ _ \ '_| | _ \ '_/ -_) _` | / / _` / _ \ V V / ' \ +# |___|_| |_| \___/_| |___/_| \___\__,_|_\_\__,_\___/\_/\_/|_||_| +################################################################################### +def error_breakdown(com, noise_com, alias_wfs_com, tomo_com, H_com, trunc_com, bp_com, + wf_com, mod_com, fit, psf_ortho, i): + """ + Compute the error breakdown of the AO simulation. Returns the error commands of + each contributors. Suppose no delay (for now) and only 2 controllers : the main one, controller #0, (specified on the parameter file) + and the geometric one, controller #1 (automatically added if roket is asked in the parameter file) + Commands are computed by applying the loop filter on various kind of commands : (see schema_simulation_budget_erreur_v2) + + - Ageom : Aliasing contribution on WFS direction + Obtained by computing commands from DM orthogonal phase (projection + slopes_geom) + + - B : Projection on the target direction + Obtained as the commmands output of the geometric controller + + - C : Wavefront + Obtained by computing commands from DM parallel phase (RD*B) + + - E : Wavefront + aliasing + ech/trunc + tomo + Obtained by performing the AO loop iteration without noise on the WFS + + - F : Wavefront + aliasing + tomo + Obtained by performing the AO loop iteration without noise on the WFS and using phase deriving slopes + + - G : tomo + + Note : rtc.get_err returns to -CMAT.slopes + + Args: + noise_com : np.array((niter,nactu)) : Noise contribution + Computed with com-E + + alias_wfs_com : np.array((niter,nactu)) : Aliasing on WFS direction contribution + Computed with Ageom + + tomo_com : np.array((niter,nactu)) : Tomographic error contribution + Computed with C-B + + H_com : np.array((niter,nactu)) : Filtered modes error + Computed with B + + trunc_com : np.array((niter,nactu)) : sampling/truncature error contribution + Computed with E-F + + bp_com : np.array((niter,nactu)) : Bandwidth error + + wf_com : np.array((niter,nactu)) : Reconstructed wavefront + + mod_com : np.array((niter,nactu)) : commanded modes + + fit : np.array((niter)) : fitting value + + i : (int) : current iteration number + + """ + g = config.p_controllers[0].gain + Dcom = rtc.get_command(0) + Derr = rtc.get_err(0) + com[i, :] = Dcom + tarphase = tar.get_phase(0) + ########################################################################### + ## Noise contribution + ########################################################################### + if (config.p_wfss[0].type == b"sh"): + ideal_bincube = wfs.get_bincubeNotNoisy(0) + bincube = wfs.get_bincube(0) + if (config.p_centroiders[0].type == b"tcog" + ): # Select the same pixels with or without noise + invalidpix = np.where(bincube <= config.p_centroiders[0].thresh) + ideal_bincube[invalidpix] = 0 + rtc.setthresh(0, -1e16) + wfs.set_bincube(0, ideal_bincube) + elif (config.p_wfss[0].type == b"pyrhr"): + ideal_pyrimg = wfs.get_binimg_notnoisy(0) + wfs.set_pyrimg(0, ideal_pyrimg) + + rtc.docentroids(0) + if (config.p_centroiders[0].type == b"tcog"): + rtc.setthresh(0, config.p_centroiders[0].thresh) + + rtc.docontrol(0) + E = rtc.get_err(0) + # Apply loop filter to get contribution of noise on commands + if (i + 1 < config.p_loop.niter): + noise_com[i + 1, :] = gRD.dot(noise_com[i, :]) + g * (Derr - E) + + ########################################################################### + ## Sampling/truncature contribution + ########################################################################### + rtc.docentroids_geom(0) + rtc.docontrol(0) + F = rtc.get_err(0) + # Apply loop filter to get contribution of sampling/truncature on commands + if (i + 1 < config.p_loop.niter): + trunc_com[i + 1, :] = gRD.dot(trunc_com[i, :]) + g * (E - F) + + ########################################################################### + ## Aliasing contribution on WFS direction + ########################################################################### + rtc.docontrol_geo_onwfs(1, dms, wfs, 0) + rtc.applycontrol(1, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "dm", tel, atm, dms) + """ + wfs.sensors_compimg(0) + if(config.p_wfss[0].type == b"sh"): + ideal_bincube = wfs.get_bincubeNotNoisy(0) + bincube = wfs.get_bincube(0) + if(config.p_centroiders[0].type == b"tcog"): # Select the same pixels with or without noise + invalidpix = np.where(bincube <= config.p_centroiders[0].thresh) + ideal_bincube[invalidpix] = 0 + rtc.setthresh(0,-1e16) + wfs.set_bincube(0,ideal_bincube) + elif(config.p_wfss[0].type == b"pyrhr"): + ideal_pyrimg = wfs.get_binimg_notnoisy(0) + wfs.set_pyrimg(0,ideal_pyrimg) + """ + rtc.docentroids_geom(0) + rtc.docontrol(0) + Ageom = rtc.get_err(0) + if (i + 1 < config.p_loop.niter): + alias_wfs_com[i + 1, :] = gRD.dot(alias_wfs_com[i, :]) + g * (Ageom) + + ########################################################################### + ## Wavefront + filtered modes reconstruction + ########################################################################### + tar.atmos_trace(0, atm, tel) + rtc.docontrol_geo(1, dms, tar, 0) + B = rtc.get_command(1) + + ########################################################################### + ## Fitting + ########################################################################### + rtc.applycontrol(1, dms) + tar.dmtrace(0, dms, do_phase_var=0) + fit[i] = tar.get_strehl(0, comp_strehl=False)[2] + if (i >= N_preloop): + psf_ortho += tar.get_image(0, 'se') / niters + + ########################################################################### + ## Filtered modes error & Commanded modes + ########################################################################### + modes = P.dot(B) + modes_filt = modes.copy() * 0. + modes_filt[-nfiltered - 2:-2] = modes[-nfiltered - 2:-2] + H_com[i, :] = Btt.dot(modes_filt) + modes[-nfiltered - 2:-2] = 0 + mod_com[i, :] = Btt.dot(modes) + + ########################################################################### + ## Bandwidth error + ########################################################################### + C = mod_com[i, :] - mod_com[i - 1, :] + + bp_com[i, :] = gRD.dot(bp_com[i - 1, :]) - C + + ########################################################################### + ## Tomographic error + ########################################################################### + #G = F - (mod_com[i,:] + Ageom - np.dot(RDgeom,com[i-1,:])) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "atmos", tel, atm, dms) + rtc.docontrol_geo_onwfs(1, dms, wfs, 0) + G = rtc.get_command(1) + modes = P.dot(G) + modes[-nfiltered - 2:-2] = 0 + wf_com[i, :] = Btt.dot(modes) + + G = mod_com[i, :] - wf_com[i, :] + if (i + 1 < config.p_loop.niter): + tomo_com[i + 1, :] = gRD.dot(tomo_com[i, :]) - g * RD.dot(G) + + # Without anyone noticing... + tar.set_phase(0, tarphase) + rtc.setCom(0, Dcom) + + +################################################################################ +# ___ _ +# | _ ) __ _ __(_)___ +# | _ \/ _` (_-< (_-< +# |___/\__,_/__/_/__/ +################################################################################ +def compute_btt2(): + IF = rtc.get_IFsparse(1).T + N = IF.shape[0] + n = IF.shape[1] + #T = IF[:,-2:].copy() + T = rtc.get_IFtt(1) + #IF = IF[:,:n-2] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + # Tip-tilt + piston + Tp = np.ones((T.shape[0], T.shape[1] + 1)) + Tp[:, :2] = T.copy() #.toarray() + deltaT = IF.T.dot(Tp) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille generatrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + # Base orthonormee sans TT + gdg = G.T.dot(delta).dot(G) + U, s, V = np.linalg.svd(gdg) + U = U[:, :U.shape[1] - 3] + s = s[:s.size - 3] + L = np.identity(s.size) / np.sqrt(s) + B = G.dot(U).dot(L) + + # Rajout du TT + TT = T.T.dot(T) / N #.toarray()/N + Btt = np.zeros((n + 2, n - 1)) + Btt[:B.shape[0], :B.shape[1]] = B + mini = 1. / np.sqrt(TT) + mini[0, 1] = 0 + mini[1, 0] = 0 + Btt[n:, n - 3:] = mini + + # Calcul du projecteur actus-->modes + delta = np.zeros((n + T.shape[1], n + T.shape[1])) + #IF = rtc.get_IFsparse(1).T + delta[:-2, :-2] = IF.T.dot(IF).toarray() / N + delta[-2:, -2:] = T.T.dot(T) / N + P = Btt.T.dot(delta) + + return Btt.astype(np.float32), P.astype(np.float32) + + +def compute_btt(): + IF = rtc.get_IFsparse(1).T + N = IF.shape[0] + n = IF.shape[1] + T = IF[:, -2:].copy() + IF = IF[:, :n - 2] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + # Tip-tilt + piston + Tp = np.ones((T.shape[0], T.shape[1] + 1)) + Tp[:, :2] = T.toarray() + deltaT = IF.T.dot(Tp) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille génératrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + # Base orthonormée sans TT + gdg = G.T.dot(delta).dot(G) + U, s, V = np.linalg.svd(gdg) + U = U[:, :U.shape[1] - 3] + s = s[:s.size - 3] + L = np.identity(s.size) / np.sqrt(s) + B = G.dot(U).dot(L) + + # Rajout du TT + TT = T.T.dot(T).toarray() / N + Btt = np.zeros((n + 2, n - 1)) + Btt[:B.shape[0], :B.shape[1]] = B + mini = 1. / np.sqrt(TT) + mini[0, 1] = 0 + mini[1, 0] = 0 + Btt[n:, n - 3:] = mini + + # Calcul du projecteur actus-->modes + IF = rtc.get_IFsparse(1).T + delta = IF.T.dot(IF).toarray() / N + P = Btt.T.dot(delta) + + return Btt.astype(np.float32), P.astype(np.float32) + + +def compute_cmatWithBtt(Btt, nfilt): + D = rtc.get_imat(0) + #D = ao.imat_geom(wfs,config.p_wfss,config.p_controllers[0],dms,config.p_dms,meth=0) + # Filtering on Btt modes + Btt_filt = np.zeros((Btt.shape[0], Btt.shape[1] - nfilt)) + Btt_filt[:, :Btt_filt.shape[1] - 2] = Btt[:, :Btt.shape[1] - (nfilt + 2)] + Btt_filt[:, Btt_filt.shape[1] - 2:] = Btt[:, Btt.shape[1] - 2:] + + # Modal interaction basis + Dm = D.dot(Btt_filt) + # Direct inversion + Dmp = np.linalg.inv(Dm.T.dot(Dm)).dot(Dm.T) + # Command matrix + cmat = Btt_filt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +def compute_cmatWithBtt2(Btt, nfilt): + D = rtc.get_imat(0) + + # Modal interaction basis + Dm = D.dot(Btt) + # Filtering on modal imat + DmtDm = Dm.T.dot(Dm) + U, s, V = np.linalg.svd(DmtDm) + s = 1. / s + s[s.shape[0] - nfilt - 2:s.shape[0] - 2] = 0. + DmtDm1 = U.dot(np.diag(s)).dot(U.T) + Dmp = DmtDm1.dot(Dm.T) + # Command matrix + cmat = Btt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +########################################################################################### +# ___ _ __ _ _ _ +# / __|_____ ____ _ _ _(_)__ _ _ _ __ ___ / _|___ __ ___ _ _ _ _ ___| |__ _| |_(_)___ _ _ +# | (__/ _ \ V / _` | '_| / _` | ' \/ _/ -_) > _|_ _| / _/ _ \ '_| '_/ -_) / _` | _| / _ \ ' \ +# \___\___/\_/\__,_|_| |_\__,_|_||_\__\___| \_____| \__\___/_| |_| \___|_\__,_|\__|_\___/_||_| +# +########################################################################################### + + +def cov_cor(P, noise, trunc, alias, H, bp, tomo): + cov = np.zeros((6, 6)) + bufdict = { + "0": noise.T, + "1": trunc.T, + "2": alias.T, + "3": H.T, + "4": bp.T, + "5": tomo.T + } + for i in range(cov.shape[0]): + for j in range(cov.shape[1]): + if (j >= i): + tmpi = P.dot(bufdict[str(i)]) + tmpj = P.dot(bufdict[str(j)]) + cov[i, j] = np.sum( + np.mean(tmpi * tmpj, axis=1) - + np.mean(tmpi, axis=1) * np.mean(tmpj, axis=1)) + else: + cov[i, j] = cov[j, i] + + s = np.reshape(np.diag(cov), (cov.shape[0], 1)) + sst = np.dot(s, s.T) + cor = cov / np.sqrt(sst) + + return cov, cor + + +########################################################################################### +# ___ +# / __| __ ___ _____ +# \__ \/ _` \ V / -_) +# |___/\__,_|\_/\___| +########################################################################################### + + +def save_it(filename): + IF = rtc.get_IFsparse(1) + TT = rtc.get_IFtt(1) + + tmp = (config.p_geom._ipupil.shape[0] - + (config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1)) / 2 + tmp_e0 = config.p_geom._ipupil.shape[0] - tmp + tmp_e1 = config.p_geom._ipupil.shape[1] - tmp + pup = config.p_geom._ipupil[tmp:tmp_e0, tmp:tmp_e1] + indx_pup = np.where(pup.flatten() > 0)[0].astype(np.int32) + dm_dim = config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1 + cov, cor = cov_cor(P, noise_com, trunc_com, alias_wfs_com, H_com, bp_com, tomo_com) + psf = tar.get_image(0, "le", fluxNorm=False) + + fname = "/home/fferreira/Data/" + filename + pdict = { + "noise": noise_com.T, + "aliasing": alias_wfs_com.T, + "tomography": tomo_com.T, + "filtered modes": H_com.T, + "non linearity": trunc_com.T, + "bandwidth": bp_com.T, + "wf_com": wf_com.T, + "P": P, + "Btt": Btt, + "IF.data": IF.data, + "IF.indices": IF.indices, + "IF.indptr": IF.indptr, + "TT": TT, + "dm_dim": dm_dim, + "indx_pup": indx_pup, + "fitting": fit, + "SR": SR, + "SR2": SR2, + "cov": cov, + "cor": cor, + "psfortho": np.fft.fftshift(psf_ortho), + "dm.xpos": config.p_dms[0]._xpos, + "dm.ypos": config.p_dms[0]._ypos + } + h5u.save_h5(fname, "psf", config, psf) + #h5u.writeHdf5SingleDataset(fname,com.T,datasetName="com") + for k in list(pdict.keys()): + h5u.save_hdf5(fname, k, pdict[k]) + + +############################################################################################### +# _ _ +# | |_ ___ ___| |_ ___ +# | __/ _ \/ __| __/ __| +# | || __/\__ \ |_\__ \ +# \__\___||___/\__|___/ +############################################################################################### +param_file = "/home/fferreira/compass/trunk/shesha/data/par/par4roket/correlation_study/roket_8m_1layer.py" +error_flag = True +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + #sys.path.remove(param_path) +nfiltered = 20 +N_preloop = 1000 +niters = config.p_loop.niter +config.p_loop.set_niter(niters + N_preloop) +winddirs = [0, 45, 90, 135, 180] +windspeeds = [5., 10., 15., 20.] + +d = float(sys.argv[1]) +s = float(sys.argv[2]) +g = float(sys.argv[3]) + +savename = "roket_8m_1layer_dir%d_speed%d_g%d_cpu.h5" % (d, s, g * 10) +config.p_atmos.set_winddir([d]) +config.p_atmos.set_windspeed([s]) +config.p_controllers[0].set_gain(g) + +atm, wfs, tel, dms, tar, rtc = init_config(config) +#config.p_loop.set_niter(niters) +Btt, P = compute_btt2() +rtc.load_Btt(1, Btt.dot(Btt.T)) +Dm, cmat = compute_cmatWithBtt(Btt, nfiltered) +rtc.set_cmat(0, cmat) +R = rtc.get_cmat(0) +imat = rtc.get_imat(0) +RD = np.dot(R, imat).astype(np.float32) +gRD = (np.identity(RD.shape[0]) - config.p_controllers[0].gain * RD).astype(np.float32) + +com, noise_com, alias_wfs_com, tomo_com, H_com, trunc_com, bp_com, wf_com, fit, SR, SR2, psf_ortho = loop( + niters + N_preloop) +noise_com = noise_com[N_preloop:, :] +trunc_com = trunc_com[N_preloop:, :] +alias_wfs_com = alias_wfs_com[N_preloop:, :] +H_com = H_com[N_preloop:, :] +bp_com = bp_com[N_preloop:, :] +tomo_com = tomo_com[N_preloop:, :] +save_it(savename) diff --git a/guardians/misc/layer_linearity/layers_test.py b/guardians/misc/layer_linearity/layers_test.py new file mode 100644 index 0000000..18ff802 --- /dev/null +++ b/guardians/misc/layer_linearity/layers_test.py @@ -0,0 +1,133 @@ +""" +Created on Wed Oct 5 14:28:23 2016 + +@author: fferreira +""" +import sys, os +import numpy as np +import h5py +import matplotlib.pyplot as plt +import matplotlib +plt.ion() +from guardians import gamora, drax + +datapath = "/home/fferreira/Data/" +fname_layers = "roket_8m_12layers_gamma1.h5" # File with all layers +buferr_ref = drax.get_err(datapath + fname_layers) +f_layers = h5py.File(datapath + fname_layers) +nlayers = f_layers.attrs["_Param_atmos__nscreens"] + +fname_layer_i = [] +name = "roket_8m_12layers" +for i in range(nlayers): + fname_layer_i.append(name + "_%d.h5" % (i)) + +files = [] +for f in fname_layer_i: + files.append(h5py.File(datapath + f)) + +print("--------------------------------------------") +print("file ", fname_layers, " :") +print(" nlayers : ", f_layers.attrs["_Param_atmos__nscreens"]) +print(" frac : ", f_layers.attrs["_Param_atmos__frac"]) +print("--------------------------------------------") + +nmodes = f_layers["P"][:].shape[0] +contributors = [ + "tomography", "bandwidth", "non linearity", "noise", "filtered modes", "aliasing" +] +Lambda_tar = f_layers.attrs["_Param_target__Lambda"][0] +fracs = f_layers.attrs["_Param_atmos__frac"] +alts = f_layers.attrs["_Param_atmos__alt"] +frac_per_layer = dict() +i = 0 +for a in alts: + frac_per_layer[a] = fracs[i] + i += 1 + +frac = [] +buferr_layers = drax.get_err(datapath + fname_layer_i[0]) * 0. +for k in range(len(files)): + frac.append(frac_per_layer[files[k].attrs["_Param_atmos__alt"][0]]) + buferr_layers += drax.get_err(datapath + fname_layer_i[k]) * np.sqrt( + frac_per_layer[files[k].attrs["_Param_atmos__alt"][0]]) + +C_layers = np.zeros((buferr_layers.shape[0], buferr_layers.shape[0])) +for k in range(len(files)): + C_layers += ( + frac[k] * drax.get_covmat_contrib(datapath + fname_layer_i[k], contributors)) +print("contributors : ", contributors) + +# Column 1 : with correlation, column 2 : independence assumption +err_layers = np.zeros((nmodes, 2)) + +err_layer_i = np.zeros((nmodes, 2 * nlayers)) + +err_layers[:, 0] = drax.variance(f_layers, contributors, method="Default") +err_layers[:, 1] = drax.variance(f_layers, contributors, method="Independence") +l = 0 +for f in files: + err_layer_i[:, l] = drax.variance(f, contributors, method="Default") + err_layer_i[:, l + 1] = drax.variance(f, contributors, method="Independence") + l += 2 + +#err_layer1p2 = varianceMultiFiles([f_layer1,f_layer2], frac_per_layer, contributors) +inderr = np.zeros(nmodes) +derr = np.zeros(nmodes) +for l in range(nlayers): + inderr += frac[l] * err_layer_i[:, 2 * l + 1] + derr += frac[l] * err_layer_i[:, 2 * l] + +otftel_ref, otf2_ref, psf_ref, gpu = gamora.psf_rec_Vii(datapath + fname_layers) +otftel_sum, otf2_sum, psf_sum, gpu = gamora.psf_rec_Vii(datapath + fname_layers, + err=buferr_layers) + +# Plots +plt.figure(1) +plt.subplot(2, 1, 1) +plt.semilogy(err_layers[:, 1]) +plt.semilogy(inderr) +plt.legend(["%d layers" % nlayers, "Layers sum"]) +plt.xlabel("Modes #") +plt.ylabel("Variance [mic^2]") +plt.title("Variance with independence assumption") +plt.subplot(2, 1, 2) +plt.plot(drax.cumulativeSR(err_layers[:, 1], Lambda_tar)) +plt.plot(drax.cumulativeSR(inderr, Lambda_tar)) +plt.legend(["%d layers" % nlayers, "Layers sum"]) +plt.xlabel("Modes #") +plt.ylabel("SR") +plt.title("Resulting SR") + +plt.figure(2) +plt.subplot(2, 1, 1) +plt.semilogy(err_layers[:, 0]) +plt.semilogy(derr) +plt.legend(["%d layers" % nlayers, "Layers sum"]) +plt.xlabel("Modes #") +plt.ylabel("Variance [mic^2]") +plt.title("Variance with correlation") +plt.subplot(2, 1, 2) +plt.plot(drax.cumulativeSR(err_layers[:, 0], Lambda_tar)) +plt.plot(drax.cumulativeSR(derr, Lambda_tar)) +plt.legend(["%d layers" % nlayers, "Layers sum"]) +plt.xlabel("Modes #") +plt.ylabel("SR") +plt.title("Resulting SR") + +RASC = 180 / np.pi * 3600. +#pixsize = (Lambda_tar*1e-6 / 8. * RASC) * 16./64. +#lambda/(Nfft*pupdiam/D) +pixsize = Lambda_tar * 1e-6 / (psf_ref.shape[0] * 8. / 640) * RASC +x = (np.arange(psf_ref.shape[0]) - psf_ref.shape[0] / 2) * pixsize / ( + Lambda_tar * 1e-6 / 8. * RASC) +font = {'family': 'normal', 'weight': 'bold', 'size': 22} + +matplotlib.rc('font', **font) + +plt.figure() +plt.semilogy(x, psf_ref[psf_ref.shape[0] / 2, :], color="blue") +plt.semilogy(x, psf_sum[psf_sum.shape[0] / 2, :], color="red") +plt.xlabel("Angle [units of lambda/D]") +plt.ylabel("Normalized intensity") +plt.legend(["12-layers PSF", "Sum of 12 layers PSF"]) diff --git a/guardians/misc/roket_cpu.py b/guardians/misc/roket_cpu.py new file mode 100644 index 0000000..e9e01ca --- /dev/null +++ b/guardians/misc/roket_cpu.py @@ -0,0 +1,702 @@ +""" +Created on Wed Apr 27 09:28:23 2016 + +@author: fferreira +""" + +import cProfile +import pstats as ps + +import sys, os +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as pl +pl.ion() +import hdf5_util as h5u +import pandas +from scipy.sparse import csr_matrix + +if (len(sys.argv) < 2): + error = 'command line should be at least:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) + +#get parameters from file +param_file = sys.argv[1] +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + sys.path.remove(param_path) +elif (param_file.split('.')[-1] == b"h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + sys.path.remove(os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (len(sys.argv) > 2): + savename = sys.argv[2] +else: + savename = "roket_default.h5" + +############################################################################ +# _ _ _ +# (_)_ __ (_) |_ ___ +# | | '_ \| | __/ __| +# | | | | | | |_\__ \ +# |_|_| |_|_|\__|___/ +############################################################################ + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name +else: + simul_name = "" +print("simul name is", simul_name) + +matricesToLoad = {} +if (simul_name == b""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) +#initialisation: +# context +#c=ch.carmaWrap_context(7) +c = ch.carmaWrap_context(devices=np.array([6], dtype=np.int32)) +#c.set_active_device(device) + +# wfs +print("->wfs") +wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + +# atmos +print("->atmos") +atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0, clean=clean, + load=matricesToLoad) + +# dm +print("->dm") +dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + +# target +print("->target") +tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + +print("->rtc") +# rtc +rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + clean=clean, simul_name=simul_name, load=matricesToLoad) + +if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + +print("====================") +print("init done") +print("====================") +print("objects initialzed on GPU:") +print("--------------------------------------------------------") +print(atm) +print(wfs) +print(dms) +print(tar) +print(rtc) + +print("----------------------------------------------------") +print("iter# | SE SR image | LE SR image | Fitting | LE SR phase var") +print("----------------------------------------------------") + +error_flag = True in [w.roket for w in config.p_wfss] + + +############################################################################## +# _ ___ _ +# /_\ / _ \ | |___ ___ _ __ +# / _ \ (_) | | / _ \/ _ \ '_ \ +# /_/ \_\___/ |_\___/\___/ .__/ +# |_| +############################################################################## +def loop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + if (error_flag): + # Initialize buffers for error breakdown + nactu = rtc.get_command(0).size + com = np.zeros((n, nactu), dtype=np.float32) + noise_com = np.zeros((n, nactu), dtype=np.float32) + alias_wfs_com = np.copy(noise_com) + wf_com = np.copy(noise_com) + tomo_com = np.copy(noise_com) + trunc_com = np.copy(noise_com) + H_com = np.copy(noise_com) + mod_com = np.copy(noise_com) + bp_com = np.copy(noise_com) + fit = np.zeros(n) + psf_ortho = tar.get_image(0, 'se') * 0. + Ee = np.copy(noise_com) + Ff = np.copy(Ee) + #gamma = 1.0 + gRD = np.identity(RD.shape[0]) - config.p_controllers[0].gain * gamma * RD + t0 = time.time() + for i in range(n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + tar.dmtrace(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + # if( i%500==0 and i>0): + # #gamma = centroid_gain(Ff[i-500:i,:],Ee[i-500:i,:]) + # gRD = np.identity(RD.shape[0])-config.p_controllers[0].gain*gamma*RD + if (error_flag and i > -1): + #compute the error breakdown for this iteration + error_breakdown(com, noise_com, alias_wfs_com, tomo_com, H_com, + trunc_com, bp_com, wf_com, mod_com, fit, psf_ortho, i, + Ee, Ff, gamma, gRD) + + rtc.applycontrol(0, dms) + + if ((i + 1) % 100 == 0 and i > -1): + strehltmp = tar.get_strehl(0) + print(i + 1, "\t", strehltmp[0], "\t", strehltmp[1], "\t", + np.exp(-strehltmp[2]), "\t", np.exp(-strehltmp[3])) + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + if (error_flag): + #Returns the error breakdown + SR2 = np.exp(-tar.get_strehl(0, comp_strehl=False)[3]) + SR = tar.get_strehl(0, comp_strehl=False)[1] + #bp_com[-1,:] = bp_com[-2,:] + #SR = tar.get_strehl(0,comp_strehl=False)[1] + return com, noise_com, alias_wfs_com, tomo_com, H_com, trunc_com, bp_com, mod_com, np.mean( + fit[N_preloop:]), SR, SR2, psf_ortho, Ee, Ff + + +def preloop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + for i in range(0, n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + + rtc.applycontrol(0, dms) + + +################################################################################### +# ___ ___ _ _ +# | __|_ _ _ _ ___ _ _ | _ )_ _ ___ __ _| |____| |_____ __ ___ _ +# | _|| '_| '_/ _ \ '_| | _ \ '_/ -_) _` | / / _` / _ \ V V / ' \ +# |___|_| |_| \___/_| |___/_| \___\__,_|_\_\__,_\___/\_/\_/|_||_| +################################################################################### +def error_breakdown(com, noise_com, alias_wfs_com, tomo_com, H_com, trunc_com, bp_com, + wf_com, mod_com, fit, psf_ortho, i, Ee, Ff, gamma, gRD): + """ + Compute the error breakdown of the AO simulation. Returns the error commands of + each contributors. Suppose no delay (for now) and only 2 controllers : the main one, controller #0, (specified on the parameter file) + and the geometric one, controller #1 (automatically added if roket is asked in the parameter file) + Commands are computed by applying the loop filter on various kind of commands : (see schema_simulation_budget_erreur_v2) + + - Ageom : Aliasing contribution on WFS direction + Obtained by computing commands from DM orthogonal phase (projection + slopes_geom) + + - B : Projection on the target direction + Obtained as the commmands output of the geometric controller + + - C : Wavefront + Obtained by computing commands from DM parallel phase (RD*B) + + - E : Wavefront + aliasing + ech/trunc + tomo + Obtained by performing the AO loop iteration without noise on the WFS + + - F : Wavefront + aliasing + tomo + Obtained by performing the AO loop iteration without noise on the WFS and using phase deriving slopes + + - G : tomo + + Note : rtc.get_err returns to -CMAT.slopes + + Args: + noise_com : np.array((niter,nactu)) : Noise contribution + Computed with com-E + + alias_wfs_com : np.array((niter,nactu)) : Aliasing on WFS direction contribution + Computed with Ageom + + tomo_com : np.array((niter,nactu)) : Tomographic error contribution + Computed with C-B + + H_com : np.array((niter,nactu)) : Filtered modes error + Computed with B + + trunc_com : np.array((niter,nactu)) : sampling/truncature error contribution + Computed with E-F + + bp_com : np.array((niter,nactu)) : Bandwidth error + + wf_com : np.array((niter,nactu)) : Reconstructed wavefront + + mod_com : np.array((niter,nactu)) : commanded modes + + fit : np.array((niter)) : fitting value + + i : (int) : current iteration number + + """ + g = config.p_controllers[0].gain + Dcom = rtc.get_command(0) + Derr = rtc.get_err(0) + com[i, :] = Dcom + tarphase = tar.get_phase(0) + ########################################################################### + ## Noise contribution + ########################################################################### + if (config.p_wfss[0].type == b"sh"): + ideal_bincube = wfs.get_bincubeNotNoisy(0) + bincube = wfs.get_bincube(0) + if (config.p_centroiders[0].type == b"tcog" + ): # Select the same pixels with or without noise + invalidpix = np.where(bincube <= config.p_centroiders[0].thresh) + ideal_bincube[invalidpix] = 0 + rtc.setthresh(0, -1e16) + wfs.set_bincube(0, ideal_bincube) + elif (config.p_wfss[0].type == b"pyrhr"): + ideal_pyrimg = wfs.get_binimg_notnoisy(0) + wfs.set_pyrimg(0, ideal_pyrimg) + + rtc.docentroids(0) + if (config.p_centroiders[0].type == b"tcog"): + rtc.setthresh(0, config.p_centroiders[0].thresh) + + rtc.docontrol(0) + E = rtc.get_err(0) + Ee[i, :] = E + # Apply loop filter to get contribution of noise on commands + if (i + 1 < config.p_loop.niter): + noise_com[i + 1, :] = gRD.dot(noise_com[i, :]) + g * (Derr - E) + + ########################################################################### + ## Sampling/truncature contribution + ########################################################################### + rtc.docentroids_geom(0) + rtc.docontrol(0) + F = rtc.get_err(0) + Ff[i, :] = F + # Apply loop filter to get contribution of sampling/truncature on commands + if (i + 1 < config.p_loop.niter): + trunc_com[i + 1, :] = gRD.dot(trunc_com[i, :]) + g * (E - gamma * F) + + ########################################################################### + ## Aliasing contribution on WFS direction + ########################################################################### + rtc.docontrol_geo_onwfs(1, dms, wfs, 0) + rtc.applycontrol(1, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "dm", tel, atm, dms) + """ + wfs.sensors_compimg(0) + if(config.p_wfss[0].type == b"sh"): + ideal_bincube = wfs.get_bincubeNotNoisy(0) + bincube = wfs.get_bincube(0) + if(config.p_centroiders[0].type == b"tcog"): # Select the same pixels with or without noise + invalidpix = np.where(bincube <= config.p_centroiders[0].thresh) + ideal_bincube[invalidpix] = 0 + rtc.setthresh(0,-1e16) + wfs.set_bincube(0,ideal_bincube) + elif(config.p_wfss[0].type == b"pyrhr"): + ideal_pyrimg = wfs.get_binimg_notnoisy(0) + wfs.set_pyrimg(0,ideal_pyrimg) + """ + rtc.docentroids_geom(0) + rtc.docontrol(0) + Ageom = rtc.get_err(0) + if (i + 1 < config.p_loop.niter): + alias_wfs_com[i + 1, :] = gRD.dot( + alias_wfs_com[i, :]) + gamma * g * (Ageom) # - (E-F)) + + ########################################################################### + ## Wavefront + filtered modes reconstruction + ########################################################################### + tar.atmos_trace(0, atm, tel) + rtc.docontrol_geo(1, dms, tar, 0) + B = rtc.get_command(1) + + ########################################################################### + ## Fitting + ########################################################################### + rtc.applycontrol(1, dms) + tar.dmtrace(0, dms, do_phase_var=0) + fit[i] = tar.get_strehl(0, comp_strehl=False)[2] + if (i >= N_preloop): + psf_ortho += tar.get_image(0, 'se') / niters + + ########################################################################### + ## Filtered modes error & Commanded modes + ########################################################################### + modes = P.dot(B) + modes_filt = modes.copy() * 0. + modes_filt[-nfiltered - 2:-2] = modes[-nfiltered - 2:-2] + H_com[i, :] = Btt.dot(modes_filt) + modes[-nfiltered - 2:-2] = 0 + mod_com[i, :] = Btt.dot(modes) + + ########################################################################### + ## Bandwidth error + ########################################################################### + C = mod_com[i, :] - mod_com[i - 1, :] + + bp_com[i, :] = gRD.dot(bp_com[i - 1, :]) - C + + ########################################################################### + ## Tomographic error + ########################################################################### + #G = F - (mod_com[i,:] + Ageom - np.dot(RDgeom,com[i-1,:])) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "atmos", tel, atm, dms) + rtc.docontrol_geo_onwfs(1, dms, wfs, 0) + G = rtc.get_command(1) + modes = P.dot(G) + modes[-nfiltered - 2:-2] = 0 + wf_com[i, :] = Btt.dot(modes) + + G = mod_com[i, :] - wf_com[i, :] + if (i + 1 < config.p_loop.niter): + tomo_com[i + 1, :] = gRD.dot(tomo_com[i, :]) - g * RD.dot(G) + + # Without anyone noticing... + tar.set_phase(0, tarphase) + rtc.setCom(0, Dcom) + + +def centroid_gain(E, F): + + cgains = np.zeros(E.shape[1]) + for k in range(E.shape[1]): + cgains[k] = np.polyfit(E[:, k], F[:, k], 1)[0] + + return np.mean(cgains) + + +################################################################################ +# ___ _ +# | _ ) __ _ __(_)___ +# | _ \/ _` (_-< (_-< +# |___/\__,_/__/_/__/ +################################################################################ +def compute_btt2(): + IF = rtc.get_IFsparse(1).T + N = IF.shape[0] + n = IF.shape[1] + #T = IF[:,-2:].copy() + T = rtc.get_IFtt(1) + #IF = IF[:,:n-2] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + # Tip-tilt + piston + Tp = np.ones((T.shape[0], T.shape[1] + 1)) + Tp[:, :2] = T.copy() #.toarray() + deltaT = IF.T.dot(Tp) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille generatrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + # Base orthonormee sans TT + gdg = G.T.dot(delta).dot(G) + U, s, V = np.linalg.svd(gdg) + U = U[:, :U.shape[1] - 3] + s = s[:s.size - 3] + L = np.identity(s.size) / np.sqrt(s) + B = G.dot(U).dot(L) + + # Rajout du TT + TT = T.T.dot(T) / N #.toarray()/N + Btt = np.zeros((n + 2, n - 1)) + Btt[:B.shape[0], :B.shape[1]] = B + mini = 1. / np.sqrt(np.abs(TT)) + mini[0, 1] = 0 + mini[1, 0] = 0 + Btt[n:, n - 3:] = mini + + # Calcul du projecteur actus-->modes + delta = np.zeros((n + T.shape[1], n + T.shape[1])) + #IF = rtc.get_IFsparse(1).T + delta[:-2, :-2] = IF.T.dot(IF).toarray() / N + delta[-2:, -2:] = T.T.dot(T) / N + P = Btt.T.dot(delta) + + return Btt.astype(np.float32), P.astype(np.float32) + + +def compute_cmatWithBtt(Btt, nfilt): + D = rtc.get_imat(0) + #D = ao.imat_geom(wfs,config.p_wfss,config.p_controllers[0],dms,config.p_dms,meth=0) + # Filtering on Btt modes + Btt_filt = np.zeros((Btt.shape[0], Btt.shape[1] - nfilt)) + Btt_filt[:, :Btt_filt.shape[1] - 2] = Btt[:, :Btt.shape[1] - (nfilt + 2)] + Btt_filt[:, Btt_filt.shape[1] - 2:] = Btt[:, Btt.shape[1] - 2:] + + # Modal interaction basis + Dm = D.dot(Btt_filt) + # Direct inversion + Dmp = np.linalg.inv(Dm.T.dot(Dm)).dot(Dm.T) + # Command matrix + cmat = Btt_filt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +def compute_cmatWithBtt2(Btt, nfilt): + D = rtc.get_imat(0) + + # Modal interaction basis + Dm = D.dot(Btt) + # Filtering on modal imat + DmtDm = Dm.T.dot(Dm) + U, s, V = np.linalg.svd(DmtDm) + s = 1. / s + s[s.shape[0] - nfilt - 2:s.shape[0] - 2] = 0. + DmtDm1 = U.dot(np.diag(s)).dot(U.T) + Dmp = DmtDm1.dot(Dm.T) + # Command matrix + cmat = Btt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +########################################################################################### +# ___ _ __ _ _ _ +# / __|_____ ____ _ _ _(_)__ _ _ _ __ ___ / _|___ __ ___ _ _ _ _ ___| |__ _| |_(_)___ _ _ +# | (__/ _ \ V / _` | '_| / _` | ' \/ _/ -_) > _|_ _| / _/ _ \ '_| '_/ -_) / _` | _| / _ \ ' \ +# \___\___/\_/\__,_|_| |_\__,_|_||_\__\___| \_____| \__\___/_| |_| \___|_\__,_|\__|_\___/_||_| +# +########################################################################################### + + +def cov_cor(P, noise, trunc, alias, H, bp, tomo): + cov = np.zeros((6, 6)) + cor = np.zeros((6, 6)) + bufdict = { + "0": noise.T, + "1": trunc.T, + "2": alias.T, + "3": H.T, + "4": bp.T, + "5": tomo.T + } + for i in range(cov.shape[0]): + for j in range(cov.shape[1]): + if (j >= i): + tmpi = P.dot(bufdict[str(i)]) + tmpj = P.dot(bufdict[str(j)]) + cov[i, j] = np.sum( + np.mean(tmpi * tmpj, axis=1) - + np.mean(tmpi, axis=1) * np.mean(tmpj, axis=1)) + else: + cov[i, j] = cov[j, i] + + s = np.reshape(np.diag(cov), (cov.shape[0], 1)) + sst = np.dot(s, s.T) + ok = np.where(sst) + cor[ok] = cov[ok] / np.sqrt(sst[ok]) + + return cov, cor + + +########################################################################################### +# ___ +# / __| __ ___ _____ +# \__ \/ _` \ V / -_) +# |___/\__,_|\_/\___| +########################################################################################### + + +def save_it(filename): + IF = rtc.get_IFsparse(1) + TT = rtc.get_IFtt(1) + + tmp = (config.p_geom._ipupil.shape[0] - + (config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1)) / 2 + tmp_e0 = config.p_geom._ipupil.shape[0] - tmp + tmp_e1 = config.p_geom._ipupil.shape[1] - tmp + pup = config.p_geom._ipupil[tmp:tmp_e0, tmp:tmp_e1] + indx_pup = np.where(pup.flatten() > 0)[0].astype(np.int32) + dm_dim = config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1 + cov, cor = cov_cor(P, noise_com, trunc_com, alias_wfs_com, H_com, bp_com, tomo_com) + psf = tar.get_image(0, "le", fluxNorm=False) + + fname = "/home/fferreira/Data/" + filename + pdict = { + "noise": noise_com.T, + "aliasing": alias_wfs_com.T, + "tomography": tomo_com.T, + "filtered modes": H_com.T, + "non linearity": trunc_com.T, + "bandwidth": bp_com.T, + "wf_com": wf_com.T, + "P": P, + "Btt": Btt, + "IF.data": IF.data, + "IF.indices": IF.indices, + "IF.indptr": IF.indptr, + "TT": TT, + "dm_dim": dm_dim, + "indx_pup": indx_pup, + "fitting": fit, + "SR": SR, + "SR2": SR2, + "cov": cov, + "cor": cor, + "psfortho": np.fft.fftshift(psf_ortho), + "E": E, + "F": F, + "dm.xpos": config.p_dms[0]._xpos, + "dm.ypos": config.p_dms[0]._ypos, + "R": cmat, + "Nact": Nact + } + h5u.save_h5(fname, "psf", config, psf) + #h5u.writeHdf5SingleDataset(fname,com.T,datasetName="com") + for k in list(pdict.keys()): + h5u.save_hdf5(fname, k, pdict[k]) + + +############################################################################################### +# _ _ +# | |_ ___ ___| |_ ___ +# | __/ _ \/ __| __/ __| +# | || __/\__ \ |_\__ \ +# \__\___||___/\__|___/ +############################################################################################### +nfiltered = int(config.p_controllers[0].maxcond) +niters = config.p_loop.niter +N_preloop = 1000 +config.p_loop.set_niter(niters + N_preloop) +Btt, P = compute_btt2() +rtc.load_Btt(1, Btt.dot(Btt.T)) +Dm, cmat = compute_cmatWithBtt(Btt, nfiltered) +rtc.set_cmat(0, cmat) +R = rtc.get_cmat(0) +imat = rtc.get_imat(0) +RD = np.dot(R, imat) +Nact = ao.create_nact_geom(config.p_dms, 0) +gamma = 1. / 0.51495 +#gamma = centroid_gain(100) +#print("gamma = ",gamma) + +#gRD = np.identity(RD.shape[0])-config.p_controllers[0].gain*gamma*RD +#diagRD = np.diag(gRD) +#gRD = np.diag(diagRD) +#gRD=np.diag(gRD) + +#imat_geom = ao.imat_geom(wfs,config.p_wfss,config.p_controllers[0],dms,config.p_dms,meth=0) +#RDgeom = np.dot(R,imat_geom) +#preloop(1000) + +com, noise_com, alias_wfs_com, tomo_com, H_com, trunc_com, bp_com, wf_com, fit, SR, SR2, psf_ortho, E, F = loop( + niters + N_preloop) +noise_com = noise_com[N_preloop:, :] +trunc_com = trunc_com[N_preloop:, :] +alias_wfs_com = alias_wfs_com[N_preloop:, :] +H_com = H_com[N_preloop:, :] +bp_com = bp_com[N_preloop:, :] +tomo_com = tomo_com[N_preloop:, :] +E = E[N_preloop:, :] +F = F[N_preloop:, :] +save_it(savename) diff --git a/guardians/misc/roket_gpu.py b/guardians/misc/roket_gpu.py new file mode 100644 index 0000000..821fa09 --- /dev/null +++ b/guardians/misc/roket_gpu.py @@ -0,0 +1,500 @@ +""" +Created on Tue Jul 12 09:28:23 2016 + +@author: fferreira +""" + +import cProfile +import pstats as ps + +import sys, os +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as plt +plt.ion() +import hdf5_util as h5u +import pandas +from scipy.sparse import csr_matrix + +if (len(sys.argv) < 2): + error = 'command line should be at least:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) + +#get parameters from file +param_file = sys.argv[1] +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + sys.path.remove(param_path) +elif (param_file.split('.')[-1] == b"h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + sys.path.remove(os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +#if(len(sys.argv) > 2): +# device=int(sys.argv[2]) +#else: +# device = 0 +if (len(sys.argv) > 2): + savename = sys.argv[2] +else: + savename = "roket_default.h5" + +print("save file is ", savename) +############################################################################ +# _ _ _ +# (_)_ __ (_) |_ ___ +# | | '_ \| | __/ __| +# | | | | | | |_\__ \ +# |_|_| |_|_|\__|___/ +############################################################################ + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name +else: + simul_name = "" +print("simul name is", simul_name) + +matricesToLoad = {} +if (simul_name == b""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) +#initialisation: +# context +#c=ch.carmaWrap_context(device) +c = ch.carmaWrap_context(devices=np.array([0, 1], dtype=np.int32)) +#c.set_active_device(device) + +# wfs +print("->wfs") +wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + +# atmos +print("->atmos") +atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0, clean=clean, + load=matricesToLoad) + +# dm +print("->dm") +dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + +# target +print("->target") +tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + +print("->rtc") +# rtc +rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + clean=clean, simul_name=simul_name, load=matricesToLoad) + +if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + +print("====================") +print("init done") +print("====================") +print("objects initialzed on GPU:") +print("--------------------------------------------------------") +print(atm) +print(wfs) +print(dms) +print(tar) +print(rtc) + +print("----------------------------------------------------") +print("iter# | SE SR image | LE SR image | Fitting | LE SR phase var") +print("----------------------------------------------------") + +error_flag = True in [w.roket for w in config.p_wfss] + + +############################################################################## +# _ ___ _ +# /_\ / _ \ | |___ ___ _ __ +# / _ \ (_) | | / _ \/ _ \ '_ \ +# /_/ \_\___/ |_\___/\___/ .__/ +# |_| +############################################################################## +def loop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + if (error_flag): + # Initialize buffers for error breakdown + nactu = rtc.get_command(0).size + nslopes = rtc.get_centroids(0).size + com = np.zeros((n, nactu), dtype=np.float32) + noise_com = np.zeros((n, nactu), dtype=np.float32) + alias_wfs_com = np.copy(noise_com) + wf_com = np.copy(noise_com) + tomo_com = np.copy(noise_com) + trunc_com = np.copy(noise_com) + H_com = np.copy(noise_com) + mod_com = np.copy(noise_com) + bp_com = np.copy(noise_com) + fit = np.zeros(n) + # covm = np.zeros((nslopes,nslopes)) + # covv = np.zeros((nactu,nactu)) + + t0 = time.time() + for i in range(-10, n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + tar.dmtrace(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + #m = np.reshape(rtc.get_centroids(0),(nslopes,1)) + #v = np.reshape(rtc.get_command(0),(nactu,1)) + if (error_flag and i > -1): + #compute the error breakdown for this iteration + #covm += m.dot(m.T) + #covv += v.dot(v.T) + roket.computeBreakdown() + rtc.applycontrol(0, dms) + + if ((i + 1) % 100 == 0 and i > -1): + strehltmp = tar.get_strehl(0) + print(i + 1, "\t", strehltmp[0], "\t", strehltmp[1], "\t", + np.exp(-strehltmp[2]), "\t", np.exp(-strehltmp[3])) + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + if (error_flag): + #Returns the error breakdown + SR2 = np.exp(-tar.get_strehl(0, comp_strehl=False)[3]) + SR = tar.get_strehl(0, comp_strehl=False)[1] + #bp_com[-1,:] = bp_com[-2,:] + #SR = tar.get_strehl(0,comp_strehl=False)[1] + return SR, SR2 + + +def preloop(n): + """ + Performs the main AO loop for n interations. First, initialize buffers + for error breakdown computations. Then, at the end of each iteration, just + before applying the new DM shape, calls the error_breakdown function. + + :param n: (int) : number of iterations + + :return: + com : (np.array((n,nactus))) : full command buffer + + noise_com : (np.array((n,nactus))) : noise contribution for error breakdown + + alias_wfs_com : (np.array((n,nactus))) : aliasing estimation in the WFS direction + + tomo_com : (np.array((n,nactus))) : tomography error estimation + + H_com : (np.array((n,nactus))) : Filtered modes contribution for error breakdown + + trunc_com : (np.array((n,nactus))) : Truncature and sampling error of WFS + + bp_com : (np.array((n,nactus))) : Bandwidth error estimation on target + + mod_com : (np.array((n,nactus))) : Commanded modes expressed on the actuators + + fit : (float) : fitting (mean variance of the residual target phase after projection) + + SR : (float) : final strehl ratio returned by the simulation + """ + for i in range(0, n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + rtc.docentroids(0) + rtc.docontrol(0) + + rtc.applycontrol(0, dms) + + +################################################################################ +# ___ _ +# | _ ) __ _ __(_)___ +# | _ \/ _` (_-< (_-< +# |___/\__,_/__/_/__/ +################################################################################ +def compute_btt(): + IF = rtc.get_IFsparse(1).T + N = IF.shape[0] + n = IF.shape[1] + #T = IF[:,-2:].copy() + T = rtc.get_IFtt(1) + #IF = IF[:,:n-2] + n = IF.shape[1] + + delta = IF.T.dot(IF).toarray() / N + + # Tip-tilt + piston + Tp = np.ones((T.shape[0], T.shape[1] + 1)) + Tp[:, :2] = T.copy() #.toarray() + deltaT = IF.T.dot(Tp) / N + # Tip tilt projection on the pzt dm + tau = np.linalg.inv(delta).dot(deltaT) + + # Famille generatrice sans tip tilt + G = np.identity(n) + tdt = tau.T.dot(delta).dot(tau) + subTT = tau.dot(np.linalg.inv(tdt)).dot(tau.T).dot(delta) + G -= subTT + + # Base orthonormee sans TT + gdg = G.T.dot(delta).dot(G) + U, s, V = np.linalg.svd(gdg) + U = U[:, :U.shape[1] - 3] + s = s[:s.size - 3] + L = np.identity(s.size) / np.sqrt(s) + B = G.dot(U).dot(L) + + # Rajout du TT + TT = T.T.dot(T) / N #.toarray()/N + Btt = np.zeros((n + 2, n - 1)) + Btt[:B.shape[0], :B.shape[1]] = B + mini = 1. / np.sqrt(TT) + mini[0, 1] = 0 + mini[1, 0] = 0 + Btt[n:, n - 3:] = mini + + # Calcul du projecteur actus-->modes + delta = np.zeros((n + T.shape[1], n + T.shape[1])) + #IF = rtc.get_IFsparse(1).T + delta[:-2, :-2] = IF.T.dot(IF).toarray() / N + delta[-2:, -2:] = T.T.dot(T) / N + P = Btt.T.dot(delta) + + return Btt.astype(np.float32), P.astype(np.float32) + + +def compute_cmatWithBtt(Btt, nfilt): + D = rtc.get_imat(0) + #D = ao.imat_geom(wfs,config.p_wfss,config.p_controllers[0],dms,config.p_dms,meth=0) + # Filtering on Btt modes + Btt_filt = np.zeros((Btt.shape[0], Btt.shape[1] - nfilt)) + Btt_filt[:, :Btt_filt.shape[1] - 2] = Btt[:, :Btt.shape[1] - (nfilt + 2)] + Btt_filt[:, Btt_filt.shape[1] - 2:] = Btt[:, Btt.shape[1] - 2:] + + # Modal interaction basis + Dm = D.dot(Btt_filt) + # Direct inversion + Dmp = np.linalg.inv(Dm.T.dot(Dm)).dot(Dm.T) + # Command matrix + cmat = Btt_filt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +def compute_cmatWithBtt2(Btt, nfilt): + D = rtc.get_imat(0) + + # Modal interaction basis + Dm = D.dot(Btt) + # Filtering on modal imat + DmtDm = Dm.T.dot(Dm) + U, s, V = np.linalg.svd(DmtDm) + s = 1. / s + s[s.shape[0] - nfilt - 2:s.shape[0] - 2] = 0. + DmtDm1 = U.dot(np.diag(s)).dot(U.T) + Dmp = DmtDm1.dot(Dm.T) + # Command matrix + cmat = Btt.dot(Dmp) + + return Dm.astype(np.float32), cmat.astype(np.float32) + + +########################################################################################### +# ___ _ __ _ _ _ +# / __|_____ ____ _ _ _(_)__ _ _ _ __ ___ / _|___ __ ___ _ _ _ _ ___| |__ _| |_(_)___ _ _ +# | (__/ _ \ V / _` | '_| / _` | ' \/ _/ -_) > _|_ _| / _/ _ \ '_| '_/ -_) / _` | _| / _ \ ' \ +# \___\___/\_/\__,_|_| |_\__,_|_||_\__\___| \_____| \__\___/_| |_| \___|_\__,_|\__|_\___/_||_| +# +########################################################################################### + + +def cov_cor(P, noise, trunc, alias, H, bp, tomo): + cov = np.zeros((6, 6)) + bufdict = { + "0": noise.T, + "1": trunc.T, + "2": alias.T, + "3": H.T, + "4": bp.T, + "5": tomo.T + } + for i in range(cov.shape[0]): + for j in range(cov.shape[1]): + if (j >= i): + tmpi = P.dot(bufdict[str(i)]) + tmpj = P.dot(bufdict[str(j)]) + cov[i, j] = np.sum( + np.mean(tmpi * tmpj, axis=1) - + np.mean(tmpi, axis=1) * np.mean(tmpj, axis=1)) + else: + cov[i, j] = cov[j, i] + + s = np.reshape(np.diag(cov), (cov.shape[0], 1)) + sst = np.dot(s, s.T) + cor = cov / np.sqrt(sst) + + return cov, cor + + +########################################################################################### +# ___ +# / __| __ ___ _____ +# \__ \/ _` \ V / -_) +# |___/\__,_|\_/\___| +########################################################################################### + + +def save_it(filename): + IF = rtc.get_IFsparse(1) + TT = rtc.get_IFtt(1) + noise_com = roket.getContributor("noise") + trunc_com = roket.getContributor("nonlinear") + alias_wfs_com = roket.getContributor("aliasing") + H_com = roket.getContributor("filtered") + bp_com = roket.getContributor("bandwidth") + tomo_com = roket.getContributor("tomo") + fit = roket.getContributor("fitting") + + tmp = (config.p_geom._ipupil.shape[0] - + (config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1)) / 2 + tmp_e0 = config.p_geom._ipupil.shape[0] - tmp + tmp_e1 = config.p_geom._ipupil.shape[1] - tmp + pup = config.p_geom._ipupil[tmp:tmp_e0, tmp:tmp_e1] + indx_pup = np.where(pup.flatten() > 0)[0].astype(np.int32) + dm_dim = config.p_dms[0]._n2 - config.p_dms[0]._n1 + 1 + cov, cor = cov_cor(P, noise_com, trunc_com, alias_wfs_com, H_com, bp_com, tomo_com) + psf = tar.get_image(0, "le", fluxNorm=False) + psfortho = roket.get_tar_imageortho() + covv = roket.get_covv() + covm = roket.get_covm() + + fname = "/home/fferreira/Data/" + filename + pdict = { + "noise": noise_com.T, + "aliasing": alias_wfs_com.T, + "tomography": tomo_com.T, + "filtered modes": H_com.T, + "non linearity": trunc_com.T, + "bandwidth": bp_com.T, + "P": P, + "Btt": Btt, + "IF.data": IF.data, + "IF.indices": IF.indices, + "IF.indptr": IF.indptr, + "TT": TT, + "dm_dim": dm_dim, + "indx_pup": indx_pup, + "fitting": fit, + "SR": SR, + "SR2": SR2, + "cov": cov, + "cor": cor, + "psfortho": psfortho, + "covm": covm, + "covv": covv + } + h5u.save_h5(fname, "psf", config, psf) + #h5u.writeHdf5SingleDataset(fname,com.T,datasetName="com") + for k in list(pdict.keys()): + h5u.save_hdf5(fname, k, pdict[k]) + + +############################################################################################### +# _ _ +# | |_ ___ ___| |_ ___ +# | __/ _ \/ __| __/ __| +# | || __/\__ \ |_\__ \ +# \__\___||___/\__|___/ +############################################################################################### +nfiltered = config.p_controllers[0].maxcond +niters = config.p_loop.niter +#config.p_loop.set_niter(niters) +Btt, P = compute_btt() +rtc.load_Btt(1, Btt.dot(Btt.T)) +Dm, cmat = compute_cmatWithBtt(Btt, nfiltered) +rtc.set_cmat(0, cmat) +R = rtc.get_cmat(0) +imat = rtc.get_imat(0) +RD = np.dot(R, imat).astype(np.float32) + +gRD = (np.identity(RD.shape[0]) - config.p_controllers[0].gain * RD).astype(np.float32) +roket = ao.roket_init(rtc, wfs, tar, dms, tel, atm, 0, 1, Btt.shape[0], Btt.shape[1], + nfiltered, niters, Btt, P, gRD, RD) +#diagRD = np.diag(gRD) +#gRD = np.diag(diagRD) +#gRD=np.diag(gRD) + +#imat_geom = ao.imat_geom(wfs,config.p_wfss,config.p_controllers[0],dms,config.p_dms,meth=0) +#RDgeom = np.dot(R,imat_geom) +preloop(1000) +SR, SR2 = loop(niters) + +save_it(savename) diff --git a/guardians/misc/roket_widget.py b/guardians/misc/roket_widget.py new file mode 100644 index 0000000..bdb560b --- /dev/null +++ b/guardians/misc/roket_widget.py @@ -0,0 +1,809 @@ +""" +Created on Tue Feb 2 09:39:35 2016 + +@author: fferreira + +To launch it : + + - locally : + bokeh serve --show bokeh_display.py + - as a server : + bokeh serve --port 8081 --host hippo6.obspm.fr:8081 bokeh_display.py + then, open a web browser and connect to http://hippo6.obspm.fr:8081/bokeh_display.py +""" + +import numpy as np +import glob +import os, sys + +import h5py +import pandas +import datetime + +from bokeh.plotting import Figure, figure +from bokeh.models import Range1d, ColumnDataSource, HoverTool +from bokeh.models.widgets import Select, Slider, CheckboxButtonGroup, Panel, Tabs, Button, Dialog, Paragraph, RadioButtonGroup, TextInput +from bokeh.io import curdoc +from bokeh.models.layouts import HBox, VBox +from bokeh.models.widgets import DataTable, DateFormatter, TableColumn +from bokeh.client import push_session + +import matplotlib.pyplot as plt +import matplotlib as mpl +from scipy.sparse import csr_matrix + +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/test/gamora/") +import gamora + + +###################################################################################### +# _ _ _ +# (_)_ _ (_) |_ ___ +# | | ' \| | _(_-< +# |_|_||_|_|\__/__/ +###################################################################################### +class html_display: + + def __del__(self): + files = glob.glob("/home/fferreira/public_html/roket_display*") + for f in files: + os.remove(f) + + def __init__(self): + + self.datapath = "/home/fferreira/Data/correlation/" + self.covmat = None + self.files = glob.glob(self.datapath + "roket_*.h5") + self.files.sort() + self.f_list = [] + for f in self.files: + self.f_list.append(f.split('/')[-1]) + + self.f = h5py.File(self.files[0], mode='r+') + + self.Lambda_tar = self.f.attrs["target.Lambda"][0] + self.Btt = self.f["Btt"][:] + + self.IF = csr_matrix((self.f["IF.data"][:], self.f["IF.indices"][:], + self.f["IF.indptr"][:])) + self.IF = self.IF.T + #self.TT = self.f["TT"][:] + self.P = self.f["P"][:] #/np.sqrt(self.IF.shape[0]) + + self.indx_pup = self.f["indx_pup"][:] + self.pup = np.zeros((self.f["dm_dim"].value, self.f["dm_dim"].value)) + + self.niter = self.f["noise"][:].shape[1] + self.nactus = self.f["noise"][:].shape[0] + self.nmodes = self.P.shape[0] + self.swap = np.arange(self.nmodes) - 2 + self.swap[0:2] = [self.nmodes - 2, self.nmodes - 1] + + self.plot_type = ["Commands", "Variance"] + self.coms_list = [ + "noise", "aliasing", "tomography", "filtered modes", "bandwidth", + "non linearity" + ] + + self.cov = self.f["cov"][:] + self.cor = self.f["cor"][:] + self.psf_compass = np.fft.fftshift(self.f["psf"][:]) + self.psf_fitting = np.fft.fftshift(self.f["psfortho"][:]) + self.psf = None + self.otftel = None + self.otf2 = None + self.gamora = None + self.basis = ["Actuators", "Btt"] + self.url = "http://hippo6.obspm.fr/~fferreira/roket_display" + self.old = None + + ###################################################################################### + # _ _ _ + # __ __ _(_)__| |__ _ ___| |_ ___ + # \ V V / / _` / _` / -_) _(_-< + # \_/\_/|_\__,_\__, \___|\__/__/ + # |___/ + ###################################################################################### + self.dialog = Dialog(closable=False, visible=False, title="Dialog Box", + content="") + + # Tab 1 + self.comsTags = Paragraph(text="Commands type", height=25) + self.coms = CheckboxButtonGroup(labels=self.coms_list, active=[0]) + self.DB_select = Select(title="Database", value=self.f_list[0], + options=self.f_list) + self.DB_button = Button(label="Load DB", type="success") + self.plot_select = Select(title="Plot type", value=self.plot_type[1], + options=self.plot_type) + self.basis_select1 = Select(title="Basis", value=self.basis[0], + options=self.basis) + self.iter_select = Slider(title="Iteration number", start=1, end=self.niter, + step=1) + self.plusTag = Paragraph(text="Add :", height=25) + self.plus_select = CheckboxButtonGroup( + labels=self.coms_list + ["fitting", "CORRECT"], + active=[0, 1, 2, 3, 4, 5, 6]) + self.moinsTag = Paragraph(text="Substract :", height=25) + self.moins_select = CheckboxButtonGroup(labels=self.coms_list + ["fitting"], + active=[]) + self.diff_button = Button(label="Sum !", type="success") + # Tab 2 + self.A = Select(title="Commands A", value=self.coms_list[0], + options=self.coms_list) + self.B = Select(title="Commands B", value=self.coms_list[0], + options=self.coms_list) + self.basis_select2 = Select(title="Basis", value=self.basis[0], + options=self.basis) + self.power = Slider(title="Abs(covmat)**X", start=0.1, end=1., step=0.1, + value=1.) + self.cmin = Slider(title="vmin", start=1, end=10, step=1) + self.cmax = Slider(title="vmax", start=1, end=10, step=1) + self.rescale = Button(label="Rescale !", type="primary") + self.draw = Button(label="Draw !", type="success") + self.diag = Button(label="Plot diag !", type="primary") + self.cut = Button(label="Cut !", type="primary") + self.axiscut = Slider(title="X/Y cut", start=0, end=1, step=1) + self.XY = RadioButtonGroup(labels=["X", "Y"], active=0) + self.DataTableItems = [ + "Type", "Noise", "Truncature", "Aliasing", "FilteredModes", "Bandwidth", + "Tomography" + ] + self.ParamTableItems = list(self.f.attrs.keys()) + + self.table_cov_source = ColumnDataSource( + data=dict(Type=[], Noise=[], Truncature=[], Aliasing=[], + FilteredModes=[], Bandwidth=[], Tomography=[])) + self.table_cor_source = ColumnDataSource( + data=dict(Type=[], Noise=[], Truncature=[], Aliasing=[], + FilteredModes=[], Bandwidth=[], Tomography=[])) + + self.table_param_source = ColumnDataSource(data=dict(Parameter=[], Value=[])) + + self.cov_table, self.cor_table, self.param_table = self.createDataTables() + self.pcov_source = ColumnDataSource( + data=dict(image=[], x=[], y=[], dw=[], dh=[])) + self.pcor_source = ColumnDataSource( + data=dict(image=[], x=[], y=[], dw=[], dh=[])) + self.xdr4 = Range1d(start=0, end=6) + self.ydr4 = Range1d(start=0, end=6) + self.pcov = figure(x_range=self.xdr4, y_range=self.ydr4, x_axis_location="above") + self.pcov.image("image", "x", "y", "dw", "dh", palette="Spectral11", + source=self.pcov_source) + self.pcor = figure(x_range=self.xdr4, y_range=self.ydr4, x_axis_location="above") + self.pcor.image("image", "x", "y", "dw", "dh", palette="Spectral11", + source=self.pcor_source) + + self.updateDataTables() + # Tab 3 + self.basis_select3 = Select(title="Basis", value=self.basis[0], + options=self.basis) + self.modes_select = Slider(title="Mode #", value=0, start=0, end=self.nmodes, + step=1) + #self.modes_select = TextInput(value="0:"+str(self.nmodes-1),title="Enter a mode to display") + self.draw_mode = Button(label="Draw !", type="success") + self.inc_mode = Button(label="+", type="primary") + self.desinc_mode = Button(label="-", type="primary") + # Tab 4 + self.independence = CheckboxButtonGroup(labels=["Independence"], active=[]) + self.psf_display_select = Select( + title="PSF display", value="COMPASS", options=[ + "COMPASS", "ROKET", "Vii", "Fitting", "OTF Telescope", "OTF res" + ]) + self.psf_rec_methods_select = Select(title="Reconstruction method", value="Vii", + options=["Vii", "ROKET"]) + self.gamora_tag = Paragraph(text="PSF reconstruction :", height=25) + self.psf_display_tag = Paragraph(text="PSF display :", height=25) + self.error_select = CheckboxButtonGroup(labels=self.coms_list + ["fitting"], + active=[0, 1, 2, 3, 4, 5, 6]) + self.gamora_comp = Button(label="Reconstruct !", type="primary") + self.psf_display = Button(label="Display", type="primary") + self.colors = { + "filtered modes": "green", + "bandwidth": "orange", + "noise": "red", + "tomography": "purple", + "non linearity": "cyan", + "aliasing": "blue" + } + + self.source1 = ColumnDataSource(data=dict(x=[], y=[], color=[], typec=[])) + self.source2 = ColumnDataSource(data=dict(x=[], y=[], color=[])) + self.source3 = ColumnDataSource(data=dict(x=[], y=[], color=[])) + self.sourcepsf = ColumnDataSource(data=dict(x=[], y=[], color=[])) + + self.hover = HoverTool(tooltips=[("x", "@x"), ("y", "@y"), ("type", "@typec")]) + self.hoverlog = HoverTool(tooltips=[("x", "@x"), ("y", "@y"), ("type", + "@typec")]) + TOOLS = "resize,save,pan,box_zoom,tap, box_select, wheel_zoom, lasso_select,reset" + + self.plog = Figure(plot_height=600, plot_width=800, y_range=[1e-6, 10], + y_axis_type="log", tools=[TOOLS, self.hoverlog]) + self.psum = Figure(plot_height=600, plot_width=800) + for c in self.colors: + self.plog.line(legend=c, line_color=self.colors[c]) + + self.plog.multi_line("x", "y", color="color", source=self.source1) + self.psum.line(legend="Image SR", line_color="red") + self.psum.line(legend="Phase SR ", line_color="purple") + self.psum.line(legend="Var(X+Y)", line_color="blue") + self.psum.line(legend="Var(X)+var(Y)", line_color="green") + + self.psum.multi_line("x", "y", color="color", source=self.source3) + self.psum.yaxis.axis_label = "Strehl Ratio" + + self.xdr = Range1d(start=0, end=self.nactus) + self.ydr = Range1d(start=self.nactus, end=0) + self.p2 = figure(x_range=self.xdr, y_range=self.ydr, x_axis_location="above") + self.p2.image_url(url=[], x=0, y=0, w=self.nactus, h=self.nactus) + self.p3 = Figure(plot_height=600, plot_width=800) + self.p3.line(x="x", y="y", source=self.source2) + + self.xdr2 = Range1d(start=0, end=self.pup.shape[0]) + self.ydr2 = Range1d(start=self.pup.shape[1], end=0) + self.pmodes = figure(x_range=self.xdr2, y_range=self.ydr2, + x_axis_location="above") + self.pmodes.image_url(url=[], x=0, y=0, w=self.pup.shape[0], h=self.pup.shape[1]) + + self.control_plot = [self.plot_select, self.iter_select, self.basis_select1] + + self.xdr3 = Range1d(start=0, end=self.psf_compass.shape[0]) + self.ydr3 = Range1d(start=self.psf_compass.shape[1], end=0) + self.ppsf = figure(x_range=self.xdr3, y_range=self.ydr3, x_axis_location="above") + self.ppsf.image_url(url=[], x=0, y=0, w=self.psf_compass.shape[0], + h=self.psf_compass.shape[1]) + self.pcutpsf = Figure(plot_height=600, plot_width=800, y_range=[1e-9, 1], + y_axis_type="log") + self.pcutpsf.line(legend="COMPASS", line_color="blue") + self.pcutpsf.line(legend="PSF rec", line_color="red") + self.pcutpsf.multi_line("x", "y", color="color", source=self.sourcepsf) + + self.buttons = [self.coms] + for control in self.control_plot: + control.on_change('value', self.update) + for button in self.buttons: + button.on_change('active', self.update) + + self.draw.on_click(self.update_matrix2) + self.draw_mode.on_click(self.update_mode) + self.rescale.on_click(self.rescale_matrix) + self.diag.on_click(self.get_diag) + self.cut.on_click(self.cut_matrix) + self.inc_mode.on_click(self.mode_increment) + self.desinc_mode.on_click(self.mode_desincrement) + self.diff_button.on_click(self.plot_sum) + self.DB_button.on_click(self.loadDB) + self.gamora_comp.on_click(self.gamora_call) + self.psf_display.on_click(self.update_psf) + + self.inputs = HBox( + VBox(self.DB_select, self.DB_button, self.comsTags, self.coms, + self.plot_select, self.basis_select1, self.iter_select, + self.plusTag, self.plus_select, self.moinsTag, self.moins_select, + self.diff_button), width=350) + self.inputs2 = HBox( + VBox(self.DB_select, self.DB_button, self.basis_select2, self.A, self.B, + self.power, self.draw, self.cmax, self.cmin, self.rescale, + self.axiscut, self.XY, self.cut, self.diag)) #, width=350) + self.inputs3 = HBox( + VBox( + self.DB_select, self.DB_button, self.basis_select3, + VBox( + VBox( + HBox( + self.modes_select, + HBox(self.desinc_mode, self.inc_mode, + height=40))), self.draw_mode))) + self.inputs4 = HBox( + VBox( + HBox(self.DB_select, self.DB_button), self.gamora_tag, + self.psf_rec_methods_select, self.error_select, + self.independence, self.gamora_comp, self.psf_display_tag, + self.psf_display_select, self.psf_display), width=350) + self.tab1 = Panel( + child=HBox(self.inputs, VBox(self.plog, self.psum)), title="Breakdown") + self.tab2 = Panel( + child=HBox( + VBox( + HBox(self.inputs2, self.p2, self.p3), + HBox(self.cov_table, self.pcov), + HBox(self.cor_table, self.pcor))), title="Cov/cor") + self.tab3 = Panel(child=HBox(self.inputs3, self.pmodes), title="Basis") + self.tab4 = Panel( + child=HBox(self.inputs4, VBox(self.ppsf, self.pcutpsf)), title="PSF") + self.tab5 = Panel( + child=HBox(VBox(HBox(self.DB_select, self.DB_button), self.param_table)), + title="Parameters") + self.tabs = Tabs(tabs=[self.tab1, self.tab2, self.tab4, self.tab3, self.tab5]) + + curdoc().clear() + self.update(None, None, None) + + curdoc().add_root(self.tabs) #hplot(inputs,p))#, p, p2) + curdoc().add_root(self.dialog) + + ###################################################################################### + # ___ _ _ _ _ + # / __|__ _| | | |__ __ _ __| |__ ___ + # | (__/ _` | | | '_ \/ _` / _| / /(_-< + # \___\__,_|_|_|_.__/\__,_\__|_\_\/__/ + # + ###################################################################################### + def loadDB(self): + self.dialog.visible = False + self.dialog.content = "Loading database..." + self.dialog.visible = True + + self.f = h5py.File(self.datapath + str(self.DB_select.value), mode='r+') + self.Lambda_tar = self.f.attrs["target.Lambda"][0] + self.Btt = self.f["Btt"][:] + + self.IF = csr_matrix((self.f["IF.data"][:], self.f["IF.indices"][:], + self.f["IF.indptr"][:])) + self.IF = self.IF.T + #self.TT = self.f["TT"][:] + self.P = self.f["P"][:] #/np.sqrt(self.IF.shape[0]) + #self.modes = self.IF.dot(self.Btt)#np.dot(self.f["IF"][:],self.Btt) + # self.modes = self.modes[:,self.swap] + + self.indx_pup = self.f["indx_pup"][:] + self.pup = np.zeros((self.f["dm_dim"].value, self.f["dm_dim"].value)) + + self.niter = self.f["noise"][:].shape[1] + self.nactus = self.f["noise"][:].shape[0] + self.nmodes = self.P.shape[0] + self.cov = self.f["cov"][:] + self.cor = self.f["cor"][:] + self.psf_compass = np.fft.fftshift(self.f["psf"][:]) + self.psf_fitting = np.fft.fftshift(self.f["psfortho"][:]) + self.psf = None + self.otftel = None + self.otf2 = None + self.gamora = None + + self.plot_type = ["Commands", "Variance"] + + #self.cov_table, self.cor_table = self.createDataTables() + self.updateDataTables() + self.update(None, None, None) + + print("DB loaded") + self.dialog.visible = False + + def update(self, attrname, old, new): + # plot_val = plot_type.value + self.source1.data = dict(x=[], y=[], color=[], typec=[]) + + coms_active = self.coms.active + plot_val = self.plot_select.value + basis_val = self.basis_select1.value + iteration = int(self.iter_select.value) + + yi = [] + xi = [] + typec = [] + coloris = [] + for jj in coms_active: + j = self.coms_list[jj] + data = self.f[j][:] + if (plot_val == b"Commands"): + if (basis_val == b"Actuators"): + yi.append(data[:, iteration].tolist()) + self.plog.xaxis.axis_label = "Actuators" + elif (basis_val == b"Btt"): + yi.append(np.dot(self.P, data[:, iteration])[self.swap].tolist()) + self.plog.xaxis.axis_label = "Modes" + xi.append(list(range(len(data[:, iteration])))) + typec.append([j] * len(data[:, iteration])) + coloris.append(self.colors[j]) + self.plog.yaxis.axis_label = "Volts" + + elif (plot_val == b"Variance"): + if (basis_val == b"Actuators"): + yi.append(np.var(data, axis=1).tolist()) + self.plog.xaxis.axis_label = "Actuators" + elif (basis_val == b"Btt"): + yi.append(np.var(np.dot(self.P, data), axis=1)[self.swap].tolist()) + self.plog.xaxis.axis_label = "Modes" + xi.append(list(range(len(np.var(data, axis=1))))) + typec.append([j] * len(np.var(data, axis=1))) + coloris.append(self.colors[j]) + self.plog.yaxis.axis_label = "Variance" + + self.source1.data = dict(x=xi, y=yi, color=coloris, typec=typec) + + print("Plots updated") + + def gamora_call(self): + self.dialog.visible = False + psf_type = self.psf_rec_methods_select.value + err_active = self.error_select.active + err = self.f["noise"][:] * 0. + covmodes = err.dot(err.T) + independence = self.independence.active + fiterr = False + self.dialog.content = "Computing covariance matrix..." + self.dialog.visible = True + for k in err_active: + if (self.error_select.labels[k] == b"fitting"): + fiterr = True + else: + if (independence): + data = self.f[self.error_select.labels[k]][:] + covmodes += data.dot(data.T) / err.shape[1] + else: + err += self.f[self.error_select.labels[k]][:] + + if (psf_type == b"Vii"): + self.dialog.content = "Reconstructing PSF with Vii (may take a while)..." + + if (independence): + self.otftel, self.otf2, self.psf, self.gamora = gamora.psf_rec_Vii( + self.datapath + str(self.DB_select.value), fitting=fiterr, + covmodes=covmodes) + else: + self.otftel, self.otf2, self.psf, self.gamora = gamora.psf_rec_Vii( + self.datapath + str(self.DB_select.value), err=err, + fitting=fiterr) + if (psf_type == b"ROKET"): + self.dialog.content = "Reconstructing PSF from ROKET file (may take a while)..." + self.dialog.visible = True + self.psf, self.gamora = gamora.psf_rec_roket_file( + self.datapath + str(self.DB_select.value), err=err) + else: + self.dialog.content = "PSF reconstruction is available with Vii or ROKET methods only" + self.dialog.visible = True + + self.update_psf() + self.sourcepsf.data = dict( + x=[ + list(range(self.psf_compass.shape[0])), + list(range(self.psf.shape[0])) + ], y=[ + self.psf_compass[self.psf_compass.shape[0] / 2, :], + self.psf[self.psf.shape[0] / 2, :] + ], color=["blue", "red"]) + self.dialog.visible = False + + def update_psf(self): + self.dialog.visible = False + self.dialog.content = "Updating PSF display..." + self.dialog.visible = True + psf_type = self.psf_display_select.value + image = None + if (psf_type == b"COMPASS"): + image = np.log10(self.psf_compass) + if (psf_type == b"Vii" or psf_type == b"ROKET"): + image = np.log10(self.psf) + if (psf_type == b"Fitting"): + image = np.log10(self.psf_fitting) + if (psf_type == b"OTF Telescope"): + image = np.fft.fftshift(self.otftel) + if (psf_type == b"OTF res"): + image = np.fft.fftshift(self.otf2) + + if (image is not None): + if (self.old): + os.remove(self.old) + + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/fferreira/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, image) + self.ppsf.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, w=image.shape[0], + h=image.shape[0]) + + self.dialog.visible = False + + def rescale_matrix(self): + self.dialog.visible = False + vmin = self.cmin.value + vmax = self.cmax.value + self.dialog.content = "Updating matrix..." + self.dialog.visible = True + if (self.old): + os.remove(self.old) + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/fferreira/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, self.covmat, vmin=vmin, vmax=vmax) + self.p2.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, + w=self.covmat.shape[0], h=self.covmat.shape[0]) + self.dialog.visible = False + + def get_diag(self): + x = np.arange(self.covmat.shape[0]) + y = np.diag(self.covmat) + self.source2.data = dict(x=x, y=y) + + def cut_matrix(self): + XorY = self.XY.labels[self.XY.active] + ax = self.axiscut.value + if (XorY == b"X"): + data = self.covmat[ax, :] + else: + data = self.covmat[:, ax] + x = np.arange(data.size) + self.source2.data = dict(x=x, y=data) + + def update_matrix2(self): + self.dialog.visible = False + if (self.old): + os.remove(self.old) + #self.draw.disabled = True + A_val = self.A.value + B_val = self.B.value + basis = self.basis_select2.value + powa = self.power.value + self.dialog.content = "Computing and loading matrix..." + self.dialog.visible = True + + A_cov = self.f[A_val][:] + B_cov = self.f[B_val][:] + A_cov -= np.tile(np.mean(A_cov, axis=1), (A_cov.shape[1], 1)).T + B_cov -= np.tile(np.mean(B_cov, axis=1), (B_cov.shape[1], 1)).T + if (basis == b"Btt"): + A_cov = np.dot(self.P, A_cov) + B_cov = np.dot(self.P, B_cov) + print("Values ok") + self.covmat = (np.dot(A_cov, B_cov.T) / B_cov.shape[1]) + print("dot product ok") + if (powa != 1): + self.covmat = np.abs(self.covmat)**powa * np.sign(self.covmat) + print("scale adjusted") + self.cmin.start = self.covmat.min() + self.cmin.end = self.covmat.max() + self.cmin.value = self.cmin.start + self.cmin.step = (self.cmin.end - self.cmin.start) / 100. + self.cmax.start = self.covmat.min() + self.cmax.end = self.covmat.max() + self.cmax.value = self.cmax.end + self.cmax.step = self.cmin.step + self.axiscut.end = self.covmat.shape[0] + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/fferreira/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, self.covmat) + self.p2.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, + w=self.covmat.shape[0], h=self.covmat.shape[0]) + + #self.sourceC.data = dict(url=[self.url],x=0,y=covmat.shape[0],dw=covmat.shape[0],dh=covmat.shape[0]) + #self.draw.disabled = False + print("Matrix updated2") + self.dialog.visible = False + + def update_mode(self): + self.dialog.visible = False + if (self.old): + os.remove(self.old) + N = self.modes_select.value + if (N >= self.nmodes): + N = self.nmodes - 1 + self.modes_select.value = N + basis = self.basis_select3.value + self.dialog.content = "Loading..." + self.dialog.visible = True + + if (basis == b"Actuators"): + pup = self.pup.flatten() + pup[self.indx_pup] = self.IF[:, N].toarray() #self.f["IF"][:][:,N] + self.pup = pup.reshape(self.pup.shape) + elif (basis == b"Btt"): + pup = self.pup.flatten() + pup[self.indx_pup] = self.IF[:, N - 2].dot(self.Btt) + self.pup = pup.reshape(self.pup.shape) + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/fferreira/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, self.pup) + self.pmodes.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, w=self.pup.shape[0], + h=self.pup.shape[0]) + + #self.sourceC.data = dict(url=[self.url],x=0,y=covmat.shape[0],dw=covmat.shape[0],dh=covmat.shape[0]) + #self.draw.disabled = False + print("Mode updated") + self.dialog.visible = False + + def mode_increment(self): + if (self.modes_select.value < self.nmodes - 1): + self.modes_select.value = self.modes_select.value + 1 + else: + self.modes_select.value = self.nmodes - 1 + + def mode_desincrement(self): + if (self.modes_select.value > 0): + self.modes_select.value = self.modes_select.value - 1 + else: + self.modes_select.value = 0 + + def plot_sum(self): + + self.dialog.visible = False + self.dialog.content = "Computing..." + self.dialog.visible = True + + plus = self.plus_select.active + moins = self.moins_select.active + basis_val = self.basis_select1.value + plot_val = self.plot_select.value + iteration = int(self.iter_select.value) + + if (plot_val == b"Commands"): + data = np.zeros(self.nactus) + x = list(range(self.nactus)) + elif (plot_val == b"Variance"): + data = np.zeros((self.nmodes, self.niter)) #self.nmodes) + data2 = np.zeros(self.nmodes) + x = list(range(self.nmodes)) + fitp = False + fitm = False + for i in plus: + self.dialog.content = "Computing " + self.plus_select.labels[i] + if (self.plus_select.labels[i] != "CORRECT"): + if (self.plus_select.labels[i] == b"fitting"): + fitp = True + else: + if (plot_val == b"Commands"): + data += np.dot(self.P, + self.f[self.coms_list[i]][:][:, iteration]) + elif (plot_val == b"Variance"): + data += np.dot(self.P, self.f[self.coms_list[i]][:]) + data2 += np.var( + np.dot(self.P, self.f[self.coms_list[i]][:]), axis=1) + else: + theta = np.arctan( + self.f.attrs["wfs.ypos"][0] / self.f.attrs["wfs.xpos"][0]) + theta -= (self.f.attrs["winddir"][0] * np.pi / 180.) + r0 = self.f.attrs["r0"] * (self.f.attrs["target.Lambda"][0] / + self.f.attrs["wfs.Lambda"][0])**(6. / 5.) + RASC = 180 / np.pi * 3600. + Dtomo = 0 + Dbp = 0 + Dcov = 0 + dt = self.f.attrs["ittime"] + g = self.f.attrs["gain"][0] + for k in range(self.f.attrs["nscreens"]): + H = self.f.attrs["atm.alt"][k] + v = self.f.attrs["windspeed"][k] + frac = self.f.attrs["frac"][k] + htheta = np.sqrt(self.f.attrs["wfs.xpos"][0]**2 + + self.f.attrs["wfs.ypos"][0]**2) / RASC * H + Dtomo += 6.88 * (htheta / r0)**(5. / 3.) + Dbp += 6.88 * (v * dt / g / r0)**(5. / 3.) + rho = np.sqrt(htheta**2 + (v * dt / g)**2 - + 2 * htheta * v * dt / g * np.cos(np.pi - theta)) + Dcov += 6.88 * (rho / r0)**(5. / 3.) + covar = (Dbp + Dtomo - Dcov) * 0.5 * frac + + data2 += 2 * covar / (2 * np.pi / self.Lambda_tar)**2 / self.nmodes + #data2 += 2*np.sqrt(np.var(np.dot(self.P,self.f["tomography"]),axis=1))*np.sqrt(np.var(np.dot(self.P,self.f["bandwidth"]),axis=1))*np.cos(theta) + for i in moins: + if (self.plus_select.labels[i] == b"fitting"): + fitm = True + else: + if (plot_val == b"Commands"): + data -= np.dot(self.P, self.f[self.coms_list[i]][:][:, iteration]) + elif (plot_val == b"Variance"): + data -= np.dot(self.P, self.f[self.coms_list[i]][:]) + data2 -= np.var(np.dot(self.P, self.f[self.coms_list[i]][:]), axis=1) + + +# if(basis_val == b"Btt"): +# data = np.dot(self.P,data) +# data2 = np.dot(self.P,data2) + if (plot_val == b"Variance"): + data = np.var(data, axis=1) + data = np.cumsum(data[self.swap]) + # theta = np.arctan(self.f.attrs["wfs.ypos"][0]/self.f.attrs["wfs.xpos"][0]) + # if(np.sign(self.f.attrs["wfs.ypos"][0])<0): + # theta += np.pi*0. + # theta -= (self.f.attrs["winddir"][0] * np.pi/180.) + # data2 += 2*np.sqrt(np.var(np.dot(self.P,self.f["tomography"]),axis=1))*np.sqrt(np.var(np.dot(self.P,self.f["bandwidth"]),axis=1))*np.cos(theta) + data2 = np.cumsum(data2[self.swap]) + data2 = np.exp(-data2 * (2 * np.pi / self.Lambda_tar)**2) + print("data2 : ", data2) + data = np.exp(-data * (2 * np.pi / self.Lambda_tar)**2) + if (fitp and list(self.f.keys()).count("fitting")): + data *= np.exp(-self.f["fitting"].value) + data2 *= np.exp(-self.f["fitting"].value) + print("data2 : ", data2) + if (fitm and list(self.f.keys()).count("fitting")): + data /= np.exp(-self.f["fitting"].value) + data2 /= np.exp(-self.f["fitting"].value) + if (list(self.f.keys()).count("SR2")): + self.source3.data = dict( + x=[x, x, x, x], y=[ + data, + np.ones(len(x)) * self.f["SR"].value, + np.ones(len(x)) * self.f["SR2"].value, data2 + ], color=["blue", "red", "purple", "green"]) + else: + if (list(self.f.keys()).count("SR")): + self.source3.data = dict( + x=[x, x, + x], y=[data, + np.ones(len(x)) * self.f["SR"].value, data2], + color=["blue", "red", "green"]) + else: + self.source3.data = dict(x=x, y=data) + print("Sum plotted") + self.dialog.visible = False + + def cov_cor(self): + cov = np.zeros((6, 6)) + bufdict = { + "0": self.f["noise"][:], + "1": self.f["non linearity"][:], + "2": self.f["aliasing"][:], + "3": self.f["filtered modes"][:], + "4": self.f["bandwidth"][:], + "5": self.f["tomography"][:] + } + for i in range(cov.shape[0]): + for j in range(cov.shape[1]): + if (j >= i): + tmpi = self.P.dot(bufdict[str(i)]) + tmpj = self.P.dot(bufdict[str(j)]) + cov[i, j] = np.sum( + np.mean(tmpi * tmpj, axis=1) - + np.mean(tmpi, axis=1) * np.mean(tmpj, axis=1)) + else: + cov[i, j] = cov[j, i] + + s = np.reshape(np.diag(cov), (cov.shape[0], 1)) + sst = np.dot(s, s.T) + cor = cov / np.sqrt(sst) + + return cov, cor + + def createDataTables(self): + + tmp = [TableColumn(field="Type", title="Covariance")] + for item in self.DataTableItems[1:]: + tmp.append(TableColumn(field=item, title=item)) + columns = tmp + + cov_table = DataTable(source=self.table_cov_source, columns=columns, width=1200, + height=280) + tmp[0] = TableColumn(field="Type", title="Correlation") + cor_table = DataTable(source=self.table_cor_source, columns=columns, width=1200, + height=280) + + tmp = [ + TableColumn(field="Parameter", title="Parameter"), + TableColumn(field="Value", title="Value") + ] + param_table = DataTable(source=self.table_param_source, columns=tmp, width=700, + height=500) + + return cov_table, cor_table, param_table + + def updateDataTables(self): + self.table_cov_source.data = dict( + Type=self.DataTableItems[1:], Noise=self.cov[:, 0], + Truncature=self.cov[:, 1], Aliasing=self.cov[:, 2], + FilteredModes=self.cov[:, 3], Bandwidth=self.cov[:, 4], + Tomography=self.cov[:, 5]) + self.table_cor_source.data = dict( + Type=self.DataTableItems[1:], Noise=self.cor[:, 0], + Truncature=self.cor[:, 1], Aliasing=self.cor[:, 2], + FilteredModes=self.cor[:, 3], Bandwidth=self.cor[:, 4], + Tomography=self.cor[:, 5]) + params = list(self.f.attrs.keys()) + params.sort() + values = [] + for k in params: + values.append(self.f.attrs[k]) + self.table_param_source.data = dict(Parameter=params, Value=values) + + self.pcov_source.data = dict(image=[self.cov], x=[0], y=[0], dw=[6], dh=[6], + palette="Spectral11") + self.pcor_source.data = dict(image=[self.cor], x=[0], y=[0], dw=[6], dh=[6], + palette="Spectral11") + +files = glob.glob("/home/fferreira/public_html/roket_display*") +for f in files: + os.remove(f) + +disp = html_display() + +# initial load of the data diff --git a/guardians/misc/valid_roket_files.py b/guardians/misc/valid_roket_files.py new file mode 100644 index 0000000..2f53d65 --- /dev/null +++ b/guardians/misc/valid_roket_files.py @@ -0,0 +1,45 @@ +import h5py +import numpy as np +import glob + + +def validfile(filename): + f = h5py.File(filename) + if (list(f.attrs.keys()).count("target.Lambda")): + Lambda = f.attrs["target.Lambda"][0] + else: + Lambda = 1.65 + nactus = f["noise"][:].shape[0] + niter = f["noise"][:].shape[1] + P = f["P"][:] + nmodes = P.shape[0] + data = np.zeros((nmodes, niter)) + error_list = [ + "noise", "aliasing", "tomography", "filtered modes", "bandwidth", + "non linearity" + ] + + for i in error_list: + data += np.dot(P, f[i][:]) + + data = np.var(data, axis=1) + data = np.sum(data) + data = np.exp(-data * (2 * np.pi / Lambda)**2) + data *= np.exp(-f["fitting"].value) + + SR2 = f["SR2"].value + SR = f["SR"].value + + if (np.abs(data - SR) < 0.05 or np.abs(data - SR2) < 0.05): + f.attrs["validity"] = True + + +datapath = "/home/fferreira/Data/correlation/" +filenames = glob.glob(datapath + "roket_8m*.h5") +l = len(filenames) +ind = 0 +for f in filenames: + validfile(f) + ind += 1 + print(" reading : %d/%d\r" % (ind, l), end=' ') +print("read") diff --git a/guardians/roket.py b/guardians/roket.py new file mode 100644 index 0000000..3f6e66a --- /dev/null +++ b/guardians/roket.py @@ -0,0 +1,468 @@ +""" +ROKET (erROr breaKdown Estimation Tool) + +Computes the error breakdown during a COMPASS simulation +and saves it in a HDF5 file +Error contributors are bandwidth, tomography, noise, aliasing, +WFS non linearity, filtered modes and fitting +Saved file contained temporal buffers of those contributors +""" + +import cProfile +import pstats as ps + +import sys, os +import numpy as np +from shesha.supervisor.compassSupervisor import CompassSupervisor +from shesha.util.rtc_util import centroid_gain +from shesha.ao.tomo import create_nact_geom +from shesha.ao.basis import compute_btt, compute_cmat_with_Btt +import shesha.constants as scons +import time +import matplotlib.pyplot as pl +pl.ion() +import shesha.util.hdf5_util as h5u +import pandas +from scipy.sparse import csr_matrix + + +class Roket(CompassSupervisor): + """ + ROKET class + Inherits from CompassSupervisor + """ + + def __init__(self, str=None, N_preloop=1000, gamma=1.): + """ + Initializes an instance of Roket class + + Args: + str: (str): (optional) path to a parameter file + N_preloop: (int): (optional) number of iterations before starting error breakdown estimation + gamma: (float): (optional) centroid gain + """ + super().__init__(str) + self.N_preloop = N_preloop + self.gamma = gamma + + def init_config(self): + """ + Initializes the COMPASS simulation and the ROKET buffers + """ + #super().init_config() + self.iter_number = 0 + self.n = self.config.p_loop.niter + self.N_preloop + #self.nfiltered = int(self.config.p_controllers[0].maxcond) + self.nfiltered = 20 + #self.nactus = self.get_command(0).size + self.nactus = self.rtc.get_command(0).size + self.nslopes = self.rtc.get_slopes(0).size + self.com = np.zeros((self.n, self.nactus), dtype=np.float32) + self.noise_com = np.zeros((self.n, self.nactus), dtype=np.float32) + self.alias_wfs_com = np.copy(self.noise_com) + self.alias_meas = np.zeros((self.n, self.nslopes), dtype=np.float32) + self.wf_com = np.copy(self.noise_com) + self.tomo_com = np.copy(self.noise_com) + self.trunc_com = np.copy(self.noise_com) + self.trunc_meas = np.copy(self.alias_meas) + self.H_com = np.copy(self.noise_com) + self.mod_com = np.copy(self.noise_com) + self.bp_com = np.copy(self.noise_com) + self.fit = np.zeros(self.n) + self.psf_ortho = self.target.get_tar_image(0) * 0 + self.centroid_gain = 0 + self.centroid_gain2 = 0 + self.slopes = np.zeros((self.n, self.nslopes), dtype=np.float32) + #gamma = 1.0 + self.config.p_loop.set_niter(self.n) + #self.IFpzt = self.get_influ_basis_sparse(1) + self.IFpzt = self.rtc._rtc.d_control[1].d_IFsparse.get_csr() + #self.IFpzt.P = scipy.sparse.csr_matrix.transpose(self.IFpzt.get_csr()) + #self.TT = self.get_tt_influ_basis(1) + self.TT = np.array(self.rtc._rtc.d_control[1].d_TT) + + self.Btt, self.P = compute_btt(self.IFpzt.T, self.TT) + tmp = self.Btt.dot(self.Btt.T) + self.rtc._rtc.d_control[1].load_Btt(tmp[:-2, :-2], tmp[-2:, -2:]) + compute_cmat_with_Btt(self.rtc._rtc, self.Btt, self.nfiltered) + self.cmat = self.rtc.get_command_matrix(0) + self.D = self.rtc.get_interaction_matrix(0) + self.RD = np.dot(self.cmat, self.D) + self.gRD = np.identity( + self.RD. + shape[0]) - self.config.p_controllers[0].gain * self.gamma * self.RD + + self.Nact = create_nact_geom(self.config.p_dms[0]) + + def loop_next(self, **kwargs): + """ + function next + Iterates the AO loop, with optional parameters + + :param move_atmos (bool): move the atmosphere for this iteration, default: True + :param nControl (int): Controller number to use, default 0 (single control configurations) + :param tar_trace (None or list[int]): list of targets to trace. None equivalent to all. + :param wfs_trace (None or list[int]): list of WFS to trace. None equivalent to all. + :param apply_control (bool): (optional) if True (default), apply control on DMs + """ + self.next(apply_control=False) + self.error_breakdown() + self.rtc.apply_control(0) + self.iter_number += 1 + + def loop(self, monitoring_freq=100, **kwargs): + """ + Performs the AO loop for n iterations + + Args: + monitoring_freq: (int): (optional) Loop monitoring frequency [frames] in the terminal + """ + print("-----------------------------------------------------------------") + print("iter# | SE SR | LE SR | FIT SR | PH SR | ETR (s) | Framerate (Hz)") + print("-----------------------------------------------------------------") + t0 = time.time() + for i in range(self.n): + self.loop_next(**kwargs) + if ((i + 1) % monitoring_freq == 0): + framerate = (i + 1) / (time.time() - t0) + self.target.comp_tar_image(0) + self.target.comp_strehl(0) + strehltmp = self.target.get_strehl(0) + etr = (self.n - i) / framerate + print("%d \t %.2f \t %.2f\t %.2f \t %.2f \t %.1f \t %.1f" % + (i + 1, strehltmp[0], strehltmp[1], np.exp(-strehltmp[2]), + np.exp(-strehltmp[3]), etr, framerate)) + t1 = time.time() + + print(" loop execution time:", t1 - t0, " (", self.n, "iterations), ", + (t1 - t0) / self.n, "(mean) ", self.n / (t1 - t0), "Hz") + + #self.tar.comp_image(0) + SRs = self.target.get_strehl(0) + self.SR2 = np.exp(SRs[3]) + self.SR = SRs[1] + + def error_breakdown(self): + """ + Compute the error breakdown of the AO simulation. Returns the error commands of + each contributors. Suppose no delay (for now) and only 2 controllers : the main one, controller #0, (specified on the parameter file) + and the geometric one, controller #1 (automatically added if roket is asked in the parameter file) + Commands are computed by applying the loop filter on various kind of commands : (see schema_simulation_budget_erreur_v2) + + - Ageom : Aliasing contribution on WFS direction + Obtained by computing commands from DM orthogonal phase (projection + slopes_geom) + + - B : Projection on the target direction + Obtained as the commmands output of the geometric controller + + - C : Wavefront + Obtained by computing commands from DM parallel phase (RD*B) + + - E : Wavefront + aliasing + ech/trunc + tomo + Obtained by performing the AO loop iteration without noise on the WFS + + - F : Wavefront + aliasing + tomo + Obtained by performing the AO loop iteration without noise on the WFS and using phase deriving slopes + + - G : tomo + + Note : rtc.get_err returns to -CMAT.slopes + """ + g = self.config.p_controllers[0].gain + Dcom = self.rtc.get_command(0) + Derr = self.rtc.get_err(0) + self.com[self.iter_number, :] = Dcom + tarphase = self.target.get_tar_phase(0) + self.slopes[self.iter_number, :] = self.rtc.get_slopes(0) + + ########################################################################### + ## Noise contribution + ########################################################################### + if (self.config.p_wfss[0].type == scons.WFSType.SH): + ideal_img = np.array(self.wfs._wfs.d_wfs[0].d_binimg_notnoisy) + binimg = np.array(self.wfs._wfs.d_wfs[0].d_binimg) + if (self.config.p_centroiders[0].type == scons.CentroiderType.TCOG + ): # Select the same pixels with or without noise + invalidpix = np.where(binimg <= self.config.p_centroiders[0].thresh) + ideal_img[invalidpix] = 0 + self.rtc.set_centroider_threshold(0, -1e16) + self.wfs._wfs.d_wfs[0].set_binimg(ideal_img, ideal_img.size) + elif (self.config.p_wfss[0].type == scons.centroiderType.PYRHR): + ideal_pyrimg = np.array(self.wfs._wfs.d_wfs[0].d_binimg_notnoisy) + self.wfs._wfs.d_wfs[0].set_binimg(ideal_pyrimg, ideal_pyrimg.size) + + self.rtc.do_centroids(0) + if (self.config.p_centroiders[0].type == scons.CentroiderType.TCOG): + self.rtc.set_centroider_threshold(0, config.p_centroiders[0].thresh) + + self.rtc.do_control(0) + E = self.rtc.get_err(0) + E_meas = self.rtc.get_slopes(0) + # Apply loop filter to get contribution of noise on commands + if (self.iter_number + 1 < self.config.p_loop.niter): + self.noise_com[self.iter_number + 1, :] = self.gRD.dot( + self.noise_com[self.iter_number, :]) + g * (Derr - E) + ########################################################################### + ## Sampling/truncature contribution + ########################################################################### + self.rtc.do_centroids_geom(0) + self.rtc.do_control(0) + F = self.rtc.get_err(0) + F_meas = self.rtc.get_slopes(0) + self.trunc_meas[self.iter_number, :] = E_meas - F_meas + # Apply loop filter to get contribution of sampling/truncature on commands + if (self.iter_number + 1 < self.config.p_loop.niter): + self.trunc_com[self.iter_number + 1, :] = self.gRD.dot( + self.trunc_com[self.iter_number, :]) + g * (E - self.gamma * F) + self.centroid_gain += centroid_gain(E, F) + #Derr = np.ones(Derr) + #print(Derr) + #print(F) + self.centroid_gain2 += centroid_gain(Derr, F) + ########################################################################### + ## Aliasing contribution on WFS direction + ########################################################################### + #self.rtc.do_control(1, 0, wfs_direction=True) + self.rtc.do_control(1, sources=self.wfs.sources, is_wfs_phase=True) + #self.rtc.do_control(0) + #self.rtc.do_control(1) + self.rtc.apply_control(1) + for w in range(len(self.config.p_wfss)): + self.wfs.raytrace(w, dms=self.dms, reset=False) + """ + wfs.sensors_compimg(0) + if(config.p_wfss[0].type == scons.WFSType.SH): + ideal_img = wfs.get_binimgNotNoisy(0) + binimg = wfs.get_binimg(0) + if(config.p_centroiders[0].type == scons.CentroiderType.TCOG): # Select the same pixels with or without noise + invalidpix = np.where(binimg <= config.p_centroiders[0].thresh) + ideal_img[self.iter_numbernvalidpix] = 0 + rtc.setthresh(0,-1e16) + wfs.set_binimg(0,ideal_img) + elif(config.p_wfss[0].type == scons.centroiderType.PYRHR): + ideal_pyrimg = wfs.get_binimg_notnoisy(0) + wfs.set_pyrimg(0,ideal_pyrimg) + """ + self.rtc.do_centroids_geom(0) + self.rtc.do_control(0) + Ageom = self.rtc.get_err(0) + self.alias_meas[self.iter_number, :] = self.rtc.get_slopes(0) + if (self.iter_number + 1 < self.config.p_loop.niter): + self.alias_wfs_com[self.iter_number + 1, :] = self.gRD.dot( + self.alias_wfs_com[self.iter_number, :]) + self.gamma * g * ( + Ageom) # - (E-F)) + + ########################################################################### + ## Wavefront + filtered modes reconstruction + ########################################################################### + self.target.raytrace(0, atm=self.atmos) + #self.rtc.do_control(1, 0, wfs_direction=False) + self.rtc.do_control(1, sources=self.target.sources, is_wfs_phase=False) + #self.rtc.do_control(0) + #self.rtc.do_control(1) + B = self.rtc.get_command(1) + + ########################################################################### + ## Fitting + ########################################################################### + self.rtc.apply_control(1) + self.target.raytrace(0, dms=self.dms, reset=False) + self.target.comp_tar_image(0, compLE=False) + self.target.comp_strehl(0) + self.fit[self.iter_number] = self.target.get_strehl(0)[2] + if (self.iter_number >= self.N_preloop): + self.psf_ortho += self.target.get_tar_image(0, expo_type='se') + + ########################################################################### + ## Filtered modes error & Commanded modes + ########################################################################### + modes = self.P.dot(B) + modes_filt = modes.copy() * 0. + modes_filt[-self.nfiltered - 2:-2] = modes[-self.nfiltered - 2:-2] + self.H_com[self.iter_number, :] = self.Btt.dot(modes_filt) + modes[-self.nfiltered - 2:-2] = 0 + self.mod_com[self.iter_number, :] = self.Btt.dot(modes) + + ########################################################################### + ## Bandwidth error + ########################################################################### + C = self.mod_com[self.iter_number, :] - self.mod_com[self.iter_number - 1, :] + + self.bp_com[self.iter_number, :] = self.gRD.dot( + self.bp_com[self.iter_number - 1, :]) - C + + ########################################################################### + ## Tomographic error + ########################################################################### + #G = F - (mod_com[self.iter_number,:] + Ageom - np.dot(RDgeom,com[self.iter_number-1,:])) + for w in range(len(self.config.p_wfss)): + self.wfs.raytrace(w, atm=self.atmos) + + #self.rtc.do_control(1, 0, wfs_direction=True) + self.rtc.do_control(1, sources=self.wfs.sources, is_wfs_phase=True) + #self.rtc.do_control(0) + #self.rtc.do_control(1) + G = self.rtc.get_command(1) + modes = self.P.dot(G) + modes[-self.nfiltered - 2:-2] = 0 + self.wf_com[self.iter_number, :] = self.Btt.dot(modes) + + G = self.mod_com[self.iter_number, :] - self.wf_com[self.iter_number, :] + if (self.iter_number + 1 < self.config.p_loop.niter): + self.tomo_com[self.iter_number + 1, :] = self.gRD.dot( + self.tomo_com[self.iter_number, :]) - g * self.gamma * self.RD.dot(G) + + # Without anyone noticing... + #self._sim.tar.d_targets[0].set_phase(tarphase) + self.target.set_tar_phase(0, tarphase) + #self._sim.rtc.d_control[0].set_com(Dcom, Dcom.size) + self.rtc.set_command(0, Dcom) + + def save_in_hdf5(self, savename): + """ + Saves all the ROKET buffers + simuation parameters in a HDF5 file + + Args: + savename: (str): name of the output file + """ + tmp = (self.config.p_geom._ipupil.shape[0] - + (self.config.p_dms[0]._n2 - self.config.p_dms[0]._n1 + 1)) // 2 + tmp_e0 = self.config.p_geom._ipupil.shape[0] - tmp + tmp_e1 = self.config.p_geom._ipupil.shape[1] - tmp + pup = self.config.p_geom._ipupil[tmp:tmp_e0, tmp:tmp_e1] + indx_pup = np.where(pup.flatten() > 0)[0].astype(np.int32) + dm_dim = self.config.p_dms[0]._n2 - self.config.p_dms[0]._n1 + 1 + self.cov_cor() + psf = self.target.get_tar_image(0, expo_type='le') + if(os.getenv("DATA_GUARDIAN") is not None): + fname = os.getenv("DATA_GUARDIAN") + "/" + savename + else: + fname = savename + #fname = "test" + pdict = { + "noise": + self.noise_com[self.N_preloop:, :].T, + "aliasing": + self.alias_wfs_com[self.N_preloop:, :].T, + "tomography": + self.tomo_com[self.N_preloop:, :].T, + "filtered modes": + self.H_com[self.N_preloop:, :].T, + "non linearity": + self.trunc_com[self.N_preloop:, :].T, + "bandwidth": + self.bp_com[self.N_preloop:, :].T, + "wf_com": + self.wf_com[self.N_preloop:, :].T, + "P": + self.P, + "Btt": + self.Btt, + "IF.data": + self.IFpzt.data, + "IF.indices": + self.IFpzt.indices, + "IF.indptr": + self.IFpzt.indptr, + "TT": + self.TT, + "dm_dim": + dm_dim, + "indx_pup": + indx_pup, + "fitting": + np.mean(self.fit[self.N_preloop:]), + "SR": + self.SR, + "SR2": + self.SR2, + "cov": + self.cov, + "cor": + self.cor, + "psfortho": + np.fft.fftshift(self.psf_ortho) / + (self.config.p_loop.niter - self.N_preloop), + "centroid_gain": + self.centroid_gain / (self.config.p_loop.niter - self.N_preloop), + "centroid_gain2": + self.centroid_gain2 / + (self.config.p_loop.niter - self.N_preloop), + "dm.xpos": + self.config.p_dms[0]._xpos, + "dm.ypos": + self.config.p_dms[0]._ypos, + "R": + self.rtc.get_command_matrix(0), + "D": + self.rtc.get_interaction_matrix(0), + "Nact": + self.Nact, + "com": + self.com[self.N_preloop:, :].T, + "slopes": + self.slopes[self.N_preloop:, :].T, + "alias_meas": + self.alias_meas[self.N_preloop:, :].T, + "trunc_meas": + self.trunc_meas[self.N_preloop:, :].T + } + h5u.save_h5(fname, "psf", self.config, psf) + for k in list(pdict.keys()): + h5u.save_hdf5(fname, k, pdict[k]) + + def cov_cor(self): + """ + Computes covariance matrix and correlation matrix between all the contributors + """ + self.cov = np.zeros((6, 6)) + self.cor = np.zeros((6, 6)) + bufdict = { + "0": self.noise_com.T, + "1": self.trunc_com.T, + "2": self.alias_wfs_com.T, + "3": self.H_com.T, + "4": self.bp_com.T, + "5": self.tomo_com.T + } + for i in range(self.cov.shape[0]): + for j in range(self.cov.shape[1]): + if (j >= i): + tmpi = self.P.dot(bufdict[str(i)]) + tmpj = self.P.dot(bufdict[str(j)]) + self.cov[i, j] = np.sum( + np.mean(tmpi * tmpj, axis=1) - + np.mean(tmpi, axis=1) * np.mean(tmpj, axis=1)) + else: + self.cov[i, j] = self.cov[j, i] + + s = np.reshape(np.diag(self.cov), (self.cov.shape[0], 1)) + sst = np.dot(s, s.T) + ok = np.where(sst) + self.cor[ok] = self.cov[ok] / np.sqrt(sst[ok]) + + +############################################################################### +# +# MAIN +# +############################################################################### +if __name__ == "__main__": + from shesha.config import ParamConfig + + if (len(sys.argv) < 2): + error = 'command line should be at least:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) + + #get parameters from file + param_file = sys.argv[1] + + if (len(sys.argv) > 2): + savename = sys.argv[2] + else: + savename = "roket_default.h5" + config = ParamConfig(param_file) + roket = Roket(config) + roket.init_config() + roket.loop() + roket.save_in_hdf5(savename) diff --git a/guardians/scripts/bash_roket.sh b/guardians/scripts/bash_roket.sh new file mode 100755 index 0000000..9a97346 --- /dev/null +++ b/guardians/scripts/bash_roket.sh @@ -0,0 +1,24 @@ +#!/bin/bash +DIAM="8" +NSSP="40" +NFILT="20" +NPIX="4" +PIXSIZE="0.6" +WINDSPEED="10 20" +WINDDIR="0" # 45 90 135 180 20" +DATE=`date +%F_%Hh%M` +OUTPUT="$SHESHA_ROOT/guardians/scripts/outputfile_$DATE" + +echo "writing output in "$OUTPUT + +script="$SHESHA_ROOT/guardians/scripts/script_roket_fix.py" + +for s in $WINDSPEED +do + for dd in $WINDDIR + do + CMD="ipython $script $SHESHA_ROOT/guardians/script/Sim_param_r0_012.py -- --niter 200 --diam $DIAM --npix $NPIX --pixsize $PIXSIZE --nfilt $NFILT --nssp $NSSP --winddir $dd --windspeed $s -s roket_"$DIAM"m_nssp"$NSSP"_dir"$dd"_speed"$s".h5" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done +done diff --git a/guardians/scripts/convergence.py b/guardians/scripts/convergence.py new file mode 100644 index 0000000..f1b19d8 --- /dev/null +++ b/guardians/scripts/convergence.py @@ -0,0 +1,24 @@ +import numpy as np +import matplotlib.pyplot as plt +plt.ion() +import h5py +from guardians import groot, gamora +import os + +filename = os.getenv("DATA_GUARDIAN") + "roket_8m_LE.h5" +Cab = groot.compute_Cerr(filename) +_, _, psfModel, _ = gamora.psf_rec_Vii(filename, fitting=False, + cov=Cab.astype(np.float32)) + +f = h5py.File(filename, 'r') +tb = f["tomography"][:] + f["bandwidth"][:] + +for k in range(10000, 201000, 10000): + C = tb[:, :k].dot(tb[:, :k].T) / k + _, _, psfC, _ = gamora.psf_rec_Vii(filename, fitting=False, + covmodes=C.astype(np.float32)) + plt.matshow( + np.log10(np.abs(psfC - psfModel)), vmin=np.log10(np.abs(psfModel)).min(), + vmax=np.log10(np.abs(psfModel)).max()) + plt.title("niter = %d" % k) + plt.colorbar() diff --git a/guardians/scripts/old2new_attrs.py b/guardians/scripts/old2new_attrs.py new file mode 100644 index 0000000..af4453a --- /dev/null +++ b/guardians/scripts/old2new_attrs.py @@ -0,0 +1,127 @@ +import h5py +from glob import glob + +old2new_dict = { + # Loop params + "niter": "_Param_loop__niter", + "ittime": "_Param_loop__ittime", + # Geom params + "zenithangle": "_Param_geom__zenithangle", + "pupdiam": "_Param_geom__pupdiam", + # Telescope params + "tel_diam": "_Param_tel__diam", + "cobs": "_Param_tel__cobs", + "t_spiders": "_Param_tel__t_spiders", + "spiders_type": "_Param_tel__spiders_type", + "type_ap": "_Param_tel__type_ap", + "referr": "_Param_tel__referr", + "pupangle": "_Param_tel__pupangle", + "nbrmissing": "_Param_tel__nbrmissing", + "std_piston": "_Param_tel__std_piston", + "std_tt": "_Param_tel__std_tt", + # Atmos params + "r0": "_Param_atmos__r0", + "nscreens": "_Param_atmos__nscreens", + "frac": "_Param_atmos__frac", + "atm.alt": "_Param_atmos__alt", + "windspeed": "_Param_atmos__windspeed", + "winddir": "_Param_atmos__winddir", + "L0": "_Param_atmos__L0", + "seeds": "_Param_atmos__seeds", + # Target params + "ntargets": "_Param_target__ntargets", + "target.xpos": "_Param_target__xpos", + "target.ypos": "_Param_target__ypos", + "target.Lambda": "_Param_target__Lambda", + "target.mag": "_Param_target__mag", + "target.dms_seen": "_Param_target__dms_seen", + #WFS params + "type": "_Param_wfs__type", + "nxsub": "_Param_wfs__nxsub", + "npix": "_Param_wfs__npix", + "pixsize": "_Param_wfs__pixsize", + "fracsub": "_Param_wfs__fracsub", + "wfs.xpos": "_Param_wfs__xpos", + "wfs.ypos": "_Param_wfs__ypos", + "wfs.Lambda": "_Param_wfs__Lambda", + "gsmag": "_Param_wfs__gsmag", + "optthroughput": "_Param_wfs__optthroughput", + "zerop": "_Param_wfs__zerop", + "noise": "_Param_wfs__noise", + "atmos_seen": "_Param_wfs__atmos_seen", + "dms_seen": "_Param_wfs__dms_seen", + "beamsize": "_Param_wfs__beamsize", + "fssize": "_Param_wfs__fssize", + "fstop": "_Param_wfs__fstop", + "gsalt": "_Param_wfs__gsalt", + "laserpower": "_Param_wfs__laserpower", + "lgsreturnperwatt": "_Param_wfs__lgsreturnperwatt", + "lltx": "_Param_wfs__lltx", + "llty": "_Param_wfs__llty", + "open_loop": "_Param_wfs__open_loop", + "proftype": "_Param_wfs__proftype", + "pyr_ampl": "_Param_wfs__pyr_ampl", + "pyr_loc": "_Param_wfs__pyr_loc", + "pyr_npts": "_Param_wfs__pyr_npts", + "pyr_pup_sep": "_Param_wfs__pyr_pup_sep", + "pyrtype": "_Param_wfs__pyrtype", + #DM params + "type": "_Param_dm__type_dm", + "dm.alt": "_Param_dm__alt", + "coupling": "_Param_dm__coupling", + "nkl": "_Param_dm__nkl", + "kl_type": "_Param_dm__type_kl", + "pupoffset": "_Param_dm__pupoffset", + "nact": "_Param_dm__nact", + "push4imat": "_Param_dm__push4imat", + "dm.thresh": "_Param_dm__thresh", + "unitpervolt": "_Param_dm__unitpervolt", + #Centroider params + "type": "_Param_centroider__type", + "nmax": "_Param_centroider__nmax", + "centro.nwfs": "_Param_centroider__nwfs", + "sizex": "_Param_centroider__sizex", + "sizey": "_Param_centroider__sizey", + "centroider.thresh": "_Param_centroider__thresh", + "type_fct": "_Param_centroider__type_fct", + "weights": "_Param_centroider__weights", + "width": "_Param_centroider__width", + # Control params + "type": "_Param_controller__type", + "TTcond": "_Param_controller__TTcond", + "cured_ndivs": "_Param_controller__cured_ndivs", + "delay": "_Param_controller__delay", + "gain": "_Param_controller__gain", + "maxcond": "_Param_controller__maxcond", + "modopti": "_Param_controller__modopti", + "ndm": "_Param_controller__ndm", + "nmodes": "_Param_controller__nmodes", + "nrec": "_Param_controller__nrec", + "gmin": "_Param_controller__gmin", + "gmax": "_Param_controller__gmax", + "ngain": "_Param_controller__ngain", + "control.nwfs": "_Param_controller__nwfs", + "ndms": "ndms", + "nwfs": "nwfs", + "ncontrollers": "ncontrollers", + "simulname": "simulname", + "revision": "revision", + "ncentroiders": "ncentroiders", + "hyst": "hyst", + "margin": "margin", + "validity": "validity" +} + +files = glob("/home/fferreira/Data/correlation/*.h5") + +for ff in files: + f = h5py.File(ff, 'r+') + if not "_Param_atmos__r0" in f.attrs.keys(): + for k in f.attrs.keys(): + try: + f.attrs[old2new_dict[k]] = f.attrs[k] + del f.attrs[k] + except: + print(ff) + print(k) + f.close() diff --git a/guardians/scripts/psf_error.py b/guardians/scripts/psf_error.py new file mode 100644 index 0000000..5140c16 --- /dev/null +++ b/guardians/scripts/psf_error.py @@ -0,0 +1,45 @@ +import numpy as np +import matplotlib.pyplot as plt +from guardians import groot, gamora, drax +import h5py +from tqdm import tqdm + +filename = "/home/fferreira/Data/roket_8m_nssp40_dir135_speed10.h5" +spup = drax.get_pup(filename) + +Cee = groot.compute_Cn_cpu(filename) +Cee += groot.compute_Calias(filename) +speed = np.array([15, 16, 17, 18, 19, 21, 22, 23, 24, 25]) - 10 + +Cab = groot.compute_Cerr(filename) +otftel, otf2, psfm, gpu = gamora.psf_rec_Vii(filename, fitting=False, + cov=(Cee + Cab).astype(np.float32)) +otf_fit, psf_fit = groot.compute_OTF_fitting(filename, otftel) +psfm = np.fft.fftshift(np.real(np.fft.ifft2(otf_fit * otf2 * otftel))) +psfm *= (psfm.shape[0] * psfm.shape[0] / float(np.where(spup)[0].shape[0])) + +psfe = gamora.psf_rec_Vii(filename) +psf_compass = drax.get_tar_image(filename) +SR = [] +EE5 = [] +EE10 = [] +EE20 = [] + +for s in tqdm(speed): + Cab = groot.compute_Cerr(filename, speed=np.array([s], dtype=np.float32)) + otftel, otf2, psfi, gpu = gamora.psf_rec_Vii(filename, fitting=False, + cov=(Cee + Cab).astype(np.float32)) + otf_fit, psf_fit = groot.compute_OTF_fitting(filename, otftel) + psfi = np.fft.fftshift(np.real(np.fft.ifft2(otf_fit * otf2 * otftel))) + psfi *= (psfm.shape[0] * psfm.shape[0] / float(np.where(spup)[0].shape[0])) + SR.append(psfi.max()) + EE5.append(drax.ensquared_energy(filename, psfi, 5)) + EE10.append(drax.ensquared_energy(filename, psfi, 10)) + EE20.append(drax.ensquared_energy(filename, psfi, 20)) + +plt.figure() +plt.plot(speed, psf_compass.max() - SR) +plt.figure() +plt.plot(speed, drax.ensquared_energy(filename, psf_compass, 5) - EE5) +plt.plot(speed, drax.ensquared_energy(filename, psf_compass, 10) - EE10) +plt.plot(speed, drax.ensquared_energy(filename, psf_compass, 20) - EE20) diff --git a/guardians/scripts/script_roket.py b/guardians/scripts/script_roket.py new file mode 100644 index 0000000..e3efa23 --- /dev/null +++ b/guardians/scripts/script_roket.py @@ -0,0 +1,87 @@ +"""script for ROKET + +Usage: + script_roket.py [options] + +with 'parameters_filename' the path to the parameters file + +Options: + -h --help Show this help message and exit + -s, --savefile savename Set the name of the ouput h5 file that will be saved in $DATA_GUARDIAN + -d, --diam diam Set the telescope diameter [m] + --niter niter Set the number of iterations + --nssp nxsub Set the number of subapertures of the WFS. Number of actuators is actualized to nxsub+1 + --npix npix Set the number of pixels per subap. + --pixsize pixsize Set the WFS pixel size [arcsec] + --nfilt nfilt Set the number of filtered modes + --winddir winddir Set the wind direction + --windspeed windspeed Set the wind speed + --noise noise Set the noise value + --gain gain Set the loop gain + --devices devices Specify the devices to use + --gamma gamma Set the value of the centroid gain + --seeds seeds Set the turbulence seeds + --alt alt Set the layer altitude + +Usage with Ipython: ipython [-i] script_roket.py -- [options] +""" + +from docopt import docopt + +import sys +import os +#from guardian.roket import Roket +from roket import Roket +from shesha.util.utilities import load_config_from_file + +arguments = docopt(__doc__) +param_file = arguments[""] +print(arguments) +# Get parameters from file +if arguments["--savefile"]: + savefile = arguments["--savefile"] +else: + savefile = "roket_default.h5" + +gamma = 1.0 +if arguments["--gamma"]: + gamma = 1 / float(arguments["--gamma"]) + +config = load_config_from_file(param_file) +roket = Roket(config, gamma=gamma) + +if arguments["--diam"]: + roket.config.p_tel.set_diam(float(arguments["--diam"])) +if arguments["--niter"]: + roket.config.p_loop.set_niter(int(arguments["--niter"])) +if arguments["--nssp"]: + roket.config.p_wfss[0].set_nxsub(int(arguments["--nssp"])) + roket.config.p_dms[0].set_nact(int(arguments["--nssp"]) + 1) +if arguments["--npix"]: + roket.config.p_wfss[0].set_npix(int(arguments["--npix"])) +if arguments["--pixsize"]: + roket.config.p_wfss[0].set_pixsize(float(arguments["--pixsize"])) +if arguments["--nfilt"]: + roket.config.p_controllers[0].set_maxcond(float(arguments["--nfilt"])) +if arguments["--windspeed"]: + roket.config.p_atmos.set_windspeed([float(arguments["--windspeed"])]) +if arguments["--winddir"]: + roket.config.p_atmos.set_winddir([float(arguments["--winddir"])]) +if arguments["--noise"]: + roket.config.p_wfss[0].set_noise(float(arguments["--noise"])) +if arguments["--gain"]: + roket.config.p_controllers[0].set_gain(float(arguments["--gain"])) +if arguments["--seeds"]: + roket.config.p_atmos.set_seeds([int(arguments["--seeds"])]) +if arguments["--alt"]: + roket.config.p_atmos.set_alt([float(arguments["--alt"])]) + +#if arguments["--devices"]: +# devices = [] +# for k in range(len(arguments["--devices"])): +# devices.append(int(arguments["--devices"][k])) +# roket.config.p_loop.set_devices(devices) + +roket.init_config() +roket.loop() +roket.save_in_hdf5(savefile) diff --git a/guardians/starlord.py b/guardians/starlord.py new file mode 100644 index 0000000..2805d9b --- /dev/null +++ b/guardians/starlord.py @@ -0,0 +1,140 @@ +""" +STARLORD (SeT of Algorithms foR mOdified stRucture function computation) +Set of functions for structure function computation +""" + +import numpy as np +from scipy.special import jv # Bessel function + + +def dphi_highpass(r, x0, tabx, taby): + """ + Fonction de structure de phase "haute frequence" + A renormalise en fonction du r0 + :params: + r : distance [m] + x0 : distance interactionneur [m] + tabx, taby : integrale tabulee obtenue avec la fonction tabulateIj0 + """ + return (r** + (5. / 3.)) * (1.1183343328701949 - Ij0t83(r * (np.pi / x0), tabx, taby)) * ( + 2 * (2 * np.pi)**(8 / 3.) * 0.0228956) + + +def dphi_lowpass(r, x0, L0, tabx, taby): + """ + Fonction de structure de phase "basse frequence" + A renormalise en fonction du r0 + :params: + r : distance [m] + x0 : distance interactionneur [m] + tabx, taby : integrale tabulee obtenue avec la fonction tabulateIj0 + """ + return rodconan(r, L0) - dphi_highpass(r, x0, tabx, taby) + + +def Ij0t83(x, tabx, taby): + """ + Calcul de l'integrale tabulee + x + $ t^(-8/3) (1-bessel_j(0,t)) dt + 0 + + Pres de 0, le resultat s'approxime par (3/4.)*x^(1./3)*(1-x^2/112.+...) + """ + res = x.copy() + ismall = np.where(res < np.exp(-3.0)) + ilarge = np.where(res >= np.exp(-3.0)) + if (ismall[0].size > 0): + res[ismall] = 0.75 * x[ismall]**(1. / 3) * (1 - x[ismall]**2 / 112.) + if (ilarge[0].size > 0): + res[ilarge] = np.interp(x[ilarge], tabx, taby) + + return res + + +def tabulateIj0(): + """ + Tabulation de l'intesgrale + Necessaire avant utilisation des fonction dphi_lowpass et dphi_highpass + """ + n = 10000 + t = np.linspace(-4, 10, n) + dt = (t[-1] - t[0]) / (n - 1) + smallx = np.exp(-4.0) + A = 0.75 * smallx**(1. / 3) * (1 - smallx**2 / 112.) + X = np.exp(t) + Y = np.exp(-t * (5. / 3.)) * (1 - jv(0, X)) + Y[1:] = np.cumsum(Y[:-1] + np.diff(Y) / 2.) + Y[0] = 0. + Y = Y * dt + A + + return X, Y + + +def asymp_macdo(x): + k2 = 1.00563491799858928388289314170833 + k3 = 1.25331413731550012081 + a1 = 0.22222222222222222222 + a2 = -0.08641975308641974829 + a3 = 0.08001828989483310284 + + x_1 = 1. / x + res = k2 - k3 * np.exp(-x) * x**(1. / 3.) * (1.0 + x_1 * (a1 + x_1 * + (a2 + x_1 * a3))) + return res + + +def macdo(x): + a = 5. / 6. + x2a = x**(2. * a) + x22 = x * x / 4. + s = 0.0 + + Ga = [ + 0, 12.067619015983075, 5.17183672113560444, 0.795667187867016068, + 0.0628158306210802181, 0.00301515986981185091, 9.72632216068338833e-05, + 2.25320204494595251e-06, 3.93000356676612095e-08, 5.34694362825451923e-10, + 5.83302941264329804e-12 + ] + + Gma = [ + -3.74878707653729304, -2.04479295083852408, -0.360845814853857083, + -0.0313778969438136685, -0.001622994669507603, -5.56455315259749673e-05, + -1.35720808599938951e-06, -2.47515152461894642e-08, -3.50257291219662472e-10, + -3.95770950530691961e-12, -3.65327031259100284e-14 + ] + + x2n = 0.5 + + s = Gma[0] * x2a + s *= x2n + + x2n *= x22 + + for n in np.arange(10) + 1: + + s += (Gma[n] * x2a + Ga[n]) * x2n + x2n *= x22 + + return s + + +def rodconan(r, L0): + """ + Fonction de structure de phase avec prise en compte de l'echelle externe + A renormalise en fonction du r0 + """ + res = r * 0. + k1 = 0.1716613621245709486 + dprf0 = (2 * np.pi / L0) * r + ilarge = np.where(dprf0 > 4.71239) + ismall = np.where(dprf0 <= 4.71239) + if (ilarge[0].size > 0): + res[ilarge] = asymp_macdo(dprf0[ilarge]) + if (ismall[0].size > 0): + res[ismall] = -macdo(dprf0[ismall]) + + res *= k1 * L0**(5. / 3.) + + return res diff --git a/guardians/widgets/bokeh_gamora.py b/guardians/widgets/bokeh_gamora.py new file mode 100644 index 0000000..be493d8 --- /dev/null +++ b/guardians/widgets/bokeh_gamora.py @@ -0,0 +1,28 @@ +""" +To launch it : + + - locally : + bokeh serve --show bokeh_display.py + - as a server : + bokeh serve --port 8081 --allow-websocket-origin hippo6.obspm.fr:8081 bokeh_roket.py + then, open a web browser and connect to http://hippo6.obspm.fr:8081/bokeh_roket +""" +from widget_gamora import Bokeh_gamora +from bokeh.io import curdoc, output_file, show +import glob, os, atexit + + +def remove_files(): + files = glob.glob("/home/fferreira/public_html/roket_display*") + for f in files: + os.remove(f) + + +widget = Bokeh_gamora() +curdoc().clear() +#widget.update() +#output_file("roket.html") +#show(widget.tab) +curdoc().add_root(widget.tab) + +atexit.register(remove_files) diff --git a/guardians/widgets/bokeh_groot.py b/guardians/widgets/bokeh_groot.py new file mode 100644 index 0000000..153c76b --- /dev/null +++ b/guardians/widgets/bokeh_groot.py @@ -0,0 +1,28 @@ +""" +To launch it : + + - locally : + bokeh serve --show bokeh_display.py + - as a server : + bokeh serve --port 8081 --allow-websocket-origin hippo6.obspm.fr:8081 bokeh_roket.py + then, open a web browser and connect to http://hippo6.obspm.fr:8081/bokeh_roket +""" +from widget_groot import Bokeh_groot +from bokeh.io import curdoc, output_file, show +import glob, os, atexit + + +def remove_files(): + files = glob.glob("/home/fferreira/public_html/roket_display*") + for f in files: + os.remove(f) + + +widget = Bokeh_groot() +curdoc().clear() +#widget.update() +#output_file("roket.html") +#show(widget.tab) +curdoc().add_root(widget.tab) + +atexit.register(remove_files) diff --git a/guardians/widgets/bokeh_guardian.py b/guardians/widgets/bokeh_guardian.py new file mode 100644 index 0000000..faf4925 --- /dev/null +++ b/guardians/widgets/bokeh_guardian.py @@ -0,0 +1,25 @@ +""" +To launch it : + + - locally : + bokeh serve --show bokeh_display.py + - as a server : + bokeh serve --port 8081 --allow-websocket-origin hippo6.obspm.fr:8081 bokeh_roket.py + then, open a web browser and connect to http://hippo6.obspm.fr:8081/bokeh_roket +""" +from widget_guardian import Bokeh_guardian +from bokeh.io import curdoc, output_file, show +import glob, os, atexit + + +def remove_files(): + files = glob.glob("/home/fferreira/public_html/roket_display*") + for f in files: + os.remove(f) + + +widget = Bokeh_guardian() +curdoc().clear() +curdoc().add_root(widget.tab) + +atexit.register(remove_files) diff --git a/guardians/widgets/bokeh_roket.py b/guardians/widgets/bokeh_roket.py new file mode 100644 index 0000000..6edaf3e --- /dev/null +++ b/guardians/widgets/bokeh_roket.py @@ -0,0 +1,28 @@ +""" +To launch it : + + - locally : + bokeh serve --show bokeh_display.py + - as a server : + bokeh serve --port 8081 --allow-websocket-origin hippo6.obspm.fr:8081 bokeh_roket.py + then, open a web browser and connect to http://hippo6.obspm.fr:8081/bokeh_roket +""" +from widget_roket import Bokeh_roket +from bokeh.io import curdoc, output_file, show +import glob, os, atexit + + +def remove_files(): + files = glob.glob("/home/fferreira/public_html/roket_display*") + for f in files: + os.remove(f) + + +widget = Bokeh_roket() +curdoc().clear() +widget.update() +#output_file("roket.html") +#show(widget.tab) +curdoc().add_root(widget.tab) + +atexit.register(remove_files) diff --git a/guardians/widgets/widget_gamora.py b/guardians/widgets/widget_gamora.py new file mode 100644 index 0000000..9d2fc44 --- /dev/null +++ b/guardians/widgets/widget_gamora.py @@ -0,0 +1,174 @@ +import numpy as np +from glob import glob +import os +import datetime + +import h5py +import matplotlib as mpl + +from bokeh.plotting import figure +from bokeh.models import ColumnDataSource, Range1d +from bokeh.models.widgets import Panel, TextInput, Slider, CheckboxButtonGroup, DataTable, TableColumn, Tabs, Button, RadioButtonGroup, Select, DataTable, DateFormatter, TableColumn, PreText +from bokeh.layouts import layout, widgetbox +from bokeh.io import curdoc, output_file, show + +from guardians import gamora, groot + + +class Bokeh_gamora: + + def __init__(self): + + self.dataroot = os.getenv("DATA_GUARDIAN") + self.datapath = self.dataroot + self.files = [f.split('/')[-1] for f in glob(self.datapath + "roket_*.h5")] + if self.files == []: + self.files = ["No hdf5 files"] + + self.f = None + self.Btt = None + self.P = None + + self.url = "http://" + os.uname()[1] + ".obspm.fr/~" + os.getlogin( + ) + "/roket_display" + self.old = None + self.psf_compass = None + self.psf_Vii = None + + # Widgets Elements + self.pretext = PreText(text=""" """, width=500, height=75) + self.SRcompass = TextInput(value=" ", title="SR compass:") + self.SRVii = TextInput(value=" ", title="SR Vii:") + + self.button_psf = Button(label="PSF !", button_type="success") + self.button_roll = Button(label="Roll", button_type="primary") + + self.select_datapath = Select( + title="Datapath", value=self.dataroot, + options=[self.dataroot] + glob(self.dataroot + "*/")) + self.select_files = Select(title="File", value=self.files[0], options=self.files) + + self.xdr = Range1d(start=0, end=1024) + self.ydr = Range1d(start=1024, end=0) + self.image_compass = figure(x_range=self.xdr, y_range=self.ydr, + x_axis_location="above", title="PSF COMPASS") + self.image_Vii = figure(x_range=self.image_compass.x_range, + y_range=self.image_compass.y_range, + x_axis_location="above", title="PSF ROKET") + self.plot_psf_cuts = figure(plot_height=600, plot_width=800, y_range=[1e-9, 1], + x_range=self.image_compass.x_range, + y_axis_type="log") + self.source_psf_compass = ColumnDataSource(data=dict(x=[], y=[])) + self.source_psf_Vii = ColumnDataSource(data=dict(x=[], y=[])) + + self.image_compass.image_url(url=[], x=0, y=0, w=1024, h=1024) + self.image_Vii.image_url(url=[], x=0, y=0, w=1024, h=1024) + self.plot_psf_cuts.line(x="x", y="y", legend="COMPASS", color="red", + muted_alpha=0.1, source=self.source_psf_compass) + self.plot_psf_cuts.line(x="x", y="y", legend="Vii", color="blue", + muted_alpha=0.1, source=self.source_psf_Vii) + self.plot_psf_cuts.legend.click_policy = "mute" + + # Callback functions + self.select_datapath.on_change( + "value", lambda attr, old, new: self.update_files()) + self.select_files.on_change("value", lambda attr, old, new: self.update()) + self.button_psf.on_click(self.comp_psf) + self.button_roll.on_click(self.roll_psf) + + self.update() + + #layouts + self.control_box = widgetbox(self.select_datapath, self.select_files, + self.button_psf, self.button_roll, self.SRcompass, + self.SRVii, self.pretext) + self.tab = Panel( + child=layout([[self.control_box, self.image_compass, self.image_Vii], + [self.plot_psf_cuts]]), title="GAMORA") + + def update(self): + """ + Update the attributes based on the new selected filename + """ + if os.path.exists(self.datapath + str(self.select_files.value)): + self.f = h5py.File(self.datapath + str(self.select_files.value), mode='r+') + self.psf_compass = self.f["psf"][:] + self.psf_Vii = None + self.Btt = self.f["Btt"][:] + self.P = self.f["P"][:] + + def update_files(self): + """ + Update the select_files options following the current datapath + """ + self.datapath = str(self.select_datapath.value) + self.files = self.files = [ + f.split('/')[-1] for f in glob(self.datapath + "roket_*.h5") + ] + if self.files == []: + self.files = ["No hdf5 files"] + + self.select_files.options = self.files + self.select_files.value = self.files[0] + + def update_psf(self): + """ + Update the PSF by ensquaring them + """ + + psfc = self.psf_compass + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/" + os.getlogin() + "/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, np.log10(np.abs(psfc))) + self.image_compass.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, w=psfc.shape[0], + h=psfc.shape[0]) + self.image_compass.x_range.update(start=0, end=psfc.shape[0]) + self.image_compass.y_range.update(start=psfc.shape[0], end=0) + + self.SRcompass.value = "%.2f" % (self.psf_compass.max()) + + if self.psf_Vii is not None: + + psfv = self.psf_Vii + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/" + os.getlogin( + ) + "/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, np.log10(np.abs(psfv))) + self.image_Vii.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, w=psfc.shape[0], + h=psfc.shape[0]) + self.SRVii.value = "%.2f" % (self.psf_Vii.max()) + + self.update_cuts() + + def update_cuts(self): + """ + Update the PSF cuts + """ + x = np.arange(self.psf_compass.shape[0]) + self.source_psf_compass.data = dict( + x=x, y=self.psf_compass[:, self.psf_compass.shape[0] // 2]) + if self.psf_Vii is not None: + self.source_psf_Vii.data = dict( + x=x, y=self.psf_Vii[:, self.psf_Vii.shape[0] // 2]) + + def comp_psf(self): + """ + Compute the PSF using the Vii functions and display it + """ + self.pretext.text = """ Computing PSF using Vii... Please wait""" + self.button_psf.button_type = "danger" + _, _, self.psf_Vii, _ = gamora.psf_rec_Vii(self.datapath + + str(self.select_files.value)) + self.psf_compass = self.f["psf"][:] + self.update_psf() + self.pretext.text = """ """ + self.button_psf.button_type = "success" + + def roll_psf(self): + """ + Roll the COMPASS PSF (for retro-compatibility with old ROKET files) + """ + self.psf_compass = np.fft.fftshift(self.psf_compass) + self.update_psf() diff --git a/guardians/widgets/widget_groot.py b/guardians/widgets/widget_groot.py new file mode 100644 index 0000000..bd9b8db --- /dev/null +++ b/guardians/widgets/widget_groot.py @@ -0,0 +1,295 @@ +import numpy as np +from glob import glob +import os +import datetime + +import h5py +import matplotlib as mpl + +from bokeh.plotting import figure +from bokeh.models import ColumnDataSource, Range1d +from bokeh.models.widgets import Panel, Toggle, TextInput, Slider, CheckboxButtonGroup, DataTable, TableColumn, Tabs, Button, RadioButtonGroup, Select, DataTable, DateFormatter, TableColumn, PreText +from bokeh.layouts import layout, widgetbox +from bokeh.io import curdoc, output_file, show + +from guardians import gamora, groot, drax + + +class Bokeh_groot: + + def __init__(self): + + self.dataroot = os.getenv("DATA_GUARDIAN") + self.datapath = self.dataroot + self.files = [f.split('/')[-1] for f in glob(self.datapath + "roket_*.h5")] + if self.files == []: + self.files = ["No hdf5 files"] + self.f = None + self.Btt = None + self.P = None + self.nactus = None + self.nmodes = None + self.nslopes = None + + self.url = "http://" + os.uname()[1] + ".obspm.fr/~" + os.getlogin( + ) + "/roket_display" + self.old = None + self.psf_compass = None + self.psf_roket = None + self.psf_groot = None + self.covmat_groot = None + self.covmat_roket = None + + # Widgets Elements + self.pretext = PreText(text=""" """, width=500, height=75) + self.SRcompass = TextInput(value=" ", title="SR compass:") + self.SRroket = TextInput(value=" ", title="SR roket:") + self.SRgroot = TextInput(value=" ", title="SR groot:") + + self.button_covmat = Button(label="Covmat", button_type="success") + self.button_psf = Button(label="PSF !", button_type="success") + self.toggle_fit = Toggle(label="Fitting", button_type="primary") + + self.select_datapath = Select( + title="Datapath", value=self.dataroot, + options=[self.dataroot] + glob(self.dataroot + "*/")) + self.select_files = Select(title="File", value=self.files[0], options=self.files) + + self.contributors = ["noise", "bandwidth & tomography", "aliasing"] + self.checkboxButtonGroup_contributors = CheckboxButtonGroup( + labels=self.contributors, active=[]) + self.radioButton_basis = RadioButtonGroup(labels=["Actus", "Btt", "Slopes"], + active=0) + + self.xdr = Range1d(start=0, end=1024) + self.ydr = Range1d(start=1024, end=0) + self.xdr2 = Range1d(start=0, end=1024) + self.ydr2 = Range1d(start=1024, end=0) + self.image_roket = figure(x_range=self.xdr, y_range=self.ydr, + x_axis_location="above", title="PSF ROKET") + self.image_groot = figure(x_range=self.image_roket.x_range, + y_range=self.image_roket.y_range, + x_axis_location="above", title="PSF GROOT") + self.im_covmat_roket = figure(x_range=self.xdr2, y_range=self.ydr2, + x_axis_location="above", title="Covmat ROKET") + self.im_covmat_groot = figure(x_range=self.im_covmat_roket.x_range, + y_range=self.im_covmat_roket.y_range, + x_axis_location="above", title="Covmat GROOT") + self.plot_psf_cuts = figure(plot_height=600, plot_width=800, y_range=[1e-9, 1], + x_range=self.image_roket.x_range, y_axis_type="log") + self.source_psf_roket = ColumnDataSource(data=dict(x=[], y=[])) + self.source_psf_groot = ColumnDataSource(data=dict(x=[], y=[])) + self.source_psf_compass = ColumnDataSource(data=dict(x=[], y=[])) + self.source_covmat_roket = ColumnDataSource(data=dict(x=[], y=[])) + self.source_covmat_groot = ColumnDataSource(data=dict(x=[], y=[])) + + self.image_roket.image_url(url=[], x=0, y=0, w=1024, h=1024) + self.image_groot.image_url(url=[], x=0, y=0, w=1024, h=1024) + self.im_covmat_roket.image_url(url=[], x=0, y=0, w=1024, h=1024) + self.im_covmat_groot.image_url(url=[], x=0, y=0, w=1024, h=1024) + self.plot_psf_cuts.line(x="x", y="y", legend="ROKET", color="blue", + muted_alpha=0.1, source=self.source_psf_roket) + self.plot_psf_cuts.line(x="x", y="y", legend="COMPASS", color="red", + muted_alpha=0.1, source=self.source_psf_compass) + self.plot_psf_cuts.line(x="x", y="y", legend="GROOT", color="green", + muted_alpha=0.1, source=self.source_psf_groot) + self.plot_psf_cuts.legend.click_policy = "mute" + + # Callback functions + self.select_datapath.on_change( + "value", lambda attr, old, new: self.update_files()) + self.select_files.on_change("value", lambda attr, old, new: self.update()) + self.button_psf.on_click(self.comp_psf) + self.button_covmat.on_click(self.comp_covmats) + self.update() + + #layouts + self.control_box = widgetbox(self.select_datapath, self.select_files, + self.checkboxButtonGroup_contributors, + self.radioButton_basis, self.button_covmat, + self.button_psf, self.toggle_fit, self.SRcompass, + self.SRroket, self.SRgroot, self.pretext) + self.tab = Panel( + child=layout([[ + self.control_box, self.im_covmat_roket, self.im_covmat_groot + ], [self.image_roket, self.image_groot], [self.plot_psf_cuts]]), + title="GROOT") + + def update(self): + """ + Update the attributes based on the new selected filename + """ + if os.path.exists(self.datapath + str(self.select_files.value)): + self.f = h5py.File(self.datapath + str(self.select_files.value), mode='r+') + self.psf_compass = self.f["psf"][:] + self.SRcompass.value = "%.2f" % (self.psf_compass.max()) + self.psf_groot = None + self.psf_roket = None + self.covmat_groot = None + self.covmat_roket = None + self.Btt = self.f["Btt"][:] + self.P = self.f["P"][:] + self.nactus = self.P.shape[1] + self.nmodes = self.P.shape[0] + self.nslopes = self.f["R"][:].shape[1] + + def update_files(self): + """ + Update the select_files options following the current datapath + """ + self.datapath = str(self.select_datapath.value) + self.files = self.files = [ + f.split('/')[-1] for f in glob(self.datapath + "roket_*.h5") + ] + if self.files == []: + self.files = ["No hdf5 files"] + self.select_files.options = self.files + self.select_files.value = self.files[0] + + def update_psf(self): + """ + Update the PSF display + """ + if self.psf_roket is not None: + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/" + os.getlogin( + ) + "/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, np.log10(np.abs(self.psf_roket))) + self.image_roket.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, + w=self.psf_roket.shape[0], h=self.psf_roket.shape[0]) + self.image_roket.x_range.update(start=0, end=self.psf_roket.shape[0]) + self.image_roket.y_range.update(start=self.psf_roket.shape[0], end=0) + self.SRroket.value = "%.2f" % (self.psf_roket.max()) + + if self.psf_groot is not None: + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/" + os.getlogin( + ) + "/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, np.log10(np.abs(self.psf_groot))) + self.image_groot.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, + w=self.psf_groot.shape[0], h=self.psf_groot.shape[0]) + self.SRgroot.value = "%.2f" % (self.psf_groot.max()) + + self.update_cuts() + + def update_covmats(self): + """ + Update the covmats + """ + if self.covmat_roket is not None: + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/" + os.getlogin( + ) + "/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, self.covmat_roket) + self.im_covmat_roket.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, + w=self.covmat_roket.shape[0], h=self.covmat_roket.shape[0]) + self.im_covmat_roket.x_range.update(start=0, end=self.covmat_roket.shape[0]) + self.im_covmat_roket.y_range.update(start=self.covmat_roket.shape[0], end=0) + + if self.covmat_groot is not None: + time = str(datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%f')) + self.old = "/home/" + os.getlogin( + ) + "/public_html/roket_display" + time + ".png" + mpl.image.imsave(self.old, self.covmat_groot) + self.im_covmat_groot.image_url( + url=dict(value=self.url + time + ".png"), x=0, y=0, + w=self.covmat_groot.shape[0], h=self.covmat_groot.shape[0]) + + def update_cuts(self): + """ + Update the PSF cuts + """ + if self.psf_roket is not None: + x = np.arange(self.psf_roket.shape[0]) + self.source_psf_roket.data = dict( + x=x, y=self.psf_roket[:, self.psf_roket.shape[0] // 2]) + if self.psf_groot is not None: + x = np.arange(self.psf_groot.shape[0]) + self.source_psf_groot.data = dict( + x=x, y=self.psf_groot[:, self.psf_groot.shape[0] // 2]) + if self.psf_compass is not None: + x = np.arange(self.psf_compass.shape[0]) + self.source_psf_compass.data = dict( + x=x, y=self.psf_compass[:, self.psf_compass.shape[0] // 2]) + + def comp_covmats(self): + """ + Compute the covmats using GROOT model and display it + """ + self.pretext.text = """ Computing covmats... Please wait""" + self.button_covmat.button_type = "danger" + contrib = [ + self.contributors[c] + for c in self.checkboxButtonGroup_contributors.active + ] + if contrib == []: + contrib = self.contributors + if "bandwidth & tomography" in contrib: + contrib.remove("bandwidth & tomography") + contrib.append("bandwidth") + contrib.append("tomography") + modal = self.radioButton_basis.active + if modal == 1: + self.covmat_groot = np.zeros((self.nmodes, self.nmodes)) + elif modal == 2: + self.covmat_groot = np.zeros((self.nslopes, self.nslopes)) + else: + self.covmat_groot = np.zeros((self.nactus, self.nactus)) + + if modal != 2: + if "noise" in contrib: + self.covmat_groot += groot.compute_Cn_cpu( + self.datapath + str(self.select_files.value), modal=modal) + if "aliasing" in contrib: + self.covmat_groot += groot.compute_Ca_cpu( + self.datapath + str(self.select_files.value), modal=modal) + if "tomography" in contrib or "bandwidth" in contrib: + self.covmat_groot += groot.compute_Cerr( + self.datapath + str(self.select_files.value), modal=modal) + + err = drax.get_err_contributors(self.datapath + str(self.select_files.value), + contrib) + self.covmat_roket = err.dot(err.T) / err.shape[1] + if modal: + self.covmat_roket = self.P.dot(self.covmat_roket).dot(self.P.T) + else: + if "aliasing" in contrib: + self.covmat_groot, self.covmat_roket = groot.compute_Calias( + self.datapath + str(self.select_files.value)) + + self.update_covmats() + + self.pretext.text = """ """ + self.button_covmat.button_type = "success" + + def comp_psf(self): + """ + Compute the PSF from the covmats + """ + self.pretext.text = """ Computing PSFs... Please wait""" + self.button_psf.button_type = "danger" + + fit = self.toggle_fit.active + if self.covmat_groot.shape[0] != self.nmodes: + self.covmat_groot = self.P.dot(self.covmat_groot).dot(self.P.T) + self.covmat_roket = self.P.dot(self.covmat_roket).dot(self.P.T) + + otftel, otf2, self.psf_groot, _ = gamora.psf_rec_Vii( + self.datapath + str(self.select_files.value), + cov=self.covmat_groot.astype(np.float32), fitting=False) + if fit: + otffit, _ = groot.compute_OTF_fitting( + self.datapath + str(self.select_files.value), otftel) + self.psf_groot = gamora.add_fitting_to_psf( + self.datapath + str(self.select_files.value), otf2 * otftel, otffit) + + _, _, self.psf_roket, _ = gamora.psf_rec_Vii( + self.datapath + str(self.select_files.value), + cov=self.covmat_roket.astype(np.float32), fitting=fit) + + self.update_psf() + self.pretext.text = """ """ + self.button_psf.button_type = "success" diff --git a/guardians/widgets/widget_guardian.py b/guardians/widgets/widget_guardian.py new file mode 100644 index 0000000..3ebf642 --- /dev/null +++ b/guardians/widgets/widget_guardian.py @@ -0,0 +1,21 @@ +import numpy as np + +from widget_roket import Bokeh_roket +from widget_gamora import Bokeh_gamora +from widget_groot import Bokeh_groot + +from bokeh.models.widgets import Tabs + + +class Bokeh_guardian: + """ + Class that defines a bokeh layout for all the guardians package + Usage: see bokeh_roket.py which is the executable + """ + + def __init__(self): + self.roket = Bokeh_roket() + self.gamora = Bokeh_gamora() + self.groot = Bokeh_groot() + + self.tab = Tabs(tabs=[self.roket.tab, self.gamora.tab, self.groot.tab]) diff --git a/guardians/widgets/widget_roket.py b/guardians/widgets/widget_roket.py new file mode 100644 index 0000000..97452ab --- /dev/null +++ b/guardians/widgets/widget_roket.py @@ -0,0 +1,240 @@ +import numpy as np +from glob import glob +import os + +import h5py + +from bokeh.plotting import figure +from bokeh.models import ColumnDataSource +from bokeh.models.widgets import Panel, DataTable, TableColumn, Tabs, Button, RadioButtonGroup, Select, DataTable, DateFormatter, TableColumn, PreText +from bokeh.layouts import layout, widgetbox +from bokeh.io import curdoc, output_file, show + +from guardians import drax + + +class Bokeh_roket: + """ + Class that defines a bokeh layout and callback functions for ROKET + Usage: see bokeh_roket.py which is the executable + """ + + def __init__(self): + + self.dataroot = os.getenv("DATA_GUARDIAN") + self.datapath = self.dataroot + self.files = [f.split('/')[-1] for f in glob(self.datapath + "roket_*.h5")] + if self.files == []: + self.files = ["No hdf5 files"] + self.f = None + self.Btt = None + self.P = None + self.cov = None + self.cor = None + self.url = "http://hippo6.obspm.fr/~fferreira/roket_display" + self.old = None + + # Widgets Elements + self.pretext = PreText(text=""" """, width=500, height=75) + self.button_load = Button(label="Load", button_type="success") + self.select_datapath = Select( + title="Datapath", value=self.dataroot, + options=[self.dataroot] + glob(self.dataroot + "*/")) + self.select_files = Select(title="File", value=self.files[0], options=self.files) + self.radioButton_basis = RadioButtonGroup(labels=["Actuators", "Btt"], active=1) + + self.colors = { + "filtered modes": "green", + "bandwidth": "orange", + "noise": "red", + "tomography": "purple", + "non linearity": "cyan", + "aliasing": "blue" + } + self.contributors = [c for c in self.colors.keys()] + self.source_breakdown = ColumnDataSource( + data=dict(n=[], a=[], b=[], t=[], nl=[], f=[], fm=[])) + self.source_cov = ColumnDataSource( + data=dict(Type=[], Noise=[], Trunc=[], Aliasing=[], FilteredModes=[], + Bandwidth=[], Tomography=[])) + self.source_cor = ColumnDataSource( + data=dict(Type=[], Noise=[], Trunc=[], Aliasing=[], FilteredModes=[], + Bandwidth=[], Tomography=[])) + self.source_params = ColumnDataSource(data=dict(Parameter=[], Value=[])) + columns = [ + TableColumn(field="n", title="noise"), + TableColumn(field="a", title="aliasing"), + TableColumn(field="b", title="bandwidth"), + TableColumn(field="t", title="tomography"), + TableColumn(field="nl", title="non lin."), + TableColumn(field="f", title="fitting"), + TableColumn(field="fm", title="filt. modes") + ] + + self.table_breakdown = DataTable(source=self.source_breakdown, columns=columns, + width=400, height=75) + #self.update_breakdown() + + columns2 = [ + TableColumn(field="Type", title="Cov."), + TableColumn(field="Noise", title="Noise"), + TableColumn(field="Trunc", title="Non lin."), + TableColumn(field="Aliasing", title="Alias."), + TableColumn(field="FilteredModes", title="Filt."), + TableColumn(field="Bandwidth", title="Band."), + TableColumn(field="Tomography", title="Tomo"), + ] + self.table_cov = DataTable(source=self.source_cov, columns=columns2, width=400, + height=200) + columns2[0] = TableColumn(field="Type", title="Cor.") + self.table_cor = DataTable(source=self.source_cor, columns=columns2, width=400, + height=250) + #self.update_covcor() + + tmp = [ + TableColumn(field="Parameter", title="Parameter"), + TableColumn(field="Value", title="Value") + ] + self.table_params = DataTable(source=self.source_params, columns=tmp, width=600, + height=500) + #self.update_params() + self.source_variances = {} + for c in self.contributors: + self.source_variances[c] = ColumnDataSource(data=dict(x=[], y=[])) + self.p = figure(plot_height=600, plot_width=800, y_axis_type="log", + y_range=[1e-9, 1], title="Contibutors variance") + for c in self.contributors: + self.p.line(x="x", y="y", legend=c, color=self.colors[c], + muted_color=self.colors[c], muted_alpha=0.1, + source=self.source_variances[c]) + + self.p.legend.click_policy = "mute" + + # Callback functions + self.select_datapath.on_change( + "value", lambda attr, old, new: self.update_files()) + self.button_load.on_click(self.load_file) + self.radioButton_basis.on_change("active", lambda attr, old, new: self.update()) + + # Layouts + self.control_box = widgetbox(self.select_datapath, self.select_files, + self.button_load, self.radioButton_basis) + self.tab = Panel( + child=layout([[ + self.control_box, self.p, + widgetbox(self.pretext, self.table_breakdown, self.table_cov, + self.table_cor) + ], [self.table_params]]), title="ROKET") + + def update_files(self): + """ + Update the select_files options following the current datapath + """ + self.datapath = str(self.select_datapath.value) + self.files = self.files = [ + f.split('/')[-1] for f in glob(self.datapath + "roket_*.h5") + ] + if self.files == []: + self.files = ["No hdf5 files"] + self.select_files.options = self.files + self.select_files.value = self.files[0] + + def load_file(self): + """ + Load the selected file and update the display + """ + self.button_load.button_type = "danger" + self.f = h5py.File(self.datapath + str(self.select_files.value), mode='r+') + self.Btt = self.f["Btt"][:] + self.P = self.f["P"][:] #/np.sqrt(self.IF.shape[0]) + self.cov = self.f["cov"][:] + self.cor = self.f["cor"][:] + + self.update() + self.update_breakdown() + self.update_covcor() + self.update_params() + self.button_load.button_type = "success" + + print("DB loaded") + + def update_breakdown(self): + """ + Update the values of the error breakdown tables + """ + self.pretext.text = """ Updating error breakdown... Please wait""" + + breakdown = drax.get_breakdown(self.datapath + str(self.select_files.value)) + self.source_breakdown.data = dict( + n=[int(np.round(breakdown["noise"])) + ], a=[int(np.round(breakdown["aliasing"])) + ], b=[int(np.round(breakdown["bandwidth"])) + ], t=[int(np.round(breakdown["tomography"])) + ], nl=[int(np.round(breakdown["non linearity"])) + ], f=[int(np.round(breakdown["fitting"]))], + fm=[int(np.round(breakdown["filtered modes"]))]) + self.pretext.text = """ """ + + def update_covcor(self): + """ + Update tables of covariances and correlations + """ + self.pretext.text = """ Updating cov cor tables... Please wait""" + + self.source_cov.data = dict( + Type=["Noise", "Trunc", "Alias.", "Filt.", "Band.", + "Tomo"], Noise=["%.2E" % v for v in self.cov[:, 0] + ], Trunc=["%.2E" % v for v in self.cov[:, 1]], + Aliasing=["%.2E" % v for v in self.cov[:, 2] + ], FilteredModes=["%.2E" % v for v in self.cov[:, 3]], + Bandwidth=["%.2E" % v for v in self.cov[:, 4] + ], Tomography=["%.2E" % v for v in self.cov[:, 5]]) + self.source_cor.data = dict( + Type=["Noise", "Trunc", "Alias.", "Filt.", "Band.", + "Tomo"], Noise=["%.2f" % v for v in self.cor[:, 0] + ], Trunc=["%.2f" % v for v in self.cor[:, 1]], + Aliasing=["%.2f" % v for v in self.cor[:, 2] + ], FilteredModes=["%.2f" % v for v in self.cor[:, 3]], + Bandwidth=["%.2f" % v for v in self.cor[:, 4] + ], Tomography=["%.2f" % v for v in self.cor[:, 5]]) + + self.pretext.text = """ """ + + def update_params(self): + """ + Update the simulation parameters table + """ + self.pretext.text = """ Updating parameters table... Please wait""" + params = list(self.f.attrs.keys()) + params.sort() + values = [] + for k in params: + values.append(str(self.f.attrs[k])) + self.source_params.data = dict(Parameter=params, Value=values) + self.pretext.text = """ """ + + def update(self): + """ + Main callback function that update the bokeh display + """ + tmp = self.button_load.button_type + self.button_load.button_type = "danger" + self.pretext.text = """ Updating plot... Please wait""" + + basis_active = self.radioButton_basis.active + xi = [] + yi = [] + coloris = [] + + for c in self.contributors: + self.source_variances[c].data = dict(x=[], y=[], color=[], legend=[]) + data = self.f[c][:] + self.p.xaxis.axis_label = "Actuators" + if (basis_active): + data = self.P.dot(data) + self.p.xaxis.axis_label = "Modes" + self.source_variances[c].data = dict( + x=np.arange(len(data)).tolist(), y=np.var(data, axis=1).tolist()) + + self.pretext.text = """ """ + self.button_load.button_type = tmp diff --git a/shesha/__init__.py b/shesha/__init__.py index 5c21f43..b431510 100644 --- a/shesha/__init__.py +++ b/shesha/__init__.py @@ -6,7 +6,7 @@ import subprocess, sys -__version__ = "5.0.0" +__version__ = "5.1.0" def check_shesha_compass_versions(): compass_package = subprocess.check_output('conda list compass | tail -n1',shell=True).decode( diff --git a/shesha/ao/__init__.py b/shesha/ao/__init__.py index 63ec922..40e10d9 100644 --- a/shesha/ao/__init__.py +++ b/shesha/ao/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.ao ## @brief Python package for AO operations on COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/ao/basis.py b/shesha/ao/basis.py index b2374cf..5ca9f5e 100644 --- a/shesha/ao/basis.py +++ b/shesha/ao/basis.py @@ -1,7 +1,7 @@ ## @package shesha.ao.basis ## @brief Functions for modal basis (DM basis, KL, Btt, etc...) ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -54,7 +54,7 @@ def compute_KL2V(p_controller: conf.Param_controller, dms: Dms, p_dms: list, """ Compute the Karhunen-Loeve to Volt matrix (transfer matrix between the KL space and volt space for a pzt dm) - :parameters: + Args: p_controller: (Param_controller) : p_controller settings @@ -118,7 +118,7 @@ def compute_dm_basis(g_dm, p_dm: conf.Param_dm, p_geom: conf.Param_geom): - get the corresponding dm shape - apply pupil mask and store in a column - :parameters: + Args: g_dm: (Dm) : Dm object p_dm: (Param_dm) : dm settings @@ -160,7 +160,7 @@ def compute_IFsparse(g_dm: Dms, p_dms: list, p_geom: conf.Param_geom): - get the corresponding dm shape - apply pupil mask and store in a column - :parameters: + Args: g_dm: (Dms) : Dms object @@ -191,7 +191,7 @@ def command_on_Btt(rtc: Rtc, dms: Dms, p_dms: list, p_geom: conf.Param_geom, nfi """ Compute a command matrix in Btt modal basis (see error breakdown) and set it on the sutra_rtc. It computes by itself the volts to Btt matrix. - :parameters: + Args: rtc: (Rtc) : rtc object @@ -216,7 +216,7 @@ def command_on_Btt(rtc: Rtc, dms: Dms, p_dms: list, p_geom: conf.Param_geom, nfi def compute_cmat_with_Btt(rtc: Rtc, Btt: np.ndarray, nfilt: int): """ Compute a command matrix on the Btt basis and load it in the GPU - :parameters: + Args: rtc: (Rtc): rtc object @@ -248,7 +248,7 @@ def command_on_KL(rtc: Rtc, dms: Dms, p_controller: conf.Param_controller, """ Compute a command matrix in KL modal basis and set it on the sutra_rtc. It computes by itself the volts to KL matrix. - :parameters: + Args: rtc: (Rtc) : rtc object @@ -271,7 +271,7 @@ def command_on_KL(rtc: Rtc, dms: Dms, p_controller: conf.Param_controller, def compute_cmat_with_KL(rtc: Rtc, KL2V: np.ndarray, nfilt: int): """ Compute a command matrix on the KL basis and load it in the GPU - :parameters: + Args: rtc: (Rtc): rtc object @@ -299,10 +299,10 @@ def compute_fourier(nActu: int, pitch: float, actu_x_pos: np.ndarray, actu_y_pos: np.ndarray, periodic='n'): ''' Values you are looking for are: - config.p_dm0.nact - config.p_dm0._pitch - config.p_dm0._i1 - config.p_dm0._j1 + config.p_dms[0].nact + config.p_dms[0]._pitch + config.p_dms[0]._i1 + config.p_dms[0]._j1 ''' # Offset xpos and ypos to get integer indices. # Compute nact x nact x nact x nact Fourier basis # Periodic condition n / n-1 as option @@ -348,7 +348,7 @@ def compute_fourier(nActu: int, pitch: float, actu_x_pos: np.ndarray, def compute_btt(IFpzt, IFtt, influ_petal=None, return_delta=False): """ Returns Btt to Volts and Volts to Btt matrices - :parameters: + Args: IFpzt : (csr_matrix) : influence function matrix of pzt DM, sparse and arrange as (Npts in pup x nactus) diff --git a/shesha/ao/cmats.py b/shesha/ao/cmats.py index cecb88d..545ad63 100644 --- a/shesha/ao/cmats.py +++ b/shesha/ao/cmats.py @@ -1,7 +1,7 @@ ## @package shesha.ao.cmats ## @brief Computation implementations of command matrix ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -57,7 +57,7 @@ def generic_imat_inversion( ) -> np.ndarray: """ Generic numpy modal interaction matrix inversion function - :parameters: + Args: M2V: (nActu x nModes) : modal basis matrix @@ -85,7 +85,7 @@ def cmat_init(ncontrol: int, rtc: Rtc, p_controller: conf.Param_controller, nmodes: int = 0) -> None: """ Compute the command matrix on the GPU - :parameters: + Args: ncontrol: (int) : @@ -165,7 +165,7 @@ def Btt_for_cmat(rtc, dms, p_dms, p_geom): """ Compute a command matrix in Btt modal basis (see error breakdown) and set it on the sutra_rtc. It computes by itself the volts to Btt matrix. - :parameters: + Args: rtc: (Rtc) : rtc object @@ -194,7 +194,7 @@ def get_cmat(D, nfilt, Btt=None, rtc=None, svd=None): get_cmat(D,nfilt,Btt=BTT,rtc=RTC) get_cmat(D,nfilt,svd=SVD) - :parameters: + Args: D: (np.ndarray[ndim=2, dtype=np.float32]): interaction matrix nfilt: (int): number of element to filter diff --git a/shesha/ao/imats.py b/shesha/ao/imats.py index c96e454..a69c79b 100644 --- a/shesha/ao/imats.py +++ b/shesha/ao/imats.py @@ -1,7 +1,7 @@ ## @package shesha.ao.imats ## @brief Computation implementations of interaction matrix ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -56,7 +56,7 @@ def imat_geom(wfs: Sensors, dms: Dms, p_wfss: List[conf.Param_wfs], meth: int = 0) -> np.ndarray: """ Compute the interaction matrix with a geometric method - :parameters: + Args: wfs: (Sensors) : Sensors object @@ -117,7 +117,7 @@ def imat_init(ncontrol: int, rtc: Rtc, dms: Dms, p_dms: list, wfs: Sensors, p_wf dataBase: dict = {}, use_DB: bool = False) -> None: """ Initialize and compute the interaction matrix on the GPU - :parameters: + Args: ncontrol: (int) : controller's index @@ -179,21 +179,20 @@ def imat_init(ncontrol: int, rtc: Rtc, dms: Dms, p_dms: list, wfs: Sensors, p_wf # do imat geom -def imat_geom_ts_multiple_direction(wfs: Sensors, dms: Dms, p_wfss: List[conf.Param_wfs], +def imat_geom_ts_multiple_direction(wfs: Sensors, dms: Dms, p_ts: conf.Param_wfs, p_dms: List[conf.Param_dm], p_geom: conf.Param_geom, ind_TS: int, ind_dmseen: List, p_tel: conf.Param_tel, x, y, meth: int = 0) -> np.ndarray: """ Compute the interaction matrix with a geometric method for multiple truth sensors (with different direction) - :parameters: - + Args: wfs: (Sensors) : Sensors object dms: (Dms) : Dms object - p_wfss: (list of Param_wfs) : wfs settings + p_ts: (Param_wfs) : truth sensor settings - ind_TS: (int) : index of the truth sensor in the wfs settings list + ind_TS: (int) : index of the truth sensor in Sensors (wfs) p_dms: (list of Param_dm) : dms settings @@ -201,10 +200,9 @@ def imat_geom_ts_multiple_direction(wfs: Sensors, dms: Dms, p_wfss: List[conf.Pa p_controller: (Param_controller) : controller settings + Kwargs: meth: (int) : (optional) method type (0 or 1) """ - if (ind_TS < 0): - ind_TS = len(p_wfss) - 1 imat_size2 = 0 print("DMS_SEEN: ", ind_dmseen) for nm in ind_dmseen: @@ -228,26 +226,25 @@ def imat_geom_ts_multiple_direction(wfs: Sensors, dms: Dms, p_wfss: List[conf.Pa wfs.d_wfs[ind_TS].d_gs.remove_layer(p_dms[k].type, k) wfs.d_wfs[ind_TS].d_gs.add_layer(p_dms[k].type, k, xoff, yoff) imat_cpu = np.concatenate( - (imat_cpu, imat_geom_ts(wfs, dms, p_wfss, ind_TS, p_dms, ind_dmseen, + (imat_cpu, imat_geom_ts(wfs, dms, p_ts, ind_TS, p_dms, ind_dmseen, meth)), axis=0) return imat_cpu -def imat_geom_ts(wfs: Sensors, dms: Dms, p_wfss: conf.Param_wfs, ind_TS: int, +def imat_geom_ts(wfs: Sensors, dms: Dms, p_ts: conf.Param_wfs, ind_TS: int, p_dms: List[conf.Param_dm], ind_DMs: List[int], meth: int = 0) -> np.ndarray: """ Compute the interaction matrix with a geometric method for a single truth sensor - :parameters: - + Args: wfs: (Sensors) : Sensors object dms: (Dms) : Dms object - p_wfss: (list of Param_wfs) : wfs settings + p_ts: (Param_wfs) : truth sensor settings - ind_TS: (int) : index of the truth sensor in the wfs settings list + ind_TS: (int) : index of the truth sensor in Sensors (wfs) p_dms: (list of Param_dm) : dms settings @@ -255,12 +252,13 @@ def imat_geom_ts(wfs: Sensors, dms: Dms, p_wfss: conf.Param_wfs, ind_TS: int, p_controller: (Param_controller) : controller settings + Kwargs: meth: (int) : (optional) method type (0 or 1) """ #nwfs = 1 #p_controller.nwfs.size # as parameter list of indices for wfs if several ts (only 1 ts for now) ndm = len(ind_DMs) #p_controller.ndm.size # as parameter list of indices of used dms - imat_size1 = p_wfss[ind_TS]._nvalid * 2 # as parameter (nvalid) + imat_size1 = p_ts._nvalid * 2 # as parameter (nvalid) imat_size2 = 0 # for nw in range(nwfs): @@ -292,13 +290,22 @@ def imat_geom_ts(wfs: Sensors, dms: Dms, p_wfss: conf.Param_wfs, ind_TS: int, return imat_cpu -def get_metaD(sup, TS_xpos=None, TS_ypos=None, ind_TS=-1, save_metaD=False, nControl=0): +def get_metaD(sup, p_wfs, TS_xpos=None, TS_ypos=None, ind_TS=-1, n_control=0): """Create an interaction matrix for the current simulation given TS position - :parameters: - sim : : current COMPASS simulation + + Args: + sup : (CompassSupervisor) : current COMPASS simulation + + p_ts: (Param_wfs) : truth sensor settings + TS_xpos : np.ndarray : TS position (x axis) + TS_ypos : np.ndarray : TS position (y axis) + ind_TS: (int) : index of the truth sensor in Sensors (wfs) + + n_control : (int) : index of the controller + :return: metaD : np.ndarray :interaction matrix """ @@ -320,7 +327,7 @@ def get_metaD(sup, TS_xpos=None, TS_ypos=None, ind_TS=-1, save_metaD=False, nCon if (TS_ypos.size < 1): TS_ypos = np.zeros((1)) - return imat_geom_ts_multiple_direction(sup.wfs._wfs, sup.dms._dms, sup.config.p_wfss, + return imat_geom_ts_multiple_direction(sup.wfs._wfs, sup.dms._dms, p_wfs, sup.config.p_dms, sup.config.p_geom, ind_TS, - sup.config.p_controllers[nControl].ndm, + sup.config.p_controllers[n_control].ndm, sup.config.p_tel, TS_xpos, TS_ypos) diff --git a/shesha/ao/modopti.py b/shesha/ao/modopti.py index 7f0f7e7..36fbbb9 100644 --- a/shesha/ao/modopti.py +++ b/shesha/ao/modopti.py @@ -1,7 +1,7 @@ ## @package shesha.ao.modopti ## @brief Functions used for modal optimization control ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -43,7 +43,7 @@ def open_loopSlp(tel: Telescope, atmos: Atmos, wfs: Sensors, rtc: Rtc, nrec: int ncontrol: int, p_wfss: list): """ Return a set of recorded open-loop slopes, usefull for initialize modal control optimization - :parameters: + Args: tel: (Telescope) : Telescope object diff --git a/shesha/ao/tomo.py b/shesha/ao/tomo.py index d5d46c5..72f349c 100644 --- a/shesha/ao/tomo.py +++ b/shesha/ao/tomo.py @@ -1,7 +1,7 @@ ## @package shesha.ao.tomo ## @brief Computation of tomographic reconstructor ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -52,7 +52,7 @@ def do_tomo_matrices(ncontrol: int, rtc: Rtc, p_wfss: List[conf.Param_wfs], dms: p_atmos: conf.Param_atmos): """ Compute Cmm and Cphim matrices for the MV controller on GPU - :parameters: + Args: ncontrol: (int): controller index @@ -190,7 +190,7 @@ def selectDMforLayers(p_atmos: conf.Param_atmos, p_controller: conf.Param_contro p_dms: list): """ For each atmos layer, select the DM which have to handle it in the Cphim computation for MV controller - :parameters: + Args: p_atmos : (Param_atmos) : atmos parameters @@ -262,7 +262,7 @@ def create_nact_geom(p_dm: conf.Param_dm): def create_piston_filter(p_dm: conf.Param_dm): """ Create the piston filter matrix - :parameters: + Args: p_dm: (Param_dm): dm settings """ diff --git a/shesha/ao/wfs.py b/shesha/ao/wfs.py index c9de51a..da2f4c2 100644 --- a/shesha/ao/wfs.py +++ b/shesha/ao/wfs.py @@ -1,7 +1,7 @@ ## @package shesha.ao.wfs ## @brief On the fly modification of the WFS ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -49,7 +49,7 @@ def comp_new_pyr_ampl(nwfs: int, ampli: float, p_wfss: list, p_tel: conf.Param_t npts_force: int = None): """ Set the pyramid modulation amplitude - :parameters: + Args: nwfs : (int): WFS index @@ -92,7 +92,7 @@ def noise_cov(nw: int, p_wfs: conf.Param_wfs, p_atmos: conf.Param_atmos, Photon noise: (pi^2/2)*(1/Nphotons)*(d/r0)^2 / (2*pi*d/lambda)^2 Electronic noise: (pi^2/3)*(wfs.noise^2/N^2photons)*wfs.npix^2*(wfs.npix*wfs.pixsize*d/lambda)^2 / (2*pi*d/lambda)^2 - :parameters: + Args: nw: wfs number @@ -138,7 +138,7 @@ def comp_new_fstop(wfs: Sensors, n: int, p_wfs: conf.Param_wfs, fssize: float, fstop: bytes): """ Compute a new field stop for pyrhr WFS - :parameters: + Args: n : (int) : WFS index diff --git a/shesha/config/PATMOS.py b/shesha/config/PATMOS.py index d738423..a4129e8 100644 --- a/shesha/config/PATMOS.py +++ b/shesha/config/PATMOS.py @@ -1,7 +1,7 @@ ## @package shesha.config.PATMOS ## @brief Param_atmos class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/config/PCENTROIDER.py b/shesha/config/PCENTROIDER.py index 70e05b8..ddcc242 100644 --- a/shesha/config/PCENTROIDER.py +++ b/shesha/config/PCENTROIDER.py @@ -1,7 +1,7 @@ ## @package shesha.config.PCENTROIDER ## @brief Param_centroider class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/config/PCONTROLLER.py b/shesha/config/PCONTROLLER.py index 98f87b5..671ed9e 100644 --- a/shesha/config/PCONTROLLER.py +++ b/shesha/config/PCONTROLLER.py @@ -1,7 +1,7 @@ ## @package shesha.config.PCONTROLLER ## @brief Param_controller class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -49,6 +49,8 @@ class Param_controller: def __init__(self): self.__type = None """ type of controller""" + self.__command_law = None + """ type of command law type for generic controller only""" self.__nwfs = None """ index of wfss in controller""" self.__nvalid = 0 @@ -96,6 +98,17 @@ def __init__(self): """ Gain applied to modes at cMat inversion """ self.__nstates = 0 """ Number of states""" + ''' MODAL OPTIMIZATION CLOSE''' + self.__close_opti = False + """ Flag for modal optimization with close """ + self.__mgain_init = 1.0 + """ Initial values of the modal gains """ + self.__lfdownup = (0.01, 0.01) + """ Modal gain correction learning factor """ + self.__close_learning_factor = 0.3 + """ Autocorrelation learning factor """ + self.__close_target = 0.0 + """ Target value """ def get_type(self): """ Get the controller type @@ -113,6 +126,22 @@ def set_type(self, t): type = property(get_type, set_type) + def get_command_law(self): + """ Get the command law type for generic controller only + + :return: (string) : Command law type + """ + return self.__command_law + + def set_command_law(self, t): + """ Set the command law type for generic controller only + + :param t: (string) : Command law type + """ + self.__command_law = scons.check_enum(scons.CommandLawType, t) + + command_law = property(get_command_law, set_command_law) + def get_do_kl_imat(self): """Get type imat, for imat on kl set at 1 @@ -484,3 +513,84 @@ def set_nstates(self, l): self.__nstates = csu.enforce_int(l) nstates = property(get_nstates, set_nstates) + + def get_close_opti(self): + """ Get flag for CLOSE modal optimization + + :return: (bool) : CLOSE flag + """ + return self.__close_opti + + def set_close_opti(self, close_opti): + """ Set the flag for CLOSE modal optimization + + :param close_opti: (bool) : CLOSE flag + """ + self.__close_opti = close_opti + + close_opti = property(get_close_opti, set_close_opti) + + def get_mgain_init(self): + """ Get the initial value of modal gains + + :return: (float) : initial value for modal gains + """ + return self.__mgain_init + + def set_mgain_init(self, mgain_init): + """ Set the initial value of modal gains + + :param mgain_init: (float) : init valuo of modal gain + """ + self.__mgain_init = csu.enforce_float(mgain_init) + + mgain_init = property(get_mgain_init, set_mgain_init) + + def get_lfdownup(self): + """ Get the autocorrelation learning factors + + :return: (tuple) : learning factors for autocorrelation + """ + return self.__lfdownup + + def set_lfdownup(self, qplus, qminus): + """ Set the autocorrelation learning factor + + :param qplus: (float) : learning factor when higher than target + :param qminus: (float) : learning factor when lower than target + """ + self.__lfdownup = (csu.enforce_float(qplus), csu.enforce_float(qminus)) + + lfdownup = property(get_lfdownup, set_lfdownup) + + def get_close_learning_factor(self): + """ Get the modal gain learning factor + + :return: (float) : learning factor for modal gain + """ + return self.__close_learning_factor + + def set_close_learning_factor(self, p): + """ Set the modal gain optimization learning factor + + :param p: (float) : learning factor + """ + self.__close_learning_factor = csu.enforce_float(p) + + lf = property(get_close_learning_factor, set_close_learning_factor) + + def get_close_target(self): + """ Get the autocorrelation target + + :return: (float) : CLOSE autocorrelation target + """ + return self.__close_target + + def set_close_target(self, t): + """ Set the autocorrelation target + + :param t: (float) : close target + """ + self.__close_target = csu.enforce_float(t) + + close_target = property(get_close_target, set_close_target) \ No newline at end of file diff --git a/shesha/config/PDMS.py b/shesha/config/PDMS.py index 125a3bd..dee706e 100644 --- a/shesha/config/PDMS.py +++ b/shesha/config/PDMS.py @@ -1,7 +1,7 @@ ## @package shesha.config.PDMS ## @brief Param_dm class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -51,6 +51,7 @@ def __init__(self): self.__nact = 0 # linear number of actuators across the pupil diameter self.__alt = 0.0 # DM conjugation altitude self.__thresh = 0.0 # Threshold on response for selection + self.__keep_all_actu = False # if True, don't mask actu by pupil self.__coupling = 0.2 # Actuator coupling (< .3) self.__gain = 1.0 # Actuator gains self.__pupoffset = np.array([0, 0]) @@ -570,6 +571,22 @@ def set_thresh(self, t): thresh = property(get_thresh, set_thresh) + def get_keep_all_actu(self): + """ Get the flag for keeping all actuators + + :return: (bool) : keep all actuator flag (boolean) + """ + return self.__keep_all_actu + + def set_keep_all_actu(self, k): + """ set the flag for keeping all actuators + + :param k: (f) : keep all actuator flag (boolean) + """ + self.__keep_all_actu = csu.enforce_or_cast_bool(k) + + keep_all_actu = property(get_keep_all_actu, set_keep_all_actu) + def get_coupling(self): """ Get the actuators coupling diff --git a/shesha/config/PGEOM.py b/shesha/config/PGEOM.py index ace9ce0..822118b 100644 --- a/shesha/config/PGEOM.py +++ b/shesha/config/PGEOM.py @@ -1,7 +1,7 @@ ## @package shesha.config.PGEOM ## @brief Param_geom class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/config/PLOOP.py b/shesha/config/PLOOP.py index 296331a..241cae7 100644 --- a/shesha/config/PLOOP.py +++ b/shesha/config/PLOOP.py @@ -1,7 +1,7 @@ ## @package shesha.config.PLOOP ## @brief Param_loop class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -59,7 +59,7 @@ def get_devices(self): def set_devices(self, devices): """ Set the list of GPU devices used - :parameters: + Args: devices: (np.ndarray[ndim=1, dtype=np.int32_t]) : list of GPU devices """ self.__devices = csu.enforce_array(devices, len(devices), dtype=np.int32, @@ -77,7 +77,7 @@ def get_niter(self): def set_niter(self, n): """ Set the number of iteration - :parameters: + Args: n: (long) : number of iteration """ self.__niter = csu.enforce_int(n) @@ -94,7 +94,7 @@ def get_ittime(self): def set_ittime(self, t): """ Set iteration time - :parameters: + Args: t: (float) :iteration time """ self.__ittime = csu.enforce_float(t) diff --git a/shesha/config/PTARGET.py b/shesha/config/PTARGET.py index bf59de0..62391ca 100644 --- a/shesha/config/PTARGET.py +++ b/shesha/config/PTARGET.py @@ -1,7 +1,7 @@ ## @package shesha.config.PTARGET ## @brief Param_target class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/config/PTEL.py b/shesha/config/PTEL.py index 3f33cc7..710f60f 100644 --- a/shesha/config/PTEL.py +++ b/shesha/config/PTEL.py @@ -1,7 +1,7 @@ ## @package shesha.config.PTEL ## @brief Param_tel class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/config/PWFS.py b/shesha/config/PWFS.py index c220c19..30c2371 100644 --- a/shesha/config/PWFS.py +++ b/shesha/config/PWFS.py @@ -1,7 +1,7 @@ ## @package shesha.config.PWFS ## @brief Param_wfs class definition ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -121,9 +121,9 @@ def __init__(self, roket=False): # Fakecam mode (uint16) self.__fakecam = False """ uint16 computation flag for WFS image """ - self.__maxFluxPerPix = 0 + self.__max_flux_per_pix = 0 """ Maximum number of photons allowed before pixel computation (only used if fakecam is True) """ - self.__maxPixValue = 0 + self.__max_pix_value = 0 """ Maximum number of ADU photons allowed in the uint16 image (only used if fakecam is True) """ # internal kwrd self.__pdiam = 0 @@ -546,32 +546,32 @@ def get_maxFluxPerPix(self): :return: (int) : max_flux_per_pix """ - return self.__maxFluxPerPix + return self.__max_flux_per_pix def set_max_flux_per_pix(self, max_flux_per_pix): """ Set the max_flux_per_pix :return: (int) : max_flux_per_pix """ - self.__maxFluxPerPix = csu.enforce_int(max_flux_per_pix) + self.__max_flux_per_pix = csu.enforce_int(max_flux_per_pix) max_flux_per_pix = property(get_maxFluxPerPix, set_max_flux_per_pix) - def get_maxPixValue(self): + def get_max_pix_value(self): """ Get the max_pix_value :return: (int) : max_pix_value """ - return self.__maxPixValue + return self.__max_pix_value def set_max_pix_value(self, max_pix_value): """ Set the max_pix_value :return: (int) : max_pix_value """ - self.__maxPixValue = csu.enforce_int(max_pix_value) + self.__max_pix_value = csu.enforce_int(max_pix_value) - max_pix_value = property(get_maxPixValue, set_max_pix_value) + max_pix_value = property(get_max_pix_value, set_max_pix_value) def get_gsalt(self): """ Get the altitude of guide star diff --git a/shesha/config/__init__.py b/shesha/config/__init__.py index 5242ab9..f7da8aa 100644 --- a/shesha/config/__init__.py +++ b/shesha/config/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.config ## @brief Parameter classes for COMPASS ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -37,7 +37,7 @@ __all__ = [ 'PATMOS', 'PDMS', 'PGEOM', 'PLOOP', 'PTEL', 'PWFS', 'PTARGET', 'PCONTROLLER', - 'PCENTROIDER', 'config_setter_utils' + 'PCENTROIDER', 'config_setter_utils', 'pconfig' ] from .PATMOS import Param_atmos @@ -49,3 +49,4 @@ from .PTARGET import Param_target from .PCENTROIDER import Param_centroider from .PCONTROLLER import Param_controller +from .pconfig import ParamConfig diff --git a/shesha/config/config_setter_utils.py b/shesha/config/config_setter_utils.py index 621cf7b..32523df 100644 --- a/shesha/config/config_setter_utils.py +++ b/shesha/config/config_setter_utils.py @@ -1,7 +1,7 @@ ## @package shesha.config.config_setter ## @brief Utility functions for enforcing types in a property setter ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/config/pconfig.py b/shesha/config/pconfig.py new file mode 100644 index 0000000..bd4d2b3 --- /dev/null +++ b/shesha/config/pconfig.py @@ -0,0 +1,407 @@ +## @package shesha.config +## @brief Parameters configuration class +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +import importlib +import sys, os +from collections import OrderedDict +import numpy as np +import shesha.constants as scons +from typing import NoReturn, Dict + +class ParamConfig(object): + """ Shesha parameters configuration class. It embeds all the + parameters classes instances needed for the simulation run. + + This class also exposes most useful getters from its components + to allow an easier access and exposition through Pyro + + Attributes: + simul_name : (str) : Simulation run name + + p_atmos : (Param_atmos) : A Param_atmos instance + + p_geom : (Param_geom) : A Param_geom instance + + p_tel : (Param_tel) : A Param_tel instance + + p_dms : (List of Param_dm) : List of Param_dm instance + + p_wfss : (List of Param_wfs) : List of Param_wfs instance + + p_targets : (List of Param_target) : List of Param_target instance + + p_loop : (Param_loop) : A Param_loop instance + + p_centroiders : (List of Param_centroider) : List of Param_centroider instance + + p_controllers : (List of Param_controller) : List of Param controller instance + + _config : (configuration module from parfile) : Raw parameter file module + """ + def __init__(self, param_file : str): + self._load_config_from_file(param_file) + self.simul_name = self._config.simul_name + self.p_atmos = self._config.p_atmos + self.p_tel = self._config.p_tel + self.p_geom = self._config.p_geom + self.p_wfss = self._config.p_wfss + self.p_dms = self._config.p_dms + self.p_targets = self._config.p_targets + self.p_loop = self._config.p_loop + self.p_centroiders = self._config.p_centroiders + self.p_controllers = self._config.p_controllers + + def _load_config_from_file(self, filename_path: str) -> NoReturn: + """ Load the parameters from the parameters file + + Args: + filename_path: (str): path to the parameters file + """ + path = os.path.dirname(os.path.abspath(filename_path)) + filename = os.path.basename(filename_path) + name, ext = os.path.splitext(filename) + + if (ext == ".py"): + if (path not in sys.path): + sys.path.insert(0, path) + + return self._load_config_from_module(name) + + # exec("import %s as wao_config" % filename) + sys.path.remove(path) + elif importlib.util.find_spec(filename_path) is not None: + return self._load_config_from_module(filename_path) + else: + raise ValueError("Config file must be .py or a module") + + + def _load_config_from_module(self, filepath: str) -> NoReturn: + """ + Load the parameters from the parameters module + + Args: + filename_path: (str): path to the parameters file + + Returns: + config : (config) : a config module + """ + filename = filepath.split('.')[-1] + print("loading: %s" % filename) + + config = importlib.import_module(filepath) + del sys.modules[config.__name__] # Forced reload + self._config = importlib.import_module(filepath) + + if hasattr(config, 'par'): + self._config = getattr("config.par.par4bench", filename) + + # Set missing config attributes to None + if not hasattr(self._config, 'p_loop'): + self._config.p_loop = None + if not hasattr(self._config, 'p_geom'): + self._config.p_geom = None + if not hasattr(self._config, 'p_tel'): + self._config.p_tel = None + if not hasattr(self._config, 'p_atmos'): + self._config.p_atmos = None + if not hasattr(self._config, 'p_dms'): + self._config.p_dms = None + if not hasattr(self._config, 'p_targets'): + self._config.p_targets = None + if not hasattr(self._config, 'p_wfss'): + self._config.p_wfss = None + if not hasattr(self._config, 'p_centroiders'): + self._config.p_centroiders = None + if not hasattr(self._config, 'p_controllers'): + self._config.p_controllers = None + + if not hasattr(self._config, 'simul_name'): + self._config.simul_name = None + + def get_pupil(self, pupil_type) -> np.ndarray: + """ Returns the specified pupil of COMPASS. + + Possible args value are : + - "i" or "ipupil" : returns the biggest pupil of size (Nfft x Nfft) + - "m" or "mpupil" : returns the medium pupil, used for WFS computation + - "s" or "spupil" : returns the smallest pupil of size (p_geom.pupdiam x p_geom.pupdiam) + + Returns: + pupil : (np.ndarray) : pupil + """ + if scons.PupilType(pupil_type) is scons.PupilType.SPUPIL: + return self.p_geom.get_spupil() + if scons.PupilType(pupil_type) is scons.PupilType.MPUPIL: + return self.p_geom.get_mpupil() + if scons.PupilType(pupil_type) is scons.PupilType.IPUPIL: + return self.p_geom.get_ipupil() + + def export_config(self) -> [Dict, Dict]: + """ + Extract and convert compass supervisor configuration parameters + into 2 dictionnaries containing relevant AO parameters + + Returns : 2 dictionnaries + """ + aodict = OrderedDict() + dataDict = {} + + if (self.p_tel is not None): + aodict.update({"teldiam": self.p_tel.diam}) + aodict.update({"telobs": self.p_tel.cobs}) + aodict.update({"pixsize": self.p_geom._pixsize}) + # TURBU + aodict.update({"r0": self.p_atmos.r0}) + aodict.update({"Fe": 1 / self.p_loop.ittime}) + aodict.update({"nbTargets": len(self.p_targets)}) + else: + aodict.update({"nbTargets": 1}) + + # WFS + aodict.update({"nbWfs": len(self.p_wfss)}) + aodict.update({"nbCam": aodict["nbWfs"]}) + aodict.update({"nbOffaxis": 0}) + aodict.update({"nbNgsWFS": 1}) + aodict.update({"nbLgsWFS": 0}) + aodict.update({"nbFigSensor": 0}) + aodict.update({"nbSkyWfs": aodict["nbWfs"]}) + aodict.update({"nbOffNgs": 0}) + + # DMS + aodict.update({"nbDms": len(self.p_dms)}) + aodict.update({"Nactu": self.p_controllers[0].nactu}) + # List of things + aodict.update({"list_NgsOffAxis": []}) + aodict.update({"list_Fig": []}) + aodict.update({"list_Cam": [0]}) + aodict.update({"list_SkyWfs": [0]}) + aodict.update({"list_ITS": []}) + aodict.update({"list_Woofer": []}) + aodict.update({"list_Tweeter": []}) + aodict.update({"list_Steering": []}) + + listOfNstatesPerController = [] + listOfcommandLawTypePerController = [] + for control in self.p_controllers: + listOfNstatesPerController.append(control.nstates) + listOfcommandLawTypePerController.append(control.type) + aodict.update({"list_nstatesPerController": listOfNstatesPerController}) + aodict.update({"list_controllerType": listOfcommandLawTypePerController}) + + # fct of Nb of wfss + NslopesList = [] + NsubapList = [] + listWfsType = [] + listCentroType = [] + + pyrModulationList = [] + pyr_npts = [] + pyr_pupsep = [] + pixsize = [] + xPosList = [] + yPosList = [] + fstopsize = [] + fstoptype = [] + npixPerSub = [] + nxsubList = [] + nysubList = [] + lambdaList = [] + dms_seen = [] + colTmpList = [] + noise = [] + #new_hduwfsl = pfits.HDUList() + #new_hduwfsSubapXY = pfits.HDUList() + for i in range(aodict["nbWfs"]): + #new_hduwfsl.append(pfits.ImageHDU(self.p_wfss[i]._isvalid)) # Valid subap array + #new_hduwfsl[i].header["DATATYPE"] = "valid_wfs%d" % i + dataDict["wfsValid_" + str(i)] = self.p_wfss[i]._isvalid + + xytab = np.zeros((2, self.p_wfss[i]._validsubsx.shape[0])) + xytab[0, :] = self.p_wfss[i]._validsubsx + xytab[1, :] = self.p_wfss[i]._validsubsy + dataDict["wfsValidXY_" + str(i)] = xytab + + #new_hduwfsSubapXY.append(pfits.ImageHDU(xytab)) # Valid subap array inXx Y on the detector + #new_hduwfsSubapXY[i].header["DATATYPE"] = "validXY_wfs%d" % i + pixsize.append(self.p_wfss[i].pixsize) + """ + if (self.p_centroiders[i].type == "maskedpix"): + factor = 4 + else: + factor = 2 + NslopesList.append( + self.p_wfss[i]._nvalid * factor) # slopes per wfs + """ + listCentroType.append( + self.p_centroiders[i]. + type) # assumes that there is the same number of centroiders and wfs + NsubapList.append(self.p_wfss[i]._nvalid) # subap per wfs + listWfsType.append(self.p_wfss[i].type) + xPosList.append(self.p_wfss[i].xpos) + yPosList.append(self.p_wfss[i].ypos) + fstopsize.append(self.p_wfss[i].fssize) + fstoptype.append(self.p_wfss[i].fstop) + nxsubList.append(self.p_wfss[i].nxsub) + nysubList.append(self.p_wfss[i].nxsub) + lambdaList.append(self.p_wfss[i].Lambda) + if (self.p_wfss[i].dms_seen is not None): + dms_seen.append(list(self.p_wfss[i].dms_seen)) + noise.append(self.p_wfss[i].noise) + + if (self.p_centroiders[i].type == scons.CentroiderType.MASKEDPIX): + NslopesList.append(self.p_wfss[i]._nvalid * 4) # slopes per wfs + else: + NslopesList.append(self.p_wfss[i]._nvalid * 2) # slopes per wfs + + if (self.p_wfss[i].type == "pyrhr"): + pyrModulationList.append(self.p_wfss[i].pyr_ampl) + pyr_npts.append(self.p_wfss[i].pyr_npts) + pyr_pupsep.append(self.p_wfss[i].pyr_pup_sep) + npixPerSub.append(1) + else: + pyrModulationList.append(0) + pyr_npts.append(0) + pyr_pupsep.append(0) + npixPerSub.append(self.p_wfss[i].npix) + """ + confname = filepath.split("/")[-1].split('.conf')[0] + print(filepath.split(".conf")[0] + '_wfsConfig.fits') + new_hduwfsl.writeto( + filepath.split(".conf")[0] + '_wfsConfig.fits', overwrite=True) + new_hduwfsSubapXY.writeto( + filepath.split(".conf")[0] + '_wfsValidXYConfig.fits', overwrite=True) + """ + if (len(dms_seen) != 0): + aodict.update({"listWFS_dms_seen": dms_seen}) + + aodict.update({"listWFS_NslopesList": NslopesList}) + aodict.update({"listWFS_NsubapList": NsubapList}) + aodict.update({"listWFS_CentroType": listCentroType}) + aodict.update({"listWFS_WfsType": listWfsType}) + aodict.update({"listWFS_pixarc": pixsize}) + aodict.update({"listWFS_pyrModRadius": pyrModulationList}) + aodict.update({"listWFS_pyrModNPts": pyr_npts}) + aodict.update({"listWFS_pyrPupSep": pyr_pupsep}) + aodict.update({"listWFS_fstopsize": fstopsize}) + aodict.update({"listWFS_fstoptype": fstoptype}) + aodict.update({"listWFS_NsubX": nxsubList}) + aodict.update({"listWFS_NsubY": nysubList}) + aodict.update({"listWFS_Nsub": nysubList}) + aodict.update({"listWFS_NpixPerSub": npixPerSub}) + aodict.update({"listWFS_Lambda": lambdaList}) + if (len(noise) != 0): + aodict.update({"listWFS_noise": noise}) + + listDmsType = [] + NactuX = [] + Nactu = [] + unitPerVolt = [] + push4imat = [] + coupling = [] + push4iMatArcSec = [] + #new_hdudmsl = pfits.HDUList() + + for j in range(aodict["nbDms"]): + listDmsType.append(self.p_dms[j].type) + NactuX.append( + self.p_dms[j].nact) # nb of actuators across the diameter !! + Nactu.append(self.p_dms[j]._ntotact) # nb of actuators in total + unitPerVolt.append(self.p_dms[j].unitpervolt) + push4imat.append(self.p_dms[j].push4imat) + coupling.append(self.p_dms[j].coupling) + tmp = [] + if (self.p_dms[j]._i1 is + not None): # Simu Case where i1 j1 is known (simulated) + if (self.p_dms[j].type != 'tt'): + tmpdata = np.zeros((4, len(self.p_dms[j]._i1))) + tmpdata[0, :] = self.p_dms[j]._j1 + tmpdata[1, :] = self.p_dms[j]._i1 + tmpdata[2, :] = self.p_dms[j]._xpos + tmpdata[3, :] = self.p_dms[j]._ypos + else: + tmpdata = np.zeros((4, 2)) + + dataDict["dmData" + str(j)] = tmpdata + """ + new_hdudmsl.append(pfits.ImageHDU(tmpdata)) # Valid subap array + new_hdudmsl[j].header["DATATYPE"] = "valid_dm%d" % j + """ + #for k in range(aodict["nbWfs"]): + # tmp.append(supervisor.computeDMrange(j, k)) + + push4iMatArcSec.append(tmp) + + # new_hdudmsl.writeto(filepath.split(".conf")[0] + '_dmsConfig.fits', overwrite=True) + if (len(push4iMatArcSec) != 0): + aodict.update({"listDMS_push4iMat": push4imat}) + aodict.update({"listDMS_unitPerVolt": unitPerVolt}) + aodict.update({"listDMS_Nxactu": NactuX}) + aodict.update({"listDMS_Nyactu": NactuX}) + aodict.update({"listDMS_Nactu": Nactu}) + + aodict.update({"listDMS_type": listDmsType}) + aodict.update({"listDMS_coupling": coupling}) + + if (self.p_targets is not None): # simu case + listTargetsLambda = [] + listTargetsXpos = [] + listTargetsYpos = [] + listTargetsDmsSeen = [] + listTargetsMag = [] + listTARGETS_pixsize = [] + for k in range(aodict["nbTargets"]): + listTargetsLambda.append(self.p_targets[k].Lambda) + listTargetsXpos.append(self.p_targets[k].xpos) + listTargetsYpos.append(self.p_targets[k].ypos) + listTargetsMag.append(self.p_targets[k].mag) + listTargetsDmsSeen.append(list(self.p_targets[k].dms_seen)) + PSFPixsize = (self.p_targets[k].Lambda * 1e-6) / ( + self.p_geom._pixsize * + self.p_geom.get_ipupil().shape[0]) * 206265. + listTARGETS_pixsize.append(PSFPixsize) + + aodict.update({"listTARGETS_Lambda": listTargetsLambda}) + aodict.update({"listTARGETS_Xpos": listTargetsXpos}) + aodict.update({"listTARGETS_Ypos": listTargetsYpos}) + aodict.update({"listTARGETS_Mag": listTargetsMag}) + aodict.update({"listTARGETS_DmsSeen": listTargetsDmsSeen}) + aodict.update({"listTARGETS_pixsize": listTARGETS_pixsize}) + + listDmsType = [] + Nslopes = sum(NslopesList) + Nsubap = sum(NsubapList) + aodict.update({"Nslopes": Nslopes}) + aodict.update({"Nsubap": Nsubap}) + return aodict, dataDict diff --git a/shesha/constants.py b/shesha/constants.py index 8b37393..e66cae0 100644 --- a/shesha/constants.py +++ b/shesha/constants.py @@ -1,7 +1,7 @@ ## @package shesha.constants ## @brief Numerical constants for shesha and config enumerations for safe-typing ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -36,6 +36,7 @@ # If not, see . import numpy as np +from aenum import MultiValueEnum class CONST: @@ -111,6 +112,16 @@ class ControllerType: GEO = 'geo' +class CommandLawType: + """ + Command law types for generic controller only + """ + + INTEGRATOR = 'integrator' + MODAL_INTEGRATOR = 'modal_integrator' + TWO_MATRICES = '2matrices' + + class CentroiderType: """ Centroider types @@ -194,11 +205,13 @@ class ProfType: GAUSS2 = 'Gauss2' GAUSS3 = 'Gauss3' EXP = 'Exp' + MULTIPEAK = 'Multipeak' FILES = dict({ GAUSS1: "allProfileNa_withAltitude_1Gaussian.npy", GAUSS2: "allProfileNa_withAltitude_2Gaussian.npy", GAUSS3: "allProfileNa_withAltitude_3Gaussian.npy", - EXP: "allProfileNa_withAltitude.npy" + EXP: "allProfileNa_withAltitude.npy", + MULTIPEAK: "multipeakProfileNa_withAltitude.npy" }) @@ -208,3 +221,10 @@ class FieldStopType: """ SQUARE = 'square' ROUND = 'round' + +class PupilType(MultiValueEnum): + """Compass pupil enumeration + """ + SPUPIL = "spupil", "s" + MPUPIL = "mpupil", "m" + IPUPIL = "ipupil", "i" diff --git a/shesha/init/__init__.py b/shesha/init/__init__.py index c35561b..5daad64 100644 --- a/shesha/init/__init__.py +++ b/shesha/init/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.init ## @brief Python package for COMPASS simulation initialization ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/init/atmos_init.py b/shesha/init/atmos_init.py index fd80874..8103e96 100644 --- a/shesha/init/atmos_init.py +++ b/shesha/init/atmos_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.atmos_init ## @brief Initialization of a Atmos object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -50,16 +50,26 @@ def atmos_init(context: carmaWrap_context, p_atmos: conf.Param_atmos, """ Initializes an Atmos object - :parameters: + Args: context: (carmaWrap_context): GPU device context + p_atmos: (Param_atmos): Atmosphere parameters + p_tel: (Param_tel): Telescope parameters + p_geom: (Param_geom): Geometry parameters - ittime: (float): (optional) exposition time [s] - p_wfss: (list of Param_wfs): (optional) WFS parameters - p_targets: (list of Param_target): (optional) target parameters - dataBase: (dict): (optional) dictionary for data base - use_DB: (bool): (optional) flag for using the dataBase system + + Kwargs: + ittime: (float): exposition time [s] + + p_wfss: (list of Param_wfs): WFS parameters + + p_targets: (list of Param_target): target parameters + + dataBase: (dict): dictionary for data base + + use_DB: (bool): flag for using the dataBase system + :return: atm : (Atmos): Atmos object """ diff --git a/shesha/init/dm_init.py b/shesha/init/dm_init.py index e5a5167..3a5a97c 100644 --- a/shesha/init/dm_init.py +++ b/shesha/init/dm_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.dm_init ## @brief Initialization of a Dms object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -62,10 +62,10 @@ def dm_init(context: carmaWrap_context, p_dms: List[conf.Param_dm], p_tel: conf.Param_tel, p_geom: conf.Param_geom, - p_wfss: List[conf.Param_wfs] = None, keepAllActu: bool = False) -> Dms: + p_wfss: List[conf.Param_wfs] = None) -> Dms: """Create and initialize a Dms object on the gpu - :parameters: + Args: context: (carmaWrap_context): context p_dms: (list of Param_dms) : dms settings p_tel: (Param_tel) : telescope settings @@ -94,18 +94,17 @@ def dm_init(context: carmaWrap_context, p_dms: List[conf.Param_dm], for i in range(len(p_dms)): max_extent = _dm_init(context, dms, p_dms[i], xpos_wfs, ypos_wfs, p_geom, - p_tel.diam, p_tel.cobs, p_tel.pupangle, max_extent, - keepAllActu=keepAllActu) + p_tel.diam, p_tel.cobs, p_tel.pupangle, max_extent) return dms def _dm_init(context: carmaWrap_context, dms: Dms, p_dm: conf.Param_dm, xpos_wfs: list, ypos_wfs: list, p_geom: conf.Param_geom, diam: float, cobs: float, - pupAngle: float, max_extent: int, keepAllActu: bool = False): + pupAngle: float, max_extent: int): """ inits a Dms object on the gpu - :parameters: + Args: context: (carmaWrap_context): context dms: (Dms) : dm object @@ -136,14 +135,17 @@ def _dm_init(context: carmaWrap_context, dms: Dms, p_dm: conf.Param_dm, xpos_wfs if (p_dm.type == scons.DmType.PZT): if p_dm.file_influ_fits == None: - p_dm._pitch = patchDiam / float(p_dm.nact - 1) + if p_dm._pitch is None: + p_dm._pitch = patchDiam / float(p_dm.nact - 1) + print(f"DM pitch = {p_dm._pitch:8.5f} pix = {p_dm._pitch*diam/p_geom.pupdiam:8.5f} m", + flush=True) # + 2.5 pitch each side extent = p_dm._pitch * (p_dm.nact + p_dm.pzt_extent) p_dm._n1, p_dm._n2 = dm_util.dim_dm_support(p_geom.cent, extent, p_geom.ssize) # calcul defaut influsize - make_pzt_dm(p_dm, p_geom, cobs, pupAngle, keepAllActu=keepAllActu) + make_pzt_dm(p_dm, p_geom, cobs, pupAngle) else: init_custom_dm(p_dm, p_geom, diam) @@ -211,12 +213,11 @@ def _dm_init(context: carmaWrap_context, dms: Dms, p_dm: conf.Param_dm, xpos_wfs def _dm_init_factorized(context: carmaWrap_context, dms: Dms, p_dm: conf.Param_dm, xpos_wfs: list, ypos_wfs: list, p_geom: conf.Param_geom, - diam: float, cobs: float, pupAngle: float, max_extent: int, - keepAllActu: bool = False): + diam: float, cobs: float, pupAngle: float, max_extent: int): """ inits a Dms object on the gpu NOTE: This is the - :parameters: + Args: context: (carmaWrap_context): context dms: (Dms) : dm object @@ -254,7 +255,7 @@ def _dm_init_factorized(context: carmaWrap_context, dms: Dms, p_dm: conf.Param_d extent = p_dm._pitch * (p_dm.nact + p_dm.pzt_extent) # calcul defaut influsize - make_pzt_dm(p_dm, p_geom, cobs, pupAngle, keepAllActu=keepAllActu) + make_pzt_dm(p_dm, p_geom, cobs, pupAngle) elif (p_dm.type == scons.DmType.TT): if (p_dm.alt == 0) and (max_extent != 0): @@ -309,7 +310,7 @@ def dm_init_standalone(context: carmaWrap_context, p_dms: list, p_geom: conf.Par diam=1., cobs=0., pupAngle=0., wfs_xpos=[0], wfs_ypos=[0]): """Create and initialize a Dms object on the gpu - :parameters: + Args: p_dms: (list of Param_dms) : dms settings p_geom: (Param_geom) : geom settings @@ -335,11 +336,11 @@ def dm_init_standalone(context: carmaWrap_context, p_dms: list, p_geom: conf.Par def make_pzt_dm(p_dm: conf.Param_dm, p_geom: conf.Param_geom, cobs: float, - pupAngle: float, keepAllActu: bool = False): + pupAngle: float): """Compute the actuators positions and the influence functions for a pzt DM. NOTE: if the DM is in altitude, central obstruction is forced to 0 - :parameters: + Args: p_dm: (Param_dm) : dm parameters p_geom: (Param_geom) : geometry parameters @@ -388,10 +389,10 @@ def make_pzt_dm(p_dm: conf.Param_dm, p_geom: conf.Param_geom, cobs: float, if p_dm.type_pattern == scons.PatternType.HEXA: print("Pattern type : hexa") cub = dm_util.createHexaPattern(pitch, p_geom.pupdiam * 1.1) - keepAllActu = True + p_dm.keep_all_actu = True elif p_dm.type_pattern == scons.PatternType.HEXAM4: print("Pattern type : hexaM4") - keepAllActu = True + p_dm.keep_all_actu = True cub = dm_util.createDoubleHexaPattern(pitch, p_geom.pupdiam * 1.1, pupAngle) if p_dm.margin_out is not None: print(f'p_dm.margin_out={p_dm.margin_out} is being ' @@ -408,7 +409,7 @@ def make_pzt_dm(p_dm: conf.Param_dm, p_geom: conf.Param_geom, cobs: float, else: raise ValueError("This pattern does not exist for pzt dm") - if keepAllActu: + if p_dm.keep_all_actu: inbigcirc = np.arange(cub.shape[1]) else: if (p_dm.alt > 0): @@ -518,7 +519,7 @@ def make_pzt_dm(p_dm: conf.Param_dm, p_geom: conf.Param_geom, cobs: float, def init_custom_dm(p_dm: conf.Param_dm, p_geom: conf.Param_geom, diam: float): """Read Fits for influence pzt fonction and form - :parameters: + Args: p_dm: (Param_dm) : dm settings p_geom: (Param_geom) : geom settings @@ -669,7 +670,7 @@ def make_tiptilt_dm(p_dm: conf.Param_dm, patchDiam: int, p_geom: conf.Param_geom diam: float): """Compute the influence functions for a tip-tilt DM - :parameters: + Args: p_dm: (Param_dm) : dm settings patchDiam: (int) : patchDiam for dm size @@ -705,7 +706,7 @@ def make_kl_dm(p_dm: conf.Param_dm, patchDiam: int, p_geom: conf.Param_geom, cobs: float) -> None: """Compute the influence function for a Karhunen-Loeve DM - :parameters: + Args: p_dm: (Param_dm) : dm settings patchDiam: (int) : patchDiam for dm size @@ -757,7 +758,7 @@ def make_kl_dm(p_dm: conf.Param_dm, patchDiam: int, p_geom: conf.Param_geom, def comp_dmgeom(p_dm: conf.Param_dm, p_geom: conf.Param_geom): """Compute the geometry of a DM : positions of actuators and influence functions - :parameters: + Args: dm: (Param_dm) : dm settings geom: (Param_geom) : geom settings @@ -826,7 +827,7 @@ def correct_dm(context, dms: Dms, p_dms: list, p_controller: conf.Param_controll use_DB: bool = False): """Correct the geometry of the DMs using the imat (filter unseen actuators) - :parameters: + Args: context: (carmaWrap_context): context dms: (Dms) : Dms object p_dms: (list of Param_dm) : dms settings diff --git a/shesha/init/geom_init.py b/shesha/init/geom_init.py index 2edb8ca..827c8e8 100644 --- a/shesha/init/geom_init.py +++ b/shesha/init/geom_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.geom_init ## @brief Initialization of the system geometry and of the Telescope object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -51,14 +51,22 @@ def tel_init(context: carmaWrap_context, p_geom: conf.Param_geom, p_tel: conf.Pa """ Initialize the overall geometry of the AO system, including pupil and WFS - :parameters: + Args: context: (carmaWrap_context) : context + p_geom: (Param_geom) : geom settings + p_tel: (Param_tel) : telescope settings + r0: (float) : atmos r0 @ 0.5 microns + ittime: (float) : 1/loop frequency [s] + p_wfss: (list of Param_wfs) : wfs settings - dm: (list of Param_dm) : (optional) dms settings [=None] + + Kwargs: + dm: (list of Param_dm) : dms settings [=None] + :return: telescope: (Telescope): Telescope object @@ -113,7 +121,7 @@ def init_wfs_geom(p_wfs: conf.Param_wfs, r0: float, p_tel: conf.Param_tel, """Compute the geometry of WFSs: valid subaps, positions of the subaps, flux per subap, etc... - :parameters: + Args: p_wfs: (Param_wfs) : wfs settings r0: (float) : atmos r0 @ 0.5 microns @@ -168,7 +176,7 @@ def init_wfs_geom(p_wfs: conf.Param_wfs, r0: float, p_tel: conf.Param_tel, def init_wfs_size(p_wfs: conf.Param_wfs, r0: float, p_tel: conf.Param_tel, verbose=1): """Compute all the parameters usefull for further WFS image computation (array sizes) - :parameters: + Args: p_wfs: (Param_wfs) : wfs settings r0: (float) : atmos r0 @ 0.5 microns @@ -344,7 +352,7 @@ def compute_nphotons(wfs_type, ittime, optthroughput, diam, cobs=0, nxsub=0, zer gsmag=0, lgsreturnperwatt=0, laserpower=0, verbose=1): ''' Determines the number of photons TBC - :parameters: + Args: wfs_type: (scons.WFSType) : wfs type: SH or PYRHR. ittime: (float) : 1/loop frequency [s]. @@ -412,7 +420,7 @@ def init_pyrhr_geom(p_wfs: conf.Param_wfs, r0: float, p_tel: conf.Param_tel, """Compute the geometry of PYRHR WFSs: valid subaps, positions of the subaps, flux per subap, etc... - :parameters: + Args: p_wfs: (Param_wfs) : wfs settings r0: (float) : atmos r0 @ 0.5 microns @@ -624,7 +632,7 @@ def init_sh_geom(p_wfs: conf.Param_wfs, r0: float, p_tel: conf.Param_tel, """Compute the geometry of SH WFSs: valid subaps, positions of the subaps, flux per subap, etc... - :parameters: + Args: p_wfs: (Param_wfs) : wfs settings r0: (float) : atmos r0 @ 0.5 microns @@ -814,7 +822,7 @@ def geom_init(p_geom: conf.Param_geom, p_tel: conf.Param_tel, padding=2): """ Initialize the system geometry - :parameters: + Args: p_geom: (Param_geom) : geometry settings p_tel: (Param_tel) : telescope settings padding: (optional) : padding factor for PYRHR geometry @@ -872,7 +880,7 @@ def geom_init_generic(p_geom, pupdiam, t_spiders=0.01, spiders_type="six", xc=0, real=0, cobs=0): """Initialize the system geometry - :parameters: + Args: pupdiam: (long) : linear size of total pupil t_spiders: (float) : secondary supports ratio. diff --git a/shesha/init/lgs_init.py b/shesha/init/lgs_init.py index d6c2ffc..fa1ab6f 100644 --- a/shesha/init/lgs_init.py +++ b/shesha/init/lgs_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.lgs_init ## @brief Initialization of a LGS in a Wfs object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -59,7 +59,7 @@ def make_lgs_prof1d(p_wfs: conf.Param_wfs, p_tel: conf.Param_tel, prof: np.ndarr h: np.ndarray, beam: float, center=""): """same as prep_lgs_prof but cpu only. original routine from rico - :parameters: + Args: p_tel: (Param_tel) : telescope settings prof: (np.ndarray[dtype=np.float32]) : Na profile intensity, in arbitrary units @@ -204,7 +204,7 @@ def prep_lgs_prof(p_wfs: conf.Param_wfs, nsensors: int, p_tel: conf.Param_tel, effect. It is obtaind by convolution of a gaussian of width "lgsWidth" arcseconds, with the line of the sodium profile "prof". The altitude of the profile is the array "h". - :parameters: + Args: p_wfs: (Param_wfs) : WFS settings nsensors: (int) : wfs index diff --git a/shesha/init/rtc_init.py b/shesha/init/rtc_init.py index 9b7fff2..5fda487 100644 --- a/shesha/init/rtc_init.py +++ b/shesha/init/rtc_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.rtc_init ## @brief Initialization of a Rtc object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -58,27 +58,47 @@ def rtc_init(context: carmaWrap_context, tel: Telescope, wfs: Sensors, dms: Dms, tar=None, dataBase={}, use_DB=False): """Initialize all the SutraRtc objects : centroiders and controllers - :parameters: + Args: context: (carmaWrap_context): context + tel: (Telescope) : Telescope object + wfs: (Sensors) : Sensors object + dms: (Dms) : Dms object + atmos: (Atmos) : Atmos object + p_wfss: (list of Param_wfs) : wfs settings + p_tel: (Param_tel) : telescope settings + p_geom: (Param_geom) : geom settings + p_atmos: (Param_atmos) : atmos settings + ittime: (float) : iteration time [s] - p_centroiders : (list of Param_centroider): (optional) centroiders settings - p_controllers : (list of Param_controller): (optional) controllers settings - p_dms: (list of Param_dms) : (optional) dms settings - do_refslp : (bool): (optional) do ref slopes flag, default=False - brahma: (bool) : (optional) brahma flag - cacao: (bool) : (optional) cacao flag - tar: (Target) : (optional) - dataBase: (dict): (optional) dict containig paths to files to load + + Kwargs: + p_centroiders : (list of Param_centroider): centroiders settings + + p_controllers : (list of Param_controller): controllers settings + + p_dms: (list of Param_dms) : dms settings + + do_refslp : (bool): do ref slopes flag, default=False + + brahma: (bool) : brahma flag + + cacao: (bool) : cacao flag + + tar: (Target) : Target object + + dataBase: (dict): dict containig paths to files to load + use_DB: (bool): use dataBase flag - :return: + + Returns: Rtc : (Rtc) : Rtc object """ # initialisation var @@ -153,12 +173,37 @@ def rtc_init(context: carmaWrap_context, tel: Telescope, wfs: Sensors, dms: Dms, return rtc -# MODBY J def rtc_standalone(context: carmaWrap_context, nwfs: int, nvalid: list, nactu: int, centroider_type: list, delay: list, offset: list, scale: list, brahma: bool = False, fp16: bool = False, cacao: bool = False) -> Rtc: - """ - TODO docstring + """Initialize all the SutraRtc objects : centroiders and controllers + + Args: + context: (carmaWrap_context): context + + nwfs: (int): number of wavefront sensors + + nvalid: (int): number of valid measures as input + + nactu: (int): number of actuators as output + + centroider_type: (list): type of centroiders + + delay: (list): delay of each controller + + offset: (list): offset added in the cog computation of each WFS + + scale: (list): scale factor used in the cog computation of each WFS + + Kwargs: + brahma: (bool) : brahma flag (default=False) + + fp16: (bool) : fp16 flag (default=False) + + cacao: (bool) : cacao flag (default=False) + + Returns: + Rtc : (Rtc) : Rtc object """ print("start rtc_standalone") if brahma: @@ -182,7 +227,7 @@ def rtc_standalone(context: carmaWrap_context, nwfs: int, nvalid: list, nactu: i context.active_device, centroider_type[k]) nslopes = sum([c.nslopes for c in rtc.d_centro]) - rtc.add_controller(context, sum(nvalid), nslopes, nactu, delay[k], + rtc.add_controller(context, sum(nvalid), nslopes, nactu, delay[0], context.active_device, "generic", idx_centro=np.arange(nwfs), ncentro=nwfs) @@ -195,12 +240,17 @@ def init_centroider(context, nwfs: int, p_wfs: conf.Param_wfs, p_atmos: conf.Param_atmos, wfs: Sensors, rtc: Rtc): """ Initialize a centroider object in Rtc - :parameters: + Args: context: (carmaWrap_context): context + nwfs : (int) : index of wfs + p_wfs : (Param_wfs): wfs settings + p_centroider : (Param_centroider) : centroider settings + wfs: (Sensors): Sensor object + rtc : (Rtc) : Rtc object """ if (p_wfs.type == scons.WFSType.SH): @@ -269,12 +319,13 @@ def init_centroider(context, nwfs: int, p_wfs: conf.Param_wfs, def comp_weights(p_centroider: conf.Param_centroider, p_wfs: conf.Param_wfs, npix: int): - """ - Compute the weights used by centroider wcog and corr + """ Compute the weights used by centroider wcog and corr - :parameters: + Args: p_centroider : (Param_centroider) : centroider settings + p_wfs : (Param_wfs) : wfs settings + npix: (int): """ if (p_centroider.type_fct == scons.CentroiderFctType.MODEL): @@ -329,25 +380,45 @@ def init_controller(context, i: int, p_controller: conf.Param_controller, p_wfss wfs: Sensors, tel: Telescope, atmos: Atmos, p_centroiders: List[conf.Param_centroider], do_refslp=False, dataBase={}, use_DB=False): - """ - Initialize the controller part of rtc + """ Initialize the controller part of rtc - :parameters: + Args: context: (carmaWrap_context): context + i : (int) : controller index + p_controller: (Param_controller) : controller settings + p_wfss: (list of Param_wfs) : wfs settings + p_geom: (Param_geom) : geom settings + p_dms: (list of Param_dms) : dms settings + p_atmos: (Param_atmos) : atmos settings + ittime: (float) : iteration time [s] + p_tel: (Param_tel) : telescope settings + rtc: (Rtc) : Rtc objet + dms: (Dms) : Dms object + wfs: (Sensors) : Sensors object + tel: (Telescope) : Telescope object + atmos: (Atmos) : Atmos object + p_centroiders: (list of Param_centroider): centroiders settings + + Kwargs: + do_refslp: (bool): do the reference slopes at startup, + + dataBase: (dict): database used + + use_DB: (bool): use database or not """ if (p_controller.type != scons.ControllerType.GEO): nwfs = p_controller.nwfs @@ -417,16 +488,22 @@ def init_controller(context, i: int, p_controller: conf.Param_controller, p_wfss def init_controller_geo(i: int, rtc: Rtc, dms: Dms, p_geom: conf.Param_geom, p_controller: conf.Param_controller, p_dms: list, roket=False): - """ - Initialize geometric controller + """ Initialize geometric controller - :parameters: + Args: i: (int): controller index + rtc: (Rtc): rtc object + dms: (Dms): Dms object + p_geom: (Param_geom): geometry settings + p_controller: (Param_controller): controller settings + p_dms: (list of Param_dms): dms settings + + Kwargs roket: (bool): Flag to initialize ROKET """ indx_pup = np.where(p_geom._spupil.flatten('F'))[0].astype(np.int32) @@ -453,22 +530,39 @@ def init_controller_ls(i: int, p_controller: conf.Param_controller, p_wfss: list ittime: float, p_tel: conf.Param_tel, rtc: Rtc, dms: Dms, wfs: Sensors, tel: Telescope, atmos: Atmos, dataBase: dict = {}, use_DB: bool = False): - """ - Initialize the least square controller - :parameters: + """ Initialize the least square controller + + Args: i : (int) : controller index + p_controller: (Param_controller) : controller settings + p_wfss: (list of Param_wfs) : wfs settings + p_geom: (Param_geom) : geom settings + p_dms: (list of Param_dms) : dms settings + p_atmos: (Param_atmos) : atmos settings + ittime: (float) : iteration time [s] + p_tel: (Param_tel) : telescope settings + rtc: (Rtc) : Rtc objet + dms: (Dms) : Dms object + wfs: (Sensors) : Sensors object + tel: (Telescope) : Telescope object + atmos: (Atmos) : Atmos object + + Kwargs: + dataBase: (dict): database used + + use_DB: (bool): use database or not """ M2V = None if p_controller.do_kl_imat: @@ -515,13 +609,17 @@ def init_controller_ls(i: int, p_controller: conf.Param_controller, p_wfss: list def init_controller_cured(i: int, rtc: Rtc, p_controller: conf.Param_controller, p_dms: list, p_wfss: list): - """ - Initialize the CURED controller - :parameters: + """ Initialize the CURED controller + + Args: i : (int) : controller index + rtc: (Rtc) : Rtc objet + p_controller: (Param_controller) : controller settings + p_dms: (list of Param_dms) : dms settings + p_wfss: (list of Param_wfs) : wfs settings """ @@ -539,20 +637,29 @@ def init_controller_mv(i: int, p_controller: conf.Param_controller, p_wfss: list p_geom: conf.Param_geom, p_dms: list, p_atmos: conf.Param_atmos, p_tel: conf.Param_tel, rtc: Rtc, dms: Dms, wfs: Sensors, atmos: Atmos): - """ - Initialize the MV controller + """ Initialize the MV controller - :parameters: + Args: i : (int) : controller index + p_controller: (Param_controller) : controller settings + p_wfss: (list of Param_wfs) : wfs settings + p_geom: (Param_geom) : geom settings + p_dms: (list of Param_dms) : dms settings + p_atmos: (Param_atmos) : atmos settings + p_tel: (Param_tel) : telescope settings + rtc: (Rtc) : Rtc objet + dms: (Dms) : Dms object + wfs: (Sensors) : Sensors object + atmos: (Atmos) : Atmos object """ p_controller._imat = imats.imat_geom(wfs, dms, p_wfss, p_dms, p_controller) @@ -569,13 +676,15 @@ def init_controller_mv(i: int, p_controller: conf.Param_controller, p_wfss: list def init_controller_generic(i: int, p_controller: conf.Param_controller, p_dms: list, rtc: Rtc): - """ - Initialize the generic controller + """ Initialize the generic controller - :parameters: + Args: i: (int): controller index + p_controller: (Param_controller): controller settings + p_dms: (list of Param_dm): dms settings + rtc: (Rtc): Rtc object """ size = sum([p_dms[j]._ntotact for j in range(len(p_dms))]) @@ -584,6 +693,9 @@ def init_controller_generic(i: int, p_controller: conf.Param_controller, p_dms: matE = np.identity(size, dtype=np.float32) cmat = np.zeros((size, p_controller.nslope), dtype=np.float32) + if p_controller.command_law is not None: + rtc.d_control[i].set_commandlaw(p_controller.command_law) + rtc.d_control[i].set_decayFactor(decayFactor) rtc.d_control[i].set_modal_gains(mgain) rtc.d_control[i].set_cmat(cmat) diff --git a/shesha/init/target_init.py b/shesha/init/target_init.py index 35f37d9..d8d9800 100644 --- a/shesha/init/target_init.py +++ b/shesha/init/target_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.target_init ## @brief Initialization of a Target object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -49,7 +49,7 @@ def target_init(ctxt: carmaWrap_context, telescope: Telescope, p_targets: list, p_geom: conf.Param_geom, dm=None, brahma=False): """Create a cython target from parametres structures - :parameters: + Args: ctxt: (carmaWrap_context) : telescope: (Telescope): Telescope object p_targets: (lis of Param_target) : target_settings diff --git a/shesha/init/wfs_init.py b/shesha/init/wfs_init.py index d7d16d5..f20e1f7 100644 --- a/shesha/init/wfs_init.py +++ b/shesha/init/wfs_init.py @@ -1,7 +1,7 @@ ## @package shesha.init.wfs_init ## @brief Initialization of a Sensors object ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -49,7 +49,7 @@ def wfs_init(context: carmaWrap_context, telescope: Telescope, p_wfss: list, """ Create and initialise a Sensors object - :parameters: + Args: context : (carmaWrap_context) telescope: (Telescope) : Telescope object p_wfss: (list of Param_wfs) : wfs settings diff --git a/shesha/scripts/__init__.py b/shesha/scripts/__init__.py index 8b438e7..94af787 100644 --- a/shesha/scripts/__init__.py +++ b/shesha/scripts/__init__.py @@ -1,7 +1,7 @@ -## @package shesha.script +## @package shesha.scripts ## @brief Python package for COMPASS simulation scripts ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/scripts/closed_loop.py b/shesha/scripts/closed_loop.py index 20827d9..86eae7f 100755 --- a/shesha/scripts/closed_loop.py +++ b/shesha/scripts/closed_loop.py @@ -1,9 +1,9 @@ #!/usr/bin/env python -## @package shesha.script.closed_loop +## @package shesha.scripts.closed_loop ## @brief script test to simulate a closed loop ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -54,16 +54,16 @@ -g, --generic Use generic controller -f, --fast Compute PSF only during monitoring """ -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig from docopt import docopt - if __name__ == "__main__": arguments = docopt(__doc__) + param_file = arguments[""] compute_tar_psf = not arguments["--fast"] - config = load_config_from_file(param_file) + config = ParamConfig(param_file) # Get parameters from file if arguments["--bench"]: diff --git a/shesha/scripts/dm_standalone.py b/shesha/scripts/dm_standalone.py index e4822ed..f055d24 100644 --- a/shesha/scripts/dm_standalone.py +++ b/shesha/scripts/dm_standalone.py @@ -1,7 +1,7 @@ -## @package shesha.script.dm_standalone +## @package shesha.scripts.dm_standalone ## @brief Python dm standalone script ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/supervisor/__init__.py b/shesha/supervisor/__init__.py index 2edbf71..adaa0bb 100644 --- a/shesha/supervisor/__init__.py +++ b/shesha/supervisor/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/supervisor/benchSupervisor.py b/shesha/supervisor/benchSupervisor.py index 37502b5..6c8e378 100644 --- a/shesha/supervisor/benchSupervisor.py +++ b/shesha/supervisor/benchSupervisor.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.benchSupervisor ## @brief Initialization and execution of a Bench supervisor ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -42,34 +42,165 @@ from shesha.init.rtc_init import rtc_standalone from shesha.sutra_wrap import carmaWrap_context -from shesha.supervisor.aoSupervisor import AoSupervisor +from shesha.supervisor.components import RtcStandalone + +from shesha.supervisor.genericSupervisor import GenericSupervisor + +from shesha.util.utilities import load_config_from_file from typing import Callable -class BenchSupervisor(AoSupervisor): +class BenchSupervisor(GenericSupervisor): + """ This class implements generic supervisor to handle compass simulation + + Attributes inherited from GenericSupervisor: + context : (CarmaContext) : a CarmaContext instance + + config : (config) : Parameters structure + + is_init : (bool) : Flag equals to True if the supervisor has already been initialized + + iter : (int) : Frame counter + + Attributes: + rtc : (RtcComponent) : A Rtc component instance + + cacao : (bool) : CACAO features enabled in the RTC + + basis : (ModalBasis) : a ModalBasis instance (optimizer) - def __init__(self, config_file: str = None, brahma: bool = False, - cacao: bool = False): + calibration : (Calibration) : a Calibration instance (optimizer) + """ + + def __init__(self, config_file: str = None, cacao: bool = False): """ Init the COMPASS wih the config_file Kwargs: config_file : (str) : path to the configuration file. Default is None - brahma : (bool) : Flag to use brahma. Default is False - cacao : (bool) : Flag to use cacao rtc. Default is False """ self.pause_loop = None self.rtc = None self.frame = None - self.brahma = brahma self.cacao = cacao self.iter = 0 self.slopes_index = None + config = load_config_from_file(config_file) - if config_file is not None: - self.load_config(config_file=config_file) + GenericSupervisor.__init__(self, config) + + def _init_rtc(self): + """Initialize the rtc component of the supervisor as a RtcCompass + """ + print("->RTC") + self.number_of_wfs = len(self.config.p_wfss) + print("Configuration of", self.number_of_wfs, "wfs ...") + + if (hasattr(self.config, 'p_loop') and self.config.p_loop.devices.size > 1): + self.context = carmaWrap_context.get_instance_ngpu( + self.config.p_loop.devices.size, self.config.p_loop.devices) + else: + self.context = carmaWrap_context.get_instance_1gpu( + self.config.p_loop.devices[0]) + nact = self.config.p_controllers[0].nactu + + nvalid = [] + centroider_type = [] + delay = [] + offset = [] + scale = [] + gain = [] + cmat_size = [] + npix = [] + + # Get parameters + for wfs in range(self.number_of_wfs): + + if self.config.p_wfss[wfs].type == WFSType.SH: + npix.append(self.config.p_wfss[wfs].npix) + if self.config.p_wfss[wfs]._validsubsx is None or \ + self.config.p_wfss[wfs]._validsubsy is None: + + from hraa.tools.doit import makessp + roiTab = makessp(self.config.p_wfss[wfs].nxsub, obs=0., rmax=0.98) + self.config.p_wfss[wfs]._nvalid = roiTab[0].size + self.config.p_wfss[ + wfs]._validsubsx = roiTab[0] * self.config.p_wfss[wfs].npix + self.config.p_wfss[ + wfs]._validsubsy = roiTab[1] * self.config.p_wfss[wfs].npix + else: + self.config.p_wfss[wfs]._nvalid = self.config.p_wfss[ + wfs]._validsubsx.size + + nvalid.append( + np.array([self.config.p_wfss[wfs]._nvalid], dtype=np.int32)) + # print("nvalid : %d" % nvalid[wfs]) + centroider_type.append(self.config.p_centroiders[wfs].type) + delay.append(self.config.p_controllers[0].delay) # ??? + offset.append((self.config.p_wfss[wfs].npix - 1) / 2) + scale.append(1) + gain.append(1) + cmat_size.append(2 * nvalid[wfs][0]) + + elif self.config.p_wfss[wfs].type == WFSType.PYRHR or self.config.p_wfss[ + wfs].type == WFSType.PYRLR: + nvalid.append( + np.array([self.config.p_wfss[wfs]._nvalid], + dtype=np.int32)) # Number of valid SUBAPERTURES + centroider_type.append(self.config.p_centroiders[wfs].type) + delay.append(self.config.p_controllers[0].delay) # ??? + offset.append(0) + scale.append(1) + gain.append(1) + cmat_size.append(self.config.p_wfss[wfs].nPupils * nvalid[wfs][0]) + npix.append(0) + else: + raise ValueError('WFS type not supported') + + # Create RTC + self.rtc = RtcStandalone(self.context, self.config, self.number_of_wfs, nvalid, + nact, centroider_type, delay, offset, scale, + cacao=self.cacao) + + self.slopes_index = np.cumsum([0] + + [wfs.nslopes for wfs in self.rtc._rtc.d_centro]) + + # Create centroiders + for wfs in range(self.number_of_wfs): + self.rtc._rtc.d_centro[wfs].load_validpos( + self.config.p_wfss[wfs]._validsubsx, + self.config.p_wfss[wfs]._validsubsy, + self.config.p_wfss[wfs]._validsubsx.size) + if self.config.p_centroiders[wfs].type is CentroiderType.BPCOG: + self.rtc._rtc.d_centro[wfs].set_nmax(self.config.p_centroiders[wfs].nmax) + self.rtc._rtc.d_centro[wfs].set_npix(npix[wfs]) + # finally + self.config.p_centroiders[wfs]._nslope = self.rtc._rtc.d_centro[wfs].nslopes + print("wfs ", wfs, " set as ", centroider_type[wfs]) + size = sum(cmat_size) + cMat = np.zeros((nact, size), dtype=np.float32) + print("Size of cMat:", cMat.shape) + + # Initiate RTC + self.rtc._rtc.d_control[0].set_cmat(cMat) + self.rtc._rtc.d_control[0].set_decayFactor( + np.ones(nact, dtype=np.float32) * (gain[0] - 1)) + self.rtc._rtc.d_control[0].set_matE(np.identity(nact, dtype=np.float32)) + self.rtc._rtc.d_control[0].set_modal_gains( + np.ones(nact, dtype=np.float32) * -gain[0]) + + print("RTC initialized") + self.is_init = True + + def _init_components(self) -> None: + """ Initialize all the components + """ + if self.config.p_controllers is not None or self.config.p_centroiders is not None: + self._init_rtc() + + GenericSupervisor._init_components(self) # _ _ _ _ # / \ | |__ ___| |_ _ __ __ _ ___| |_ @@ -83,14 +214,15 @@ def __init__(self, config_file: str = None, brahma: bool = False, # | | | | __/ |_| | | | (_) | (_| \__ \ # |_| |_|\___|\__|_| |_|\___/ \__,_|___/ - def single_next(self) -> None: + def next(self) -> None: """ Performs a single loop iteration """ self.load_new_wfs_frame() + if (self.pause_loop is not True): self.compute_wfs_frame() - self.set_command(0, np.array(self.rtc.d_control[0].d_voltage)) - if self.brahma or self.cacao: + self.set_command(0, np.array(self.rtc._rtc.d_control[0].d_voltage)) + if self.cacao: self.rtc.publish() self.iter += 1 @@ -110,18 +242,18 @@ def set_command(self, nctrl: int, command: np.ndarray) -> None: # Do stuff self.dm_set_callback(command) # Btw, update the RTC state with the information - # self.rtc.d_control[nctrl].set_com(command, command.size) + # self.rtc._rtc.d_control[nctrl].set_com(command, command.size) def get_command(self) -> np.ndarray: """ Get command from DM - Return: + Returns: command : (np.ndarray) : Command vector """ # Do something command = self.dm_get_callback() # Btw, update the RTC state with the information - # self.rtc.d_control[nControl].set_com(command, command.size) + # self.rtc._rtc.d_control[nControl].set_com(command, command.size) return command @@ -136,9 +268,9 @@ def __repr__(self): s = '--- BenchSupervisor ---\nRTC: ' + repr(self.rtc) if hasattr(self, '_cam'): - s += '\nCAM: ' + repr(self._cam) + s += '\nCAM: ' + repr(cam) if hasattr(self, '_dm'): - s += '\nDM: ' + repr(self._dm) + s += '\nDM: ' + repr(dm) return s def load_new_wfs_frame(self, centro_index: int = 0) -> None: @@ -151,22 +283,23 @@ def load_new_wfs_frame(self, centro_index: int = 0) -> None: if (type(self.frame) is tuple): centro_index = len(self.frame) for i in range(centro_index): - self.rtc.d_centro[i].load_img(self.frame[i], self.frame[i].shape[0], - self.frame[i].shape[1]) + self.rtc._rtc.d_centro[i].load_img(self.frame[i], self.frame[i].shape[0], + self.frame[i].shape[1], -1) else: - self.rtc.d_centro[centro_index].load_img(self.frame, self.frame.shape[0], - self.frame.shape[1]) + self.rtc._rtc.d_centro[centro_index].load_img(self.frame, + self.frame.shape[0], + self.frame.shape[1], -1) def compute_wfs_frame(self): """ Compute the WFS frame: calibrate, centroid, commands. """ - # for index, centro in enumerate(self.rtc.d_centro): - for centro in self.rtc.d_centro: + # for index, centro in enumerate(self.rtc._rtc.d_centro): + for centro in self.rtc._rtc.d_centro: centro.calibrate_img() self.rtc.do_centroids(0) self.rtc.do_control(0) self.rtc.do_clipping(0) - self.rtc.comp_voltage(0) + self.rtc._rtc.comp_voltage(0) def set_one_actu(self, nctrl: int, nactu: int, *, ampli: float = 1, reset: bool = True) -> None: @@ -193,7 +326,7 @@ def force_context(self) -> None: Required for using with widgets, due to multithreaded init and in case GPU 0 is not used by the simu """ - if self.is_init() and self.context is not None: + if self.is_init and self.context is not None: current_device = self.context.active_device for device in range(len(self.config.p_loop.devices)): self.context.set_active_device_force(device) @@ -203,7 +336,7 @@ def reset_dm(self) -> None: """ Reset the DM """ if hasattr(self, '_dm'): - self._dm.reset_dm() + dm.reset_dm() def reset_command(self, nctrl: int = -1) -> None: """ Reset the nctrl Controller command buffer, reset all controllers if nctrl == -1 @@ -212,10 +345,10 @@ def reset_command(self, nctrl: int = -1) -> None: nctrl : (int) : Controller index. If -1 (default), all controllers commands are reset """ if (nctrl == -1): # All Dms reset - for control in self.rtc.d_control: + for control in self.rtc._rtc.d_control: control.d_com.reset() else: - self.rtc.d_control[nctrl].d_com.reset() + self.rtc._rtc.d_control[nctrl].d_com.reset() def load_config(self, config_file: str = None) -> None: """ Init the COMPASS with the config_file @@ -223,8 +356,8 @@ def load_config(self, config_file: str = None) -> None: Args: config_file : (str) : path to the configuration file """ - from shesha.util.utilities import load_config_from_file - load_config_from_file(self, config_file) + from shesha.config import ParamConfig + self.config = ParamConfig(config_file) def set_cam_callback(self, cam_callback: Callable): """ Set the externally defined function that allows to grab frames @@ -244,107 +377,6 @@ def set_dm_callback(self, dm_get_callback: Callable, dm_set_callback: Callable): self.dm_get_callback = dm_get_callback self.dm_set_callback = dm_set_callback - def init(self) -> None: - """ Initialize the bench - """ - print("->RTC") - self.number_of_wfs = len(self.config.p_wfss) - print("Configuration of", self.number_of_wfs, "wfs ...") - - if (hasattr(self.config, 'p_loop') and self.config.p_loop.devices.size > 1): - self.context = carmaWrap_context.get_instance_ngpu( - self.config.p_loop.devices.size, self.config.p_loop.devices) - else: - self.context = carmaWrap_context.get_instance_1gpu( - self.config.p_loop.devices[0]) - nact = self.config.p_controllers[0].nactu - self._nvalid = [] - self._centroider_type = [] - self._delay = [] - self._offset = [] - self._scale = [] - self._gain = [] - self._cmat_size = [] - self._npix = [] - - # Get parameters - for wfs in range(self.number_of_wfs): - - if self.config.p_wfss[wfs].type == WFSType.SH: - self._npix.append(self.config.p_wfss[wfs].npix) - if self.config.p_wfss[wfs]._validsubsx is None or \ - self.config.p_wfss[wfs]._validsubsy is None: - - from hraa.tools.doit import makessp - roiTab = makessp(self.config.p_wfss[wfs].nxsub, obs=0., rmax=0.98) - self.config.p_wfss[wfs]._nvalid = roiTab[0].size - self.config.p_wfss[ - wfs]._validsubsx = roiTab[0] * self.config.p_wfss[wfs].npix - self.config.p_wfss[ - wfs]._validsubsy = roiTab[1] * self.config.p_wfss[wfs].npix - else: - self.config.p_wfss[wfs]._nvalid = self.config.p_wfss[ - wfs]._validsubsx.size - - self._nvalid.append( - np.array([self.config.p_wfss[wfs]._nvalid], dtype=np.int32)) - # print("nvalid : %d" % self._nvalid[wfs]) - self._centroider_type.append(self.config.p_centroiders[wfs].type) - self._delay.append(self.config.p_controllers[0].delay) # ??? - self._offset.append((self.config.p_wfss[wfs].npix - 1) / 2) - self._scale.append(1) - self._gain.append(1) - self._cmat_size.append(2 * self._nvalid[wfs][0]) - - elif self.config.p_wfss[wfs].type == WFSType.PYRHR or self.config.p_wfss[ - wfs].type == WFSType.PYRLR: - self._nvalid.append( - np.array([self.config.p_wfss[wfs]._nvalid], - dtype=np.int32)) # Number of valid SUBAPERTURES - self._centroider_type.append(self.config.p_centroiders[wfs].type) - self._delay.append(self.config.p_controllers[0].delay) # ??? - self._offset.append(0) - self._scale.append(1) - self._gain.append(1) - self._cmat_size.append( - self.config.p_wfss[wfs].nPupils * self._nvalid[wfs][0]) - self._npix.append(0) - else: - raise ValueError('WFS type not supported') - - # Create RTC - self.rtc = rtc_standalone(self.context, self.number_of_wfs, self._nvalid, nact, - self._centroider_type, self._delay, self._offset, - self._scale, brahma=self.brahma, cacao=self.cacao) - - self.slopes_index = np.cumsum([0] + [wfs.nslopes for wfs in self.rtc.d_centro]) - - # Create centroiders - for wfs in range(self.number_of_wfs): - self.rtc.d_centro[wfs].load_validpos( - self.config.p_wfss[wfs]._validsubsx, - self.config.p_wfss[wfs]._validsubsy, - self.config.p_wfss[wfs]._validsubsx.size) - if self.config.p_centroiders[wfs].type is CentroiderType.BPCOG: - self.rtc.d_centro[wfs].set_nmax(self.config.p_centroiders[wfs].nmax) - self.rtc.d_centro[wfs].set_npix(self._npix[wfs]) - # finally - self.config.p_centroiders[wfs]._nslope = self.rtc.d_centro[wfs].nslopes - print("wfs ", wfs, " set as ", self._centroider_type[wfs]) - size = sum(self._cmat_size) - cMat = np.zeros((nact, size), dtype=np.float32) - print("Size of cMat:", cMat.shape) - - # Initiate RTC - self.rtc.d_control[0].set_cmat(cMat) - self.rtc.d_control[0].set_decayFactor( - np.ones(nact, dtype=np.float32) * (self._gain[0] - 1)) - self.rtc.d_control[0].set_matE(np.identity(nact, dtype=np.float32)) - self.rtc.d_control[0].set_modal_gains( - np.ones(nact, dtype=np.float32) * -self._gain[0]) - self.is_init = True - print("RTC initialized") - def adaptive_windows(self, init_config=False, centro_index: int = 0): """ Re-centre the centroiding boxes around the spots, and loads the new box coordinates in the slopes computation supervisor @@ -359,8 +391,8 @@ def adaptive_windows(self, init_config=False, centro_index: int = 0): # reset de la configuration initiale ij_subap = self.config.p_wfss[centro_index].get_validsub() nsubap = ij_subap.shape[1] - self.rtc.d_centro[centro_index].load_validpos(ij_subap[0], ij_subap[1], - nsubap) + self.rtc._rtc.d_centro[centro_index].load_validpos( + ij_subap[0], ij_subap[1], nsubap) else: # acquire slopes first nslopes = 10 @@ -373,8 +405,8 @@ def adaptive_windows(self, init_config=False, centro_index: int = 0): s /= nslopes # get coordinates of valid sub-apertures #ij_subap = self.config.p_wfss[centro_index].get_validsub() - i_subap = np.array(self.rtc.d_centro[centro_index].d_validx) - j_subap = np.array(self.rtc.d_centro[centro_index].d_validy) + i_subap = np.array(self.rtc._rtc.d_centro[centro_index].d_validx) + j_subap = np.array(self.rtc._rtc.d_centro[centro_index].d_validy) # get number of subaps nsubap = i_subap.shape[0] # reshape the array to be conformable with @@ -383,8 +415,8 @@ def adaptive_windows(self, init_config=False, centro_index: int = 0): new_i_subap = (i_subap + s[0, :].round()).astype(int) new_j_subap = (j_subap + s[1, :].round()).astype(int) # load the new positions of boxes - self.rtc.d_centro[centro_index].load_validpos(new_i_subap, new_j_subap, - nsubap) + self.rtc._rtc.d_centro[centro_index].load_validpos( + new_i_subap, new_j_subap, nsubap) def get_current_windows_pos(self, centro_index: int = 0): """ Returns the currently used subapertures positions @@ -392,18 +424,30 @@ def get_current_windows_pos(self, centro_index: int = 0): Args: centro_index : (int) : Index of the centroider - Return: + Returns: current_pos : (tuple) : (i_subap, j_subap) """ - i_subap = np.array(self.rtc.d_centro[centro_index].d_validx) - j_subap = np.array(self.rtc.d_centro[centro_index].d_validy) + i_subap = np.array(self.rtc._rtc.d_centro[centro_index].d_validx) + j_subap = np.array(self.rtc._rtc.d_centro[centro_index].d_validy) return i_subap, j_subap def get_slopes_index(self): """ Return the index of the first position of each WFS slopes vector inside the global RTC slopes vector - Return: + Returns: slopes_index : (np.ndarray) : Slopes index """ return self.slopes_index + + def export_config(self): + """ + Extract and convert compass supervisor configuration parameters + into 2 dictionnaries containing relevant AO parameters + + Args: + root: (object), COMPASS supervisor object to be parsed + + Returns : 2 dictionnaries + """ + return self.config.export_config() diff --git a/shesha/supervisor/canapassSupervisor.py b/shesha/supervisor/canapassSupervisor.py index 1853bc0..542db26 100644 --- a/shesha/supervisor/canapassSupervisor.py +++ b/shesha/supervisor/canapassSupervisor.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.canapassSupervisor ## @brief Initialization and execution of a CANAPASS supervisor ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -107,48 +107,61 @@ def __init__(self, config, cacao: bool = True) -> None: # ctrl = self._sim.rtc.d_control[control] # ctrl.set_commandlaw('integrator') +class loopHandler: + + def __init__(self): + pass + + def start(self): + pass + + def stop(self): + pass + + def alive(self): + return "alive" + if __name__ == '__main__': from docopt import docopt - from shesha.util.utilities import load_config_from_file + from shesha.config import ParamConfig arguments = docopt(__doc__) - config = load_config_from_file(arguments[""]) + config = ParamConfig(arguments[""]) supervisor = CanapassSupervisor(config, cacao=True) if (arguments["--freq"]): print("Warning changed frequency loop to: ", arguments["--freq"]) - supervisor.config.p_loop.set_ittime(1 / float(arguments["--freq"])) + config.p_loop.set_ittime(1 / float(arguments["--freq"])) if (arguments["--delay"]): print("Warning changed delay loop to: ", arguments["--delay"]) - supervisor.config.p_controllers[0].set_delay(float(arguments["--delay"])) + config.p_controllers[0].set_delay(float(arguments["--delay"])) + supervisor = CanapassSupervisor(config, cacao=True) try: from subprocess import Popen, PIPE from hraa.server.pyroServer import PyroServer + # Init looper + wao_loop = loopHandler() + # Find username p = Popen("whoami", shell=True, stdout=PIPE, stderr=PIPE) out, err = p.communicate() if (err != b''): print(err) - raise ValueError("ERROR CANNOT RECOGNIZE USER") + raise Exception("ERROR CANNOT RECOGNIZE USER") else: user = out.split(b"\n")[0].decode("utf-8") print("User is " + user) - devices = [ - supervisor, supervisor.rtc, supervisor.wfs, supervisor.target, - supervisor.tel, supervisor.basis, supervisor.calibration, - supervisor.atmos, supervisor.dms - ] - - names = [ - "supervisor", "supervisor_rtc", "supervisor_wfs", "supervisor_target", - "supervisor_tel", "supervisor_basis", "supervisor_calibration", - "supervisor_atmos", "supervisor_dms" - ] - nname = [] + devices = [supervisor, supervisor.rtc, supervisor.wfs, + supervisor.target, supervisor.tel,supervisor.basis, supervisor.calibration, + supervisor.atmos, supervisor.dms, supervisor.config, supervisor.modalgains, wao_loop] + + names = ["supervisor", "supervisor_rtc", "supervisor_wfs", + "supervisor_target", "supervisor_tel", "supervisor_basis", "supervisor_calibration", + "supervisor_atmos", "supervisor_dms", "supervisor_config", "supervisor_modalgains", "wao_loop"] + nname = []; for name in names: - nname.append(name + "_" + user) - server = PyroServer(listDevices=devices, listNames=names) - #server.add_device(supervisor, "waoconfig_" + user) + nname.append(name+"_"+user) + server = PyroServer(listDevices=devices, listNames=nname) server.start() except: raise EnvironmentError( diff --git a/shesha/supervisor/compassSupervisor.py b/shesha/supervisor/compassSupervisor.py index 4105962..c7dd59c 100644 --- a/shesha/supervisor/compassSupervisor.py +++ b/shesha/supervisor/compassSupervisor.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.compassSupervisor ## @brief Initialization and execution of a COMPASS supervisor ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -37,8 +37,9 @@ from shesha.supervisor.genericSupervisor import GenericSupervisor from shesha.supervisor.components import AtmosCompass, DmCompass, RtcCompass, TargetCompass, TelescopeCompass, WfsCompass -from shesha.supervisor.optimizers import ModalBasis, Calibration +from shesha.supervisor.optimizers import ModalBasis, Calibration, ModalGains import numpy as np +import time import shesha.constants as scons @@ -48,12 +49,17 @@ class CompassSupervisor(GenericSupervisor): """ This class implements generic supervisor to handle compass simulation - Attributes: + Attributes inherited from GenericSupervisor: context : (CarmaContext) : a CarmaContext instance config : (config) : Parameters structure - tel : (TelescopeComponent) : a TelescopeComponent instance + is_init : (bool) : Flag equals to True if the supervisor has already been initialized + + iter : (int) : Frame counter + + Attributes: + telescope : (TelescopeComponent) : a TelescopeComponent instance atmos : (AtmosComponent) : An AtmosComponent instance @@ -65,15 +71,15 @@ class CompassSupervisor(GenericSupervisor): rtc : (RtcComponent) : A Rtc component instance - is_init : (bool) : Flag equals to True if the supervisor has already been initialized - - iter : (int) : Frame counter - cacao : (bool) : CACAO features enabled in the RTC basis : (ModalBasis) : a ModalBasis instance (optimizer) calibration : (Calibration) : a Calibration instance (optimizer) + + modalgains : (ModalGains) : a ModalGain instance (optimizer) using CLOSE algorithm + + close_modal_gains : (list of floats) : list of the previous values of the modal gains """ def __init__(self, config, *, cacao: bool = False): @@ -84,13 +90,23 @@ def __init__(self, config, *, cacao: bool = False): Kwargs: cacao : (bool) : If True, enables CACAO features in RTC (Default is False) - /!\ Requires OCTOPUS to be installed + Requires OCTOPUS to be installed """ self.cacao = cacao + self.telescope = None + self.atmos = None + self.target = None + self.wfs = None + self.dms = None + self.rtc = None GenericSupervisor.__init__(self, config) self.basis = ModalBasis(self.config, self.dms, self.target) self.calibration = Calibration(self.config, self.tel, self.atmos, self.dms, self.target, self.rtc, self.wfs) + self.modalgains = ModalGains(self.config, self.rtc) + self.close_modal_gains = [] + + # ___ _ __ __ _ _ _ # / __|___ _ _ ___ _ _(_)__ | \/ |___| |_| |_ ___ __| |___ # | (_ / -_) ' \/ -_) '_| / _| | |\/| / -_) _| ' \/ _ \/ _` (_-< @@ -136,6 +152,27 @@ def _init_rtc(self): else: raise ValueError("Configuration not loaded or Telescope not initilaized") + def _init_components(self) -> None: + """ Initialize all the components + """ + + if self.config.p_tel is None or self.config.p_geom is None: + raise ValueError("Telescope geometry must be defined (p_geom and p_tel)") + self._init_tel() + + if self.config.p_atmos is not None: + self._init_atmos() + if self.config.p_dms is not None: + self._init_dms() + if self.config.p_targets is not None: + self._init_target() + if self.config.p_wfss is not None: + self._init_wfs() + if self.config.p_controllers is not None or self.config.p_centroiders is not None: + self._init_rtc() + + GenericSupervisor._init_components(self) + def next(self, *, move_atmos: bool = True, nControl: int = 0, tar_trace: Iterable[int] = None, wfs_trace: Iterable[int] = None, do_control: bool = True, apply_control: bool = True, @@ -160,17 +197,26 @@ def next(self, *, move_atmos: bool = True, nControl: int = 0, compute_tar_psf : (bool) : If True (default), computes the PSF at the end of the iteration """ - if ( - self.config.p_controllers is not None and - self.config.p_controllers[nControl].type == scons.ControllerType.GEO): - if tar_trace is None and self.target is not None: - tar_trace = range(len(self.config.p_targets)) - if wfs_trace is None and self.wfs is not None: - wfs_trace = range(len(self.config.p_wfss)) - - if move_atmos and self.atmos is not None: - self.atmos.move_atmos() - + try: + iter(nControl) + except TypeError: + # nControl is not an iterable creating a list + nControl = [nControl] + + #get the index of the first GEO controller (-1 if there is no GEO controller) + geo_index = next(( i for i,c in enumerate(self.config.p_controllers) + if c.type== scons.ControllerType.GEO ), -1) + + if tar_trace is None and self.target is not None: + tar_trace = range(len(self.config.p_targets)) + if wfs_trace is None and self.wfs is not None: + wfs_trace = range(len(self.config.p_wfss)) + + if move_atmos and self.atmos is not None: + self.atmos.move_atmos() + # in case there is at least 1 controller GEO in the controller list : use this one only + if ( geo_index > -1): + nControl = geo_index if tar_trace is not None: for t in tar_trace: if self.atmos.is_enable: @@ -185,18 +231,124 @@ def next(self, *, move_atmos: bool = True, nControl: int = 0, self.rtc.apply_control(nControl) if self.cacao: self.rtc.publish() - if compute_tar_psf: - for tar_index in tar_trace: - self.target.comp_tar_image(tar_index) - self.target.comp_strehl(tar_index) + else: + if tar_trace is not None: # already checked at line 213? + for t in tar_trace: + if self.atmos.is_enable: + self.target.raytrace(t, tel=self.tel, atm=self.atmos, + dms=self.dms) + else: + self.target.raytrace(t, tel=self.tel, dms=self.dms) - self.iter += 1 + if wfs_trace is not None: # already checked at line 215? + for w in wfs_trace: + if self.atmos.is_enable: + self.wfs.raytrace(w, tel=self.tel, atm=self.atmos) + else: + self.wfs.raytrace(w, tel=self.tel) - else: - GenericSupervisor.next(self, move_atmos=move_atmos, nControl=nControl, - tar_trace=tar_trace, wfs_trace=wfs_trace, - do_control=do_control, apply_control=apply_control, - compute_tar_psf=compute_tar_psf) + if not self.config.p_wfss[w].open_loop and self.dms is not None: + self.wfs.raytrace(w, dms=self.dms, reset=False) + self.wfs.compute_wfs_image(w) + if do_control and self.rtc is not None: + for ncontrol in nControl : # range(len(self.config.p_controllers)): + self.rtc.do_centroids(ncontrol) + self.rtc.do_control(ncontrol) + self.rtc.do_clipping(ncontrol) + + if apply_control: + for ncontrol in nControl : + self.rtc.apply_control(ncontrol) + + if self.cacao: + self.rtc.publish() + + if compute_tar_psf: + for tar_index in tar_trace: + self.target.comp_tar_image(tar_index) + self.target.comp_strehl(tar_index) + + if self.config.p_controllers[0].close_opti: + self.modalgains.update_mgains() + self.close_modal_gains.append(self.modalgains.get_modal_gains()) + + self.iter += 1 + + def _print_strehl(self, monitoring_freq: int, iters_time: float, total_iters: int, *, + tar_index: int = 0): + """ Print the Strehl ratio SE and LE from a target on the terminal, the estimated remaining time and framerate + + Args: + monitoring_freq : (int) : Number of frames between two prints + + iters_time : (float) : time elapsed between two prints + + total_iters : (int) : Total number of iterations + + Kwargs: + tar_index : (int) : Index of the target. Default is 0 + """ + framerate = monitoring_freq / iters_time + strehl = self.target.get_strehl(tar_index) + etr = (total_iters - self.iter) / framerate + print("%d \t %.3f \t %.3f\t %.1f \t %.1f" % (self.iter + 1, strehl[0], + strehl[1], etr, framerate)) + + def loop(self, number_of_iter: int, *, monitoring_freq: int = 100, + compute_tar_psf: bool = True, **kwargs): + """ Perform the AO loop for iterations + + Args: + number_of_iter: (int) : Number of iteration that will be done + + Kwargs: + monitoring_freq: (int) : Monitoring frequency [frames]. Default is 100 + + compute_tar_psf : (bool) : If True (default), computes the PSF at each iteration + Else, only computes it each frames + """ + if not compute_tar_psf: + print("WARNING: Target PSF will be computed (& accumulated) only during monitoring" + ) + + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | ETR (s) | Framerate (Hz)") + print("----------------------------------------------------") + # self.next(**kwargs) + t0 = time.time() + t1 = time.time() + if number_of_iter == -1: # Infinite loop + while (True): + self.next(compute_tar_psf=compute_tar_psf, **kwargs) + if ((self.iter + 1) % monitoring_freq == 0): + if not compute_tar_psf: + self.target.comp_tar_image(0) + self.target.comp_strehl(0) + self._print_strehl(monitoring_freq, time.time() - t1, number_of_iter) + t1 = time.time() + + for _ in range(number_of_iter): + self.next(compute_tar_psf=compute_tar_psf, **kwargs) + if ((self.iter + 1) % monitoring_freq == 0): + if not compute_tar_psf: + self.target.comp_tar_image(0) + self.target.comp_strehl(0) + self._print_strehl(monitoring_freq, time.time() - t1, number_of_iter) + t1 = time.time() + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", number_of_iter, "iterations), ", + (t1 - t0) / number_of_iter, "(mean) ", number_of_iter / (t1 - t0), "Hz") + + def reset(self): + """ Reset the simulation to return to its original state + """ + self.atmos.reset_turbu() + self.wfs.reset_noise() + for tar_index in range(len(self.config.p_targets)): + self.target.reset_strehl(tar_index) + self.dms.reset_dm() + self.rtc.open_loop() + self.rtc.close_loop() # ___ _ __ _ __ __ _ _ _ @@ -239,7 +391,7 @@ def record_ao_circular_buffer( projection_matrix : (np.ndarray) : projection matrix on modal basis to compute residual coefficients - Return: + Returns: slopes: (int) : the slopes CB volts: (int) : the volts applied to the DM(s) CB @@ -248,9 +400,9 @@ def record_ao_circular_buffer( psf_le: (int) : Long exposure PSF over the iterations (I.e SR is reset at the begining of the CB if ditch_strehl=True) - sthrel_se_list: (int) : The SR short exposure evolution during CB recording + strehl_se_list: (int) : The SR short exposure evolution during CB recording - sthrel_le_list: (int) : The SR long exposure evolution during CB recording + strehl_le_list: (int) : The SR long exposure evolution during CB recording g_ncpa_list: (int) : the gain applied to the NCPA (PYRWFS CASE) if NCPA is set to True @@ -347,25 +499,29 @@ def export_config(self): Extract and convert compass supervisor configuration parameters into 2 dictionnaries containing relevant AO parameters - Returns : 2 dictionaries + Args: + root: (object), COMPASS supervisor object to be parsed + + Returns : + 2 dictionaries... See F. Vidal :) """ - from shesha.util.exportConfig import export_config - return export_config(self) + return self.config.export_config() def get_s_pupil(self): """ Returns the so called S Pupil of COMPASS - Return np.array + Return: + s_pupil: (np.array) : S Pupil of COMPASS """ return self.config.p_geom.get_spupil() - def get_i_pupil(self): """ Returns the so called I Pupil of COMPASS - Return np.array + Return: + i_pupil: (np.array) : I Pupil of COMPASS """ return self.config.p_geom.get_ipupil() @@ -373,6 +529,7 @@ def get_m_pupil(self): """ Returns the so called M Pupil of COMPASS - Return np.array + Return: + m_pupil: (np.array) : M Pupil of COMPASS """ - return self.config.p_geom.get_mpupil() \ No newline at end of file + return self.config.p_geom.get_mpupil() diff --git a/shesha/supervisor/components/__init__.py b/shesha/supervisor/components/__init__.py index a1053fd..30b78ec 100644 --- a/shesha/supervisor/components/__init__.py +++ b/shesha/supervisor/components/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -34,18 +34,16 @@ # # You should have received a copy of the GNU Lesser General Public License along with COMPASS. # If not, see . -__all__ = ["atmosCompass", - "dmCompass", - "rtcCompass", - "targetCompass", - "sourceCompass", - "telescopeCompass", - "wfsCompass"] +__all__ = [ + "atmosCompass", "dmCompass", "rtc", "targetCompass", "sourceCompass", + "telescopeCompass", "wfsCompass" +] from .atmosCompass import AtmosCompass from .dmCompass import DmCompass -from .rtcCompass import RtcCompass +from .rtc.rtcCompass import RtcCompass +from .rtc.rtcStandalone import RtcStandalone from .targetCompass import TargetCompass from .sourceCompass import SourceCompass from .telescopeCompass import TelescopeCompass -from .wfsCompass import WfsCompass \ No newline at end of file +from .wfsCompass import WfsCompass diff --git a/shesha/supervisor/components/atmosCompass.py b/shesha/supervisor/components/atmosCompass.py index 74d68f8..646cb36 100644 --- a/shesha/supervisor/components/atmosCompass.py +++ b/shesha/supervisor/components/atmosCompass.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -66,7 +66,7 @@ def __init__(self, context, config): self._config.p_geom, self._config.p_loop.ittime, p_wfss=self._config.p_wfss, p_targets=self._config.p_targets) - + def enable_atmos(self, enable : bool) -> None: """ Set or unset whether atmos is enabled when running loop @@ -93,7 +93,7 @@ def set_r0(self, r0 : float, *, reset_seed : int=-1) -> None: else: ilayer = reset_seed for k in range(self._atmos.nscreens): - self._atmos.set_seed(k, 1234 + ilayer) + self._atmos.set_seed(k, self._config.p_atmos.seeds[ilayer]) self._atmos.refresh_screen(k) ilayer += 1 self._config.p_atmos.set_r0(r0) @@ -113,7 +113,7 @@ def set_wind(self, screen_index : int, *, windspeed : float = None, winddir : fl self._config.p_atmos.windspeed[screen_index] = windspeed if winddir is not None: self._config.p_atmos.winddir[screen_index] = winddir - + lin_delta = self._config.p_geom.pupdiam / self._config.p_tel.diam * self._config.p_atmos.windspeed[screen_index] * \ np.cos(CONST.DEG2RAD * self._config.p_geom.zenithangle) * self._config.p_loop.ittime oldx = self._config.p_atmos._deltax[screen_index] @@ -138,7 +138,7 @@ def reset_turbu(self) -> None: """ ilayer = 0 for k in range(self._atmos.nscreens): - self._atmos.set_seed(k, 1234 + ilayer) + self._atmos.set_seed(k, self._config.p_atmos.seeds[ilayer]) self._atmos.refresh_screen(k) ilayer += 1 @@ -148,7 +148,7 @@ def get_atmos_layer(self, indx: int) -> np.ndarray: Args: indx : (int) : Index of the turbulent layer to return - Return: + Returns: layer : (np.ndarray) : turbulent layer phase screen """ return np.array(self._atmos.d_screens[indx].d_screen) diff --git a/shesha/supervisor/components/dmCompass.py b/shesha/supervisor/components/dmCompass.py index 138a810..8dbbcaa 100644 --- a/shesha/supervisor/components/dmCompass.py +++ b/shesha/supervisor/components/dmCompass.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -100,7 +100,7 @@ def get_influ_function(self, dm_index : int) -> np.ndarray: Args: dm_index : (int) : index of the DM - Return: + Returns: influ : (np.ndarray) : Influence functions of the DM dm_index """ return self._config.p_dms[dm_index]._influ @@ -111,13 +111,13 @@ def get_influ_function_ipupil_coords(self, dm_index : int) -> np.ndarray: Args: dm_index : (int) : index of the DM - Return: + Returns: coords : (tuple) : (i, j) """ - i1 = self._config.p_dm0._i1 # i1 is in the dmshape support coords - j1 = self._config.p_dm0._j1 # j1 is in the dmshape support coords - ii1 = i1 + self._config.p_dm0._n1 # in ipupil coords - jj1 = j1 + self._config.p_dm0._n1 # in ipupil coords + i1 = self._config.p_dms[0]._i1 # i1 is in the dmshape support coords + j1 = self._config.p_dms[0]._j1 # j1 is in the dmshape support coords + ii1 = i1 + self._config.p_dms[0]._n1 # in ipupil coords + jj1 = j1 + self._config.p_dms[0]._n1 # in ipupil coords return ii1, jj1 def reset_dm(self, dm_index: int = -1) -> None: @@ -139,7 +139,7 @@ def get_dm_shape(self, indx : int) -> np.ndarray: Args: indx : (int) : Index of the DM - Return: + Returns: dm_shape : (np.ndarray) : DM phase screen """ diff --git a/shesha/supervisor/components/rtc/__init__.py b/shesha/supervisor/components/rtc/__init__.py new file mode 100644 index 0000000..fa62bc6 --- /dev/null +++ b/shesha/supervisor/components/rtc/__init__.py @@ -0,0 +1,40 @@ +## @package shesha.supervisor +## @brief User layer for initialization and execution of a COMPASS simulation +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . +__all__ = ["rtcCompass", "rtcStandalone"] + +from .rtcCompass import RtcCompass +from .rtcStandalone import RtcStandalone diff --git a/shesha/supervisor/components/rtcCompass.py b/shesha/supervisor/components/rtc/rtcAbstract.py similarity index 86% rename from shesha/supervisor/components/rtcCompass.py rename to shesha/supervisor/components/rtc/rtcAbstract.py index fa3006f..32613da 100644 --- a/shesha/supervisor/components/rtcCompass.py +++ b/shesha/supervisor/components/rtc/rtcAbstract.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -34,13 +34,18 @@ # # You should have received a copy of the GNU Lesser General Public License along with COMPASS. # If not, see . -from shesha.init.rtc_init import rtc_init + +from shesha.sutra_wrap import carmaWrap_context + from shesha.supervisor.components.sourceCompass import SourceCompass import shesha.constants as scons import numpy as np from typing import Union -class RtcCompass(object): +from abc import ABC, abstractmethod + + +class RtcAbstract(ABC): """ RTC handler for compass simulation Attributes: @@ -48,11 +53,17 @@ class RtcCompass(object): _context : (carmaContext) : CarmaContext instance - _config : (config module) : Parameters configuration structure module + _config : (config module) : Parameters configuration structure module + + brahma : (bool) : BRAHMA features enabled in the RTC - cacao : (bool) : CACAO features enabled in the RTC + fp16 : (bool) : FP16 features enabled in the RTC + + cacao : (bool) : CACAO features enabled in the RTC """ - def __init__(self, context, config, tel, wfs, dms, atm, *, cacao=False): + + def __init__(self, context: carmaWrap_context, config, *, brahma: bool = False, + fp16: bool = False, cacao: bool = False): """ Initialize a RtcCompass component for rtc related supervision Args: @@ -60,27 +71,26 @@ def __init__(self, context, config, tel, wfs, dms, atm, *, cacao=False): config : (config module) : Parameters configuration structure module - tel : (TelescopeCompass) : A TelescopeCompass instance - - wfs : (WfsCompass) : A WfsCompass instance - - dms : (DmCompass) : A DmCompass instance + Kwargs: + brahma : (bool, optional) : If True, enables BRAHMA features in RTC (Default is False) + Requires BRAHMA to be installed - atm : (AtmosCompass) : An AtmosCompass instance + fp16 : (bool, optional) : If True, enables FP16 features in RTC (Default is False) + Requires CUDA_SM>60 to be installed - Kwargs: cacao : (bool) : If True, enables CACAO features in RTC (Default is False) - /!\ Requires OCTOPUS to be installed + Requires OCTOPUS to be installed """ + self.brahma = brahma + self.fp16 = fp16 self.cacao = cacao self._context = context - self._config = config # Parameters configuration coming from supervisor init - print("->rtc init") - self._rtc = rtc_init(self._context, tel._tel, wfs._wfs, dms._dms, atm._atmos, - self._config.p_wfss, self._config.p_tel, - self._config.p_geom, self._config.p_atmos, self._config.p_loop.ittime, - self._config.p_centroiders, self._config.p_controllers, - self._config.p_dms, cacao=cacao) + self._config = config # Parameters configuration coming from supervisor init + self._rtc = None + + @abstractmethod + def rtc_init(self): + pass def set_perturbation_voltage(self, controller_index: int, name: str, command: np.ndarray) -> None: @@ -107,17 +117,17 @@ def get_slopes(self, controller_index: int) -> np.ndarray: Args: controller_index : (int) : controller index handling the slopes - Return: + Returns: slopes : (np.ndarray) : Current slopes vector containing slopes of all the WFS handled by the specified controller """ return np.array(self._rtc.d_control[controller_index].d_centroids) - def close_loop(self, controller_index: int=None) -> None: + def close_loop(self, controller_index: int = None) -> None: """ DM receives controller output + pertuVoltage Kwargs: - controller_index: (int): controller index. + controller_index: (int): controller index. If None (default), apply on all controllers """ if controller_index is None: @@ -126,7 +136,7 @@ def close_loop(self, controller_index: int=None) -> None: else: self._rtc.d_control[controller_index].set_open_loop(0) # close_loop - def open_loop(self, controller_index: int=None, reset=True) -> None: + def open_loop(self, controller_index: int = None, reset=True) -> None: """ Integrator computation goes to /dev/null but pertuVoltage still applied Kwargs: @@ -141,7 +151,8 @@ def open_loop(self, controller_index: int=None, reset=True) -> None: else: self._rtc.d_control[controller_index].set_open_loop(1, reset) # open_loop - def set_ref_slopes(self, ref_slopes: np.ndarray, *, centro_index : int=None) -> None: + def set_ref_slopes(self, ref_slopes: np.ndarray, *, + centro_index: int = None) -> None: """ Set given ref slopes in centroider Args: @@ -167,7 +178,7 @@ def get_ref_slopes(self, centro_index=None) -> np.ndarray: slopes vector returned is a concatenation of all the reference slopes used for by centroiders in the RTC - Return: + Returns: ref_slopes : (np.ndarray) : Reference slopes vector """ ref_slopes = np.empty(0) @@ -194,7 +205,7 @@ def get_interaction_matrix(self, controller_index: int): Args: controller_index: (int): controller index - Return: + Returns: imat : (np.ndarray) : Interaction matrix currently set in the controller """ return np.array(self._rtc.d_control[controller_index].d_imat) @@ -205,7 +216,7 @@ def get_command_matrix(self, controller_index: int): Args: controller_index: (int): controller index - Return: + Returns: cmat : (np.ndarray) : Command matrix currently used by the controller """ return np.array(self._rtc.d_control[controller_index].d_cmat) @@ -225,7 +236,11 @@ def get_intensities(self) -> np.ndarray: """ raise NotImplementedError("Not implemented") - def set_flat(self, centro_index: int, flat: np.ndarray,): + def set_flat( + self, + centro_index: int, + flat: np.ndarray, + ): """ Load flat field for the given wfs Args: @@ -251,7 +266,7 @@ def compute_slopes(self, controller_index: int): Args: controller_index : (int) : Controller index that will compute its slopes - Return: + Returns: slopes : (np.ndarray) : Slopes vector """ self._rtc.do_centroids(controller_index) @@ -278,7 +293,8 @@ def remove_perturbation_voltage(self, controller_index: int, name: str) -> None: """ self._rtc.d_control[controller_index].remove_perturb_voltage(name) - def get_perturbation_voltage(self, controller_index: int, *, name: str=None) -> Union[dict, tuple]: + def get_perturbation_voltage(self, controller_index: int, *, + name: str = None) -> Union[dict, tuple]: """ Get a perturbation voltage buffer Args: @@ -290,11 +306,12 @@ def get_perturbation_voltage(self, controller_index: int, *, name: str=None) -> Returns: pertu : (dict or tuple) : If name is None, returns a dictionnary with the buffers names as keys and a tuple (buffer, circular_counter, is_enabled) - """ + """ pertu_map = self._rtc.d_control[controller_index].d_perturb_map if name is None: for key in pertu_map.keys(): - pertu_map[key] = (np.array(pertu_map[key][0]), pertu_map[key][1], pertu_map[key][2]) + pertu_map[key] = (np.array(pertu_map[key][0]), pertu_map[key][1], + pertu_map[key][2]) return pertu_map else: pertu = pertu_map[name] @@ -315,14 +332,14 @@ def get_voltages(self, controller_index: int) -> np.ndarray: Args: controller_index : (int) : controller index - Return: + Returns: voltages : (np.ndarray) : current voltages vector """ return np.array(self._rtc.d_control[controller_index].d_voltage) def set_integrator_law(self, controller_index: int) -> None: - """ Set the control law to integrator (controller generic only) + """ Set the command law to integrator (controller generic only) v[k] = v[k-1] + g.R.s[k] Args: @@ -331,7 +348,7 @@ def set_integrator_law(self, controller_index: int) -> None: self._rtc.d_control[controller_index].set_commandlaw("integrator") def set_2matrices_law(self, controller_index: int) -> None: - """ Set the control law to 2matrices (controller generic only) + """ Set the command law to 2matrices (controller generic only) v[k] = decayFactor.E.v[k-1] + g.R.s[k] Args: @@ -340,7 +357,7 @@ def set_2matrices_law(self, controller_index: int) -> None: self._rtc.d_control[controller_index].set_commandlaw("2matrices") def set_modal_integrator_law(self, controller_index: int) -> None: - """ Set the control law to 2matrices (controller generic only) + """ Set the command law to 2matrices (controller generic only) v[k] = v[k-1] + E.g.R.s[k] Args: @@ -348,7 +365,7 @@ def set_modal_integrator_law(self, controller_index: int) -> None: """ self._rtc.d_control[controller_index].set_commandlaw("modal_integrator") - def set_decay_factor(self, controller_index: int, decay : np.ndarray) -> None: + def set_decay_factor(self, controller_index: int, decay: np.ndarray) -> None: """ Set the decay factor used in 2matrices command law (controller generic only) Args: @@ -358,7 +375,7 @@ def set_decay_factor(self, controller_index: int, decay : np.ndarray) -> None: """ self._rtc.d_control[controller_index].set_decayFactor(decay) - def set_E_matrix(self, controller_index: int, e_matrix : np.ndarray) -> None: + def set_E_matrix(self, controller_index: int, e_matrix: np.ndarray) -> None: """ Set the E matrix used in 2matrices or modal command law (controller generic only) Args: @@ -387,18 +404,18 @@ def set_centroider_threshold(self, centro_index: int, thresh: float) -> None: """ self._rtc.d_centro[centro_index].set_threshold(thresh) - def get_pyr_method(self, centro_index : int) -> str: + def get_pyr_method(self, centro_index: int) -> str: """ Get pyramid compute method currently used Args: centro_index: (int): centroider index - Return: + Returns: method : (str) : Pyramid compute method currently used """ return self._rtc.d_centro[centro_index].pyr_method - def set_pyr_method(self, centro_index: int, pyr_method : int) -> None: + def set_pyr_method(self, centro_index: int, pyr_method: int) -> None: """ Set the pyramid method for slopes computation Args: @@ -409,28 +426,28 @@ def set_pyr_method(self, centro_index: int, pyr_method : int) -> None: 2: nosinus local 3: sinus local) """ - self._rtc.d_centro[centro_index].set_pyr_method(pyr_method) # Sets the pyr method + self._rtc.d_centro[centro_index].set_pyr_method( + pyr_method) # Sets the pyr method self._rtc.do_centroids(0) # To be ready for the next get_slopess print("PYR method set to " + self._rtc.d_centro[centro_index].pyr_method) - def set_modal_gains(self, controller_index: int, mgain: np.ndarray) -> None: - """ Sets the modal gain (when using modal integrator control law) + """ Sets the modal gain (when using modal integrator command law) Args: controller_index : (int) : Controller index to modify - + mgain : (np.ndarray) : Modal gains to set """ self._rtc.d_control[controller_index].set_modal_gains(mgain) def get_modal_gains(self, controller_index: int) -> np.ndarray: - """ Returns the modal gains (when using modal integrator control law) + """ Returns the modal gains (when using modal integrator command law) Args: controller_index : (int) : Controller index to modify - Return: + Returns: mgain : (np.ndarray) : Modal gains vector currently used """ return np.array(self._rtc.d_control[controller_index].d_gain) @@ -441,7 +458,7 @@ def get_masked_pix(self, centro_index: int) -> np.ndarray: Args: centro_index : (int): Centroider index. Must be a maskedpix centroider - Return: + Returns: mask : (np.ndarray) : Mask used """ if (self._rtc.d_centro[centro_index].type != scons.CentroiderType.MASKEDPIX): @@ -455,12 +472,12 @@ def get_command(self, controller_index: int) -> np.ndarray: Args: controller_index : (int) : Controller index - Return: + Returns: com : (np.ndarray) : Command vector """ return np.array(self._rtc.d_control[controller_index].d_com) - def set_command(self, controller_index: int, com : np.ndarray) -> np.ndarray: + def set_command(self, controller_index: int, com: np.ndarray) -> np.ndarray: """ Returns the last computed command before conversion to voltages Args: @@ -468,7 +485,7 @@ def set_command(self, controller_index: int, com : np.ndarray) -> np.ndarray: com : (np.ndarray) : Command vector to set """ - if(com.size != self._config.p_controllers[controller_index].nactu): + if (com.size != self._config.p_controllers[controller_index].nactu): raise ValueError("Dimension mismatch") self._rtc.d_control[controller_index].set_com(com, com.size) @@ -491,7 +508,7 @@ def get_slopes_geom(self, controller_index: int) -> np.ndarray: Args: controller_index : (int) : controller index - Return: + Returns: slopes_geom : (np.ndarray) : geometrically computed slopes """ self._rtc.do_centroids_geom(controller_index) @@ -502,7 +519,7 @@ def get_slopes_geom(self, controller_index: int) -> np.ndarray: def get_selected_pix(self) -> np.ndarray: """ Return the pyramid image with only the selected pixels used by the full pixels centroider - Return: + Returns: selected_pix : (np.ndarray) : PWFS image with only selected pixels """ if (self._config.p_centroiders[0].type != scons.CentroiderType.MASKEDPIX): @@ -524,7 +541,8 @@ def do_ref_slopes(self, controller_index: int) -> None: self._rtc.do_centroids_ref(controller_index) print("Reference slopes done") - def do_control(self, controller_index: int, *, sources : SourceCompass = None, source_index : int = 0, is_wfs_phase : bool = False) -> None: + def do_control(self, controller_index: int, *, sources: SourceCompass = None, + source_index: int = 0, is_wfs_phase: bool = False) -> None: """Computes the command from the Wfs slopes Args: @@ -534,7 +552,7 @@ def do_control(self, controller_index: int, *, sources : SourceCompass = None, s sources : (SourceCompass) : List of phase screens of a wfs or target sutra object If the controller is a GEO one, specify a SourceCompass instance from WfsCompass or TargetCompass to project the corresponding phase - + source_index : (int) : Index of the phase screen to consider inside . Default is 0 is_wfs_phase : (bool) : If True, sources[source_index] is a WFS phase screen. @@ -542,8 +560,8 @@ def do_control(self, controller_index: int, *, sources : SourceCompass = None, s """ if (self._rtc.d_control[controller_index].type == scons.ControllerType.GEO): if (sources is not None): - self._rtc.d_control[controller_index].comp_dphi(sources[source_index], - is_wfs_phase) + self._rtc.d_control[controller_index].comp_dphi( + sources[source_index], is_wfs_phase) self._rtc.do_control(controller_index) def do_calibrate_img(self, controller_index: int) -> None: @@ -589,7 +607,7 @@ def do_clipping(self, controller_index: int) -> None: """ self._rtc.do_clipping(controller_index) - def set_scale(self, centroider_index : int, scale : float) -> None: + def set_scale(self, centroider_index: int, scale: float) -> None: """ Update the scale factor of the centroider Args: @@ -602,31 +620,35 @@ def set_scale(self, centroider_index : int, scale : float) -> None: def publish(self) -> None: """ Publish loop data on DDS topics - /!\ only with cacao enabled, requires OCTOPUS + only with cacao enabled, requires OCTOPUS """ if self.cacao: self._rtc.publish() else: raise AttributeError("CACAO must be enabled") - - def get_image_raw(self, centroider_index : int) -> np.ndarray: + + def get_image_raw(self, centroider_index: int) -> np.ndarray: """ Return the raw image currently loaded on the specified centroider Args: centroider_index : (int) : Index of the centroider - - Return: + + Returns: image_raw : (np.ndarray) : Raw WFS image loaded in the centroider """ return np.array(self._rtc.d_centro[centroider_index].d_img_raw) - def get_image_calibrated(self, centroider_index : int) -> np.ndarray: + def get_image_calibrated(self, centroider_index: int) -> np.ndarray: """ Return the last image calibrated by the specified centroider Args: centroider_index : (int) : Index of the centroider - - Return: + + Returns: image_cal : (np.ndarray) : Calibrated WFS image loaded in the centroider """ - return np.array(self._rtc.d_centro[centroider_index].d_img) \ No newline at end of file + img = np.array(self._rtc.d_centro[centroider_index].d_img) + if self._config.p_centroiders[ + centroider_index].type == scons.CentroiderType.MASKEDPIX: # Full pixel case + img *= self.get_masked_pix(centroider_index) + return img diff --git a/shesha/supervisor/components/rtc/rtcCompass.py b/shesha/supervisor/components/rtc/rtcCompass.py new file mode 100644 index 0000000..3f16c9e --- /dev/null +++ b/shesha/supervisor/components/rtc/rtcCompass.py @@ -0,0 +1,99 @@ +## @package shesha.supervisor +## @brief User layer for initialization and execution of a COMPASS simulation +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + +from shesha.init.rtc_init import rtc_init +from shesha.supervisor.components.sourceCompass import SourceCompass +import shesha.constants as scons +import numpy as np +from typing import Union + +from shesha.supervisor.components.rtc.rtcAbstract import RtcAbstract, carmaWrap_context + + +class RtcCompass(RtcAbstract): + """ RTC handler for compass simulation + """ + + def __init__(self, context: carmaWrap_context, config, tel, wfs, dms, atm, *, + brahma: bool = False, fp16: bool = False, cacao: bool = False): + """ Initialize a RtcCompass component for rtc related supervision + + Args: + context : (carmaContext) : CarmaContext instance + + config : (config module) : Parameters configuration structure module + + tel: (Telescope) : Telescope object + + wfs: (Sensors) : Sensors object + + dms: (Dms) : Dms object + + atm: (Atmos) : Atmos object + + Kwargs: + brahma : (bool, optional) : If True, enables BRAHMA features in RTC (Default is False) + Requires BRAHMA to be installed + + fp16 : (bool, optional) : If True, enables FP16 features in RTC (Default is False) + Requires CUDA_SM>60 to be installed + + cacao : (bool) : If True, enables CACAO features in RTC (Default is False) + Requires OCTOPUS to be installed + """ + RtcAbstract.__init__(self, context, config, brahma=brahma, fp16=fp16, + cacao=cacao) + self.rtc_init(tel, wfs, dms, atm) + + def rtc_init(self, tel, wfs, dms, atm): + """ Initialize a RtcCompass component for rtc related supervision + + Args: + tel: (Telescope) : Telescope object + + wfs: (Sensors) : Sensors object + + dms: (Dms) : Dms object + + atm: (Atmos) : Atmos object + """ + self._rtc = rtc_init(self._context, tel._tel, wfs._wfs, dms._dms, atm._atmos, + self._config.p_wfss, self._config.p_tel, + self._config.p_geom, self._config.p_atmos, + self._config.p_loop.ittime, self._config.p_centroiders, + self._config.p_controllers, self._config.p_dms, + cacao=self.cacao) diff --git a/shesha/supervisor/components/rtc/rtcStandalone.py b/shesha/supervisor/components/rtc/rtcStandalone.py new file mode 100644 index 0000000..17da5bd --- /dev/null +++ b/shesha/supervisor/components/rtc/rtcStandalone.py @@ -0,0 +1,111 @@ +## @package shesha.supervisor +## @brief User layer for initialization and execution of a COMPASS simulation +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . +from shesha.init.rtc_init import rtc_standalone +from shesha.supervisor.components.sourceCompass import SourceCompass +import shesha.constants as scons +import numpy as np +from typing import Union + +from shesha.supervisor.components.rtc.rtcAbstract import RtcAbstract, carmaWrap_context + + +class RtcStandalone(RtcAbstract): + """ RTC handler for compass standalone + """ + + def __init__(self, context: carmaWrap_context, config, nwfs: int, nvalid: list, + nactu: int, centroider_type: list, delay: list, offset: list, + scale: list, *, brahma: bool = False, fp16: bool = False, + cacao: bool = False): + """ Initialize a RtcStandalone component for rtc related supervision + + Args: + context : (carmaContext) : CarmaContext instance + + config : (config module) : Parameters configuration structure module + + nwfs: (int): number of wavefront sensors + + nvalid: (int): number of valid measures as input + + nactu: (int): number of actuators as output + + centroider_type: (list): type of centroiders + + delay: (list): delay of each controller + + offset: (list): offset added in the cog computation of each WFS + + scale: (list): scale factor used in the cog computation of each WFS + + Kwargs: + brahma : (bool, optional) : If True, enables BRAHMA features in RTC (Default is False) + Requires BRAHMA to be installed + + fp16 : (bool, optional) : If True, enables FP16 features in RTC (Default is False) + Requires CUDA_SM>60 to be installed + + cacao : (bool) : If True, enables CACAO features in RTC (Default is False) + Requires OCTOPUS to be installed + """ + RtcAbstract.__init__(self, context, config, brahma=brahma, fp16=fp16, + cacao=cacao) + + self.rtc_init(nwfs, nvalid, nactu, centroider_type, delay, offset, scale) + + def rtc_init(self, nwfs: int, nvalid: list, nactu: int, centroider_type: list, + delay: list, offset: list, scale: list): + """ Initialize a RtcStandalone component for rtc related supervision + + Args: + nwfs: (int): number of wavefront sensors + + nvalid: (int): number of valid measures as input + + nactu: (int): number of actuators as output + + centroider_type: (list): type of centroiders + + delay: (list): delay of each controller + + offset: (list): offset added in the cog computation of each WFS + + scale: (list): scale factor used in the cog computation of each WFS + """ + self._rtc = rtc_standalone(self._context, nwfs, nvalid, nactu, centroider_type, + delay, offset, scale, brahma=self.brahma, + fp16=self.fp16, cacao=self.cacao) diff --git a/shesha/supervisor/components/sourceCompass.py b/shesha/supervisor/components/sourceCompass.py index 1efcc43..833e264 100644 --- a/shesha/supervisor/components/sourceCompass.py +++ b/shesha/supervisor/components/sourceCompass.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -51,23 +51,23 @@ def __init__(self, sources : List): self.sources = sources def raytrace(self, index, *, tel=None, atm=None, dms=None, ncpa : bool=True, reset : bool = True) -> None: - """ Performs the raytracing operation through provided object phase screens + """ Performs the raytracing operation through provided object phase screens to obtain the phase screen of the SutraSource Args: - index : (int) : Index of the source to raytrace in self.sources list + index : (int) : Index of the source to raytrace in self.sources list Kwargs: tel : (TelescopeCompass) : TelescopeCompass instance. If provided, raytrace through the telescope aberration phase in the pupil atm : (AtmosCompass) : AtmosCompass instance. - If provided, raytrace through the layers phase screens + If provided, raytrace through the layers phase screens dms : (dmsCompass) : DmCompass instance. If provided, raytrace through the DM shapes - ncpa : (bool) : If True (default), raytrace through NCPA phase screen of the source (default is array of 0, i.e. no impact) + ncpa : (bool) : If True (default), raytrace through NCPA phase screen of the source (default is array of 0, i.e. no impact) reset: (bool): reset the phase screen before raytracing. Default is True """ @@ -82,4 +82,3 @@ def raytrace(self, index, *, tel=None, atm=None, dms=None, ncpa : bool=True, res self.sources[index].raytrace(dms._dms) if ncpa: self.sources[index].raytrace() - diff --git a/shesha/supervisor/components/targetCompass.py b/shesha/supervisor/components/targetCompass.py index 2f56db8..1186b96 100644 --- a/shesha/supervisor/components/targetCompass.py +++ b/shesha/supervisor/components/targetCompass.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -78,16 +78,16 @@ def get_tar_image(self, tar_index : int, *, expo_type: str = "se") -> np.ndarray expo_type : (str) : "se" for short exposure (default) "le" for long exposure - Return: + Returns: psf : (np.ndarray) : PSF """ if (expo_type == "se"): return np.fft.fftshift( np.array(self._target.d_targets[tar_index].d_image_se)) elif (expo_type == "le"): - return np.fft.fftshift( - np.array(self._target.d_targets[tar_index].d_image_le) - ) / self._target.d_targets[tar_index].strehl_counter + nb = self._target.d_targets[tar_index].strehl_counter + if nb == 0: nb = 1 + return np.fft.fftshift(np.array(self._target.d_targets[tar_index].d_image_le)) / nb else: raise ValueError("Unknown exposure type") @@ -111,7 +111,7 @@ def get_tar_phase(self, tar_index: int, *, pupil: bool = False) -> np.ndarray: pupil : (bool) : If True, applies the pupil on top of the phase screen Default is False - Return: + Returns: tar_phase : (np.ndarray) : Target phase screen """ tar_phase = np.array(self._target.d_targets[tar_index].d_phase) @@ -148,7 +148,7 @@ def get_strehl(self, tar_index: int, *, do_fit: bool = True) -> np.ndarray: do_fit : (bool) : If True (default), fit the PSF with a sinc before computing SR - Return: + Returns: strehl : (np.ndarray) : Strehl ratios and phase variances """ src = self._target.d_targets[tar_index] @@ -164,7 +164,7 @@ def get_ncpa_tar(self, tar_index : int) -> np.ndarray: Args: tar_index : (int) : Index of the target - Return: + Returns: ncpa : (np.ndarray) : NCPA phase screen """ return np.array(self._target.d_targets[tar_index].d_ncpa_phase) diff --git a/shesha/supervisor/components/telescopeCompass.py b/shesha/supervisor/components/telescopeCompass.py index abaffef..6701aa7 100644 --- a/shesha/supervisor/components/telescopeCompass.py +++ b/shesha/supervisor/components/telescopeCompass.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/supervisor/components/wfsCompass.py b/shesha/supervisor/components/wfsCompass.py index e403712..7adae3f 100644 --- a/shesha/supervisor/components/wfsCompass.py +++ b/shesha/supervisor/components/wfsCompass.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor ## @brief User layer for initialization and execution of a COMPASS simulation ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -70,7 +70,7 @@ def __init__(self, context, config, tel): self._config.p_tel, self._config.p_geom, self._config.p_dms, self._config.p_atmos) self.sources = [wfs.d_gs for wfs in self._wfs.d_wfs] - + def get_wfs_image(self, wfs_index : int) -> np.ndarray: """ Get an image from the WFS (wfs[0] by default), or from the centroider handling the WFS to get the calibrated image @@ -78,7 +78,7 @@ def get_wfs_image(self, wfs_index : int) -> np.ndarray: Args: wfs_index : (int) : index of the WFS (or the centroider) to request an image - Return: + Returns: image : (np.ndarray) : WFS image """ if self._config.p_wfss[wfs_index].fakecam: @@ -115,8 +115,8 @@ def set_pyr_modulation_ampli(self, wfs_index: int, pyr_mod: float) -> float: """ Set pyramid circular modulation amplitude value - in lambda/D units. Compute new modulation points corresponding to the new amplitude value - and upload them. - /!\ WARNING : if you are using slopes-based centroider with the PWFS, + and upload them. + WARNING : if you are using slopes-based centroider with the PWFS, also update the centroider scale (rtc.set_scale) with the returned value @@ -124,8 +124,8 @@ def set_pyr_modulation_ampli(self, wfs_index: int, pyr_mod: float) -> float: wfs_index : (int) : WFS index pyr_mod : (float) : new pyramid modulation amplitude value - - Return: + + Returns: scale : (float) : scale factor """ p_wfs = self._config.p_wfss[wfs_index] @@ -195,7 +195,7 @@ def set_pyr_disk_source_hexa(self, wfs_index: int, radius: float) -> None: """ Create disk object by packing PSF in a given radius, using hexagonal packing and set it as modulation pattern - /!\ There is no modulation + There is no modulation Args: wfs_index : (int) : WFS index @@ -222,7 +222,7 @@ def set_pyr_disk_source(self, wfs_index: int, radius: float, *, density: float = """ Create disk object by packing PSF in a given radius, using square packing and set it as modulation pattern - /!\ There is no modulation + There is no modulation Args: wfs_index : (int) : WFS index @@ -242,7 +242,7 @@ def set_pyr_square_source(self, wfs_index: int, radius: float, *, density: float """ Create a square object by packing PSF in a given radius, using square packing and set it as modulation pattern - /!\ There is no modulation + There is no modulation Args: wfs_index : (int) : WFS index @@ -318,7 +318,7 @@ def set_gs_mag(self, wfs_index : int, mag : float) -> None: mag : (float) : New magnitude of the guide star """ wfs = self._wfs.d_wfs[wfs_index] - if (self._config.p_wfs0.type == "pyrhr"): + if (self._config.p_wfss[0].type == "pyrhr"): r = wfs.comp_nphot(self._config.p_loop.ittime, self._config.p_wfss[wfs_index].optthroughput, self._config.p_tel.diam, self._config.p_tel.cobs, @@ -354,7 +354,7 @@ def get_ncpa_wfs(self, wfs_index : int) -> np.ndarray: Args: wfs_index : (int) : Index of the WFS - Return: + Returns: ncpa : (np.ndarray) : NCPA phase screen """ return np.array(self._wfs.d_wfs[wfs_index].d_gs.d_ncpa_phase) @@ -365,7 +365,7 @@ def get_wfs_phase(self, wfs_index : int) -> np.ndarray: Args: wfs_index : (int) : Index of the WFS - Return: + Returns: phase : (np.ndarray) : WFS phase screen """ return np.array(self._wfs.d_wfs[wfs_index].d_gs.d_phase) @@ -376,7 +376,7 @@ def get_pyrhr_image(self, wfs_index : int) -> np.ndarray: Args: wfs_index : (int) : Index of the WFS - Return: + Returns: image : (np.ndarray) : PWFS high resolution image """ @@ -430,7 +430,7 @@ def get_pyr_focal_plane(self, wfs_index : int) -> np.ndarray: Args: wfs_index : (int) : WFS index - Return: + Returns: focal_plane : (np.ndarray) : psf on the top of the pyramid """ return np.fft.fftshift(np.array(self._wfs.d_wfs[wfs_index].d_pyrfocalplane)) diff --git a/shesha/supervisor/genericSupervisor.py b/shesha/supervisor/genericSupervisor.py index 95fac99..706aaf5 100644 --- a/shesha/supervisor/genericSupervisor.py +++ b/shesha/supervisor/genericSupervisor.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.aoSupervisor ## @brief Abstract layer for initialization and execution of a AO supervisor ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -35,13 +35,11 @@ # You should have received a copy of the GNU Lesser General Public License along with COMPASS. # If not, see . -from abc import abstractmethod -import time +from abc import ABC, abstractmethod from shesha.sutra_wrap import carmaWrap_context -from typing import Iterable -class GenericSupervisor(object): +class GenericSupervisor(ABC): """ This class defines generic methods and behavior of a supervisor It is not intended to be instantiated as it is : prefer to build a supervisor class inheriting from it. This approach allows to build multiple @@ -52,18 +50,6 @@ class GenericSupervisor(object): config : (config) : Parameters structure - tel : (TelescopeComponent) : a TelescopeComponent instance - - atmos : (AtmosComponent) : An AtmosComponent instance - - target : (targetComponent) : A TargetComponent instance - - wfs : (WfsComponent) : A WfsComponent instance - - dms : (DmComponent) : A DmComponent instance - - rtc : (RtcComponent) : A Rtc component instance - is_init : (bool) : Flag equals to True if the supervisor has already been initialized iter : (int) : Frame counter @@ -74,23 +60,27 @@ def __init__(self, config): Args: config : (config module) : Configuration module + """ self.context = None self.config = config - self.tel = None - self.atmos = None - self.target = None - self.wfs = None - self.dms = None - self.rtc = None self.is_init = False self.iter = 0 + + if (self.config.p_loop.devices.size > 1): + self.context = carmaWrap_context.get_instance_ngpu( + self.config.p_loop.devices.size, self.config.p_loop.devices) + else: + self.context = carmaWrap_context.get_instance_1gpu( + self.config.p_loop.devices[0]) + self.force_context() + self._init_components() def get_config(self): """ Returns the configuration in use, in a supervisor specific format ? - Return: + Returns: config : (config module) : Current supervisor configuration """ return self.config @@ -98,7 +88,7 @@ def get_config(self): def get_frame_counter(self) -> int: """Return the current iteration number of the loop - Return: + Returns: framecounter : (int) : Number of iteration already performed """ return self.iter @@ -112,207 +102,8 @@ def force_context(self) -> None: self.context.set_active_device_force(device) self.context.set_active_device(current_device) + @abstractmethod def _init_components(self) -> None: """ Initialize all the components """ - if (self.config.p_loop.devices.size > 1): - self.context = carmaWrap_context.get_instance_ngpu( - self.config.p_loop.devices.size, self.config.p_loop.devices) - else: - self.context = carmaWrap_context.get_instance_1gpu( - self.config.p_loop.devices[0]) - self.force_context() - - if self.config.p_tel is None or self.config.p_geom is None: - raise ValueError("Telescope geometry must be defined (p_geom and p_tel)") - self._init_tel() - - if self.config.p_atmos is not None: - self._init_atmos() - if self.config.p_dms is not None: - self._init_dms() - if self.config.p_targets is not None: - self._init_target() - if self.config.p_wfss is not None: - self._init_wfs() - if self.config.p_controllers is not None or self.config.p_centroiders is not None: - self._init_rtc() - self.is_init = True - - @abstractmethod - def _init_tel(self): - """ Initialize the telescope component of the supervisor - """ - pass - - @abstractmethod - def _init_atmos(self): - """ Initialize the atmos component of the supervisor - """ - pass - - @abstractmethod - def _init_dms(self): - """ Initialize the dms component of the supervisor - """ - pass - - @abstractmethod - def _init_target(self): - """ Initialize the target component of the supervisor - """ - pass - - @abstractmethod - def _init_wfs(self): - """ Initialize the wfs component of the supervisor - """ - pass - - @abstractmethod - def _init_rtc(self): - """ Initialize the rtc component of the supervisor - """ - pass - - def next(self, *, move_atmos: bool = True, nControl: int = 0, - tar_trace: Iterable[int] = None, wfs_trace: Iterable[int] = None, - do_control: bool = True, apply_control: bool = True, - compute_tar_psf: bool = True) -> None: - """Iterates the AO loop, with optional parameters - - Kwargs: - move_atmos: (bool): move the atmosphere for this iteration. Default is True - - nControl: (int): Controller number to use. Default is 0 (single control configuration) - - tar_trace: (List): list of targets to trace. None is equivalent to all (default) - - wfs_trace: (List): list of WFS to trace. None is equivalent to all (default) - - do_control : (bool) : Performs RTC operations if True (Default) - - apply_control: (bool): if True (default), apply control on DMs - - compute_tar_psf : (bool) : If True (default), computes the PSF at the end of the iteration - """ - if tar_trace is None and self.target is not None: - tar_trace = range(len(self.config.p_targets)) - if wfs_trace is None and self.wfs is not None: - wfs_trace = range(len(self.config.p_wfss)) - - if move_atmos and self.atmos is not None: - self.atmos.move_atmos() - - if tar_trace is not None: - for t in tar_trace: - if self.atmos.is_enable: - self.target.raytrace(t, tel=self.tel, atm=self.atmos, dms=self.dms) - else: - self.target.raytrace(t, tel=self.tel, dms=self.dms) - - if wfs_trace is not None: - for w in wfs_trace: - if self.atmos.is_enable: - self.wfs.raytrace(w, tel=self.tel, atm=self.atmos) - else: - self.wfs.raytrace(w, tel=self.tel) - - if not self.config.p_wfss[w].open_loop and self.dms is not None: - self.wfs.raytrace(w, dms=self.dms, ncpa=False, reset=False) - self.wfs.compute_wfs_image(w) - if do_control and self.rtc is not None: - for ncontrol in range(len(self.config.p_controllers)): - self.rtc.do_centroids(ncontrol) - self.rtc.do_control(ncontrol) - self.rtc.do_clipping(ncontrol) - - if apply_control: - self.rtc.apply_control(ncontrol) - - if self.cacao: - self.rtc.publish() - - if compute_tar_psf: - for tar_index in tar_trace: - self.target.comp_tar_image(tar_index) - self.target.comp_strehl(tar_index) - - self.iter += 1 - - def _print_strehl(self, monitoring_freq: int, iters_time: float, total_iters: int, *, - tar_index: int = 0): - """ Print the Strehl ratio SE and LE from a target on the terminal, the estimated remaining time and framerate - - Args: - monitoring_freq : (int) : Number of frames between two prints - - iters_time : (float) : time elapsed between two prints - - total_iters : (int) : Total number of iterations - - Kwargs: - tar_index : (int) : Index of the target. Default is 0 - """ - framerate = monitoring_freq / iters_time - strehl = self.target.get_strehl(tar_index) - etr = (total_iters - self.iter) / framerate - print("%d \t %.3f \t %.3f\t %.1f \t %.1f" % (self.iter + 1, strehl[0], - strehl[1], etr, framerate)) - - def loop(self, number_of_iter: int, *, monitoring_freq: int = 100, - compute_tar_psf: bool = True, **kwargs): - """ Perform the AO loop for iterations - - Args: - number_of_iter: (int) : Number of iteration that will be done - - Kwargs: - monitoring_freq: (int) : Monitoring frequency [frames]. Default is 100 - - compute_tar_psf : (bool) : If True (default), computes the PSF at each iteration - Else, only computes it each frames - """ - if not compute_tar_psf: - print("WARNING: Target PSF will be computed (& accumulated) only during monitoring" - ) - - print("----------------------------------------------------") - print("iter# | S.E. SR | L.E. SR | ETR (s) | Framerate (Hz)") - print("----------------------------------------------------") - # self.next(**kwargs) - t0 = time.time() - t1 = time.time() - if number_of_iter == -1: # Infinite loop - while (True): - self.next(compute_tar_psf=compute_tar_psf, **kwargs) - if ((self.iter + 1) % monitoring_freq == 0): - if not compute_tar_psf: - self.target.comp_tar_image(0) - self.target.comp_strehl(0) - self._print_strehl(monitoring_freq, time.time() - t1, number_of_iter) - t1 = time.time() - - for _ in range(number_of_iter): - self.next(compute_tar_psf=compute_tar_psf, **kwargs) - if ((self.iter + 1) % monitoring_freq == 0): - if not compute_tar_psf: - self.target.comp_tar_image(0) - self.target.comp_strehl(0) - self._print_strehl(monitoring_freq, time.time() - t1, number_of_iter) - t1 = time.time() - t1 = time.time() - print(" loop execution time:", t1 - t0, " (", number_of_iter, "iterations), ", - (t1 - t0) / number_of_iter, "(mean) ", number_of_iter / (t1 - t0), "Hz") - - def reset(self): - """ Reset the simulation to return to its original state - """ - self.atmos.reset_turbu() - self.wfs.reset_noise() - for tar_index in range(len(self.config.p_targets)): - self.target.reset_strehl(tar_index) - self.dms.reset_dm() - self.rtc.open_loop() - self.rtc.close_loop() diff --git a/shesha/supervisor/optimizers/__init__.py b/shesha/supervisor/optimizers/__init__.py index fd71cf2..3344a67 100644 --- a/shesha/supervisor/optimizers/__init__.py +++ b/shesha/supervisor/optimizers/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.optimizers ## @brief User layer for optimizing AO supervisor loop ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -34,7 +34,8 @@ # # You should have received a copy of the GNU Lesser General Public License along with COMPASS. # If not, see . -__all__ = ["modalBasis", "calibration"] +__all__ = ["modalBasis", "calibration", "modalGains"] from .modalBasis import ModalBasis from .calibration import Calibration +from .modalGains import ModalGains diff --git a/shesha/supervisor/optimizers/calibration.py b/shesha/supervisor/optimizers/calibration.py index 6676c6f..eb68492 100644 --- a/shesha/supervisor/optimizers/calibration.py +++ b/shesha/supervisor/optimizers/calibration.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.optimizers ## @brief User layer for optimizing AO supervisor loop ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -106,7 +106,7 @@ def apply_volts_and_get_slopes(self, controller_index: int, *, noise: bool = Fal self._wfs.compute_wfs_image(w, noise=noise) return self._rtc.compute_slopes(controller_index) - def do_imat_modal(self, controller_index : int, ampli : np.ndarray, modal_basis : np.ndarray, + def do_imat_modal(self, controller_index : int, ampli : np.ndarray, modal_basis : np.ndarray, *, noise : bool=False, nmodes_max : int=0, with_turbu : bool=False, push_pull : bool=False) -> np.ndarray: """ Computes an interaction matrix from provided modal basis @@ -128,7 +128,7 @@ def do_imat_modal(self, controller_index : int, ampli : np.ndarray, modal_basis push_pull : (bool) : If True, imat is computed as an average of push and pull ampli on each mode - Return: + Returns: modal_imat : (np.ndarray) : Modal interaction matrix """ modal_imat = np.zeros((self._config.p_controllers[controller_index].nslope, modal_basis.shape[1])) @@ -181,7 +181,7 @@ def do_imat_phase(self, controller_index: int, cube_phase: np.ndarray, *, noise wfs_index : (int) : WFS index. Default is 0 - Return: + Returns: phase_imat : (np.ndarray) : Phase interaction matrix """ imat_phase = np.zeros((cube_phase.shape[0], self._config.p_controllers[controller_index].nslope)) @@ -207,11 +207,11 @@ def do_imat_phase(self, controller_index: int, cube_phase: np.ndarray, *, noise return imat_phase - def compute_modal_residuals(self, projection_matrix : np.ndarray, + def compute_modal_residuals(self, projection_matrix : np.ndarray, *, selected_actus : np.ndarray=None) -> np.ndarray: """ Computes the modal residual coefficients of the residual phase. - /!\ It supposed that roket is enabled, and the associated GEO controller is index 1. + It supposed that roket is enabled, and the associated GEO controller is index 1. Uses the projection matrix computed from compute_modes_to_volts_basis (modalBasis module) @@ -221,7 +221,7 @@ def compute_modal_residuals(self, projection_matrix : np.ndarray, Kwargs: selected_actus : (np.ndarray) : TODO : description - Return: + Returns: ai : (np.ndarray) : Modal coefficients """ try: @@ -237,4 +237,3 @@ def compute_modal_residuals(self, projection_matrix : np.ndarray, v3 = v[-2:] ai = projection_matrix.dot(np.concatenate((v2, v3))) * 1000. return ai - diff --git a/shesha/supervisor/optimizers/modalBasis.py b/shesha/supervisor/optimizers/modalBasis.py index 7b1e088..cf99502 100644 --- a/shesha/supervisor/optimizers/modalBasis.py +++ b/shesha/supervisor/optimizers/modalBasis.py @@ -1,7 +1,7 @@ ## @package shesha.supervisor.optimizers ## @brief User layer for optimizing AO supervisor loop ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -92,7 +92,7 @@ def compute_influ_basis(self, dm_index: int) -> csr_matrix: Args: dm_index : (int) : Index of the DM - Return: + Returns: influ_sparse : (csr_matrix) : influence function phases """ return basis.compute_dm_basis(self._dms._dms.d_dms[dm_index], @@ -111,7 +111,7 @@ def compute_modes_to_volts_basis(self, modal_basis_type: str, *, merged: bool = nbpairs : (int) : TODO description - Return: + Returns: modal_basis : (np.ndarray) : modes to volts matrix projection_matrix : (np.ndarray) : volts to modes matrix (None if "KL") @@ -168,7 +168,7 @@ def compute_btt_basis(self, *, merged: bool = False, nbpairs: int = None, If True, returns delta = IF.T.dot(IF) / N instead of P - Return: + Returns: Btt : (np.ndarray) : Btt modes to volts matrix projection_matrix : (np.ndarray) : volts to Btt modes matrix @@ -221,7 +221,7 @@ def compute_merged_influ(self, dm_index : int, *, nbpairs: int = None) -> np.nda Kwargs: nbpairs : (int) : Default is None. TODO : description - Return: + Returns: pairs : (np.ndarray) : TODO description discard : (list) : TODO description @@ -328,7 +328,7 @@ def compute_merged_influ(self, dm_index : int, *, nbpairs: int = None) -> np.nda def compute_btt_petal(self) -> np.ndarray: """ Computes a Btt modal basis with Pistons filtered - Return: + Returns: Btt : (np.ndarray) : Btt modes to volts matrix P : (np.ndarray) : volts to Btt modes matrix @@ -351,7 +351,7 @@ def compute_phase_to_modes(self, modal_basis: np.ndarray) -> np.ndarray: Args: modal_basis : (np.ndarray) : Modal basis matrix - Return: + Returns: phase_to_modes : (np.ndarray) : phase to modes matrix """ nbmode = modal_basis.shape[1] @@ -365,5 +365,6 @@ def compute_phase_to_modes(self, modal_basis: np.ndarray) -> np.ndarray: phase = self._target.get_tar_phase(0, pupil=True) # Normalisation pour les unites rms en microns !!! norm = np.sqrt(np.sum((phase)**2) / S) + if norm == 0: norm = 1 phase_to_modes[i] = phase / norm return phase_to_modes diff --git a/shesha/supervisor/optimizers/modalGains.py b/shesha/supervisor/optimizers/modalGains.py new file mode 100644 index 0000000..8228299 --- /dev/null +++ b/shesha/supervisor/optimizers/modalGains.py @@ -0,0 +1,215 @@ +import numpy as np + +class ModalGains(object): + """ This optimizer class handles the modal gain optimization related operations + using the CLOSE algorithm. Should be used with a modal integrator command law. + + Attributes: + _config : (config) : Configuration parameters module + + _rtc : (RtcCompass) : RtcCompass instance + + _ntotact : (int) : total number of actuators used in the simulation + + modal_basis : (np.ndarray) : KL2V modal basis + + cmat_modal : (np.ndarray) : modal command matrix + + _mask : (np.ndarray) : mask array (containig 0 or 1) filtering the modes + + _ac_idx : (int) : autocorrelation index + + _lf : (float) : learning factor of the autocorrelation computation + + _lfdownup : (float) : learning factors for modal gain update + + _trgt : (float) : target value for the autocorrelation optimization + + _initial_gain : (float) : initial value for the modal gains (same for all modes) + + _modal_meas : (list) : list containing previous modal measurements to + be used for CLOSE optimization + + _ac_est_0 : (np.ndarray) : autocorrelation estimation for no frames delay + + _ac_est_dt : (np.ndarray) : autocorrelation estimation for _ac_idx delay + + mgains : (np.ndarray) : modal gains that will be updated + + close_iter : (int) : number of iteration of CLOSE optimizations + """ + + def __init__(self, config, rtc): + """ Instantiate a ModalGains optimizer object. + + Args: + config : (config module) : Parameters configuration structure module + + rtc : (sutraWrap.Rtc) : Sutra rtc instance + """ + self._config = config + self._rtc = rtc + self._ntotact = config.p_controllers[0].nactu + # parameters of the CLOSE optimization + self.modal_basis = None # carrée !! + self.cmat_modal = None + self._mask = np.ones(self._ntotact) + self._ac_idx = int(config.p_controllers[0].delay * 2 + 1) + self._lf = config.p_controllers[0].get_close_learning_factor() + self._lfdownup = np.array(config.p_controllers[0].get_lfdownup()) + self._trgt = config.p_controllers[0].get_close_target() + self._initial_gain = config.p_controllers[0].get_mgain_init() + # computation intermediaries + self._modal_meas = [] + self._ac_est_0 = np.zeros((self._ntotact), dtype=np.float32) + self._ac_est_dt = np.zeros((self._ntotact), dtype=np.float32) + # out variables + self.mgains = np.ones(self._ntotact) * self._initial_gain + self.close_iter = 0 + if (self._config.p_controllers[0].close_opti): + self._rtc.set_modal_gains(0, self.mgains) + print(f"total number of actuators {self._ntotact}") + print(f"Autocorrelation index for CLOSE optimization is {self._ac_idx}") + + def update_modal_meas(self): + """Save the modal measurement of the current iter""" + if self.cmat_modal is None or self.modal_basis is None : + raise Exception("Modal basis and cmat modal should be not None") + slp = self._rtc.get_slopes(0) + self._modal_meas.append(self.cmat_modal.dot(slp)) + if len(self._modal_meas) == self._ac_idx + 1: + self._modal_meas.pop(0) + + def update_mgains(self): + """Compute a new modal gains + This function computes and updates the modal gains according to the + CLOSE algorithm. + """ + ctrl_modes = self._mask != 0 # where modes are controlled + if self.cmat_modal is None or self.modal_basis is None : + raise Exception("Modal basis and cmat modal should be not None") + # get new measurement + slp = self._rtc.get_slopes(0) + temp_modal_meas = self.cmat_modal.dot(slp) + self._modal_meas.append(temp_modal_meas) + # estimate autocorrelation + if np.all(self._ac_est_0 == 0): + self._ac_est_0[ctrl_modes] = self._modal_meas[-1][ctrl_modes] ** 2 + else: + self._ac_est_0[ctrl_modes] = self._ac_est_0[ctrl_modes] * (1 - self._lf) + self._modal_meas[-1][ctrl_modes] ** 2 * self._lf + if len(self._modal_meas) == self._ac_idx + 1: + if np.all(self._ac_est_dt == 0): + self._ac_est_dt[ctrl_modes] = self._modal_meas[0][ctrl_modes] * self._modal_meas[-1][ctrl_modes] + else: + self._ac_est_dt[ctrl_modes] = self._ac_est_dt[ctrl_modes] * (1 - self._lf) \ + + self._modal_meas[0][ctrl_modes] * self._modal_meas[-1][ctrl_modes] * self._lf + # compute new modal gains + x = self._ac_est_dt[ctrl_modes] / self._ac_est_0[ctrl_modes] - self._trgt + sign_ac = (x > 0).astype(np.int8) + self.mgains[ctrl_modes] = self.mgains[ctrl_modes] * (1 + self._lfdownup[sign_ac] * x) + self.mgains[ctrl_modes] += 0.01 + self._modal_meas.pop(0) + # set modal gains + self._rtc.set_modal_gains(0, self.mgains) + self.close_iter +=1 + + def reset_close(self): + """Reset modal gain and computation variables""" + self.mgains = np.ones(self._ntotact) * self._mask * self._initial_gain + self._ac_est_0 = np.zeros((self._ntotact), dtype=np.float32) + self._ac_est_dt = np.zeros((self._ntotact), dtype=np.float32) + self._modal_meas = [] + self.close_iter = 0 + self._rtc.set_modal_gains(0, self.mgains) + self.adapt_modal_gains(False) + + + def reset_mgains(self): + """Reset the modal gains only""" + self.mgains = np.ones(self._ntotact) * self._mask * self._initial_gain + + def adapt_modal_gains(self, flag): + """Set the flag indicating to use CLOSE optimization. + + Args: + flag : (bool) : If true, update the modal gains value according to CLOSE algo + """ + self._config.p_controllers[0].set_close_opti(flag) + + def set_modal_basis(self, modal_basis): + """Set the modal basis to be used in CLOSE calculation. + + Args: + modal_basis : (np.ndarray) : modal basis (KL2V) to be used (square) + """ + if (modal_basis.shape[0] != modal_basis.shape[1]): + raise Exception("Modal basis should be square matrix") + self.modal_basis = modal_basis + self._rtc.set_E_matrix(0, modal_basis) + print("Modal basis is set") + + def get_modal_basis(self): + """Get the modal basis + + Returns: + self.modal_basis : (np.ndarray) : modal basis (KL2V) used in the optimizer + """ + return self.modal_basis + + def set_cmat_modal(self, cmat_modal): + """Set cmat modal + + Args: + cmat_modal : (np.ndarray) : modal command matrix + """ + self.cmat_modal = cmat_modal + self._rtc.set_command_matrix(0, cmat_modal) + print("cmat_modal is set") + + def get_modal_gains(self): + """Get the modal gains + + Returns: + self.mgains : (np.ndarray) : modal gains + """ + return self.mgains + + def set_mask(self, mask): + """Set the mode mask + + Args: + mask : (np.ndarray) : mask array (containig 0 or 1) filtering the modes + """ + self._mask = mask + self.mgains[mask == 0] = 0 + + def set_initial_gain(self, gain): + """Set the initial value for modal gains. This function reinitializes the modal gains. + + Args: + gain: (float) : initial value for modal gains + """ + self._initial_gain = gain + self.mgains = np.ones(self._ntotact) * self._initial_gain + self.mgains[self._mask == 0] = 0 + self._rtc.set_modal_gains(0, self.mgains) + self._config.p_controllers[0].set_mgain_init(gain) + + def set_config(self, p, qplus, qminus, target): + """Set the 4 parameters for the CLOSE optimization loop + + Args: + p: (float) : learning factor for autocorrelation + + qplus: (float) : learning factor for mgain optimization when higher than target + + qminus: (float) : learning factor for mgain optimization when lower than target + + target: (float) : autocorrelation target for optimization + """ + self._lf = p + self._config.p_controllers[0].set_close_learning_factor(p) + self._lfdownup = np.array([qplus, qminus]) + self._config.p_controllers[0].set_lfdownup(qplus, qminus) + self._trgt = target + self._config.p_controllers[0].set_close_target(target) \ No newline at end of file diff --git a/shesha/sutra_wrap.py b/shesha/sutra_wrap.py index c1f2d71..dcd081b 100644 --- a/shesha/sutra_wrap.py +++ b/shesha/sutra_wrap.py @@ -1,40 +1,3 @@ -## @package shesha.sutra_wrap -## @brief Sutra import handler -## @author COMPASS Team -## @version 5.0.0 -## @date 2020/05/18 -## @copyright GNU Lesser General Public License -# -# This file is part of COMPASS -# -# Copyright (C) 2011-2019 COMPASS Team -# All rights reserved. -# Distributed under GNU - LGPL -# -# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser -# General Public License as published by the Free Software Foundation, either version 3 of the License, -# or any later version. -# -# COMPASS: End-to-end AO simulation tool using GPU acceleration -# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. -# -# The final product includes a software package for simulating all the critical subcomponents of AO, -# particularly in the context of the ELT and a real-time core based on several control approaches, -# with performances consistent with its integration into an instrument. Taking advantage of the specific -# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to -# conduct large simulation campaigns called to the ELT. -# -# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components -# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and -# various systems configurations such as multi-conjugate AO. -# -# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the -# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License along with COMPASS. -# If not, see . - import importlib diff --git a/shesha/util/__init__.py b/shesha/util/__init__.py index e9a6d38..a869484 100644 --- a/shesha/util/__init__.py +++ b/shesha/util/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.util ## @brief Utilities functions ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/util/dm_util.py b/shesha/util/dm_util.py index a8b71fe..4b4d4e0 100644 --- a/shesha/util/dm_util.py +++ b/shesha/util/dm_util.py @@ -1,7 +1,7 @@ ## @package shesha.util.dm_util ## @brief Utilities function for DM geometry initialization ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -48,7 +48,7 @@ def dim_dm_support(cent: float, extent: int, ssize: int): """ Compute the DM support dimensions - :parameters: + Args: cent : (float): center of the pupil @@ -70,7 +70,7 @@ def dim_dm_patch(pupdiam: int, diam: float, type: bytes, alt: float, xpos_wfs: List[float], ypos_wfs: List[float]): """ compute patchDiam for DM - :parameters: + Args: pupdiam: (int) : pupil diameter @@ -107,7 +107,7 @@ def createSquarePattern(pitch: float, nxact: int): Creates a list of M=nxact^2 actuator positions spread over an square grid. Coordinates are centred around (0,0). - :parameters: + Args: pitch: (float) : distance in pixels between 2 adjacent actus @@ -132,7 +132,7 @@ def createHexaPattern(pitch: float, supportSize: int): Coordinates are centred around (0,0). The support that limits the grid is a square [-supportSize/2, supportSize/2]. - :parameters: + Args: pitch: (float) : distance in pixels between 2 adjacent actus @@ -166,7 +166,7 @@ def createDoubleHexaPattern(pitch: float, supportSize: int, pupAngleDegree: floa Coordinates are centred around (0,0). The support of the grid is a square [-supportSize/2,vsupportSize/2]. - :parameters: + Args: pitch: (float) : distance in pixels between 2 adjacent actus supportSize: (int) : size in pixels of the support over which the coordinate list @@ -250,7 +250,7 @@ def select_actuators(xc: np.ndarray, yc: np.ndarray, nxact: int, pitch: int, cob """ Select the "valid" actuators according to the system geometry - :parameters: + Args: xc: actuators x positions (origine in center of mirror) @@ -303,7 +303,7 @@ def select_actuators(xc: np.ndarray, yc: np.ndarray, nxact: int, pitch: int, cob def make_zernike(nzer: int, size: int, diameter: int, xc=-1., yc=-1., ext=0): """Compute the zernike modes - :parameters: + Args: nzer: (int) : number of modes @@ -388,7 +388,7 @@ def zernumero(zn: int): Returns the radial degree and the azimuthal number of zernike number zn, according to Noll numbering (Noll, JOSA, 1976) - :parameters: + Args: zn: (int) : zernike number diff --git a/shesha/util/hdf5_util.py b/shesha/util/hdf5_util.py index 1e9f088..9849442 100644 --- a/shesha/util/hdf5_util.py +++ b/shesha/util/hdf5_util.py @@ -1,7 +1,7 @@ ## @package shesha.util.hdf5_util ## @brief Functions for handling the database system ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -48,32 +48,24 @@ def updateParamDict(pdict, pClass, prefix): Prefix must be set to define the key value of the new dict entries """ if (isinstance(pClass, list)): - params = [ - i for i in dir(pClass[0]) - if (not i.startswith('_') and not i.startswith('set_') and - not i.startswith('get_')) - ] + params = pClass[0].__dict__.keys() for k in params: pdict.update({ - prefix + k: [ - p.__dict__[prefix + k].encode("utf8") if isinstance( - p.__dict__[prefix + k], str) else - p.__dict__[prefix + k] for p in pClass + k: [ + p.__dict__[k].encode("utf8") if isinstance( + p.__dict__[k], str) else + p.__dict__[k] for p in pClass ] }) else: - params = [ - i for i in dir(pClass) - if (not i.startswith('_') and not i.startswith('set_') and - not i.startswith('get_')) - ] + params = pClass.__dict__.keys() for k in params: - if isinstance(pClass.__dict__[prefix + k], str): - pdict.update({prefix + k: pClass.__dict__[prefix + k].encode("utf8")}) + if isinstance(pClass.__dict__[k], str): + pdict.update({k: pClass.__dict__[k].encode("utf8")}) else: - pdict.update({prefix + k: pClass.__dict__[prefix + k]}) + pdict.update({k: pClass.__dict__[k]}) def params_dictionary(config): @@ -94,7 +86,7 @@ def params_dictionary(config): updateParamDict(param_dict, config.p_tel, "_Param_tel__") if config.p_atmos is not None: updateParamDict(param_dict, config.p_atmos, "_Param_atmos__") - if config.p_target is not None: + if config.p_targets is not None: updateParamDict(param_dict, config.p_targets, "_Param_target__") param_dict.update({"ntargets": len(config.p_targets)}) if config.p_wfss is not None: @@ -139,7 +131,16 @@ def create_file_attributes(filename, param_dict): ] else: attr = param_dict[i] - f.attrs.create(i, attr) + if(isinstance(attr, np.ndarray)): + save_hdf5(filename, i, attr) + elif(isinstance(attr, list)): + if(isinstance(attr[0], np.ndarray)): + for k,data in enumerate(attr): + save_hdf5(filename, i + str(k), data) + else: + f.attrs.create(i, attr) + else: + f.attrs.create(i, attr) f.attrs.create("validity", False) print(filename, "initialized") f.close() @@ -176,7 +177,7 @@ def initDataBase(savepath, param_dict): """ Initialize and create the database for all the saved matrices. This database will be placed on the top of the savepath and be named matricesDataBase.h5. - :parameters: + Args: savepath : (str) : path to the data repertory @@ -197,7 +198,7 @@ def initDataBase(savepath, param_dict): def updateDataBase(h5file, savepath, matrix_type): """ Update the database adding a new row to the matrix_type database. - :parameters: + Args: h5file : (str) : path to the new h5 file to add @@ -268,7 +269,7 @@ def checkMatricesDataBase(savepath, config, param_dict): to the database during the simulation. If the database doesn't exist, this function creates it. - :parameters: + Args: savepath : (str) : path to the data repertory @@ -301,7 +302,7 @@ def checkTurbuParams(savepath, config, pdict, matricesToLoad): Since all the turbulence matrices are computed together, we only check the parameters for the A matrix : if we load A, we load B, istx and isty too. - :parameters: + Args: config : (module) : simulation parameters @@ -358,7 +359,7 @@ def checkControlParams(savepath, config, pdict, matricesToLoad): Since all the controller matrices are computed together, we only check the parameters for the imat matrix : if we load imat, we load eigenv and U too. - :parameters: + Args: config : (module) : simulation parameters @@ -422,7 +423,7 @@ def checkDmsParams(savepath, config, pdict, matricesToLoad): Since all the dms matrices are computed together, we only check the parameters for the pztok matrix : if we load pztok, we load pztnok too. - :parameters: + Args: config : (module) : simulation parameters @@ -664,7 +665,7 @@ def writeHdf5SingleDataset(filename, data, datasetName="dataset"): def readHdf5SingleDataset(filename, datasetName="dataset"): """ Read a single dataset from an hdf5 file - :parameters: + Args: filename: (str) : name of the file to read from @@ -680,7 +681,7 @@ def readHdf5SingleDataset(filename, datasetName="dataset"): def load_AB_from_dataBase(database, ind): """ Read and return A, B, istx and isty from the database - :parameters: + Args: database: (dict): dictionary containing paths to matrices to load @@ -700,7 +701,7 @@ def load_AB_from_dataBase(database, ind): def save_AB_in_database(k, A, B, istx, isty): """ Save A, B, istx and isty in the database - :parameters: + Args: ind: @@ -728,7 +729,7 @@ def save_AB_in_database(k, A, B, istx, isty): def load_dm_geom_from_dataBase(database, ndm): """ Read and return the DM geometry - :parameters: + Args: database: (dict): dictionary containing paths to matrices to load @@ -750,7 +751,7 @@ def load_dm_geom_from_dataBase(database, ndm): def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok): """ Save the DM geometry in the database - :parameters: + Args: ndm: @@ -782,7 +783,7 @@ def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok): def load_imat_from_dataBase(database): """ Read and return the imat - :parameters: + Args: database: (dict): dictionary containing paths to matrices to load """ @@ -797,7 +798,7 @@ def load_imat_from_dataBase(database): def save_imat_in_dataBase(imat): """ Save the DM geometry in the database - :parameters: + Args: imat: (np.ndarray): imat to save """ diff --git a/shesha/util/influ_util.py b/shesha/util/influ_util.py index bed27e0..161e0dc 100644 --- a/shesha/util/influ_util.py +++ b/shesha/util/influ_util.py @@ -1,7 +1,7 @@ ## @package shesha.util.influ_util ## @brief Computation of the influence functions used by the DM ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -44,7 +44,7 @@ def besel_orth(m, n, phi, r): """ TODO: docstring - :parameters: + Args: m: @@ -72,7 +72,7 @@ def besel_orth(m, n, phi, r): def bessel_influence(xx, yy, type_i=PatternType.SQUARE): """ TODO: docstring - :parameters: + Args: xx: @@ -142,7 +142,7 @@ def bessel_influence(xx, yy, type_i=PatternType.SQUARE): def makeRigaut(pitch: float, coupling: float, x=None, y=None): """ Compute 'Rigaut-like' influence function - :parameters: + Args: pitch: (float) : pitch of the DM expressed in pixels @@ -189,7 +189,7 @@ def makeRigaut(pitch: float, coupling: float, x=None, y=None): def makeRadialSchwartz(pitch: float, coupling: float, x=None, y=None): """ Compute radial Schwartz influence function - :parameters: + Args: pitch: (float) : pitch of the DM expressed in pixels @@ -221,7 +221,7 @@ def makeRadialSchwartz(pitch: float, coupling: float, x=None, y=None): def makeSquareSchwartz(pitch: float, coupling: float, x=None, y=None): """ Compute Square Schwartz influence function - :parameters: + Args: pitch: (float) : pitch of the DM expressed in pixels @@ -258,7 +258,7 @@ def makeBlacknutt(pitch: float, coupling: float, x=None, y=None): La variable a ete laissee dans le code juste pour compatibilité avec les autres fonctions, mais elle n'est pas utilisee. - :parameters: + Args: pitch: (float): pitch of the DM expressed in pixels @@ -293,7 +293,7 @@ def makeBlacknutt(pitch: float, coupling: float, x=None, y=None): def makeGaussian(pitch: float, coupling: float, x=None, y=None): """ Compute Gaussian influence function. Coupling parameter is not taken into account - :parameters: + Args: pitch: (float) : pitch of the DM expressed in pixels @@ -337,7 +337,7 @@ def makeBessel(pitch: float, coupling: float, x: np.ndarray = None, y: np.ndarra patternType: bytes = PatternType.SQUARE): """ Compute Bessel influence function - :parameters: + Args: pitch: (float) : pitch of the DM expressed in pixels diff --git a/shesha/util/iterkolmo.py b/shesha/util/iterkolmo.py index 161421f..da6d327 100644 --- a/shesha/util/iterkolmo.py +++ b/shesha/util/iterkolmo.py @@ -1,7 +1,7 @@ ## @package shesha.util.iterkolmo ## @brief Stencil and matrices computation for the creation of a turbulent screen ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -101,7 +101,7 @@ def stencil_size_array(size): Compute the size of a stencil, given the screen size - :parameters: + Args: size: (np.ndarray[ndim=1,dtype=np.int64_t]) :screen size """ @@ -367,8 +367,8 @@ def macdo_x56(x, k=10): """ Computation of the function f(x) = x^(5/6)*K_{5/6}(x) using a series for the esimation of K_{5/6}, taken from Rod Conan thesis : - K_a(x)=1/2 \sum_{n=0}^\infty \frac{(-1)^n}{n!} - \left(\Gamma(-n-a) (x/2)^{2n+a} + \Gamma(-n+a) (x/2)^{2n-a} \right) , + K_a(x)=1/2 \\sum_{n=0}^\\infty \\frac{(-1)^n}{n!} + \\left(\\Gamma(-n-a) (x/2)^{2n+a} + \\Gamma(-n+a) (x/2)^{2n-a} \\right) , with a = 5/6. Setting x22 = (x/2)^2, setting uda = (1/2)^a, and multiplying by x^a, diff --git a/shesha/util/kl_util.py b/shesha/util/kl_util.py index 35eba7e..33167f7 100644 --- a/shesha/util/kl_util.py +++ b/shesha/util/kl_util.py @@ -1,7 +1,7 @@ ## @package shesha.util.kl_util ## @brief Functions for DM KL initialization ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -45,7 +45,7 @@ def make_radii(cobs: float, nr: int) -> float: """ TODO: docstring - :parameters: + Args: cobs: (float) : central obstruction @@ -68,7 +68,7 @@ def make_kernels(cobs: float, nr: int, radp: np.ndarray, kl_type: bytes, TODO: - :parameters: + Args: cobs : (float): central obstruction @@ -117,7 +117,7 @@ def make_kernels(cobs: float, nr: int, radp: np.ndarray, kl_type: bytes, def piston_orth(nr: int) -> np.ndarray: """ TODO: docstring - :parameters: + Args: nr: @@ -139,7 +139,7 @@ def piston_orth(nr: int) -> np.ndarray: def make_azimuth(nord: int, npp: int) -> np.ndarray: """ TODO: docstring - :parameters: + Args: nord: @@ -173,7 +173,7 @@ def radii(nr: int, npp: int, cobs: float) -> np.ndarray: TODO: - :parameters: + Args: nr: @@ -204,7 +204,7 @@ def polang(r: np.ndarray) -> np.ndarray: TODO: - :parameters: + Args: r: @@ -236,7 +236,7 @@ def setpincs(ax: np.ndarray, ay: np.ndarray, px: np.ndarray, py: np.ndarray, TODO: - :parameters: + Args: ax: @@ -312,7 +312,7 @@ def pcgeom(nr, npp, cobs, ncp, ncmar): TODO: - :parameters: + Args: nr: @@ -389,7 +389,7 @@ def set_pctr(dim: int, nr, npp, nkl: int, cobs: float, nord, ncmar=None, ncp=Non the gkl_bas routine. TODO: - :parameters: + Args: dim: @@ -448,7 +448,7 @@ def gkl_fcom(kers: np.ndarray, cobs: float, nf: int): a bunch of indices used to recover the modes in cartesian coordinates (nord, npo and ordd). - :parameters: + Args: kerns : (np.ndarray[ndim= ,dtype=np.float32]) : diff --git a/shesha/util/make_apodizer.py b/shesha/util/make_apodizer.py index 02d4a01..f304a62 100644 --- a/shesha/util/make_apodizer.py +++ b/shesha/util/make_apodizer.py @@ -1,7 +1,7 @@ ## @package shesha.util.make_apodizer ## @brief make_apodizer function ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -44,7 +44,7 @@ def make_apodizer(dim, pupd, filename, angle): """TODO doc - :parameters: + Args: (int) : im: diff --git a/shesha/util/make_pupil.py b/shesha/util/make_pupil.py index c7e448b..568f3c4 100644 --- a/shesha/util/make_pupil.py +++ b/shesha/util/make_pupil.py @@ -1,7 +1,7 @@ ## @package shesha.util.make_pupil ## @brief Pupil creation functions ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -50,7 +50,7 @@ def make_pupil(dim, pupd, tel, xc=-1, yc=-1, real=0, halfSpider=False): """Initialize the system pupil - :parameters: + Args: dim: (long) : = p_geom.pupdiam @@ -122,7 +122,7 @@ def make_pupil_generic(dim, pupd, t_spiders=0.01, spiders_type=SpiderType.SIX, x """ Initialize the system pupil - :parameters: + Args: dim: (long) : linear size of ??? @@ -204,7 +204,7 @@ def make_VLT(dim, pupd, tel): """ Initialize the VLT pupil - :parameters: + Args: dim: (long) : linear size of ??? @@ -246,7 +246,7 @@ def make_EELT(dim, pupd, tel, N_seg=-1): """ Initialize the EELT pupil - :parameters: + Args: dim: (long) : linear size of ??? @@ -368,7 +368,7 @@ def make_EELT(dim, pupd, tel, N_seg=-1): def make_phase_ab(dim, pupd, tel, pup=None, xc=-1, yc=-1, real=0, halfSpider=False): """Compute the EELT M1 phase aberration - :parameters: + Args: dim: (long) : linear size of ??? @@ -494,14 +494,11 @@ def make_phase_ab(dim, pupd, tel, pup=None, xc=-1, yc=-1, real=0, halfSpider=Fal """ - - _____ _ _____ ____ ___ ____ ___ -| ____| | |_ _| | _ \|_ _/ ___/ _ \ -| _| | | | | | |_) || | | | | | | -| |___| |___| | | _ < | | |__| |_| | -|_____|_____|_| |_| \_\___\____\___/ - - +ooooooooooo ooooo ooooooooooo oooooooooo ooooo oooooooo8 ooooooo + 888 88 888 88 888 88 888 888 888 o888 88 o888 888o + 888ooo8 888 888 888oooo88 888 888 888 888 + 888 oo 888 o 888 888 88o 888 888o oo 888o o888 +o888ooo8888 o888ooooo88 o888o o888o 88o8 o888o 888oooo88 88ooo88 """ @@ -843,16 +840,22 @@ def generateCoordSegments(D, rot, pitch=1.244683637214, nseg=33, inner_rad=4.1, segments of M1. Result is a tuple of arrays(6, 798). - Parameters - ----------------------------------------- - D: (float) : pupil diameter in meters (it must be set to 40.0 m for the ELT) - rot: (float) : pupil rotation angle in radians - pitch: (float): Segment pitch [meters] - nseg: (int) : number of segments across the diameter - inner_rad : (float): Inner radius [meters] - outer_rad : (float): Outer radius [meters] - R : (float): Curvature radius of the M1 - nominalD: (float): diameter for nominal pupil + Args: + D: (float) : pupil diameter in meters (it must be set to 40.0 m for the ELT) + + rot: (float) : pupil rotation angle in radians + + pitch: (float): Segment pitch [meters] + + nseg: (int) : number of segments across the diameter + + inner_rad : (float): Inner radius [meters] + + outer_rad : (float): Outer radius [meters] + + R : (float): Curvature radius of the M1 + + nominalD: (float): diameter for nominal pupil """ V3 = np.sqrt(3) @@ -918,11 +921,12 @@ def gendron(): """ mymsg = [ - "\n\n\n\n", "__ ___ ____ _ _ ___ _ _ ___ _", - "\ \ / / \ | _ \| \ | |_ _| \ | |/ ___|", - " \ \ /\ / / _ \ | |_) | \| || || \| | | _ ", - " \ V V / ___ \| _ <| |\ || || |\ | |_| |", - " \_/\_/_/ \_\_| \_\_| \_|___|_| \_|\____|", " \n", + "\n\n\n\n", + # "__ ___ ____ _ _ ___ _ _ ___ _", + # "\ \ / / \ | _ \| \ | |_ _| \ | |/ ___|", + # " \ \ /\ / / _ \ | |_) | \| || || \| | | _ ", + # " \ V V / ___ \| _ <| |\ || || |\ | |_| |", + # " \_/\_/_/ \_\_| \_\_| \_|___|_| \_|\____|", " \n", "Vous utilisez un telescope de type ELT. Ce telescope", "est fait pour etre utilise avec un diametre de 40 m.", " ", "Or, vous utilisez un diametre different. Cela signifie", diff --git a/shesha/util/psfMap.py b/shesha/util/psfMap.py index df0f397..e5add20 100644 --- a/shesha/util/psfMap.py +++ b/shesha/util/psfMap.py @@ -1,7 +1,7 @@ ## @package shesha.util.psfMap ## @brief class PSF_map ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/util/rtc_util.py b/shesha/util/rtc_util.py index 1e6701c..e513e85 100644 --- a/shesha/util/rtc_util.py +++ b/shesha/util/rtc_util.py @@ -1,7 +1,7 @@ ## @package shesha.util.rtc_util ## @brief Some utilities functions for RTC ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -41,7 +41,7 @@ def create_interp_mat(dimx: int, dimy: int): """TODO doc - :parameters: + Args: dimx: (int) : @@ -66,7 +66,7 @@ def create_interp_mat(dimx: int, dimy: int): def centroid_gain(E, F): """ Returns the mean centroid gain - :parameters: + Args: E : (np.array(dtype=np.float32)) : measurements from WFS diff --git a/shesha/util/tao/__init__.py b/shesha/util/tao/__init__.py new file mode 100644 index 0000000..7d02a2b --- /dev/null +++ b/shesha/util/tao/__init__.py @@ -0,0 +1,149 @@ +from importlib import reload +import numpy as np +from astropy.io import fits + +from shesha.ao import imats +from shesha.ao import cmats + +from shesha.util.tao import writer +from shesha.util.tao import ltao +from shesha.util.tao import mcao +reload(ltao) +reload(mcao) + +TILE_SIZE="1000" + +STARPU_FLAGS="" + +#variable necessary to run TAO +TAO_SETTINGS={"SCHED":"dmdas", + "STARPU_FLAGS":"", + "GPU_IDS":0, + "TILE_SIZE":TILE_SIZE, + "INPUT_PATH":0, + "TAO_PATH":0 + } + + +def check(): + """Checks that variable are initialized + """ + stop=0 + try : + if (not isinstance(TAO_SETTINGS["SCHED"], str)): + print("you must select a scheduler (dmda,dmdas,dmdar...)\n\tex: TAO_SETTINGS[\"SCHED\"]=\"dmdas\"") + stop=1 + except: + print("you must select a scheduler (dmda,dmdas,dmdar...)\n\tex: TAO_SETTINGS[\"SCHED\"]=\"dmdas\"") + stop=1 + try : + if( not isinstance(TAO_SETTINGS["GPU_IDS"], str)): + print("you must define the GPUs to use as a string \n\tex:TAO_SETTINGS[\"GPU_IDS\"]=\"1,2\"") + stop=1 + except: + print("you must define the GPUs to use as a string \n\tex:TAO_SETTINGS[\"GPU_IDS\"]=\"1,2\"") + stop=1 + try : + if( not isinstance(TAO_SETTINGS["INPUT_PATH"], str)): + print("you must define the location of the system parameters \n\tex: TAO_SETTINGS[\"INPUT_PATH\"]=\"~/workspace/compass/params\"") + stop=1 + except: + print("you must define the location of the system parameters \n\tex: TAO_SETTINGS[\"INPUTPATH\"]=\"~/workspace/compass/params\"") + stop=1 + try : + if( not isinstance(TAO_SETTINGS["TAO_PATH"], str)): + print("you must define the location of the tao executables \n\tex: TAO_SETTINGS[\"TAO_PATH\"]=\"~/workspace/tao/install/bin\"") + stop=1 + except: + print("you must define the location of the tao executables \n\tex: TAO_SETTINGS[\"TAOPATH\"]=\"~/workspace/tao/install/bin\"") + stop=1 + try : + STARPU_FLAGS + except: + STARPU_FLAGS="" + + return stop + + +def init(sup, mod, *,wfs="all", dm_use_tt=False, n_filt=None): + """ Set up the compass loop + + set the interaction matrix, loop gain and write parameter files for TAO + + Args: + + sup : (CompassSupervisor) : current supervisor + + mod : (module) : AO mode requested (among: ltao , mcao) + + Kwargs: + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + dm_use_tt : (bool) :(optional), default False using a TT DM + + n_filt : (int) : (optional), default None number of meta interaction matrix singular values filtered out + """ + + #setting open loop + sup.rtc._rtc.d_control[0].set_polc(True) + + if n_filt is None: + mod.init(TAO_SETTINGS, sup, dm_use_tt=dm_use_tt, wfs=wfs) + else: + mod.init(TAO_SETTINGS, sup, dm_use_tt=dm_use_tt, wfs=wfs, n_filt=n_filt) + +def reconstructor(mod): + """ Compute the TAO reconstructor for a given AO mode + + Args: + mod : (module) : AO mode requested (among: ltao , mcao) + """ + return mod.reconstructor(TAO_SETTINGS) + + +def run(sup, mod, *, n_iter=1000, initialisation=True, reset=True, wfs="all", + dm_use_tt=False, n_filt=None): + """ Computes a tao reconstructor and run a compass loop with it + + Args: + sup : (CompassSupervisor) : current supervisor + + mod : (module) : AO mode requested (among: ltao , mcao) + + Kwargs + n_iter : (int) : (optional), default 1000 number of iteration of the ao loop + + initialisation : (bool) : (optional), default True initialise tao (include comptation of meta matrices of interaction/command) + + reset : (bool) : (optional), default True reset the supervisor before the loop + + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + dm_use_tt : (bool) :(optional), default False using a TT DM + + n_filt : (int) : (optional), default None number of meta interaction matrix singular values filtered out + """ + check() + + #setting open loop + sup.rtc._rtc.d_control[0].set_polc(True) + + #if generic: need to update imat in controller + if(np.abs(np.array(sup.rtc._rtc.d_control[0].d_imat)).max()==0): + #imat not set yet for controller + sup.rtc._rtc.d_control[0].set_imat(sup.config.p_controllers[0]._imat) + #update gain + sup.rtc._rtc.set_gain(0,sup.config.p_controllers[0].gain) + + if(initialisation): + init(sup, mod, wfs=wfs, dm_use_tt=dm_use_tt, n_filt=n_filt) + M=reconstructor(mod) + if(reset): + sup.reset() + cmat_shape=sup.rtc.get_command_matrix(0).shape + if(M.shape[0] != cmat_shape[0] or M.shape[1] != cmat_shape[1]): + print("ToR shape is not valid:\n\twaiting for:",cmat_shape,"\n\tgot :",M.shape) + else: + sup.rtc.set_command_matrix(0,M) + if(n_iter>0): + sup.loop(n_iter) diff --git a/shesha/util/tao/ltao.py b/shesha/util/tao/ltao.py new file mode 100644 index 0000000..5e7f341 --- /dev/null +++ b/shesha/util/tao/ltao.py @@ -0,0 +1,70 @@ +import os +import numpy as np +from astropy.io import fits + +from shesha.ao import imats +from shesha.ao import cmats + +from shesha.util.tao import writer + +def init(tao_settings,sup,*,n_filt=10, wfs="all", dm_use_tt=False): + """Initialize the LTAO mode + + compute meta matrix of interaction / command and write parameter files + + Args: + tao_settings : (dict) : tao settings variables + + sup : CompassSupervisor : compass supervisor + + Kwargs: + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + n_filt : (int) : number of Imat eigenvalues to filter out + + dm_use_tt : (bool) : (optional), default False using a TT DM + """ + + #compute meta imat + meta_D = imats.get_metaD(sup,0,0) + #get svd of (D.T*D) + SVD = cmats.svd_for_cmat(meta_D) + #plt.plot(SVD[1]) + meta_Dx = cmats.get_cmat(meta_D,nfilt=n_filt,svd=SVD) + + #write MOAO pipeline inputs + data_path = tao_settings["INPUT_PATH"] + lgs_filter_cst=0.1 + if(dm_use_tt): + lgs_filter_cst=0. + writer.generate_files(sup, path=data_path, single_file=True, + dm_use_tt=dm_use_tt, wfs=wfs, lgs_filter_cst=lgs_filter_cst) + writer.write_meta_Dx(meta_Dx,nTS=sup.config.NTS,path=data_path) + + +def reconstructor(tao_settings,*,apply_log="./log"): + """Initialize the LTAO mode + + compute meta matrix of interaction / command and write parameter files + + Args: + tao_settings : (dict) : tao settings variables + + Kwargs: + apply_log : (str) : tao log file name + + Returns: + tor : () : tomographic reconstructor + """ + + flags = tao_settings["STARPU_FLAGS"] + tao_path = tao_settings["TAO_PATH"] + data_path = tao_settings["INPUT_PATH"] + gpus = tao_settings["GPU_IDS"] + tile_size = str( tao_settings["TILE_SIZE"]) + apply_cmd = flags + " " + tao_path + "/ltao_reconstructor --sys_path=" \ + + data_path + " --atm_path=" + data_path + " --ncores=1 --gpuIds=" \ + + gpus + " --ts=" + tile_size + " --sync=1 --warmup=0 >" + apply_log \ + +" 2>&1" + os.system(apply_cmd) + return fits.open("M_ltao_0.fits")[0].data.T diff --git a/shesha/util/tao/mcao.py b/shesha/util/tao/mcao.py new file mode 100644 index 0000000..3ba441f --- /dev/null +++ b/shesha/util/tao/mcao.py @@ -0,0 +1,69 @@ +import os +import numpy as np +from astropy.io import fits + +from shesha.ao import imats +from shesha.ao import cmats + +from shesha.util.tao import writer + + +def init(tao_settings ,sup,*,n_filt=0,wfs="all",dm_use_tt=False): + """Initialize the MOAO mode + + compute meta matrix of interaction / command and write parameter files + + Args: + tao_settings : (dict) : tao settings variables + + sup : (CompassSupervisor) : compass supervisor + + Kwargs + n_filt : (int) : number of Imat eigenvalues to filter out + + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + dm_use_tt : (bool) :(optional), default False DM compensating TT + """ + + + #compute meta imat + meta_D = imats.get_metaD(sup) + #get svd of (D.T*D) + SVD = cmats.svd_for_cmat(meta_D) + #plt.plot(SVD[1]) + meta_Dx = cmats.get_cmat(meta_D, nfilt=n_filt, svd=SVD) + + #write MOAO pipeline inputs + data_path = tao_settings["INPUT_PATH"] + + lgs_filter_cst = 0.1 + if(dm_use_tt): + lgs_filter_cst = 0. + writer.generate_files(sup, path=data_path, single_file=True, + dm_use_tt=dm_use_tt,wfs=wfs, lgs_filter_cst=lgs_filter_cst) + writer.write_meta_Dx(meta_Dx, nTS=sup.config.NTS, path=data_path) + + +def reconstructor(tao_settings, *,apply_log="./log"): + """Initialize the LTAO mode + + compute meta matrix of interaction / command and write parameter files + + Args: + tao_settings : (dict) : tao settings variables + + Kwargs: + apply_log : (str) : (optional), default "./log" tao log file name + """ + + flags = tao_settings["STARPU_FLAGS"] + tao_path = tao_settings["TAO_PATH"] + data_path = tao_settings["INPUT_PATH"] + gpus = tao_settings["GPU_IDS"] + tile_size = str(tao_settings["TILE_SIZE"]) + + apply_cmd=flags+" "+tao_path+"/mcao_reconstructor --sys_path="+data_path+" --atm_path="+data_path+" --ncores=1 --gpuIds="+gpus+" --ts="+tile_size+" --sync=1 --warmup=0 >"+apply_log+" 2>&1" + + os.system(apply_cmd) + return fits.open("./M_mcao.fits")[0].data.T diff --git a/shesha/util/tao/writer.py b/shesha/util/tao/writer.py new file mode 100644 index 0000000..16d3bda --- /dev/null +++ b/shesha/util/tao/writer.py @@ -0,0 +1,513 @@ +import numpy as np +import astropy.io.fits as fits +import shutil + +#filtering +#for: +#mavis_ltao keep 1320 values +#mavis_mcao keep 5400 values +#mavis_moao keep 1320 values +# + + +def used_actu(xpos, ypos,*, Np=-1): + """return the indices of the used actuators + + Args: + xpos: (np.ndarray[ndim=1, dtype=np.int32]): (optional) actuator position in x + + ypos: (np.ndarray[ndim=1, dtype=np.int32]): (optional) actuator position in y + + Kwargs: + Np: (int): (optional) number of actuators along the diameter + """ + + u = np.unique(xpos) + pMin = u.min() + u -= pMin + if (Np > 0 and Np != u.size): + raise ValueError("Number of actuator along the diameter unconsistent") + else: + Np = u.size + #if(not np.testing.assert_array_almost_equal(np.arange(Np)*u[1],u,1)): + # print((np.arange(Np)*u[1]-u).max()) + # raise ValueError("non uniform positions") + X = (xpos - pMin) / u[1] + Y = (ypos - pMin) / u[1] + return (Y * Np + X).astype(np.int32) + + +def get_idx(p_dm, *, xpos=None, ypos=None): + """return a correspondance between the covariance matrix indices and the covariance map indices + + Args: + p_dm: (Param_dm): dm settings + + Kwargs: + xpos: (np.ndarray[ndim=1, dtype=np.int32]): (optional) actuator position in x + + ypos: (np.ndarray[ndim=1, dtype=np.int32]): (optional) actuator position in y + + Returns: + index_map : (np.ndarray[ndim=1, dtype=np.int32]) : correspondance between the covariance matrix indices and the covariance map indices + """ + + if (xpos is not None and ypos is not None): + csI = used_actu(xpos, ypos) + else: + csI = p_dm.csI + + Np = p_dm.nact + # dm.csI: valid actuators + xx = np.tile(np.arange(Np), (Np, 1)).flatten('C')[csI] + xx = -np.tile(xx, (xx.size, 1)) + dx = xx - xx.T + + yy = np.tile(np.arange(Np), (Np, 1)).flatten('F')[csI] + yy = -np.tile(yy, (yy.size, 1)) + dy = yy - yy.T + # // transformation des decalages en indice de tableau + dx += Np + dy += Np + + return dx.flatten("F") + (p_dm.nact * 2 - 1) * (dy.flatten("F") - 1) + + +def get_abs2fi(sup, *,dm=0): + size = sup.config.p_geom.pupdiam + N = 2**(int(np.log(2 * size) / np.log(2) + 1)) #SutraTarget:138, + + supportFi = np.zeros((N, N), dtype=np.float32) + fi = sup.config.p_dms[0]._influ[:, :, 0] * 1e-6 + supportFi[:fi.shape[0], :fi.shape[1]] = fi + + abs2fi = np.abs(np.fft.fft2(supportFi.T))**2 + + return abs2fi.T + + +def OTF_telescope(sup): + """otf = OTF_telescope(fourier) + + Computes the OTF of the telescope, so that + > fft(OTF_telescope()).re + produces a PSF normalized with max(psf)=SR=1.0 + + """ + size = sup.config.p_geom.pupdiam + N = 2**(int(np.log(2 * size) / np.log(2) + 1)) #SutraTarget:138, + ud = sup.config.p_tel.diam / sup.config.p_geom.pupdiam + # computation of pupil + x = ud / (sup.config.p_tel.diam / 2.) * (np.arange(N) + 1 - + (N / 2 + 1)) # x exprime en rayon pupille + x2 = np.tile(x * x, (x.size, 1)) + r = np.sqrt(x2 + x2.T) + + #pup=(r<=1.0)*1 * (r>sup.config.p_tel.cobs)*1 + pup = sup.config.p_geom._ipupil + # factor that will normalize the psf + # with PSF(0)=1.00 when diffraction-limited + # surface_pup_m2 = tomo.tel.diam^2*(1-tomo.tel.obs^2)*pi/4; + surface_pup_m2 = sup.config.p_tel.diam**2 * ( + 1 - sup.config.p_tel.cobs**2) * np.pi / 4. + surface_pup_pix = surface_pup_m2 / ud**2 + factnorm = surface_pup_pix**2 + # compute FTO of telescope. Computing the psf using + # just fft(FTO).re produces a psf with max(psf)=SR + # In fact, FTOtel is normalized so that sum(FTOtel)=1. + # FTOtel = autocorrelation(pup) / factnorm; + #FTOtel=autocorrelation(pup)/factnorm + FTOtel = autocorrelation(pup) / np.sum(pup)**2 + return FTOtel + + +def get_subaps(sup, *, wfs="all"): + """ Return the number of valid subaps (per wfs) as well as their position + + Args: + sup : (CompassSupervisor) : current supervisor + + Kwargs: + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + """ + #X=(sup.config.p_wfss[0]._validpuppixx-mpup.shape[0]/2-1)*(sup.config.p_tel.diam/sup.config.p_wfss[0].nxsub/sup.config.p_wfss[0]._pdiam ) + nsubap = [] + X = [] + Y = [] + if (wfs == "ngs"): + p_wfss = sup.config.p_wfs_ngs + elif (wfs == "lgs"): + p_wfss = sup.config.p_wfs_lgs + [sup.config.p_wfs_ngs[-1]] + else: # case all + p_wfss = sup.config.p_wfs_lgs + sup.config.p_wfs_ngs + for wfs in p_wfss: + validX = wfs._validpuppixx + validY = wfs._validpuppixy + toMeter = (sup.config.p_tel.diam / wfs.nxsub / wfs._pdiam) + validX = (validX - validX.max() / 2) * toMeter + validY = (validY - validY.max() / 2) * toMeter + X += list(validX) + Y += list(validY) + nsubap.append(len(validX)) + return nsubap, X, Y + + +def autocorrelation(a): + """ computes the autocorrelation so that + + max(aa) == sum(a^2) + + Args: + a: (np.ndarray[ndim=2, dtype=np.float32]): matrix to compute the autocorrelation on + + """ + if (a.ndim == 2): + b = np.abs(np.fft.fft2(a)) + b = np.fft.ifft2(b * b).real * a.size + elif (a.ndim == 1): + b = np.abs(np.fft.fft(a)) + b = np.fft.ifft(b * b).real + else: + print("error: autocorrelation: expect dim 1 or 2") + return + n2 = a.size # N*N + b /= n2 + return b + + +def func_influ(x, y, x0): + #/* DOCUMENT opd_metres = func_influ(x,y,x0) + # + # The arguments , and must all be in the same units. + # + # In the particular case where is set to x0=dm.x0, and + # must be expressed with no unit, they are variables defined over the + # unitary circle of R=1. + # + # In any case, the function returns the influence function (=OPD, + # optical path difference) of the actuators expressed in METERS. + # + # + # // allez on va dire que ce sont des metres ! + return 1.e-6 * np.exp(-(x * x + y * y) / (2 * x0 * x0)) + + +def generate_files(sup, *,path=".", single_file=False, dm_use_tt=False, wfs="all", + lgs_filter_cst=0.1, tar=-1): + """write inputs parameters + + sys-params.txt: contains the system parameters + idx.fits : + otftel.fits : + abs2fi.fits : + subaps.fits : number and position of the subapertures + + Args: + sup : (CompassSupervisor) : current supervisor + + Kwargs: + path : (str): (optional), default './' path where the files are written + + single_file : (bool): (optional), default=False write a single fits File + + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + lgs_filter_cst : (float) : (optional) add constant to filter out lgs tt + + tar : (list) : (optional), default -1 index of the target + """ + p_dm = sup.config.p_dms[0] + if (p_dm.type == 'tt'): + print("ERROR: first dm must not be a 'tip-tilt") + return + nact = p_dm.nact + ntotact = p_dm._ntotact + + if (dm_use_tt): + p_dm_tt = sup.config.p_dms[-1] + if (p_dm_tt.type != 'tt'): + print("ERROR: tip-tilt dm must be the last one") + return + ntotact += 2 + + write_sys_param(sup, path=path, wfs=wfs, lgs_filter_cst=lgs_filter_cst, tar=tar) + write_atm_param(sup, path=path) + idx = get_idx(p_dm, xpos=p_dm._xpos, ypos=p_dm._ypos) + otf = OTF_telescope(sup) + abs2fi = get_abs2fi(sup) + nsubaps, X, Y = get_subaps(sup, wfs=wfs) + if (not single_file): + hdu_idx = fits.PrimaryHDU(idx) + hdu_idx.header["NACT"] = nact + hdu_idx.header["NTOTACT"] = ntotact + hdul = fits.HDUList([hdu_idx]) + hdul.writeto(path + "/idx.fits", overwrite=1) + fits.writeto(path + "/otftel.fits", otf, overwrite=1) + fits.writeto(path + "/abs2fi.fits", abs2fi, overwrite=1) + + hdu_prime = fits.PrimaryHDU(np.zeros(0)) + hdu_nsubap = fits.ImageHDU(nsubaps, name="NSUBAP") + hdu_Xpos = fits.ImageHDU(X, name="XPOS") + hdu_Ypos = fits.ImageHDU(Y, name="YPOS") + hdul = fits.HDUList([hdu_prime, hdu_nsubap, hdu_Xpos, hdu_Ypos]) + hdul.writeto(path + "/subaps.fits", overwrite=1) + else: + hdu_prime = fits.PrimaryHDU(np.zeros(0)) + hdu_nsubap = fits.ImageHDU(nsubaps, name="NSUBAP") + hdu_Xpos = fits.ImageHDU(X, name="XPOS") + hdu_Ypos = fits.ImageHDU(Y, name="YPOS") + hdu_idx = fits.ImageHDU(idx, name="IDX") + hdu_idx.header["NACT"] = nact + hdu_idx.header["NTOTACT"] = ntotact + hdu_abs2fi = fits.ImageHDU(abs2fi, name="ABS2FI") + hdu_otf = fits.ImageHDU(otf, name="OTF") + hdul = fits.HDUList([ + hdu_prime, hdu_nsubap, hdu_Xpos, hdu_Ypos, hdu_idx, hdu_abs2fi, hdu_otf + ]) + hdul.writeto(path + "/sys-inputs.fits", overwrite=1) + + +def to_str(a=""): + """ transform a np.array into a string + + Kwargs: + a : (np.ndarray[ndim=1, dtype=np.int32]) : input array + """ + string = "" + if (type(a) is np.ndarray): + for i in range(a.size): + string += str(a[i]) + " " + if (type(a) is list): + for i in range(len(a)): + string += str(a[i]) + " " + else: + string = str(a) + + return string + +def write_sys_param(sup, path=".", wfs="all", lgs_filter_cst=0.1, tar=-1): + """ Write a sysParam file for tao based on the compass configuration + + Args: + sup : (CompassSupervisor) : current supervisor + + Kwargs: + path : (str) : (optional), "./" path to the sysParam file + + wfs : (str) : (optional), default "all" wfs used by tao ( among "all", "lgs", "ngs") + + lgs_filter_cst : (float) : (optional) add constant to filter out lgs tt + + tar : (list) : (optional), default -1 index of the target + """ + bdw = 3.3e-7 + lgs_depth = 5000. + through_atm = 1. + p_wfs_ngs = sup.config.p_wfs_ngs + p_wfs_lgs = sup.config.p_wfs_lgs + if (wfs == "ngs"): + p_wfss = p_wfs_ngs + elif (wfs == "lgs"): + p_wfss = p_wfs_lgs + [p_wfs_ngs[-1]] + else: # case all + p_wfss = p_wfs_lgs + p_wfs_ngs + p_wfs_ts = sup.config.p_wfs_ts + p_targets = sup.config.p_targets + p_tel = sup.config.p_tel + p_loop = sup.config.p_loop + + if (len(p_wfs_lgs) > 0): + lgs_flux = p_wfs_lgs[0].lgsreturnperwatt * p_wfs_lgs[0].laserpower * p_wfs_lgs[ + 0].optthroughput * 10**4 + lgs_pix_size = p_wfs_lgs[0].pixsize + lambda_lgs = p_wfs_lgs[0].Lambda * 1e-6 + through_lgs = p_wfs_lgs[0].optthroughput + spot_width = p_wfs_lgs[0].beamsize + lgs_alt = p_wfs_lgs[0].gsalt + else: + lgs_flux = 7.e6 + lgs_pix_size = 0.7 + lambda_lgs = 5.89e-07 + through_lgs = 0.382 + spot_width = 0.8 + lgs_alt = 90000 + + if (len(p_wfs_ts) > 0): + ts_xpos = [w.xpos for w in p_wfs_ts] + ts_ypos = [w.ypos for w in p_wfs_ts] + else: + ts_xpos = [] + ts_ypos = [] + + f = open(path + "/sys-params.txt", "w") + f.write("diam : meter : Telescope diameter\n") + f.write(to_str(p_tel.diam)) + f.write("\nobs : percent : Central obscuration\n") + f.write(to_str(p_tel.cobs)) + f.write("\ntFrame : second : frame rate\n") + f.write(to_str(p_loop.ittime)) + f.write("\nnW : : number of WFS\n") + f.write(to_str(len(p_wfss))) + f.write("\nnLgs : : number of LGS\n") + f.write(to_str(len(p_wfs_lgs))) + f.write("\nnTS : : number of Truth Sensor\n") + f.write(to_str(len(p_wfs_ts))) + f.write("\nnTarget : : number of Target\n") + if(tar==-1): + f.write(to_str(len(p_targets))) + else: + f.write("1") + f.write("\nNssp : : number of subaperture per wfs along the diameter\n") + f.write(to_str([wfs.nxsub for wfs in p_wfss])) + f.write("\nfracsub : % : Minimal illumination fraction for valid subap\n") + f.write("-1") #to_str(p_wfss[0].fracsub)) + f.write("\ngsAlt : meter^-1 : inverse of lazer altitude\n") + f.write(to_str([1 / w.gsalt for w in p_wfs_lgs] + [0 for w in p_wfs_ngs])) + f.write("\ntype : : guide star type (1:NGS, 2:LGS)\n") + f.write(to_str([2 for w in p_wfs_lgs] + [1 for w in p_wfs_ngs])) + f.write("\nalphaX_as : arcsec : pointing direction of the wfs on x axis\n") + f.write(to_str([w.xpos for w in p_wfss])) + f.write("\nalphaY_as : arcsec : pointing direction of the wfs on y axis\n") + f.write(to_str([w.ypos for w in p_wfss])) + f.write("\nXPup : meter : pupil shift of the WFS\n") + f.write(to_str([0 for i in range(len(p_wfss))])) + f.write("\nYPup : meter : pupil shift of the WFS\n") + f.write(to_str([0 for i in range(len(p_wfss))])) + f.write("\nthetaML : : rotation of the microlenses\n") + f.write(to_str([0 for i in range(len(p_wfss))])) + f.write("\nthetaCam : : rotation of the camera\n") + f.write(to_str([0 for i in range(len(p_wfss))])) + f.write("\nsensibility: : sensitivity coeff of this WFS\n") + f.write(to_str([1 for i in range(len(p_wfss))])) + f.write("\ntracking : arcsec^2 : telescope tracking error parameters (x^2, y^2 and xy)\n" + ) + f.write(to_str("1 1 1")) + f.write("\npasDPHI : : Precision of DPHI precomputation. //deprecated\n" + ) + f.write(to_str(0.0001)) + f.write("\nncpu : : Number of CPU used (only with openMP)\n") + f.write(to_str(1)) + f.write("\nmrNGS : : magnitude of NGS\n") + if (len(p_wfs_ngs) > 0): + f.write(to_str([w.gsmag for w in p_wfs_ngs])) + else: + f.write(to_str([0.0])) + f.write("\nlgsFlux : (ph/m2/s) : LGS photon return at M1\n") + f.write(to_str(lgs_flux)) + f.write("\nngsPixSize : arcsec : NGS pixel size\n") + if (len(p_wfs_ngs) > 0): + f.write(to_str(p_wfs_ngs[0].pixsize)) + else: + f.write(to_str(0.0)) + f.write("\nlgsPixSize : arcsec : LGS pixel size\n") + f.write(to_str(lgs_pix_size)) + f.write("\nlambdaNGS : meter : wave length for NGS\n") + if (len(p_wfs_ngs) > 0): + f.write(to_str(p_wfs_ngs[0].Lambda * 1e-6)) + else: + f.write(to_str(0.0)) + f.write("\nlambdaLGS : meter : wave length for LGS\n") + f.write(to_str(lambda_lgs)) + f.write("\nbdw_m : meter : bandwidth\n") + f.write(to_str(bdw)) + f.write("\nthroughNGS : percent : transmission for NGS\n") + if (len(p_wfs_ngs) > 0): + f.write(to_str(p_wfs_ngs[0].optthroughput)) + else: + f.write(to_str(0.0)) + f.write("\nthroughLGS : percent : transmission for LGS\n") + f.write(to_str(through_lgs)) + f.write("\nthroughAtm : percent : atmosphere transmission\n") + f.write(to_str(through_atm)) + f.write("\nRON : nb of e- : Read Out Noise \n") + f.write(to_str(int(np.ceil(p_wfss[0].noise)))) + f.write("\nlgsCst : : constant on lgs (simulate that LGS cannot measure tip-tilt and focus)\n") + f.write(to_str(lgs_filter_cst)) + f.write("\nspotWidth : arcsec : lazer width\n") + f.write(to_str(spot_width)) + f.write("\nlgsAlt : meter : sodium layer altitude\n") + f.write(to_str(lgs_alt)) + f.write("\nlgsDepth : meter : depth of the sodium layer\n") + f.write(to_str(lgs_depth)) + f.write("\ntargetX_as : arcsec : taget direction on x axis\n") + if(tar==-1): + f.write(to_str(ts_xpos + [t.xpos for t in p_targets])) + elif(isinstance(tar,(list,np.ndarray))): + f.write(to_str(ts_xpos + [tar[0]])) + else: + f.write(to_str(ts_xpos + [p_targets[tar].xpos])) + f.write("\ntargetY_as : arcsec : taget direction on y axis\n") + if(tar==-1): + f.write(to_str(ts_ypos + [t.ypos for t in p_targets])) + elif(isinstance(tar,(list,np.ndarray))): + f.write(to_str(ts_ypos + [tar[1]])) + else: + f.write(to_str(ts_ypos + [p_targets[tar].ypos])) + + +def write_atm_param(sup, *,path="."): + """ Write a atmParam file for tao based on the compass configuration + + Args: + sup : (CompassSupervisor) : current supervisor + + Kwargs: + path : (str) : (optional), default "./" path to the atmParam file + """ + f = open(path + "/prof-1-atmos-night0.txt", "w") + f.write("Nlayer\n") + f.write(to_str(sup.config.p_atmos.nscreens)) + f.write("\nr0 @ wfs lambda\n") + f.write(to_str(sup.config.p_atmos.r0)) + f.write("\ncn2 ESO units\n") + f.write(to_str(sup.config.p_atmos.get_frac().tolist())) + f.write("\nh in meters\n") + f.write(to_str(sup.config.p_atmos.get_alt().tolist())) + f.write("\nl0 in meters\n") + f.write(to_str(sup.config.p_atmos.get_L0().tolist())) + f.write("\nwind direction \n") + f.write(to_str(sup.config.p_atmos.get_winddir().tolist())) + f.write("\nwind speed meter/s^-1\n") + f.write(to_str(sup.config.p_atmos.get_windspeed().tolist())) + f.close() + shutil.copyfile(path + "/prof-1-atmos-night0.txt", path + "/prof0-atmos-night0.txt") + + +def write_meta_Dx(meta_Dx, *,nTS=0, nmeas=None, trans=True, path="."): + """Write command matrices + + split the meta command matrix + + Args: + meta_Dx: (np.ndarray[ndim=2, dtype=np.float32]): "meta" command matrix + + Kwargs: + nTS: (int): (optional), default=0. Number of truth sensors, command matrices are written as Di.fits where 'i' belongs to [0,nTS[ , if nTS<1 write the whole matrix as Dx.fits + + nmeas: (np.ndarray[ndim=1, dtype=np.int32]): (optional) if set, must contains the number of measurements for each TS, the matrix is split according to theses numbers. By default, the matrix is split evenly between the nTS truth sensors + + trans: (bool): (optional), default=True. Transpose the matrix if true + + path: (str): (optional), default './' path where the files are written + """ + if (nTS < 1): + if (trans): + fits.writeto(path + "/Dx.fits", meta_Dx.T, overwrite=True) + else: + fits.writeto(path + "/Dx.fits", meta_Dx, overwrite=True) + return + + if (nmeas is None): + n = meta_Dx.shape[1] // nTS + nmeas = np.arange(0, meta_Dx.shape[1] + n, n) + else: + nmeas = np.append(0, nmeas.cumsum()) + + for i in range(nTS): + print(i + 1, "out of", nTS, end='\r') + Dx = meta_Dx[:, nmeas[i]:nmeas[i + 1]] + if (trans): + fits.writeto(path + "/Dx" + str(i) + ".fits", Dx.T, overwrite=True) + else: + fits.writeto(path + "/Dx" + str(i) + ".fits", Dx, overwrite=True) diff --git a/shesha/util/tools.py b/shesha/util/tools.py index 42c0d32..1f0fedf 100644 --- a/shesha/util/tools.py +++ b/shesha/util/tools.py @@ -1,7 +1,7 @@ ## @package shesha.util.tools ## @brief Imported from CANARY ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -113,7 +113,7 @@ def pli(data, color='gist_earth', cmin=9998, cmax=9998, win=1, origin=None, color = 'gist_earth' if (origin is None): origin = "" - if (aspect is not 'auto'): + if (aspect != 'auto'): aspect = "\'" + aspect + "\'" else: aspect = "\'auto\'" @@ -254,7 +254,7 @@ def plsh(slopesvector, nssp=14, rmax=0.98, obs=0, win=1, invertxy=False): def plpyr(slopesvector, validArray): """ - wao.config.p_wfs0._isvalid + wao.config.p_wfss[0]._isvalid """ nslopes = slopesvector.shape[0] / 2 x, y = np.where(validArray.T) diff --git a/shesha/util/utilities.py b/shesha/util/utilities.py index 94ea5a0..f1fb2b8 100644 --- a/shesha/util/utilities.py +++ b/shesha/util/utilities.py @@ -1,7 +1,7 @@ ## @package shesha.util.utilities ## @brief Basic utilities function ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -54,7 +54,7 @@ def rebin(a, shape): def fft_goodsize(s): """find best size for a fft from size s - :parameters: + Args: s: (int) size """ @@ -156,80 +156,6 @@ def makegaussian(size, fwhm, xc=-1, yc=-1, norm=0): return tmp -def load_config_from_file(filename_path: str): - """ - Load the parameters from the parameters file - - Args: - filename_path: (str): path to the parameters file - - Return: - config : (config) : a config module - """ - path = os.path.dirname(os.path.abspath(filename_path)) - filename = os.path.basename(filename_path) - name, ext = os.path.splitext(filename) - - if (ext == ".py"): - if (path not in sys.path): - sys.path.insert(0, path) - - return load_config_from_module(name) - - # exec("import %s as wao_config" % filename) - sys.path.remove(path) - elif importlib.util.find_spec(filename_path) is not None: - return load_config_from_module(filename_path) - else: - raise ValueError("Config file must be .py or a module") - - -def load_config_from_module(filepath: str): - """ - Load the parameters from the parameters module - - Args: - filename_path: (str): path to the parameters file - - Return: - config : (config) : a config module - """ - filename = filepath.split('.')[-1] - print("loading: %s" % filename) - - config = importlib.import_module(filepath) - del sys.modules[config.__name__] # Forced reload - config = importlib.import_module(filepath) - - if hasattr(config, 'par'): - config = getattr("config.par.par4bench", filename) - - # Set missing config attributes to None - if not hasattr(config, 'p_loop'): - config.p_loop = None - if not hasattr(config, 'p_geom'): - config.p_geom = None - if not hasattr(config, 'p_tel'): - config.p_tel = None - if not hasattr(config, 'p_atmos'): - config.p_atmos = None - if not hasattr(config, 'p_dms'): - config.p_dms = None - if not hasattr(config, 'p_targets'): - config.p_targets = None - if not hasattr(config, 'p_wfss'): - config.p_wfss = None - if not hasattr(config, 'p_centroiders'): - config.p_centroiders = None - if not hasattr(config, 'p_controllers'): - config.p_controllers = None - - if not hasattr(config, 'simul_name'): - config.simul_name = None - - return config - - def generate_square(radius: float, density: float = 1.): """ Generate modulation points positions following a square pattern @@ -238,7 +164,7 @@ def generate_square(radius: float, density: float = 1.): density : (float), optional) : number of psf per lambda/D. Default is 1 - Return: + Returns: cx : (np.ndarray) : X-positions of the modulation points cy : (np.ndarray) : Y-positions of the modulation points @@ -258,7 +184,7 @@ def generate_circle(radius: float, density: float = 1.): density : (float), optional) : number of psf per lambda/D. Default is 1 - Return: + Returns: cx : (np.ndarray) : X-positions of the modulation points cy : (np.ndarray) : Y-positions of the modulation points @@ -277,7 +203,7 @@ def generate_pseudo_source(radius: float, additional_psf=0, density=1.): density : (float, optional) :TODO description - Return: + Returns: ox : TODO description & explicit naming oy : TODO description & explicit naming @@ -352,7 +278,7 @@ def first_non_zero(array: np.ndarray, axis: int, invalid_val: int = -1) -> np.nd invalid_val : (int, optional) : Default is -1 - Return: + Returns: non_zeros_pos : (np.ndarray) : Index of the first non-zero element for each line or column following the axis """ @@ -372,7 +298,7 @@ def first_non_zero(array: np.ndarray, axis: int, invalid_val: int = -1) -> np.nd # modif dg : allow to rotate a cube of images with one angle per image -# :parameters: +# Args: # im: (np.ndarray[ndim=3,dtype=np.float32_t]) : array to rotate @@ -461,7 +387,7 @@ def first_non_zero(array: np.ndarray, axis: int, invalid_val: int = -1) -> np.nd # center of the image. # If zoom is not specified, the default value of 1.0 is taken. -# :parameters: +# Args: # im: (np.ndarray[ndim=3,dtype=np.float32_t]) : array to rotate diff --git a/shesha/util/writers/common/__init__.py b/shesha/util/writers/common/__init__.py new file mode 100644 index 0000000..2c06e6a --- /dev/null +++ b/shesha/util/writers/common/__init__.py @@ -0,0 +1,5 @@ +from shesha.util.writers.common.dm import * +from shesha.util.writers.common.wfs import * +from shesha.util.writers.common.atmos import * +from shesha.util.writers.common.imat import * +from shesha.util.writers.common.fits import * \ No newline at end of file diff --git a/shesha/util/writers/common/atmos.py b/shesha/util/writers/common/atmos.py new file mode 100644 index 0000000..d3ae9c4 --- /dev/null +++ b/shesha/util/writers/common/atmos.py @@ -0,0 +1,35 @@ +import numpy as np +import json + +def atmos_to_json(atmos, name=""): + """return a json description of a the atmos + + Args: + atmos : (Param_atmos) : compass atmospheric parameters + """ + json_atm = { + "nLayer" : atmos.get_nscreens(), + "r0" : atmos.get_r0(), + "h" : atmos.get_alt().tolist(), + "fracCn2" : atmos.get_frac().tolist(), + "L0" : atmos.get_L0().tolist(), + "windDir" : atmos.get_winddir().tolist(), + "windSpeed" : atmos.get_windspeed().tolist() + } + if(name != ""): + json_atm["name"] = name + return json_atm + + +def atmos_json_notice(): + notice = { + "name": " : profile name", + "nLayer": " : number of layers in the turbulent profile", + "r0": "r0 at 500 nm (fried parameter)", + "h": " meter : (list) altitude of each layer", + "fracCn2": " percent : (list) cn2 fraction of each layer", + "L0": "meter : (list) outer scale of each layer", + "windDir": " degree : (list) wind sirection of each layer", + "windSpeed": " meter/s : (list) wind speed of each layer" + } + return notice \ No newline at end of file diff --git a/shesha/util/writers/common/dm.py b/shesha/util/writers/common/dm.py new file mode 100644 index 0000000..cf8e816 --- /dev/null +++ b/shesha/util/writers/common/dm.py @@ -0,0 +1,55 @@ +import numpy as np +import json + +def get_actu_pos_pixel(dm): + """return the coordinates in pixel of a given DM actuators + + Args: + dm : (Param_dm) : Dm to get the actuators position from + + Returns: + xpos : (np.ndarray[ndim=1, dtype=np.float32]) : actuators positions along axis x + + ypos : (np.ndarray[ndim=1, dtype=np.float32]) : actuators positions along axis y + """ + + return dm._xpos+1, dm._ypos+1 + + +def get_actu_pos_meter(sup, dm_id): + """return the coordinates in meters of a given DM actuators + + Args: + sup : (compasSSupervisor) : supervisor + + dm_id : (int) : index of the DM + + Returns: + xpos : (np.ndarray[ndim=1, dtype=np.float32]) : actuators positions along axis x + + ypos : (np.ndarray[ndim=1, dtype=np.float32]) : actuators positions along axis y + """ + + config = sup.config + dm=config.p_dms[dm_id] + geom = config.p_geom + valid_X = ( dm._xpos - geom.get_cent() ) * geom.get_pixsize() + valid_Y = ( dm._ypos - geom.get_cent() ) * geom.get_pixsize() + return valid_X, valid_Y + + +def dm_to_json(dm, geom): + """return a json description of a dm + + Args: + dm : (Param_dm) : dm to represent as json + """ + dm_json = { + "n_actu" : dm.get_nact(), + "h" : dm.get_alt(), + "coupling" : dm.get_coupling(), + "shift_x" : dm.get_dx() * geom.get_pixsize(), + "shift_y" : dm.get_dy() * geom.get_pixsize(), + "theta" : dm.get_theta() + } + return dm_json \ No newline at end of file diff --git a/shesha/util/writers/common/fits.py b/shesha/util/writers/common/fits.py new file mode 100644 index 0000000..e42b62b --- /dev/null +++ b/shesha/util/writers/common/fits.py @@ -0,0 +1,154 @@ +import numpy as np +from shesha.util.writers.common import dm +from shesha.util.writers.common import wfs +from shesha.util.writers.common import imat +from astropy.io import fits + +def wfs_to_fits_hdu(sup, wfs_id): + """Return a fits Header Data Unit (HDU) representation of a single WFS + + Args: + sup : (compasSSupervisor) : supervisor + + wfs_id : (int) : index of the WFS in the supervisor + + Returns: + hdu : (ImageHDU) : fits representation of the WFS + """ + hdu_name = "WFS" + str(wfs_id) + X,Y = wfs.get_subap_pos_meter(sup, wfs_id) + valid_subap = np.array([X,Y],dtype=np.float64) + hdu = fits.ImageHDU( valid_subap, name=hdu_name) + hdu.header["NSSP"] = sup.config.p_wfss[wfs_id].get_nxsub() + hdu.header["SSPSIZE"] = sup.config.p_wfss[wfs_id].get_subapd() + return hdu + +def dm_to_fits_hdu(sup, dm_id): + """Return a fits Header Data Unit (HDU) representation of a single DM + + Args: + sup : (compasSSupervisor) : supervisor + + wfs_id : (int) : index of the DM in the supervisor + + Returns: + hdu : (ImageHDU) : fits representation of the DM + """ + hdu_name = "DM" + str(dm_id) + X,Y = dm.get_actu_pos_meter(sup, dm_id) + valid_subap = np.array([X,Y],dtype=np.float64) + hdu = fits.ImageHDU( valid_subap, name=hdu_name) + hdu.header["NACTU"] = sup.config.p_dms[dm_id].get_nact() + hdu.header["PITCH"] = sup.config.p_dms[dm_id].get_pitch() + hdu.header["COUPLING"] = sup.config.p_dms[dm_id].get_coupling() + hdu.header["ALT"] = sup.config.p_dms[dm_id].get_alt() + return hdu + +def dm_influ_to_fits_hdu(sup, dm_id, *, influ_index=-1): + """Return a fits Header Data Unit (HDU) holding the influence functions of a specific DM + + Args: + sup : (compasSSupervisor) : supervisor + + wfs_id : (int) : index of the DM in the supervisor + + Kwargs: + influ_index : (int) : (optional) default -1, index of the actuator to get the influence function from. -1 : get all influence functions + + Returns: + hdu : (ImageHDU) : hdu holding the DM influence functions + """ + hdu_name = "INFLU_DM" + str(dm_id) + if influ_index < 0 : + influ_fct = sup.config.p_dms[dm_id].get_influ().astype(np.float64) + else : + influ_fct = sup.config.p_dms[dm_id].get_influ()[:,:,influ_index].astype(np.float64) + hdu = fits.ImageHDU( influ_fct, name=hdu_name) + return hdu + +def write_data(file_name, sup, *, wfss_indices=None, dms_indices=None, + controller_id=0, influ=0, compose_type="controller"): + """ Write data for yao compatibility + + write into a single fits: + * number of valide subapertures + * number of actuators + * subapertures position (2-dim array x,y) in meters centered + * actuator position (2-dim array x,y) in pixels starting from 0 + * interaction matrix (2*nSubap , nactu) + * command matrix (nacy , 2*nSubap) + + Args: + file_name : (str) : data file name + + sup : (compasSSupervisor) : supervisor + + Kargs: + wfss_indices : (list[int]) : optional, default all, list of the wfs indices to include + + dms_indices : (list[int]) : optional, default all, list of the DM indices to include + + controller_id : (int) : optional, index of the controller passed to yao + + influ : (int) : optional, actuator index for the influence function + + compose_type : (str) : optional, possibility to specify split tomography case ("controller" or "splitTomo") + """ + print("writing data to" + file_name) + hdul=fits.HDUList([]) + + # setting list of wfs and dm + conf = sup.config + if(wfss_indices is None): + wfss_indices = np.arange(len(conf.p_wfss)) + if(dms_indices is None): + dms_indices = [] + for i in range(len(conf.p_dms)): + if( conf.p_dms[i].type != "tt"): + dms_indices.append(i) + + #cout the number of lgs + n_lgs = 0 + for i in wfss_indices : + if(conf.p_wfss[i].get_gsalt() > 0): + n_lgs += 1 + + #primary hdu contains only keywords for sanity check + hdu = fits.PrimaryHDU(np.zeros(1,dtype=np.int32)) + hdu.header["DIAM"] = conf.p_tel.get_diam() + hdu.header["COBS"] = conf.p_tel.get_cobs() + hdu.header["NLGS"] = n_lgs + hdu.header["NNGS"] = len(wfss_indices) - n_lgs + hdu.header["NDM" ] = len(dms_indices) + hdu.header["PIXSIZE"] = conf.p_geom.get_pixsize() + + #add primary hdu to list + hdul.append(hdu) + + # add wfss + for i in wfss_indices: + hdul.append( wfs_to_fits_hdu(sup, i)) + + # add dm + for i in dms_indices: + hdul.append(dm_to_fits_hdu(sup, i)) + hdul.append(dm_influ_to_fits_hdu(sup, i, influ_index = influ)) + + if(controller_id > -1): + # IMAT + interaction_mat=imat.compose_imat(sup, compose_type=compose_type, + controller_id=controller_id) + hdu_imat=fits.ImageHDU(interaction_mat,name="IMAT") + + # CMAT + hdu_cmat=fits.ImageHDU(sup.rtc.get_command_matrix(controller_id), + name="CMAT") + + print("\t* number of subaperture per WFS") + print("\t* subapertures position") + print("\t* number of actuator per DM") + print("\t* actuators position") + print("\t* Imat") + print("\t* Cmat") + + hdul.writeto(file_name, overwrite=1) \ No newline at end of file diff --git a/shesha/util/writers/common/imat.py b/shesha/util/writers/common/imat.py new file mode 100644 index 0000000..fc86560 --- /dev/null +++ b/shesha/util/writers/common/imat.py @@ -0,0 +1,40 @@ +import numpy as np + +def compose_imat(sup, *, compose_type="controller", controller_id=0): + """ Return an interaction matrix + + return either the specified controller interaction matrix (if compose_type="controller") + or an imat composed of all controller interaction matrices (if compose_type="splitTomo") + + Args: + sup : (compasSSupervisor) : supervisor + + Kargs: + compose_type : (str) : (optional), default "controller" possibility to specify split tomography case ("controller" or "splitTomo") + + controller_id : (int) : (optional), default 0 controller index + + Returns: + imat : (np.ndarray[ndim=1, dtype=np.float32]) : interaction matrix + """ + if(compose_type=="controller"): + return sup.rtc.get_interaction_matrix(controller_id) + elif(compose_type=="splitTomo"): + n_actu = 0 + n_meas = 0 + for c in range(len(sup.config.p_controllers)): + imShape = sup.rtc.get_interaction_matrix(c).shape + n_meas += imShape[0] + n_actu += imShape[1] + imat=np.zeros((n_meas, n_actu)) + n_meas = 0 + n_actu = 0 + for c in range(len(sup.config.p_controllers)): + im = sup.rtc.get_interaction_matrix(c) + imat[n_meas:n_meas+im.shape[0],n_actu:n_actu+im.shape[1]]=np.copy(im) + n_meas += im.shape[0] + n_actu += im.shape[1] + return imat + + else: + print("Unknown composition type") diff --git a/shesha/util/writers/common/wfs.py b/shesha/util/writers/common/wfs.py new file mode 100644 index 0000000..4346b2e --- /dev/null +++ b/shesha/util/writers/common/wfs.py @@ -0,0 +1,161 @@ +import numpy as np +import json + +def get_subap_pos_pixel(wfs): + """Return the coordinates of the valid subapertures of a given WFS + + these coordinates are given in pixels + + Args: + wfs : Param_wfs : wfs to get the subapertures position from + + Return: + valid_X : (np.ndarray[ndim=1, dtype=np.float64]) : subapertures positions along axis x + + valid_Y : (np.ndarray[ndim=1, dtype=np.float64]) : subapertures positions along axis y + """ + + return wfs._validpuppixx-2 , wfs._validpuppixy-2 + + +def get_subap_pos_meter(sup, wfs_id): + """Return the coordinates of the valid subapertures of a given WFS + + these coordinates are given in meters and centered + + Args: + sup : (compassSupervisor) : supervisor + + wfs_id : (int) : index of the WFS + + Return : + valid_X : (np.ndarray[ndim=1, dtype=np.float64]) : subapertures positions along axis x + + valid_Y : (np.ndarray[ndim=1, dtype=np.float64]) : subapertures positions along axis y + """ + + config = sup.config + wfs = config.p_wfss[wfs_id] + geom = config.p_geom + valid_X, valid_Y = get_subap_pos_pixel(wfs) + total = geom.pupdiam/wfs.nxsub*(wfs.nxsub-1) + valid_X = (valid_X-total/2)*geom.get_pixsize() + valid_Y = (valid_Y-total/2)*geom.get_pixsize() + return valid_X, valid_Y + + +def wfs_to_json(wfs, geom, type, *, x_pos=None, y_pos=None): + """return a json description of a wfs + + Args: + wfs : (Param_wfs) : wfs to represent as json + + geom : (Param_geom) : geom settings + + type : (string) : wfs type ("lgs", "ngs" "target" or "ts") + + Kargs: + x_pos : (list(float)) : x coordinates of the targets () + + y_pos : (list(float)) : y coordinates of the targets () + """ + types = ["lgs", "ngs", "target", "ts"] + if(type not in types): + ValueError("type must be one of "+str(types)) + + wfs_json={} + + if(type == "ts"): + wfs_json = { + "nssp" : wfs[0].get_nxsub(), + "alphaX_as" : [w.get_xpos() for w in wfs], + "alphaY_as" : [w.get_ypos() for w in wfs] + } + + elif(type == "target"): + if(x_pos is None or len(x_pos) != len(y_pos)): + ValueError("pointing direction of WFS target must be provided (x_pos, y_pos)") + wfs_json = { + "nssp" : wfs.get_nxsub(), + "alphaX_as" : x_pos, + "alphaY_as" : y_pos + } + + else : + bdw = 3.3e-7 + lgs_depth = 5000. + lgs_cst = 0.1 + wfs_json = { + "nssp" : wfs.get_nxsub(), + "alphaX_as" : wfs.get_xpos(), + "alphaY_as" : wfs.get_ypos(), + "XPup" : wfs.get_dx() * geom.get_pixsize(), + "YPup" : wfs.get_dy() * geom.get_pixsize(), + "thetaML" : wfs.get_thetaML() , + "thetaCam" : 0 , + "sensitivity" : 0 , + "pixSize" : wfs.get_pixsize(), + "lambdaWFS" : wfs.get_Lambda() , + "bandwidth" : bdw , + "throughput" : wfs.get_optthroughput() , + "RON" : wfs.get_noise() + } + + if(wfs.get_gsalt()>0): + if(type == "ngs"): + ValueError("wfs is not a NGS (gsalt > 0)") + + wfs_json["lgsAlt"] = wfs.get_gsalt() + wfs_json["lgsDepth"] = lgs_depth + wfs_json["lgsFlux"] = wfs.lgsreturnperwatt * wfs.laserpower * \ + wfs.optthroughput * 10**4 + wfs_json["spotWidth"] = wfs.get_beamsize() + wfs_json["lgsCst"] = lgs_cst + + else: + if(type == "lgs"): + ValueError("wfs is not a LGS (gsalt == 0) ") + wfs_json["magnitude"] = wfs.get_gsmag() + + return wfs_json + +def wfs_json_notice(type): + """Return the notice of the wfs json representation + + Args: + type : (string) : wfs type ("lgs", "ngs" or "target") + """ + if(type != "lgs" and type != "ngs" and type != "target"): + ValueError("type must be either \"lgs\", \"ngs\" or \"target\"") + if(type == "target"): + notice = { + "nssp" : " : number of subapertures along the diameter", + "alphaX_as" : " arcsec : list of pointing direction of the wfs (on x axis)", + "alphaY_as" : " arcsec : list of pointing direction of the wfs (on y axis)", + } + else : + notice = { + "nssp" : " : number of subapertures along the diameter", + "alphaX_as" : " arcsec : pointing direction of the wfs (on x axis)", + "alphaY_as" : " arcsec : pointing direction of the wfs (on y axis)", + "XPup" : " meter : pupil shift of the WFS (on axis x)", + "YPup" : " meter : pupil shift of the WFS (on axis y)", + "thetaML" : " radian : rotation of the camera", + "thetaCam" : " radian : rotation of the microlenses", + "sensitivity" : " : sensitivity coeff of this WFS", + "pixSize" : " arcsec : WFS pixel size", + "lambdaWFS" : " meter : WFS wavelength", + "bandwidth" : " meter : WFS bandwidth", + "throughput" : " percent : transmission for the GS", + "RON" : " nb of e- : Read Out Noise", + } + if(type == "lgs"): + notice["lgsAlt"] = " meter : laser guide star altitude" + notice["lgsDepth"] = " meter : laser guide star depth" + notice["lgsFlux"] = " (ph/m2/s) : LGS photon return at M1" + notice["spotWidth"] = " arcsec : lazer width" + notice["lgsCst"] = " : constant on lgs (simulate that LGS cannot measure tip-tilt and focus, for Linear Algebra purpose)" + if(type == "ngs"): + notice["magnitude"] = " : guide stars magnitude" + + return notice diff --git a/shesha/util/writers/tao/__init__.py b/shesha/util/writers/tao/__init__.py new file mode 100644 index 0000000..68d7ce6 --- /dev/null +++ b/shesha/util/writers/tao/__init__.py @@ -0,0 +1,41 @@ +from shesha.util.writers.tao.sysParams import * +from shesha.util.writers.tao.atmParams import * +from shesha.util.writers import common + +def write_parfiles(sup, *, file_name_sys="./sysParams.json", + file_name_atm="./atmParams.json", file_name_data="sys-input.fits", + wfss_indices=None, ts=False, dms_indices=None, imat_type="controller", + controller_id=-1,influ_index=0): + """write the parameter files for SHIPS + + Args: + sup : (CompassSupervisor) : supervisor + + Kargs: + file_name_sys : (str) : AO system parameter file name (default = sysParams.json) + + file_name_atm : (str) : atmospheric parameter file name (default = atmParams.json) + + file_name_data : (str) : data fits file name (default = sys-input.fits), contains sub-apertures and actuator position etc + + wfss_indices : (list(int)) : list of wfs to write to file + + ts : (bool) : write truth sensor to file + + dms_indices : (list(int)) : list of dm to write to file + + imat_type : (str) : (optional), default "controller" use of regular controller or split tomography (among "controller", "splitTomo") + + controller_id : (int) : index of te controller (default : all) + + influ_index : (int) : actuator index to get the influence function from + """ + + write_json_sys_param(sup, wfss_indices=wfss_indices, ts=ts, + dms_indices=dms_indices,file_name=file_name_sys) + + write_json_atm_param(sup, file_name=file_name_atm) + + common.write_data(file_name_data, sup, wfss_indices=wfss_indices, + dms_indices=dms_indices, controller_id=controller_id, + influ=influ_index, compose_type="controller") \ No newline at end of file diff --git a/shesha/util/writers/tao/atmParams.py b/shesha/util/writers/tao/atmParams.py new file mode 100644 index 0000000..9584177 --- /dev/null +++ b/shesha/util/writers/tao/atmParams.py @@ -0,0 +1,18 @@ +import json +import numpy as np +from shesha.util.writers import common + +def write_json_atm_param(sup, *, file_name="./atm-params.json"): + + """Return a json representation of the atmospheric parameters + + Args: + sup : (CompassSupervisor) : supervisor to get the json representation from + """ + atm_json={ + "notice" : common.atmos_json_notice(), + "profiles" : [ common.atmos_to_json(sup.config.p_atmos)] + } + f = open(file_name,"w") + f.write(json.dumps(atm_json,indent=4)) + f.close() \ No newline at end of file diff --git a/shesha/util/writers/tao/sysParams.py b/shesha/util/writers/tao/sysParams.py new file mode 100644 index 0000000..48d7923 --- /dev/null +++ b/shesha/util/writers/tao/sysParams.py @@ -0,0 +1,106 @@ +import json +import numpy as np +from shesha.util.writers import common + +def write_json_sys_param(sup, *, wfss_indices=None, ts=False, dms_indices=None, file_name="./sys-params.json"): + """Return a json representation of the AO system + + Args: + sup : (CompassSupervisor) : supervisor to get the json representation from + + Kargs: + wfss_indices : (list(int)) : list of wfs indices added into the json + + dms_indices : (list(int)) : list of dm indices added into the json + + file_name : (str) : output file name + """ + + if(wfss_indices is None): + wfss_indices = list(range(len(sup.config.p_wfss))) + elif(isinstance(wfss_indices,int)): + wfss_indices = list(range(wfss_indices)) + + if(dms_indices is None): + dms_indices = list(range(len(sup.config.p_dms))) + elif(isinstance(dms_indices,int)): + dms_indices = list(range(dms_indices)) + + # general + sys_json={ + "diam" : { + "comment": " meter : telescope diameter", + "value" : sup.config.p_tel.get_diam() + }, + "cobs" : { + "comment": " percent : central obscuration", + "value" : sup.config.p_tel.get_cobs() + }, + "tFrame": { + "comment": " second : frame rate", + "value": sup.config.p_loop.ittime + }, + "fracsub": { + "comment": "Minimal illumination fraction for valid subap", + "value": sup.config.p_wfss[0].get_fracsub() + }, + "throughAtm": { + "comment": "percent : atmosphere transmission", + "value": 1.0 + }, + "tracking": { + "comment": "arcsec^2 : telescope tracking error parameters (x^2, y^2 and xy)", + "value": [ + 1.0, + 1.0, + 1.0 + ] + } + } + + diam = sup.config.p_tel.get_diam() + geom = sup.config.p_geom + #WFSs + lgs_json = [] + ngs_json = [] + target_json = None + ts_json = None + for i in wfss_indices: + w = sup.config.p_wfss[i] + if w in sup.config.p_wfs_lgs: + lgs_json.append(common.wfs_to_json(w,geom,"lgs")) + elif w in sup.config.p_wfs_ngs: + if( i == (len(sup.config.p_wfss) - 1) ): + target_json = common.wfs_to_json(w,geom,"target", + x_pos = [t.xpos for t in sup.config.p_targets], + y_pos = [t.ypos for t in sup.config.p_targets] ) + else: + ngs_json.append(common.wfs_to_json(w,geom,"ngs")) + if ts : + w = sup.config.p_wfs_ts + if(w[0].nxsub == 0): + argmax = np.array([sup.config.p_wfss[i].nxsub for i in wfss_indices]).argmax() + w[0].set_nxsub(sup.config.p_wfss[argmax].nxsub) + w[0].set_pdiam(sup.config.p_wfss[argmax]._pdiam) + ts_json = common.wfs_to_json(w,geom,"ts") + + wfs_json = { + "notice_lgs" : common.wfs_json_notice("lgs"), + "notice_ngs" : common.wfs_json_notice("ngs"), + "lgs" : lgs_json, + "ngs" : ngs_json, + "ts" : ts_json, + "target":target_json} + + sys_json["wfs"] = wfs_json + + #DMs + dm_json = [] + for i in dms_indices: + d = sup.config.p_dms[i] + dm_json.append(common.dm_to_json(d, geom)) + sys_json["dm"] = dm_json + + f = open(file_name, "w") + f.write(json.dumps({"instrument":sys_json},indent=4)) + f.close() diff --git a/shesha/util/writers/yao/__init__.py b/shesha/util/writers/yao/__init__.py new file mode 100644 index 0000000..0a89a87 --- /dev/null +++ b/shesha/util/writers/yao/__init__.py @@ -0,0 +1,70 @@ + +from shesha.util.writers.yao.general import * +from shesha.util.writers.yao.wfs import * +from shesha.util.writers.yao.dm import * +from shesha.util.writers.yao.targets import * +from shesha.util.writers.yao.atmos import * +from shesha.util.writers.yao.loop import * +from shesha.util.writers.yao.gs import * +from shesha.util.writers import common + +def write_parfiles(sup, *, param_file_name="./yao.par", + fits_file_name="./yao.fits", + screen_dir="\"./yao_screen\"", + n_wfs=None, + controller_id=-1, + influ_index=0, + imat_type="controller"): + """Write parameter files for YAO simulations + + Args: + sup : (CompassSupervisor) : supervisor + + Kwargs: + param_file_name : (str) : (optional), default "./yao.par" name of the yao parameter file + + fits_file_name : (str) : (optional), default "./yao.fits" name of fits file containing sub-apertures and actuator position etc + + screen_dir : (str) : (optional), default "./yao_screen" path to the yao turbulent screen files + + n_wfs : (int) : (optional), number of WFS (default: all wfs) + + controller_id : (int) : index of te controller (default : all) + + influ_index : (int) : actuator index to get the influence function from + + imat_type : (str) : (optional), default "controller" use of regular controller or split tomography (among "controller", "splitTomo") + """ + conf = sup.config + if(n_wfs is None): + n_wfs = len(conf.p_wfss) + zerop = conf.p_wfss[0].zerop + lgs_return_per_watt = max([w.lgsreturnperwatt for w in conf.p_wfss]) + + print("writing parameter file to " + param_file_name) + write_general(param_file_name, conf.p_geom, conf.p_controllers, + conf.p_tel, conf.simul_name) + wfs_offset = 0 + dm_offset = 0 + ndm = init_dm(param_file_name) + for sub_system, c in enumerate(conf.p_controllers): + dms = [ conf.p_dms[i] for i in c.get_ndm() ] + ndm += write_dms (param_file_name, dms ,sub_system=sub_system + 1, + offset=dm_offset) + dm_offset = dm_offset+len(dms) + finish_dm(param_file_name, ndm) + gs = init_wfs(param_file_name) + for sub_system, c in enumerate(conf.p_controllers): + wfss = [ conf.p_wfss[i] for i in c.get_nwfs()] + n_ngs, n_lgs = write_wfss(param_file_name, wfss, sub_system=sub_system + 1, + n_wfs=n_wfs, offset=wfs_offset) + gs = (gs[0] + n_ngs, gs[1] + n_lgs) + wfs_offset = wfs_offset + len(wfss) + finish_wfs(param_file_name, gs[0], gs[1]) + write_targets(param_file_name, conf.p_targets) + write_gs(param_file_name, zerop, lgs_return_per_watt, + conf.p_geom.zenithangle) + write_atm(param_file_name, conf.p_atmos, screen_dir,conf.p_geom.zenithangle) + write_loop(param_file_name, conf.p_loop, conf.p_controllers[0]) + common.write_data(fits_file_name, sup, wfss_indices=np.arange(n_wfs), + controller_id=controller_id, influ=influ_index, compose_type=imat_type) diff --git a/shesha/util/writers/yao/atmos.py b/shesha/util/writers/yao/atmos.py new file mode 100644 index 0000000..a9ee0f8 --- /dev/null +++ b/shesha/util/writers/yao/atmos.py @@ -0,0 +1,37 @@ +import numpy as np + +def write_atm(file_name, atm, screen_file, zenithangle): + """Write (append) atmospheric parameters to file for YAO use + + Args: + file_name : (str) : name of the file to append the parameter to + + atm : (Param_atmos) : compass atmospheric parameters. Note that + atm.winddir is transformed + + screen_file : (str) : path to the yao turbulent screen files. Note + that the string is passed through raw (without quotes around it) + in order to use yorick variables in the path name (e.g., Y_USER). + """ + f = open(file_name,"a+") + f.write("\n\n//------------------------------") + f.write("\n//ATM parameters") + f.write("\n//------------------------------") + + f.write("\nr0 =" + str(atm.r0) + "; //qt 500 nm") + f.write("\natm.dr0at05mic = tel.diam/r0;") + + indexList = '"1"' + for i in range(2, atm.nscreens + 1): + indexList += ',"' + str(i) + '"' + f.write("\natm.screen = &(" + screen_file + "+["+indexList + \ + "]+\".fits\")") + f.write("\natm.layerspeed = &(" + np.array2string(atm.windspeed / np.cos(np.pi*zenithangle/180), \ + separator=',', max_line_width=300) + ");") + f.write("\natm.layeralt = &(" + np.array2string(atm.alt * np.cos(np.pi*zenithangle/180), \ + separator=',', max_line_width=300) + ");") + f.write("\natm.layerfrac = &(" + np.array2string(atm.frac, \ + separator=',', max_line_width=300) + ");") + f.write("\natm.winddir = &(" + np.array2string(-(atm.winddir+90)%360, \ + separator=',', max_line_width=300) + ");") + f.close() diff --git a/shesha/util/writers/yao/dm.py b/shesha/util/writers/yao/dm.py new file mode 100644 index 0000000..30774bc --- /dev/null +++ b/shesha/util/writers/yao/dm.py @@ -0,0 +1,86 @@ +YAO_DMTYPE={"pzt":"\"stackarray\"", + "tt" :"\"tiptilt\""} + +def init_dm(file_name): + """ Initialise dm entry in yao parameter file + + Args: + file_name : (str) : yao parameter file name + """ + f = open(file_name, "a+") + f.write("\n\n//------------------------------") + f.write("\n//DM parameters") + f.write("\n//------------------------------") + f.close() + return 0 + +def write_dm(file_name, dm, index, *, sub_system=1): + """Write (append) dm parameter to file for YAO use for a single dm + + Args: + file_name : (str) : name of the file to append the parameter to + + dm : (Param_dm) : compass dm parameters + + index : (int) : YAO index for dm + + sub_system : (int) : (optional), default 1 index of yao sub-system + """ + obj = "dm(" + str(index) + ")" + f = open(file_name,"a+") + f.write("\ngrow,dm,dms;") + f.write("\n" + obj + ".type = " + YAO_DMTYPE[dm.type] + ";") + f.write("\n" + obj + ".subsystem = " + str(sub_system) + ";") + f.write("\n" + obj + ".iffile = \"\"; // not set by compass") + f.write("\n" + obj + ".alt = " + str(dm.alt) + ";") + f.write("\n" + obj + ".unitpervolt = " + str(dm.unitpervolt) + ";") + f.write("\n" + obj + ".push4imat = " + str(dm.push4imat) + ";") + + if(dm.type != "tt"): + f.write("\n" + obj + ".nxact = " + str(dm.nact) + ";") + f.write("\n" + obj + ".pitch = " + str(dm._pitch) + ";") + f.write("\n" + obj + ".thresholdresp = " + str(dm.thresh) + ";") + f.write("\n" + obj + ".pitchMargin = " + str(2.2) + "; // not set by compass") + f.write("\n" + obj + ".elt = " + str(1) + "; // not set by compass") + f.write("\n" + obj + ".coupling = " + str(dm.coupling) + ";") + f.close() + +def write_dms(file_name, dms, *, sub_system=1, offset=0): + """Write (append) dm parameter to file for YAO + + Args: + file_name : str : name of the file to append the parameter to + + dms : list[Param_dm] : compass dm parameters list + + Kwargs: + sub_system : (int) : (optional), default 1 index of yao sub-system + + offset : (int) : (optional), default 0 yao dm index offset + + Returns: + n_dm : (int) : number of dm passed to yao + """ + f = open(file_name,"a+") + + i = 1 + for d in dms: + f.write("\n\n//DM " + str(i + offset)) + f.flush() + write_dm(file_name, d, i + offset, sub_system=sub_system) + i += 1 + + f.close() + return len(dms) + +def finish_dm(file_name, n_dm): + """ Finalize wfs section in yao parameter file + + Args: + file_name : (str) : yao parameter file name + + n_dm : (int) : number of ngs written to yao parameter file + """ + f=open(file_name, "a+") + f.write("\n\nndm = " + str(n_dm) + ";") + f.close() diff --git a/shesha/util/writers/yao/general.py b/shesha/util/writers/yao/general.py new file mode 100644 index 0000000..c0d9a45 --- /dev/null +++ b/shesha/util/writers/yao/general.py @@ -0,0 +1,38 @@ +import numpy as np + +def write_general(file_name, geom, controllers, tel, simul_name): + """Write (append) general simulation parameter to file for YAO use + + Args: + file_name : (str) : name of the file to append the parameter to + + geom : (Param_geom) : compass AO geometry parameters + + controllers : ([Param_controller]) : list of compass controller parameters + + tel : (Param_tel) : compass telescope parameters + + simul_name : (str) : simulation name + """ + f = open(file_name,"w") + f.write("\n\n//------------------------------") + f.write("\n//general parameters") + f.write("\n//------------------------------") + f.write("\nsim.name = \"" + simul_name + "\";") + f.write("\nsim.pupildiam = " + str(geom.pupdiam) + ";") + f.write("\nsim.debug = 0;") + f.write("\nsim.verbose = 1;") + + f.write("\nmat.file = \"\";") + f.write("\nmat.condition = &(" + np.array2string( \ + np.array([np.sqrt(c.maxcond) for c in controllers]), \ + separator=',',max_line_width=300) + ");") + + f.write("\nmat.method = \"none\";") + #f.write("\nhfield = 15") + f.write("\nYAO_SAVEPATH = \"\"; // where to save the output to the simulations") + + f.write("\ntel.diam = " + str(tel.diam) + ";") + f.write("\ntel.cobs = " + str(tel.cobs) + ";") + f.write("\ndm = [];") + f.write("\nwfs = [];") diff --git a/shesha/util/writers/yao/gs.py b/shesha/util/writers/yao/gs.py new file mode 100644 index 0000000..ac4440e --- /dev/null +++ b/shesha/util/writers/yao/gs.py @@ -0,0 +1,23 @@ + +def write_gs(file_name, zero_point, lgs_return_per_watt, zenith_angle): + """Write (append) guide stars parameters to file for YAO + + Args: + file_name : (str) : name of the file to append the parameter to + + zero_point : (float) : flux for magnitude 0 (ph/m²/s) + + lgs_return_per_watt : (float) : return per watt factor (ph/cm²/s/W) + + zenith_angle : (float) : zenithal angle (degree) + """ + f=open(file_name,"a+") + f.write("\n\n//------------------------------") + f.write("\n//GS parameters") + f.write("\n//------------------------------") + + f.write("\ngs.zeropoint = " + str(zero_point)+"; //TODO get ") + # Consider later (ngs intensity) + f.write("\ngs.lgsreturnperwatt = " + str(lgs_return_per_watt) + \ + "; //TODO check lgs case") + f.write("\ngs.zenithangle = " + str(zenith_angle) + ";") diff --git a/shesha/util/writers/yao/loop.py b/shesha/util/writers/yao/loop.py new file mode 100644 index 0000000..9c1c0ff --- /dev/null +++ b/shesha/util/writers/yao/loop.py @@ -0,0 +1,26 @@ + +def write_loop(file_name, loop, controller): + """Write (append) AO loop parameters to file for YAO + + Args: + file_name (str) : yao parameter file name + + loop : (Param_loop) : compass loop parameters + + controller : (Param_controller) : compass controller parameters + """ + f=open(file_name,"a+") + f.write("\n\n//------------------------------") + f.write("\n//LOOP parameters") + f.write("\n//------------------------------") + f.write("\nloop.method = " + "\"none\"" + ";") + f.write("\nloop.leak = " + str(0.001) + ";") + f.write("\nloop.gain = " + str(controller.gain) + ";") + f.write("\nloop.framedelay = " + str(controller.delay+1) + ";") # delay_yao = delay_compass + 1 + f.write("\nloop.niter = " + str(loop.niter) + ";") + f.write("\nloop.ittime = " + str(loop.ittime) + ";") + f.write("\nloop.skipevery = " + str(100000) + ";") + f.write("\nloop.startskip = " + str(30) + ";") + f.write("\nloop.skipby = " + str(5000) + ";") + + f.close() diff --git a/shesha/util/writers/yao/targets.py b/shesha/util/writers/yao/targets.py new file mode 100644 index 0000000..1dae56b --- /dev/null +++ b/shesha/util/writers/yao/targets.py @@ -0,0 +1,30 @@ +import numpy as np +def write_targets(file_name, tars, *, sub_system=1): + """Write (append) target parameter to file for YAO use for a single dm + + Args: + file_name : (str) : name of the file to append the parameter to + + tars : (list[Param_target]) : compass target parameters list + + Kwargs: + sub_system : (int) : (optional), default 1 yao sub system index + """ + f=open(file_name,"a+") + f.write("\n\n//------------------------------") + f.write("\n//TAR parameters") + f.write("\n//------------------------------") + + f.write("\ntarget.lambda = &(" + np.array2string(np.array( \ + [t.Lambda for t in tars]), separator=',', max_line_width=300) + \ + ");") #&([0.55]); + f.write("\ntarget.xposition = &(" + np.array2string(np.array(\ + [t.xpos for t in tars]), separator=',', max_line_width=300) + \ + ");") # &mmsepos_asec1; + f.write("\ntarget.yposition = &(" + np.array2string(np.array( \ + [t.ypos for t in tars]), separator=',', max_line_width=300) + \ + ");") # &mmsepos_asec2; + dispzoom = np.ones((len(tars))) + f.write("\ntarget.dispzoom = &(" + np.array2string(dispzoom, \ + separator=',',max_line_width=300) + ") ; // not set by compass") + #+ np.array2string(np.array([t.mag for t in tars]),separator=',',max_line_width=300)+";)") # &array(5.0,numberof(mmsepos_asec1)); diff --git a/shesha/util/writers/yao/wfs.py b/shesha/util/writers/yao/wfs.py new file mode 100644 index 0000000..90340d5 --- /dev/null +++ b/shesha/util/writers/yao/wfs.py @@ -0,0 +1,127 @@ +import numpy as np + +YAO_WFSTYPE={"sh":"\"hartmann\"", "pyrhr":"\"pyramid\""} + +def init_wfs(file_name): + """ Initialise wfs entry in yao parameter file + + Args: + file_name : (str) : yao parameter file name + """ + f = open(file_name,"a+") + f.write("\n\n//------------------------------") + f.write("\n//WFS parameters") + f.write("\n//------------------------------") + return (0,0) + +def write_wfs(file_name, wfs, index, *, sub_system=1): + """Write (append) wfs parameter to file for YAO use for a single wfs + + Args: + file_name : (str) : name of the file to append the parameter to + + wfs : (Param_wfs) : compass wfs parameters + + index :(int) : wfs index in ayo parameter file + + Kwargs: + sub_system : (int) : (optional), default 1 sub_system in yao + """ + obj = "wfs(" + str(index) + ")" + f = open(file_name, "a+") + f.write("\ngrow,wfs,wfss;") + f.write("\n" + obj + ".type = " + YAO_WFSTYPE[wfs.type] + ";") + f.write("\n" + obj + ".subsystem = " + str(sub_system) + ";") + f.write("\n" + obj + ".shmethod = 2" + ";") + f.write("\n" + obj + ".shnxsub = " + str(wfs.nxsub) + ";") + f.write("\n" + obj + ".lambda = " + str(wfs.Lambda) + ";") + f.write("\n" + obj + ".pixsize = " + str(wfs.pixsize) + ";") + f.write("\n" + obj + ".npixels = " + str(wfs.npix) + ";") + f.write("\n" + obj + ".shthreshold = 0; // not set by compass") + f.write("\n" + obj + ".dispzoom = 1.0; // not set by compass") + f.write("\n" + obj + ".fracIllum = " + str(wfs.fracsub) + ";") + f.write("\n" + obj + ".rotation = " + str(wfs.thetaML) + ";") + f.write("\n" + obj + ".shift = [ " + str(wfs.dx) + " , " + \ + str(wfs.dy) + " ];") + f.write("\n" + obj + ".LLTxy = [ " + str(wfs.lltx) + " , " + \ + str(wfs.llty) + " ];") + f.write("\n" + obj + ".gspos = [ " + str(wfs.xpos) + " , " + \ + str(wfs.ypos) + " ];") + if(wfs.noise<0): + f.write("\n" +obj + ".noise = 1;") + f.write("\n" +obj + ".ron = 0;") + else: + f.write("\n" + obj + ".noise = 1;") + f.write("\n" + obj + ".ron = " + str(wfs.noise) + ";") + f.write("\n" + obj + ".darkcurrent = 0 ; // not set by compass ") + if(wfs.gsalt > 0): + f.write("\n" + obj + ".gsalt = " + str(wfs.gsalt) + ";") + f.write("\n" + obj + ".gsdepth = " + str(1) + ";") + f.write("\n" + obj + ".optthroughput = " + str(wfs.optthroughput) +\ + ";") + f.write("\n" + obj + ".laserpower = " + str(wfs.laserpower) + ";") + f.write("\n" + obj + ".filtertilt = " + str(1) + ";") + f.write("\n" + obj + ".correctUpTT = " + str(1) + ";") + f.write("\n" + obj + ".uplinkgain = " + str(0.2) + ";") + f.close() + + +def write_wfss(file_name, wfss, *, n_wfs=-1, sub_system=1, offset=0): + """Write (append) wfs parameter to file for YAO use for a wfs list + + Args: + file_name : (str) : name of the file to append the parameter to + + wfss : (list[ Param_wfs]) : compass wfs parameters list + + Kwargs: + n_wfs : (int) : (optional), default -1 number of wfs passed to yao (-1 : all wfs) + + sub_system : (int) : (optional), default 1 yao sub system index + + offset : (int) : (optional), default 0 yao wfs index offset + + Returns: + n_ngs : (int) : number of ngs passed to yao + n_lgs : (int) : number of lgs passed to yao + """ + #counting nb of lgs and ngs + n_ngs=0 + n_lgs=0 + if(n_wfs<0): + n_wfs = len(wfss) + for w in wfss[:n_wfs]: + if(w.gsalt>0): + n_lgs += 1 + else: + n_ngs += 1 + n_wfs = n_ngs + n_lgs + f=open(file_name, "a+") + + i = 1 + for w in wfss[:n_wfs] : + f.write("\n\n//WFS" + str(i + offset)) + f.flush() + write_wfs(file_name, w, i + offset, sub_system=sub_system) + i += 1 + + f.close() + return (n_ngs , n_lgs) + +################################ + +def finish_wfs(file_name, n_ngs, n_lgs): + """ Finalize wfs section in yao parameter file + + Args: + file_name : (str) : yao parameter file name + + n_ngs : (int) : number of ngs written to yao parameter file + + n_lgs : (int) : number of lgs written to yao parameter file + """ + f=open(file_name,"a+") + f.write("\n\nnngs = "+str(n_ngs)+";") + f.write("\nnlgs = "+str(n_lgs)+";") + f.write("\nnwfs = "+str(n_ngs+n_lgs)+";") + f.close() diff --git a/shesha/widgets/__init__.py b/shesha/widgets/__init__.py index b01ba43..6c069a7 100644 --- a/shesha/widgets/__init__.py +++ b/shesha/widgets/__init__.py @@ -1,7 +1,7 @@ ## @package shesha.widgets ## @brief Widget features ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/shesha/widgets/widget_ao.py b/shesha/widgets/widget_ao.py index 780d02d..0ff50c0 100755 --- a/shesha/widgets/widget_ao.py +++ b/shesha/widgets/widget_ao.py @@ -2,7 +2,7 @@ ## @package shesha.widgets.widget_ao ## @brief Widget to simulate a closed loop ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # @@ -60,7 +60,7 @@ from pyqtgraph.dockarea import Dock, DockArea from shesha.util.tools import plsh, plpyr -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig import warnings @@ -257,7 +257,7 @@ def load_config(self, *args, config_file=None, supervisor=None, **kwargs) -> Non sys.path.insert(0, self.defaultParPath) if supervisor is None: - self.config = load_config_from_file(config_file) + self.config = ParamConfig(config_file) else: self.config = supervisor.get_config() @@ -340,8 +340,8 @@ def load_config(self, *args, config_file=None, supervisor=None, **kwargs) -> Non self.expertWidget.setSupervisor(self.supervisor) self.expertWidget.updatePanels() - if (hasattr(self.config, "layout")): - area_filename = self.defaultAreaPath + "/" + self.config.layout + ".area" + if (hasattr(self.config._config, "layout")): + area_filename = self.defaultAreaPath + "/" + self.config._config.layout + ".area" self.loadArea(filename=area_filename) self.adjustSize() diff --git a/shesha/widgets/widget_base.py b/shesha/widgets/widget_base.py index 83cc25f..41aba34 100644 --- a/shesha/widgets/widget_base.py +++ b/shesha/widgets/widget_base.py @@ -1,7 +1,7 @@ ## @package shesha.widgets.widget_base ## @brief Abstract Widget base ## @author COMPASS Team -## @version 5.0.0 +## @version 5.1.0 ## @date 2020/05/18 ## @copyright GNU Lesser General Public License # diff --git a/tests/check.py b/tests/check.py index 249755e..e4ad55e 100644 --- a/tests/check.py +++ b/tests/check.py @@ -1,4 +1,41 @@ #!/usr/bin/env python +## @package shesha.tests +## @brief Runs a set of tests +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + """script test to simulate a closed loop Usage: @@ -14,11 +51,12 @@ """ from docopt import docopt +import time if __name__ == "__main__": import pandas from shesha.supervisor.compassSupervisor import CompassSupervisor - from shesha.util.utilities import load_config_from_file + from shesha.config import ParamConfig arguments = docopt(__doc__) @@ -44,7 +82,7 @@ else: # Get parameters from file param_file = arguments[""] - config = load_config_from_file(param_file) + config = ParamConfig(param_file) if arguments["--devices"]: config.p_loop.set_devices([ @@ -52,14 +90,20 @@ ]) try: + t0 = time.perf_counter() supervisor = CompassSupervisor(config) + t_init = time.perf_counter() - t0 is_init = supervisor.is_init except: supervisor = None is_init = False + t_init = 0 SR = "N/A" try: + t0 = time.perf_counter() supervisor.loop(supervisor.config.p_loop.niter) + t_loop = time.perf_counter() - t0 + t_init = 0 SR = supervisor.target.get_strehl(0)[1] except: SR = "N/A" @@ -72,6 +116,8 @@ idx = len(df.index) df.loc[idx, "Test name"] = param_file.split('/')[-1] df.loc[idx, "Init"] = str(is_init) + df.loc[idx, "T Init"] = str(t_init) df.loc[idx, "SR@100iter"] = str(SR) + df.loc[idx, "T Loop"] = str(t_loop) df.to_hdf("check.h5", "check") diff --git a/tests/checkCompass.sh b/tests/checkCompass.sh index 4155edb..8d67898 100755 --- a/tests/checkCompass.sh +++ b/tests/checkCompass.sh @@ -2,18 +2,18 @@ # script="$SHESHA_ROOT/shesha/tests/check.py" rm -f check.h5 -script="tests.check" +script="$SHESHA_ROOT/tests/check.py" conf_path="$SHESHA_ROOT/data/par/par4tests" nb_test=$(ls -1 $conf_path/*.py | wc -l) current_test=1 for file in $conf_path/*.py do name=$(basename $file ".py") - CMD="python -m $script $file" + CMD="python $script $file" echo "[$current_test/$nb_test] running $name" $CMD > /dev/null # let "current_test++" current_test=$(expr $current_test + 1) done -CMD="python -m $script osef --displayResult --repportResult=report_E2E.md" +CMD="python $script osef --displayResult --repportResult=report_E2E.md" $CMD diff --git a/tests/old_scripts/benchmark.sh b/tests/old_scripts/benchmark.sh new file mode 100755 index 0000000..f73312d --- /dev/null +++ b/tests/old_scripts/benchmark.sh @@ -0,0 +1,79 @@ +#!/bin/bash +#FILES="scao_sh_16x16_8pix.py " +# scao_sh_40x40_8pix.py scao_sh_64x64_8pix.py +FILES_SCAO="scao_sh_16x16_8pix.py scao_sh_40x40_8pix.py scao_sh_80x80_8pix.py" +FILES_PYR="scao_pyrhr_16x16.py scao_pyrhr_40x40.py scao_pyrhr_80x80.py" +#FILES_MCAO="mcao_8m.py mcao_40m.py" +#FILES+="scao_sh_16x16_16pix.py" +# scao_sh_40x40_16pix.py scao_sh_64x64_16pix.py scao_sh_80x80_16pix.py" +#FILES+="scao_16x16_8pix_noisy.par scao_40x40_8pix_noisy.par scao_64x64_8pix_noisy.par scao_80x80_8pix_noisy.par +#FILES+="scao_16x16_16pix_noisy.par scao_40x40_16pix_noisy.par scao_64x64_16pix_noisy.par scao_80x80_16pix_noisy.par" +#FILES="scao_pyr_80x80_8pix.py" + +DATE=`date +%F_%Hh%M` +OUTPUT="$SHESHA_ROOT/data/bench-results/outputfile_$DATE\_$HOSTNAME" +DEVICE=$1 + +echo "writing output in "$OUTPUT + +script="$SHESHA_ROOT/test/benchmark_script.py" +DEVICE="0" +for f in $FILES_SCAO +do + for CTR in "ls" "modopti" "mv" "geo" + do + for COG in "cog" "tcog" "wcog" "bpcog" + do + CMD="python $script $f $COG $CTR $DEVICE" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done + done +done + +# for f in $FILES_MCAO +# do +# for CTR in "mv" +# do +# for COG in "cog" +# do +# CMD="python $script $f $COG $CTR $DEVICE" +# echo "execute $CMD" >> $OUTPUT +# $CMD 2>> $OUTPUT >> $OUTPUT +# done +# done +# done + +DEVICE="0123" +for f in $FILES_PYR +do + for CTR in "ls" + do + for COG in "pyr" #"pyr" # + do + CMD="python $script $f $COG $CTR $DEVICE" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done + done +done + + + +FILES_LGS="scao_sh_16x16_8pix_lgs.py" +#FILES_LGS+="scao_sh_40x40_10pix_lgs.par" +#FILES_LGS+="scao_sh_64x64_16pix_lgs.par" +#FILES_LGS+="scao_sh_80x80_20pix_lgs.par" +DEVICE="0" +for f in $FILES_LGS +do + for CTR in "ls" "modopti" "mv" "geo" + do + for COG in "wcog" "corr" + do + CMD="python $script $f $COG $CTR $DEVICE" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done + done +done diff --git a/tests/old_scripts/benchmark_script.py b/tests/old_scripts/benchmark_script.py new file mode 100644 index 0000000..e1b28eb --- /dev/null +++ b/tests/old_scripts/benchmark_script.py @@ -0,0 +1,420 @@ +import datetime +#import hdf5_util as h5u +import os +import platform +import re +import sys +import time +from subprocess import check_output + +import numpy as np +from pandas import DataFrame, HDFStore + +import shesha.constants as scons +import shesha.init as init +from carmaWrap.context import context as carmaWrap_context +from carmaWrap.timer import timer as carmaWrap_timer +from carmaWrap.timer import threadSync + + +def get_processor_name(): + command = "cat /proc/cpuinfo" + all_info = check_output(command, shell=True).strip().decode("utf-8") + nb_cpu = 0 + cpu = [] + for line in all_info.split("\n"): + if "model name" in line: + cpu.append(re.sub(".*model name.*:", "", line, 1)) + nb_cpu += 1 + return nb_cpu, cpu + + +def script4bench(param_file, centroider, controller, devices, fwrite=True): + """ + + Args: + param_file: (str) : parameters filename + + centroider: (str) : centroider type + + controller: (str) : controller type + """ + + c = carmaWrap_context(devices=np.array(devices, dtype=np.int32)) + # c.set_active_device(device) + + timer = carmaWrap_timer() + + # times measured + synctime = 0. + move_atmos_time = 0. + t_raytrace_atmos_time = 0. + t_raytrace_dm_time = 0. + s_raytrace_atmos_time = 0. + s_raytrace_dm_time = 0. + comp_img_time = 0. + docentroids_time = 0. + docontrol_time = 0. + applycontrol_time = 0. + + # reading parfile + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + #exec("import %s as config" % filename.split(".py")[0]) + config = __import__(filename.split(".py")[0]) + sys.path.remove(param_path) + + config.p_centroiders[0].set_type(centroider) + + if (centroider == "tcog"): + config.p_centroiders[0].set_thresh(0.) + elif (centroider == "bpcog"): + config.p_centroiders[0].set_nmax(16) + elif (centroider == "geom"): + config.p_centroiders[0].set_type("cog") + elif (centroider == "wcog"): + config.p_centroiders[0].set_type_fct("gauss") + config.p_centroiders[0].set_width(2.0) + elif (centroider == "corr"): + config.p_centroiders[0].set_type_fct("gauss") + config.p_centroiders[0].set_width(2.0) + + if (controller == "modopti"): + config.p_controllers[0].set_type("ls") + config.p_controllers[0].set_modopti(1) + else: + config.p_controllers[0].set_type(controller) + + config.p_loop.set_niter(2000) + + threadSync() + timer.start() + threadSync() + synctime = timer.stop() + timer.reset() + + # init system + timer.start() + tel = init.tel_init(c, config.p_geom, config.p_tel, config.p_atmos.r0, + config.p_loop.ittime, config.p_wfss) + threadSync() + tel_init_time = timer.stop() - synctime + timer.reset() + + timer.start() + atm = init.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, + config.p_loop.ittime) + threadSync() + atmos_init_time = timer.stop() - synctime + timer.reset() + + timer.start() + dms = init.dm_init(c, config.p_dms, config.p_tel, config.p_geom, config.p_wfss) + threadSync() + dm_init_time = timer.stop() - synctime + timer.reset() + + timer.start() + target = init.target_init(c, tel, config.p_target, config.p_atmos, config.p_tel, + config.p_geom, config.p_dms) + threadSync() + target_init_time = timer.stop() - synctime + timer.reset() + + timer.start() + wfs = init.wfs_init(c, tel, config.p_wfss, config.p_tel, config.p_geom, config.p_dms, + config.p_atmos) + threadSync() + wfs_init_time = timer.stop() - synctime + timer.reset() + + timer.start() + rtc = init.rtc_init(c, tel, wfs, dms, atm, config.p_wfss, config.p_tel, + config.p_geom, config.p_atmos, config.p_loop.ittime, + config.p_centroiders, config.p_controllers, config.p_dms) + threadSync() + rtc_init_time = timer.stop() - synctime + timer.reset() + + print("... Done with inits !") + # h5u.validDataBase(os.environ["SHESHA_ROOT"]+"/data/",matricesToLoad) + + strehllp = [] + strehlsp = [] + ############################################################ + # _ _ + # (_) | | + # _ __ ___ __ _ _ _ __ | | ___ ___ _ __ + # | '_ ` _ \ / _` | | '_ \ | |/ _ \ / _ \| '_ \ + # | | | | | | (_| | | | | | | | (_) | (_) | |_) | + # |_| |_| |_|\__,_|_|_| |_| |_|\___/ \___/| .__/ + # | | + # |_| + ########################################################### + if (controller == "modopti"): + for zz in range(2048): + atm.move_atmos() + + for cc in range(config.p_loop.niter): + threadSync() + timer.start() + atm.move_atmos() + threadSync() + move_atmos_time += timer.stop() - synctime + timer.reset() + + if (config.p_controllers[0].type != b"geo"): + if ((config.p_target is not None) and (rtc is not None)): + for i in range(config.p_target.ntargets): + timer.start() + target.raytrace(i, b"atmos", tel, atm) + threadSync() + t_raytrace_atmos_time += timer.stop() - synctime + timer.reset() + + if (dms is not None): + timer.start() + target.raytrace(i, b"dm", tel, dms=dms) + threadSync() + t_raytrace_dm_time += timer.stop() - synctime + timer.reset() + + if (config.p_wfss is not None and wfs is not None): + for i in range(len(config.p_wfss)): + timer.start() + wfs.raytrace(i, b"atmos", tel, atm) + threadSync() + s_raytrace_atmos_time += timer.stop() - synctime + timer.reset() + + if (not config.p_wfss[i].open_loop and dms is not None): + timer.start() + wfs.raytrace(i, b"dm", tel, atm, dms) + threadSync() + s_raytrace_dm_time += timer.stop() - synctime + timer.reset() + + timer.start() + wfs.comp_img(i) + threadSync() + comp_img_time += timer.stop() - synctime + timer.reset() + + if (rtc is not None and config.p_wfss is not None and wfs is not None): + if (centroider == "geom"): + timer.start() + rtc.do_centroids_geom(0) + threadSync() + docentroids_time += timer.stop() - synctime + timer.reset() + else: + timer.start() + rtc.do_centroids(0) + threadSync() + docentroids_time += timer.stop() - synctime + timer.reset() + + if (dms is not None): + timer.start() + rtc.do_control(0) + threadSync() + docontrol_time += timer.stop() - synctime + timer.reset() + + timer.start() + rtc.apply_control(0) + threadSync() + applycontrol_time += timer.stop() - synctime + timer.reset() + + else: + if (config.p_target is not None and target is not None): + for i in range(config.p_target.ntargets): + timer.start() + target.raytrace(i, b"atmos", tel, atm) + threadSync() + t_raytrace_atmos_time += timer.stop() - synctime + timer.reset() + + if (dms is not None): + timer.start() + rtc.do_control_geo(0, dms, target, i) + threadSync() + docontrol_time += timer.stop() - synctime + timer.reset() + + timer.start() + rtc.apply_control(0) + threadSync() + applycontrol_time += timer.stop() - synctime + timer.reset() + + timer.start() + target.raytrace(i, b"dm", tel, atm, dms) + threadSync() + t_raytrace_dm_time += timer.stop() - synctime + timer.reset() + target.comp_image(0) + strehltmp = target.get_strehl(0) + strehlsp.append(strehltmp[0]) + if (cc > 50): + strehllp.append(strehltmp[1]) + + print("\n done with simulation \n") + print("\n Final strehl : \n", strehllp[len(strehllp) - 1]) + ################################################################### + # _ _ + # | | (_) + # | |_ _ _ __ ___ ___ _ __ ___ + # | __| | '_ ` _ \ / _ \ '__/ __| + # | |_| | | | | | | __/ | \__ \ + # \__|_|_| |_| |_|\___|_| |___/ + ################################################################### + + move_atmos_time /= config.p_loop.niter / 1000. + t_raytrace_atmos_time /= config.p_loop.niter / 1000. + t_raytrace_dm_time /= config.p_loop.niter / 1000. + s_raytrace_atmos_time /= config.p_loop.niter / 1000. + s_raytrace_dm_time /= config.p_loop.niter / 1000. + comp_img_time /= config.p_loop.niter / 1000. + docentroids_time /= config.p_loop.niter / 1000. + docontrol_time /= config.p_loop.niter / 1000. + applycontrol_time /= config.p_loop.niter / 1000. + + time_per_iter = move_atmos_time + t_raytrace_atmos_time +\ + t_raytrace_dm_time + s_raytrace_atmos_time +\ + s_raytrace_dm_time + comp_img_time +\ + docentroids_time + docontrol_time +\ + applycontrol_time + + ########################################################################### + # _ _ __ _____ + # | | | |/ _| ____| + # | |__ __| | |_| |__ ___ __ ___ _____ + # | '_ \ / _` | _|___ \ / __|/ _` \ \ / / _ \ + # | | | | (_| | | ___) | \__ \ (_| |\ V / __/ + # |_| |_|\__,_|_| |____/ |___/\__,_| \_/ \___| + ############################################################################### + + if (config.p_wfss[0].gsalt > 0): + stype = "lgs " + else: + stype = "ngs " + + if (config.p_wfss[0].gsmag > 3): + stype += "noisy " + + stype += str(config.p_wfss[0].type) + + if (controller == "modopti"): + G = np.mean(rtc.get_modal_gains(0)) + else: + G = 0. + + date = datetime.datetime.now() + date = [date.year, date.month, date.day] + + version = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8') + + # version=str(check_output(["svnversion",os.getenv("COMPASS_ROOT")]).replace("\n","")) + hostname = check_output("hostname").replace(b"\n", b"").decode('UTF-8') + nb_cpu, cpu = get_processor_name() + keys_dict = { + "date": date, + "simulname": config.simul_name, + "hostname": hostname, + "ndevices": c.get_ndevice(), + "device": c.get_device_names()[0], + "cuda_version": c.get_cuda_runtime_get_version(), + "magma_version": c.get_magma_info(), + "platform": platform.platform(), + "ncpu": nb_cpu, + "processor": cpu[0], + "tel.diam": config.p_tel.diam, + "sensor_type": config.p_wfss[0].type.decode('UTF-8'), + "LGS": config.p_wfss[0].gsalt > 0, + "noisy": config.p_wfss[0].gsmag > 3, + "nxsub": config.p_wfss[0].nxsub, + "npix": config.p_wfss[0].npix, + "nphotons": config.p_wfss[0]._nphotons, + "controller": controller, + "centroider": centroider, + "finalSRLE": strehllp[len(strehllp) - 1], + "rmsSRLE": np.std(strehllp), + "wfs_init": wfs_init_time, + "atmos_init": atmos_init_time, + "dm_init": dm_init_time, + "target_init": target_init_time, + "rtc_init": rtc_init_time, + "move_atmos": move_atmos_time, + "target_trace_atmos": t_raytrace_atmos_time, + "target_trace_dm": t_raytrace_dm_time, + "sensor_trace_atmos": s_raytrace_atmos_time, + "sensor_trace_dm": s_raytrace_dm_time, + "comp_img": comp_img_time, + "docentroids": docentroids_time, + "docontrol": docontrol_time, + "applycontrol": applycontrol_time, + "iter_time": time_per_iter, + "Avg.gain": G, + "residualPhase": target.get_phase(0) + } + + store = HDFStore(BENCH_SAVEPATH + "/benchmarks.h5") + try: + df = store.get(version) + except KeyError: + df = DataFrame(columns=list(keys_dict.keys()), dtype=object) + + ix = len(df.index) + + if (fwrite): + print("writing files") + for i in list(keys_dict.keys()): + df.loc[ix, i] = keys_dict[i] + store.put(version, df) + store.close() + + +############################################################# +# _ +# | | +# ___ _ __ __| | +# / _ \ '_ \ / _` | +# | __/ | | | (_| | +# \___|_| |_|\__,_| +############################################################# +if __name__ == '__main__': + + if (len(sys.argv) < 4 or len(sys.argv) > 6): + error = "wrong number of argument. Got %d (expect 4)\ncommande line should be: 'python benchmark_script.py " % len( + sys.argv) + raise Exception(error) + + SHESHA = os.environ.get('SHESHA_ROOT') + if (SHESHA is None): + raise EnvironmentError("Environment variable 'SHESHA_ROOT' must be define") + + SHESHA_SAVEPATH = SHESHA + "/data" + PARPATH = SHESHA_SAVEPATH + "/par/par4bench" + BENCH_SAVEPATH = SHESHA_SAVEPATH + "/bench-results" + + store = HDFStore(BENCH_SAVEPATH + "/benchmarks.h5") + + filename = PARPATH + "/" + sys.argv[1] + centroider = sys.argv[2] + controller = sys.argv[3] + device = 5 + fwrite = True + if (len(sys.argv) > 4): + devices = [] + if (len(sys.argv[4]) > 1): + for k in range(len(sys.argv[4])): + devices.append(int(sys.argv[4][k])) + else: + devices.append(int(sys.argv[4])) + if (len(sys.argv) == 6): + fwrite = int(sys.argv[5]) + + script4bench(filename, centroider, controller, devices, fwrite) diff --git a/tests/old_scripts/closed_loop_fake_wfs.py b/tests/old_scripts/closed_loop_fake_wfs.py new file mode 100644 index 0000000..7330e87 --- /dev/null +++ b/tests/old_scripts/closed_loop_fake_wfs.py @@ -0,0 +1,405 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Oct 9 14:03:29 2017 + +@author: sdurand +""" +# import cProfile +# import pstats as ps +#@profile +import sys +import os +# import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as plt +import hdf5_util as h5u +import numpy as np +plt.ion() +sys.path.append('/home/sdurand/hracode/codes/PYRCADO/Python') +import PYRCADOCALIB as pyrcalib +from astropy.io import fits +from numba import autojit +#import gnumpy as gpu + +print("TEST SHESHA\n closed loop: call loop(int niter)") + +if (len(sys.argv) != 2): + error = 'command line should be:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise StandardError(error) + +# get parameters from file +param_file = sys.argv[1] +if (param_file.split('.')[-1] == "py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + sys.path.remove(param_path) +# elif (param_file.split('.')[-1] == "h5"): +# sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") +# import scao_16x16_8pix as config +# sys.path.remove(os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") +# h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name + print("simul name is", simul_name) +else: + simul_name = "" + +clean = 1 +matricesToLoad = {} +if (simul_name != ""): + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + +# initialisation: + +# context +# c = ch.carmaWrap_context(0) +# c = ch.carmaWrap_context(devices=np.array([0,1], dtype=np.int32)) +# c.set_active_device(0) #useful only if you use ch.carmaWrap_context() +c = ch.carmaWrap_context(devices=config.p_loop.devices) +# wfs +print("->wfs") +wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + +# atmos +print("->atmos") +atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, clean=clean, + load=matricesToLoad) + +# dm +print("->dm") +dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + +# target +print("->target") +tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms, config.p_wfss) + +print("->rtc") +# rtc +rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + clean=clean, simul_name=simul_name, load=matricesToLoad) + +if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + +print("====================") +print("init done") +print("====================") +print("objects initialzed on GPU:") +print("--------------------------------------------------------") +print(atm) +print(wfs) +print(dms) +print(tar) +print(rtc) + + +def import_im(nb_im, path): + im = fits.open(path) + size = im[0].data.shape[0] + pyr_im_cube = np.zeros((nb_im, size, size), dtype=np.float32) + for i in range(nb_im): + pyr_im_cube[i] = im[i].data + im.close() + return pyr_im_cube + + +def create_P(bin_factor, size): + return np.repeat( + np.identity(size / bin_factor, dtype=np.float32), bin_factor, axis=0) + + +def calib_pyr(centers, wfs_numbers, bin_factor=1, crop_factor=0): + + #initialisation + #offset 4 roi : + offset = np.zeros((2, 4)) + j = [2, 1, 0, 3] + npup = config.p_wfss[wfs_numbers]._validsubsx.shape[0] + #decoupage 4 roi + for i in range(4): + #decoupage coordonnee + #x : + subx = config.p_wfss[wfs_numbers]._validsubsx[npup * (i) / 4:npup * (i + 1) / 4] + #y : + suby = config.p_wfss[wfs_numbers]._validsubsy[npup * (i) / 4:npup * (i + 1) / 4] + # calcul des 4 centres + center_compass = [((np.max(subx) - np.min(subx)) / 2.) + np.min(subx), + ((np.max(suby) - np.min(suby)) / 2.) + np.min(suby)] + # calcul des offsets + offset[:, i] = [ + np.int32((centers[j[i]][0] - crop_factor / 2.) / bin_factor) - + center_compass[0], + np.int32((centers[j[i]][1] - crop_factor / 2.) / bin_factor) - + center_compass[1] + ] + + return offset + + +def pyr_aquisition(n=0): + + #fonction d'aquisition d'image pour la pyramide + #lib sesame python + # cam 10gbit.py + # ten gb class + # get_image(1, num_cam) --> + #im_path = ['pyrimgforSeb1.fits','pyrimgforSeb2.fits','pyrimgforSeb3.fits','pyrimgforSeb4.fits','pyrimgforSeb5.fits','pyrimgforSeb6.fits'] + #im = fits.open('/home/sdurand/im_pyr_banc/'+ im_path[n]) + #pyr_im = im[0].data + path = '/home/sdurand/RecordPyrImages_2017_06_06_07h49/pyrImageCube.fits' + im = fits.open(path) + pyr_im = im[n].data + im.close() + return pyr_im + + +def get_slope_pyrhr(npup, valid_pixel): + + pup = np.zeros((npup / 4, 4)) + j = [0, 2, 3, 1] + for i in range(4): + pup[:, i] = valid_pixel[(npup / 4) * j[i]:(npup / 4) * (j[i] + 1)] + tot = np.sum(pup, axis=1) + t = np.average(tot) + + gx = (pup[:, 0] + pup[:, 2] - (pup[:, 1] + pup[:, 3])) / t + gy = (pup[:, 0] + pup[:, 1] - (pup[:, 2] + pup[:, 3])) / t + #gz = (pup[:,0] - pup[:,1] - pup[:,2] + pup[:,3]) / t + + slope = np.append(gx, gy) * ( + (config.p_wfss[0].pyr_ampl * config.p_wfss[0].Lambda * 1e-6) / + config.p_tel.diam) * (180 / np.pi) * 3600 + + return slope + + +def crop_im(im, taille_sortie): + + #im_crop = np.zeros((taille_sortie,taille_sortie),dtype=np.float32) + size = im.shape[0] + im_crop = im[np.int32((size / 2.) - (taille_sortie / 2.)):np.int32( + (size / 2.) + (taille_sortie / 2)), + np.int32((size / 2.) - (taille_sortie / 2.)):np.int32( + (size / 2.) + (taille_sortie / 2.))] + + return im_crop + + +@autojit +def binning_im(im, bin_factor): + + bin_factor = np.int32(bin_factor) + size = im.shape[0] + size_bin = size / bin_factor + binimage = np.zeros((size_bin, size_bin), dtype=np.float32) + + a = np.arange(size) + xx, yy = np.meshgrid(a, a) + xx = xx / bin_factor + yy = yy / bin_factor + for i in range(size): + for j in range(size): + binimage[xx[i, j], yy[i, j]] += im[i, j] + return binimage / (bin_factor**2) + + +#@autojit +def binning_im_2(im, bin_factor): + size = im.shape[0] + bin_factor = np.int32(bin_factor) + P = create_P(bin_factor, size) # + #GP = gpu.garray(P) + #Gim = gpu.garray(im) + binimage = ((P.T).dot(im)).dot(P) + #binimage = ((GP.T).dot(Gim)).dot(GP) + + return binimage / (bin_factor**2) + + +def loop(n, d_valid_pix=[], d_P=[], offset=[], + bool_fake_wfs=np.zeros(len(config.p_wfss)), bin_factor=[], crop_factor=[], + cube_im=[]): + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | Est. Rem. | framerate") + print("----------------------------------------------------") + t0 = time.time() + #fake_pos = np.where(bool_fake_wfs==1) + + for i in range(n): + atm.move_atmos() + if (config.p_controllers[0].type_control == "geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.docontrol_geo(0, dms, tar, 0) + rtc.applycontrol(0, dms) + tar.dmtrace(0, dms) + + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + + fake_it = 0 + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + if bool_fake_wfs[w]: #verif fake_wfs + if (config.p_wfss[w].type_wfs == 'pyrhr'): # verif type_wfs = pyrhr + if (bin_factor[fake_it] > 1): # verif bining + if (cube_im == []): # verif bincube not here + pyr_im = pyr_aquisition(i) # aquistion image + pyr_im_crop = crop_im( + pyr_im, + pyr_im.shape[0] - crop_factor[w]) # crop image + + else: + pyr_im_crop = crop_im(cube_im[i], + cube_im[i].shape[0] - 2).astype( + np.float32) # crop image + + d_imhr = ch.carmaWrap_obj_Float2D( + ch.carmaWrap_context(), data=pyr_im_crop / + (bin_factor[fake_it]**2)) # inject pyr_image in GPU + d_imlr = d_P[fake_it].gemm(d_imhr, 't', 'n').gemm( + d_P[fake_it]) # bining GPU + else: + if (cube_im == []): + pyr_im = pyr_aquisition(i) # aquistion image + d_imlr = ch.carmaWrap_obj_Float2D( + ch.carmaWrap_context(), + data=pyr_im) # inject pyr_image in GPU + else: + d_imlr = ch.carmaWrap_obj_Float2D( + ch.carmaWrap_context(), + data=cube_im[i]) # inject pyr_image in GPU + # valable seulmement pour wf0 : + wfs.copy_pyrimg( + w, d_imlr, d_valid_pix[fake_it][0], + d_valid_pix[fake_it][1]) # envoie de l image pyramide + + elif (config.p_wfss[w].type_wfs == 'sh'): # verif type_wfs = pyrhr + print("TODO SH") + else: + print("error") + fake_it += 1 # increment for fake_wfs + else: + wfs.sensors_compimg(w) # normal wfs + + rtc.docentroids(0) + #slope_compass_0[:,i] = rtc.get_centroids(0) + rtc.docontrol(0) + + rtc.applycontrol(0, dms) + + if ((i + 1) % 100 == 0): + strehltmp = tar.get_strehl(0) + print(i + 1, "\t", strehltmp[0], "\t", strehltmp[1]) + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + + +#____________________________________________________________ +# lib sesam +# sesam_class +# init vector fake_wfs --> +bool_fake_wfs = np.zeros(len(config.p_wfss), dtype=np.int32) + +bool_fake_wfs[0] = 1 + +# init wfs +crop_factor = np.zeros(sum(bool_fake_wfs)) +size_c = np.zeros(sum(bool_fake_wfs)) +centers_fake_wfs = [] +d_P = [] +offset = np.zeros((2, 4, sum(bool_fake_wfs))) +size = np.zeros(sum(bool_fake_wfs)) +bin_factor = np.zeros(sum(bool_fake_wfs)) +d_valid_pix = [] +#____________________________________________________________ + +# pour le wfs_fake = 0 +w = 0 +# rebin param +bin_factor[w] = 3 +#fake_wfs_param +bool_fake_wfs[w] = 1 +size[w] = 800 +#____________________________________________________________ + +# import calibration and image +if (bool_fake_wfs[w] == 1): + if (config.p_wfss[w].type_wfs == 'pyrhr'): + centers_fake_wfs.append(pyrcalib.giveMeTheCalibs()[1]['centers']) + + nb_im = 100 # nombre d'image + path = '/home/sdurand/RecordPyrImages_2017_06_06_07h49/pyrImageCube.fits' + pyr_im_cube = import_im(nb_im, path) +#_____________________________________________________________ + +# initialisation fake wfs +fake_pos = np.where(bool_fake_wfs == 1) +for f in range(sum(bool_fake_wfs)): + crop_factor[f] = size[f] - ((size[f] / bin_factor[f]) * bin_factor[f]) + size_c[f] = size[f] - crop_factor[f] + + if (config.p_wfss[fake_pos[f]].type_wfs == 'pyrhr'): + offset[:, :, f] = calib_pyr(centers_fake_wfs[f], fake_pos[f], + bin_factor=bin_factor[f], + crop_factor=crop_factor[f]) # calcul offset for wfs + d_P.append( + ch.carmaWrap_obj_Float2D(ch.carmaWrap_context(), data=create_P( + bin_factor[f], size_c[f]))) # add wfs offset on GPU + else: + d_P.append([]) + +#_____________________________________________________________ +# valable seulmement pour wf0_fake : +w = 0 + +if (bool_fake_wfs[w] == 1): # verif fake_wfs + if (config.p_wfss[w].type_wfs == 'pyrhr'): # verif fake_pyrhr + npup = config.p_wfss[w]._validsubsx.shape[0] + valid_pix = np.zeros((2, npup), dtype=np.int32) + d_P.append( + ch.carmaWrap_obj_Float2D(ch.carmaWrap_context(), + data=create_P(bin_factor[w], size_c[w]))) + valid_pix[0, :] = np.int32(config.p_wfss[w]._validsubsx + offset[0, :, w].repeat( + config.p_wfss[w]._nvalid)) # cacul new X new validsubx + valid_pix[1, :] = np.int32(config.p_wfss[w]._validsubsy + offset[1, :, w].repeat( + config.p_wfss[w]._nvalid)) # cacul new Y new validsuby + + #d_valid_pix = ch.carmaWrap_obj_Float2D(ch.carmaWrap_context(), data=valid_pix) + d_valid_pix.append([ + ch.carmaWrap_obj_Int1D(ch.carmaWrap_context(), data=valid_pix[0, :]), + ch.carmaWrap_obj_Int1D(ch.carmaWrap_context(), data=valid_pix[1, :]) + ]) # add valid subpix coord in GPU + loop(100, d_valid_pix, d_P, offset=offset, bool_fake_wfs=bool_fake_wfs, + cube_im=pyr_im_cube, bin_factor=bin_factor, + crop_factor=crop_factor) # Run loop +#_______________________________________________________________ + elif (config.p_wfss[w].type_wfs == 'sh'): + + print("TODO SH") + else: + print("Error") +else: + loop(100) diff --git a/tests/old_scripts/closed_loop_mpi.py b/tests/old_scripts/closed_loop_mpi.py new file mode 100644 index 0000000..7c472b7 --- /dev/null +++ b/tests/old_scripts/closed_loop_mpi.py @@ -0,0 +1,197 @@ +import os + +import cProfile +import pstats as ps + +import sys +import numpy as np +import carmaWrap as ch +import shesha as ao +import time + +rank = int(os.environ['OMPI_COMM_WORLD_RANK']) +c = ch.carmaWrap_context() +c.set_active_device(rank % c.get_ndevice()) + +# Delay import because of cuda_aware +# mpi_init called during the import +import mpi4py +from mpi4py import MPI +import hdf5_util as h5u + +comm = MPI.COMM_WORLD +comm_size = comm.Get_size() +rank = comm.Get_rank() + +print("TEST SHESHA\n closed loop with MPI") + +if (len(sys.argv) != 2): + error = 'command line should be:"python test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) + +# get parameters from file +param_file = sys.argv[1] +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + sys.path.remove(param_path) +elif (param_file.split('.')[-1] == b"h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + sys.path.remove(os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name +else: + simul_name = "" +print("simul name is", simul_name) + +matricesToLoad = {} +if (simul_name == b""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + +# initialisation: +# wfs +print("->wfs") +wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, comm_size, rank, config.p_dms) + +# atmos +print("->atmos") +atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + rank=rank, load=matricesToLoad) + +# dm +print("->dm") +dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + +# target +print("->target") +tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + +# rtc +print("->rtc") +rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + clean=clean, simul_name=simul_name, load=matricesToLoad) + +if not clean and rank == 0: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + +comm.Barrier() +if (rank == 0): + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(atm) + print(wfs) + print(dms) + print(tar) + print(rtc) + + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | Est. Rem. | framerate") + print("----------------------------------------------------") +comm.Barrier() + +mimg = 0. # initializing average image + +#import matplotlib.pyplot as pl + + +def loop(n): + # if(rank==0): + #fig,((turbu,image),(shak,defMir))=pl.subplots(2,2, figsize=(15,15)) + # pl.ion() + # pl.show() + + t0 = time.time() + for i in range(n): + if (rank == 0): + atm.move_atmos() + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, "all", tel, atm, dms) + wfs.Bcast_dscreen() + for w in range(len(config.p_wfss)): + wfs.sensors_compimg(w) + wfs.gather_bincube(w) + if (rank == 0): + rtc.docentroids(0) + rtc.docontrol(0) + rtc.applycontrol(0, dms) + + if ((i + 1) % 50 == 0): + # s=rtc.get_centroids(0) + if (rank == 0): + """ FOR DEBUG PURPOSE + turbu.clear() + image.clear() + shak.clear() + defMir.clear() + + screen=atm.get_screen(0.) + + im=tar.get_image(0,"se") + im=np.roll(im,im.shape[0]/2,axis=0) + im=np.roll(im,im.shape[1]/2,axis=1) + + #sh=wfs.get_binimg(0) + + dm=dms.get_dm("pzt",0.) + + f1=turbu.matshow(screen,cmap='Blues_r') + f2=image.matshow(im,cmap='Blues_r') + #f3=shak.matshow(sh,cmap='Blues_r') + f4=defMir.matshow(dm) + pl.draw() + + + c=rtc.get_command(0) + v=rtc.get_voltages(0) + + sh_file="dbg/shak_"+str(i)+"_np_"+str(comm.Get_size())+".npy" + im_file="dbg/imag_"+str(i)+"_np_"+str(comm.Get_size())+".npy" + dm_file="dbg/DM_"+str(i)+"_np_"+str(comm.Get_size())+".npy" + s_file="dbg/cent_"+str(i)+"_np_"+str(comm.Get_size())+".npy" + c_file="dbg/comm_"+str(i)+"_np_"+str(comm.Get_size())+".npy" + v_file="dbg/volt_"+str(i)+"_np_"+str(comm.Get_size())+".npy" + + np.save(sh_file,sh) + np.save(im_file,im) + np.save(dm_file,dm) + np.save(s_file,s) + np.save(c_file,c) + np.save(v_file,v) + """ + + strehltmp = tar.get_strehl(0) + print("%5d" % (i + 1), " %1.5f" % strehltmp[0], + " %1.5f" % strehltmp[1]) + + t1 = time.time() + print(rank, "| loop execution time:", t1 - t0, " (", n, "iterations), ", + (t1 - t0) / n, "(mean) ", n / (t1 - t0), "Hz") + + +loop(config.p_loop.niter) diff --git a/tests/old_scripts/debug_pyr.py b/tests/old_scripts/debug_pyr.py new file mode 100644 index 0000000..7e3df33 --- /dev/null +++ b/tests/old_scripts/debug_pyr.py @@ -0,0 +1,117 @@ +import cProfile +import pstats as ps + +import sys +import os +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as pl +import hdf5_util as h5u + +print("TEST SHESHA\n closed loop: call loop(int niter)") + +if (len(sys.argv) != 2): + error = 'command line should be:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) + +# get parameters from file +param_file = sys.argv[1] +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + sys.path.remove(param_path) +elif (param_file.split('.')[-1] == b"h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + sys.path.remove(os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name + print("simul name is", simul_name) +else: + simul_name = "" + +matricesToLoad = {} +if (simul_name == b""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) +# initialisation: +# context +c = ch.carmaWrap_context(0) +# c.set_active_device(0) #useful only if you use ch.carmaWrap_context() + +# wfs +config.p_wfs0.set_atmos_seen(0) +config.p_wfs0.set_pyr_ampl(3) + +# dm +config.p_dm1 = ao.Param_dm() +config.p_dms = [config.p_dm1] +config.p_dm1.set_type("tt") +config.p_dm1.set_alt(0.) +config.p_dm1.set_unitpervolt(1.) +lambda_d = 1. #config.p_wfs0.Lambda / config.p_tel.diam * 180 / np.pi * 3600 +config.p_dm1.set_push4imat(2. * lambda_d) + +# controllers +config.p_controller0.set_ndm([0]) + +import matplotlib.pyplot as plt +plt.ion() +npts = 16 +index = 1 +while npts <= 512: + config.p_wfs0.set_pyr_npts(npts) + # wfs + print("->wfs") + wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + + # dm + print("->dm") + dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + + print("->rtc") + # rtc + rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, None, None, config.p_tel, config.p_loop, clean=clean, + simul_name=simul_name, load=matricesToLoad) + + if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(wfs) + print(dms) + print(rtc) + + imat = rtc.get_imat(0) + + plt.subplot(2, 3, index) + plt.plot(imat[:, -1], label="Tip") + plt.plot(imat[:, -2], label="Tilt") + plt.legend() + plt.title("%s_npts%d_ampl%.2f" % (param_file, config.p_wfs0.pyr_npts, + config.p_wfs0.pyr_ampl)) + npts <<= 1 + index += 1 diff --git a/tests/old_scripts/dm_scripts/par_dm.py b/tests/old_scripts/dm_scripts/par_dm.py new file mode 100644 index 0000000..8f54572 --- /dev/null +++ b/tests/old_scripts/dm_scripts/par_dm.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Nov 9 14:03:29 2016 + +@author: sdurand +""" + +#import min : +import shesha as ao +import numpy as np + +simul_name = "dm_init" + +nact = 17 # number of actuator +pupdiam = 500 # size of DM + +# note available for norms != 0 : +alt = 0. + + +def calcul_size_support_dmpzt(nact, pupdiam): + """ + This fonction is available just for alt =0 or/and norms = 0 + """ + + ssize = int(2**np.ceil(np.log2(pupdiam) + 1)) + cent = ssize / 2 + 0.5 + pitch = int(pupdiam / (nact - 1)) #--> for wfs_xpos and ypos = 0 + extent = pitch * (nact + 5) + n1 = np.floor(cent - extent / 2) + n2 = np.ceil(cent + extent / 2) + taille = n2 - n1 + 1 + + return taille + + +#geometry param : +p_geom = ao.Param_geom() + +p_geom.set_pupdiam(pupdiam) # size of dm in support (pixel) +p_geom.set_apod(0) #booleen 1 = actif 0 = inactif + +#telescope param : + +p_tel = ao.Param_tel() + +# These values are mandatory +# for alt = 0 or/and norm = 0 this value is not use for compute dm support size + +# diam is not use for pzt dm if alt=0 or/and norms=0 +# diam is not use for kl dm +p_tel.set_diam(8.0) #--> use for tiptilt and dm_h5 (and dm support size) + +# Cobs is not use for tiptilt dm +# cobs is not use for pzt if have no filter +p_tel.set_cobs(0.12) #--> use for kl_miror and PZT filter + +#dm param: + +p_dm0 = ao.Param_dm() +p_dms = [p_dm0] +p_dm0.set_type("pzt") +p_dm0.set_nact(nact) +p_dm0.set_alt(alt) +p_dm0.set_thresh(0.3) +p_dm0.set_coupling(0.2) +p_dm0.set_unitpervolt(1.) +p_dm0.set_push4imat(1.) +p_dm0.set_margin_out(0) diff --git a/tests/old_scripts/dm_scripts/script_dminit.py b/tests/old_scripts/dm_scripts/script_dminit.py new file mode 100644 index 0000000..b30d15b --- /dev/null +++ b/tests/old_scripts/dm_scripts/script_dminit.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Nov 9 14:03:29 2016 + +@author: sdurand +""" + +# import min +import sys +import os +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/widgets/") +import carmaWrap as ch +import shesha as ao + +print("TEST SHESHA_dm\n") + +# read param file : +param_file = sys.argv[1] +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +#initialisation: +# context : gpu 0 +gpudevice = 0 +c = ch.carmaWrap_context(gpudevice) + + +# fonction for init dm and geometry +def initSimuDM(config): + + # init geom + print("->geom") + ao.Param_geom.geom_init(config.p_geom, config.p_tel, config.p_geom.pupdiam, + config.p_geom.apod) #apod = apodizer + # init dm + print("->dm") + dms = ao.dm_init_standalone(config.p_dms, config.p_geom, config.p_tel.diam, + config.p_tel.cobs) + + # Print DM information + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + + print(dms) + + return dms + + +# use init function +dms = initSimuDM(config) # Init Simu diff --git a/tests/old_scripts/nono_loop.py b/tests/old_scripts/nono_loop.py new file mode 100644 index 0000000..5808fa4 --- /dev/null +++ b/tests/old_scripts/nono_loop.py @@ -0,0 +1,41 @@ +"""script test to simulate a closed loop + +Usage: + closed_loop.py [options] + +with 'parameters_filename' the path to the parameters file + +Options: + -h --help Show this help message and exit + --brahma Distribute data with brahma + --bench For a timed call + -d, --devices devices Specify the devices +""" + +from docopt import docopt + +import shesha.sim +from shesha.constants import ControllerType + +arguments = docopt(__doc__) +param_file = arguments[""] + +# Get parameters from file +# if arguments["--bench"]: +# sim = shesha_sim.Bench(param_file) +# elif arguments["--brahma"]: +# sim = shesha_sim.SimulatorBrahma(param_file) +# else: +# sim = shesha_sim.Simulator(param_file) + +sim = shesha_sim.BenchBrahma(param_file) +sim.config.p_controller0.set_type(ControllerType.GENERIC) + +if arguments["--devices"]: + devices = [] + for k in range(len(arguments["--devices"])): + devices.append(int(arguments["--devices"][k])) + sim.config.p_loop.set_devices(devices) + +sim.init_sim() +sim.loop(monitoring_freq=100) diff --git a/tests/old_scripts/open_loop.py b/tests/old_scripts/open_loop.py new file mode 100644 index 0000000..3824be0 --- /dev/null +++ b/tests/old_scripts/open_loop.py @@ -0,0 +1,107 @@ +# import cProfile +# import pstats as ps + +import sys +import os +# import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import hdf5_util as h5u +import numpy as np + +print("TEST SHESHA\n closed loop: call loop(int niter)") + +if (len(sys.argv) != 2): + error = 'command line should be:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) + +# get parameters from file +param_file = sys.argv[1] +if (param_file.split('.')[-1] == "py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + sys.path.remove(param_path) +elif (param_file.split('.')[-1] == "h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + sys.path.remove(os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name + print("simul name is", simul_name) +else: + simul_name = "" + +clean = 1 +matricesToLoad = {} + +config.p_geom.set_pupdiam(500) + +# initialisation: + +# context +# c = ch.carmaWrap_context(0) +# c = ch.carmaWrap_context(devices=np.array([0,1], dtype=np.int32)) +# c.set_active_device(0) #useful only if you use ch.carmaWrap_context() +c = ch.carmaWrap_context(devices=config.p_loop.devices) + +# wfs +print("->wfs") +wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, None, + config.p_loop, config.p_dms) + +# atmos +print("->atmos") +atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, None, clean=clean, load=matricesToLoad) + +# dm +print("->dm") +dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) +ao.correct_dm(config.p_dms, dms, config.p_controller0, config.p_geom, + np.ones((config.p_wfs0._nvalid, config.p_dm0._ntotact), dtype=np.float32), + b'', {}, 1) + +if not clean: + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + +print("====================") +print("init done") +print("====================") +print("objects initialzed on GPU:") +print("--------------------------------------------------------") +print(atm) +print(wfs) +print(dms) + +mimg = 0. # initializing average image + + +def loop(n): + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | Est. Rem. | framerate") + print("----------------------------------------------------") + t0 = time.time() + for i in range(n): + atm.move_atmos() + + for w in range(len(config.p_wfss)): + wfs.sensors_trace(w, b"all", tel, atm, dms) + + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + + +# loop(config.p_loop.niter) diff --git a/tests/old_scripts/scriptBashCompass_PYR39m.sh b/tests/old_scripts/scriptBashCompass_PYR39m.sh new file mode 100755 index 0000000..5799d0c --- /dev/null +++ b/tests/old_scripts/scriptBashCompass_PYR39m.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +PARFILE="$SHESHA_ROOT/data/par/MICADO/micado_39m_PYR_ELTPupil.py" +OUTPUT="$SHESHA_ROOT/test/scripts/resultatsScripts/script39mPYRLog3.txt" +rm $OUTPUT +echo "writing output in "$OUTPUT +echo "To monitor process: tail -f" $OUTPUT +#script="$SHESHA_ROOT/test/scripts/script_PYR39m.py" +script="$SHESHA_ROOT/test/scripts/script_PYR39m_optimGain.py" +# Relevant parameters for pyramid: +# REQ, MODU, GAIN, MAG, NKL +SIMULNAME="AO4ELT5_20ArcSecOffset" +GPU="5" +for FREQ in "500" +do + for RONS in "0.1" + do + for MODU in "5" + do + for MAG in "11" "15" "17" + #for MAG in "18" + do + for GAIN in "0.5" + #for GAIN in "1.0" + do + for KLFILT in "100" + do + for NSSP in "92" + #for NSSP in "72" "108" + do + CMD="python $script $PARFILE $FREQ $RONS $MODU $GAIN $MAG $KLFILT $SIMULNAME $NSSP $GPU" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done + done + done + done + done + done +done +CMD="python sendMail.py" +echo "execute $CMD">> $OUTPUT +$CMD 2>> $OUTPUT >> $OUTPUT +# To monitor the script log: +# tail -f resultatsScripts/script39mPYRLog.txt + + +#for f in $FILES +#do +# for CTR in "ls" "modopti" "mv" "geo" +# do +# for COG in "cog" "tcog" "bpcog" "geom" #"pyr" # +# do +# CMD="python -i $script $f $COG $CTR $DEVICE" +# echo "execute $CMD" >> $OUTPUT +# $CMD 2>> $OUTPUT >> $OUTPUT +# done +# done +#done +echo "Script 39mPYR Done" + +#FILES_LGS="scao_16x16_8pix_lgs.py" +#FILES_LGS+="scao_40x40_10pix_lgs.par" +#FILES_LGS+="scao_64x64_16pix_lgs.par" +#FILES_LGS+="scao_80x80_20pix_lgs.par" diff --git a/tests/old_scripts/scriptBashCompass_SH39m.sh b/tests/old_scripts/scriptBashCompass_SH39m.sh new file mode 100755 index 0000000..a5d3450 --- /dev/null +++ b/tests/old_scripts/scriptBashCompass_SH39m.sh @@ -0,0 +1,97 @@ +#!/bin/bash + +PARFILE="$SHESHA_ROOT/data/par/MICADO/micado_39m_SH.py" # scao_sh_40x40_8pix.py scao_sh_64x64_8pix.py scao_sh_80x80_8pix.py " + +DATE=`date +%F_%Hh%M` +SVN=`svnversion` +OUTPUT="$SHESHA_ROOT/test/scripts/resultatsScripts/scriptLog.txt" +rm $OUTPUT +echo "writing output in "$OUTPUT +echo "To monitor process: tail -f" $OUTPUT +script="$SHESHA_ROOT/test/scripts/script_SH39m.py" +for FREQ in "500" #"1000" +do + for NPIX in "8" "6" #"8" + do + for PIXSIZE in "1." "1.5" #"1.5" + do + #for GAIN in "0.1" "0.2" "0.3" "0.4" "0.5" #"pyr" # + for GAIN in "0.1" "0.2" "0.3" "0.4" "0.5" #"pyr" # + #for GAIN in "0.5" #"pyr" # + do + for BP in "10" # nb of Brightest pixels + #for TH in "1" #"pyr" # + do + for RON in "3" "10" # RON + do + #for MAG in "11" "12" "13" "14" "15" "16" "17" #"pyr" # + for MAG in "11" "12" "13" "13.5" "14" "14.5" "15" "15.5" "16" #"pyr" # + #for MAG in "11" "12" #"pyr" # + do + #for KLFILT in "500" "1000" "1500" + for KLFILT in "1000" + do + CMD="python $script $PARFILE $FREQ $NPIX $PIXSIZE $GAIN $BP $MAG $RON $KLFILT" + echo "execute $CMD" >> $OUTPUT + $CMD 2>> $OUTPUT >> $OUTPUT + done + done + done + done + done + done + done +done + +#FREQ="500" +#NPIX="6" +#PIXSIZE="1" +#CMD="python $script $PARFILE $FREQ $NPIX $PIXSIZE 0.5 0 16 1000" +#echo "execute $CMD" >> $OUTPUT +#$CMD 2>> $OUTPUT >> $OUTPUT +#sleep 5 + +#CMD="python $script $PARFILE $FREQ $NPIX $PIXSIZE 0.5 0 16 1500" +#echo "execute $CMD" >> $OUTPUT +#$CMD 2>> $OUTPUT >> $OUTPUT +#sleep 5 + +#CMD="python $script $PARFILE $FREQ $NPIX $PIXSIZE 0.5 0 17 500" +#echo "execute $CMD" >> $OUTPUT +#$CMD 2>> $OUTPUT >> $OUTPUT +#sleep 5 + +#CMD="python $script $PARFILE $FREQ $NPIX $PIXSIZE 0.5 0 17 1000" +#echo "execute $CMD" >> $OUTPUT +#$CMD 2>> $OUTPUT >> $OUTPUT +#sleep 5 + +#CMD="python $script $PARFILE $FREQ $NPIX $PIXSIZE 0.5 0 17 1500" +#echo "execute $CMD" >> $OUTPUT +#$CMD 2>> $OUTPUT >> $OUTPUT +#sleep 5 + + + +# To monitor the script log: +# tail -f resultatsScripts/scriptLog.txt + + +#for f in $FILES +#do +# for CTR in "ls" "modopti" "mv" "geo" +# do +# for COG in "cog" "tcog" "bpcog" "geom" #"pyr" # +# do +# CMD="python -i $script $f $COG $CTR $DEVICE" +# echo "execute $CMD" >> $OUTPUT +# $CMD 2>> $OUTPUT >> $OUTPUT +# done +# done +#done +echo "Script Done" + +#FILES_LGS="scao_sh_16x16_8pix_lgs.py" +#FILES_LGS+="scao_sh_40x40_10pix_lgs.par" +#FILES_LGS+="scao_sh_64x64_16pix_lgs.par" +#FILES_LGS+="scao_sh_80x80_20pix_lgs.par" diff --git a/tests/old_scripts/script_PYR39m.py b/tests/old_scripts/script_PYR39m.py new file mode 100644 index 0000000..c2c13e0 --- /dev/null +++ b/tests/old_scripts/script_PYR39m.py @@ -0,0 +1,392 @@ +import cProfile +import pstats as ps +import sys, os +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/widgets/") +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/lib/") + +#from adoptLib import computeKLModesImat, computeCmatModal +from shesha.util import tools +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as plt +import hdf5_util as h5u +import resDataBase as db +import astropy.io.fits as pf +import glob +import pandas as pd + +print("TEST SHESHA\n closed loop: call loop(int niter)") +simulName = "PYR_39m" +pathResults = "/volumes/hra/micado/PYR39m_RoundPupil_RUN2/" +dBResult = "/volumes/hra/micado/PYR_39m_RoundPupil_RUN2.h5" +#GPUS = np.array([0, 1, 2, 3]) + +if (len(sys.argv) == 1): + error = 'command line should be:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) +if (len(sys.argv) == 2): + print("Using Internal parameters...") + """ + ----------------- + INPUTS + ----------------- + """ + freqs = [500.] + gainslist = [1] + #magnitudes=[11, 12, 13, 14, 15, 16] + magnitudes = [11, 15] + nKL_Filt = 1000 + MODU = [5] + RONS = [0.5] +else: + print("DETECTED BASH SCRIPT") + #python $script $PARFILE $FREQ $MODU $GAIN $MAG $KLFILT + print(sys.argv) + freqs = [float(sys.argv[2])] # frequency + RONS = [float(sys.argv[3])] # RONS + MODU = [float(sys.argv[4])] # MODU + gainslist = [float(sys.argv[5])] # frequency + magnitudes = [float(sys.argv[6])] # frequency + nKL_Filt = float(sys.argv[7]) # frequency + +#$FREQ $NPIX $PIXSIZE $GAIN $TH $MAG $KLFILT +Nsimutot = len(gainslist) * len(freqs) * len(RONS) * len(MODU) * len(magnitudes) + +if (not glob.glob(pathResults)): + print("Results folder not found. Creating it now:") + tools.system("mkdir " + pathResults) +if (not glob.glob(pathResults + "PSFs/")): + print("PSFs folder not found. Creating it now:") + + tools.system("mkdir " + pathResults + "PSFs/") + +#get parameters from file +param_file = sys.argv[1] # par filename +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + #sys.path.remove(param_path) +elif (param_file.split('.')[-1] == b"h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + #sys.path.remove(os.environ["SHESHA_ROOT"]+"/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name +else: + simul_name = "" +print("simul name is", simul_name) + +matricesToLoad = {} +if (simul_name == b""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) +#initialisation: +# context +c = ch.carmaWrap_context(devices=np.array([0, 1, 2, 3], dtype=np.int32)) + +#c.set_active_device(6) + + +def makeFITSHeader(filepath, df): + hdulist = pf.open(filepath) # read file + header = hdulist[0].header + names = np.sort(list(set(df))).tolist() + for name in names: + val = df[name][0] + if (type(val) is list): + value = "" + for v in val: + value += (str(v) + " ") + elif (type(val) is np.ndarray): + value = "" + for v in val: + value += (str(v) + " ") + else: + value = val + header.set(name, value, '') + hdulist.writeto(filepath, clobber=True) # Save changes to file + + +def initSimu(config, c): + # wfs + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + print("->wfs") + wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + # atmos + print("->atmos") + atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0, clean=clean, + load=matricesToLoad) + + # dm + print("->dm") + dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + + # target + print("->target") + tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + + print("->rtc") + # rtc + rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + do_refslp=False, clean=clean, simul_name=simul_name, + load=matricesToLoad, doimat=0) + + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(atm) + print(wfs) + print(dms) + print(tar) + print(rtc) + return wfs, tel, atm, dms, tar, rtc + + +def loop(n, wfs, tel, atm, dms, tar, rtc): + t0 = time.time() + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | Est. Rem. | framerate") + print("----------------------------------------------------") + sr_se = [] + sr_le = [] + numiter = [] + for i in range(n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.do_control_geo(0, dms, tar, 0) + rtc.apply_control(0) + tar.dmtrace(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.raytrace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + + rtc.do_centroids(0) + rtc.docontrol(0) + rtc.apply_control(0) + + if ((i + 1) % 100 == 0): + print("Iter#:", i + 1) + #for t in range(config.p_target.ntargets): + t = 1 + SR = tar.get_strehl(t) + print("Tar %d at %3.2fMicrons:" % (t + 1, tar.Lambda[t])) + signal_se = "SR S.E: %1.2f " % SR[0] + signal_le = "SR L.E: %1.2f " % SR[1] + + print(signal_se + signal_le) + #print(i+1,"\t",,SR[0],"\t",SR[1]) + sr_le.append(SR[1]) + sr_se.append(SR[0]) + numiter.append(i + 1) + + +# +# plt.pause(0.01) +# plt.scatter(numiter, sr_le, color="green", label="Long Exposure") +# plt.plot(numiter, sr_le, color="green") +# plt.scatter(numiter, sr_se, color="red", label="Short Exposure") +# plt.plot(numiter, sr_se, color="red") + + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + SRList = [] + for t in range(config.p_target.ntargets): + SR = tar.get_strehl(t) + SRList.append(SR[1]) # Saving Long Exp SR + return SRList, tar.Lambda.tolist(), sr_le, sr_se, numiter + +mimg = 0. # initializing average image + +SR = [] +""" +dictProcess, dictplot = getDataFrameColumns() +colnames = h5u.params_dictionary(config) +resAll = pd.DataFrame( columns=colnames.keys()) # res is the local dataframe for THIS data set +resAll = resAll.append(colnames, ignore_index=True) #Fill dataframe +resAll.srir = None +""" + +colnames = h5u.params_dictionary(config) # config values internal to compass +#simunames = {"PSFFilenames":None, "srir":None, "lambdaTarget":None, "threshold":None, "sr_le":None, "sr_se":None, "numiter":None, "NklFilt":None, "NklTot":None, "Nkl":None, "eigenvals":None, "Nphotons":None, "Nactu":None, "Nslopes":None}# Added values computed by the simu.. +simunames = { + "PSFFilenames": None, + "srir": None, + "lambdaTarget": None, + "nbBrightest": None, + "sr_le": None, + "sr_se": None, + "numiter": None, + "NklFilt": None, + "NklTot": None, + "Nkl": None, + "eigenvals": None, + "Nphotons": None, + "Nactu": None, + "RON": None, + "Nslopes": None +} # Added values computed by the simu.. + +resAll = db.readDataBase(fullpath=dBResult) # Reads all the database if exists +if (not (type(resAll) == pd.core.frame.DataFrame)): + print("Creating compass database") + resAll = db.createDf(list(colnames.keys()) + list( + simunames.keys())) # Creates the global compass Db + +#res = db.addcolumn(res,simunames) + +#freqs = [100.,300., 500., 1000.] +#npixs = [4,6,8] +#pixsizes = [0.5,1,1.5] # in lambda/dssp +#gainslist = [0.1, 0.3, 0.5] +#ths=[0,1,2,3] +#magnitudes=[11.5,12.5,13.5,14.5] +## + +#res500 = pf.get_data("/home/fvidal/res_500.fits") +#fig = plt.figure(num = 1) +#fig.show() +NCurrSim = 0 + +for freq in freqs: + config.p_loop.set_ittime(1 / freq) + for RON in RONS: + config.p_wfs0.set_noise(RON) + for modulation in MODU: + rMod = modulation + config.p_wfs0.set_pyr_npts( + int(np.ceil(int(rMod * 2 * 3.141592653589793) / 4.) * 4)) + config.p_wfs0.set_pyr_ampl(rMod) + for gain in gainslist: + config.p_controller0.set_gain(gain) # Change Gain + for magnitude in magnitudes: + NCurrSim += 1 + config.p_wfs0.set_gsmag(magnitude) + res = pd.DataFrame( + columns=list(colnames.keys()) + list( + simunames.keys())) # Create Db for last result + print("Freq = %3.2f Hz" % (1. / config.p_loop.ittime)) + print("Magnitude = %3.2f" % config.p_wfs0.gsmag) + print("Gain = %3.2f" % config.p_controller0.gain) + + wfs, tel, atm, dms, tar, rtc = initSimu(config, c) # Init Simu + nfilt = nKL_Filt + #cmat = ao.compute_cmatWithKL(rtc, config.p_controllers[0], dms, config.p_dms, config.p_geom, config.p_atmos, config.p_tel, nfilt) + print("Reading cMat") + print(1 / 0.) + + #cmat = pf.get_data(os.environ["SHESHA_ROOT"]+"/test/scripts/cmatKLGood.fits").byteswap().newbyteorder() + print("Setting cMat") + #rtc.set_cmat(0, cmat.copy().astype(np.float32)) + rtc.set_cmat(0, cmat.copy()) + print("Starting Loop") + SR, lambdaTargetList, sr_le, sr_se, numiter = loop( + config.p_loop.niter, wfs, tel, atm, dms, tar, rtc) + dfparams = h5u.params_dictionary( + config) # get the current compass config + dfparams.update(simunames) # Add the simunames params + + res = db.fillDf(res, dfparams) # Saving dictionnary config + res.loc[0, "NklFilt"] = nKL_Filt + res.loc[0, "Nkl"] = cmat.shape[0] - 2 - nKL_Filt + res.loc[0, "NklTot"] = cmat.shape[0] - 2 + res.loc[0, "Nactu"] = cmat.shape[0] + res.loc[0, "Nslopes"] = cmat.shape[1] + res.loc[0, "Nphotons"] = config.p_wfs0._nphotons + res.loc[0, "RON"] = RON + res.loc[0, "Nphotons"] = config.p_wfs0._nphotons + #res.eigenvals.values[0] = rtc.getEigenvals(0) + res.srir.values[0] = SR # Saving computed values + res.lambdaTarget.values[0] = lambdaTargetList + res.loc[0, "gsmag"] = config.p_wfs0.gsmag + res.loc[0, "gain"] = config.p_controller0.gain + #res.sr_le.values[0] = sr_le + #res.sr_se.values[0] = sr_se + #res.numiter.values[0] = numiter + res.loc[0, "simulname"] = simulName + print("Saving PSFs...") + PSFNameList = [] + for t in range(config.p_target.ntargets): + PSFtarget = tar.get_image(t, "le") + date = time.strftime("_%d-%m-%Y_%H:%M:%S_") + lam = "%3.2f" % tar.Lambda.tolist()[t] + lam = lam.replace(".", "_") + PSFName = "PYR_" + lam + "_" + date + ".fits" + PSFNameList.append(PSFName) + #PSFNameList.append("NOT SAVED") + pf.writeto(pathResults + "PSFs/" + PSFName, PSFtarget.copy(), + clobber=True) + lam2 = "%3.2f" % tar.Lambda.tolist()[t] + res.loc[0, "SR_%s" % lam2] = SR[t] + filepath = pathResults + "PSFs/" + PSFName + + #"Add the SR and wavelegth value at the top of the PSF header file" + hdulist = pf.open(filepath) # read file + header = hdulist[0].header + header["SR"] = SR[t] + header["wavelength"] = tar.Lambda.tolist()[t] + hdulist.writeto(filepath, clobber=True) # Save changes to file + # Adding all the parameters to the header + makeFITSHeader(filepath, res) + print("Done") + res.loc[0, "type_ap"] = str(res.loc[0, "type_ap"][0]) + res.loc[0, "type"] = str(res.loc[0, "type"][0]) + res.loc[0, "type"] = "pzt, tt" + res.loc[0, "npix"] = res.loc[0, "npix"][0] + #res.loc[0, "nbBrightest"] = res.loc[0, "nbBrightest"][0] + res.loc[0, "pixsize"] = res.loc[0, "pixsize"][0] + res.PSFFilenames.values[0] = PSFNameList + resAll = db.fillDf(resAll, res) # Saving in global DB + #resAll.to_hdf("/home/fvidal/compass/trunk/shesha/test/scripts/resultatsScripts/SH39m.h5", "resAll", complevel=9,complib='blosc') + resAll.to_hdf(dBResult, "resAll", complevel=9, complib='blosc') + #db.saveDataBase(resAll) + +print("Simulation Done...") +""" +Sauver PSF dans le bon nom + directory + ranger... + params dans le header + + SR = np.zeros((3, len(set(resAll.gsmag)))) + i=0 + for mag in list(set(resAll.gsmag)): + SR[0,i] = resAll[resAll.gsmag == mag]["SR_1.20"].max() + SR[1,i] = resAll[resAll.gsmag == mag]["SR_1.65"].max() + SR[2,i] = resAll[resAll.gsmag == mag]["SR_2.20"].max() + i+=1 + + +""" diff --git a/tests/old_scripts/script_PYR39m_optimGain.py b/tests/old_scripts/script_PYR39m_optimGain.py new file mode 100644 index 0000000..5ab11e1 --- /dev/null +++ b/tests/old_scripts/script_PYR39m_optimGain.py @@ -0,0 +1,715 @@ +# +""" +Test Line + +ipython -i /home/fvidal/compass/shesha/test/scripts/script_PYR39m_optimGain.py /home/fvidal/compass/shesha/data/par/MICADO/micado_39m_PYR_ELTPupil.py 500 0.1 5 0.5 17 100 py3Test 92 5 +""" + + +def sendMail(message, title): + import smtplib + from email.mime.text import MIMEText + smtp = smtplib.SMTP('smtp.obspm.fr') + msg = MIMEText(title) + msg['From'] = 'micmac' + msg['To'] = 'Script micmac' + msg['Subject'] = message + smtp.sendmail('micmac@obspm.fr', ["fabrice.vidal@obspm.fr"], msg.as_string()) + + +#try: +import cProfile +import pstats as ps +import sys, os +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/widgets/") +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/lib/") +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/AOlib/") +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/src/shesha_util/") + +#from adoptLib import computeKLModesImat, computeCmatModal +from shesha.util import tools +import numpy as np +import carmaWrap as ch +import shesha.config as ao +import shesha.sim +import shesha.constants as scons +from shesha.constants import CONST + +import time +import matplotlib.pyplot as plt +import hdf5_util as h5u +import resDataBase as db +import astropy.io.fits as pf +import glob +import pandas as pd +import compassConfigToFile as cf +import make_pupil as mkP + +if (len(sys.argv) == 1): + error = 'command line should be:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) +elif (len(sys.argv) == 2): + print("Using Internal parameters...") + """ + ----------------- + INPUTS + ----------------- + """ + freq = 500 + gain = 1 + magnitude = 11 + nKL_Filt = 450 + MODU = 5 + RON = 0.1 + NSSP = 92 + simulName = "PYR_39m_RoundPupil_FromHippo6" +else: + print("-------------------------------------") + print("DETECTED BASH SCRIPT with parameters:") + print((sys.argv)) + print("-------------------------------------") + + freq = float(sys.argv[2]) # AO Loop frequency + RON = float(sys.argv[3]) # noise on the WFS measurement in electrons + MODU = float(sys.argv[4]) # modulation radius + gain = float(sys.argv[5]) # global loop gain + magnitude = float(sys.argv[6]) # gs magnitude + nKL_Filt = int(float(sys.argv[7])) # Nb KL filtered + simulName = sys.argv[8] # Nb KL filtered + NSSP = int(sys.argv[9]) # Number of ssp (pixels of pyramid) + GPU = int(sys.argv[10]) # GPU number + comment = "SRVsGSVsNControlledModes" + +print(("Freq=", freq)) +print(("RON=", RON)) +print(("MODU=", MODU)) +print(("gain=", gain)) +print(("magnitude=", magnitude)) +print(("nKL_Filt=", nKL_Filt)) +print(("GPU=", GPU)) +print(("simulName=", simulName)) + +pathResults = "/volumes/hra/micado/" + simulName + +dBResult = pathResults + "/" + simulName + ".h5" +savePSFs = True +PYR = True + +imatFromFile = False +#iMatName = "iMat39mPYR_MODU_"+str(int(MODU))+".fits" +#KL2VName = "KL2VNorm39mPYR_MODU_"+str(int(MODU))+".fits" +#gainModalName = "gains4K_MODU_"+str(int(MODU))+".fits" + +#iMatName = "iMat_MODU_5_ELTPUPIL.fits" +#KL2VName = "KL2VNorm_MODU_5_ELTPUPIL.fits" +#gainModalName = "gains4K_MODU_5_ELTPUPIL.fits" + +imat0_PATH = "/home/fvidal/dataSimus" +iMatName = "imatDiffraction_ELTPYR_35Layers.fits" +gainModalName = "gains4K_ELTPYR_35Layers.fits" +KL2VName = "KL2VNorm_ELTPYR_35Layers.fits" +""" +iMatName = "iMat_MODU_2_ELTPUPIL.fits" +KL2VName = "KL2VNorm_MODU_2_ELTPUPIL.fits" +gainModalName = "gains4K_MODU_2_ELTPUPIL.fits" +""" + +ModalBasisType = "Btt" + +PSFWithOtherPupil = True + +niter = 8096 +saveCBData = True +nbLoopData = 512 +""" +simulName = "PYR_39m" +pathResults="/home/fvidal/dataSimus/PYR_39m_RoundPupil_RUN1/" +dBResult = "/home/fvidal/dataSimus/PYR_39m_RoundPupil_RUN1.h5" +imat0_PATH = "/home/fvidal/compass/shesha/test/scripts" +savePSFs = False +imatFromFile = False +""" + +if (GPU == 1): + GPUs = np.array([4, 5, 6, 7], dtype=np.int32) +else: + GPUs = np.array([GPU], dtype=np.int32) +print(("Using GPUs: ", GPUs)) + +GPUs = np.array([4, 5, 6, 7], dtype=np.int32) +#GPUs = np.array([0,1,2,3], dtype=np.int32) + +if (not glob.glob(pathResults)): + print("Results folder not found. Creating it now:") + tools.system("mkdir " + pathResults) +if (not glob.glob(pathResults + "/PSFs/")): + print("PSFs folder not found. Creating it now:") + tools.system("mkdir " + pathResults + "/PSFs/") +if (not glob.glob(pathResults + "/AODATA/")): + print("AODATA folder not found. Creating it now:") + tools.system("mkdir " + pathResults + "/AODATA/") +if (not glob.glob(pathResults + "/CircularBuffers/")): + print("CircularBuffers folder not found. Creating it now:") + tools.system("mkdir " + pathResults + "/CircularBuffers/") + +#get parameters from file +param_file = sys.argv[1] # par filename +if (param_file.split('.')[-1] == "py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + #sys.path.remove(param_path) +elif (param_file.split('.')[-1] == "h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + #sys.path.remove(os.environ["SHESHA_ROOT"]+"/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print(("param_file is", param_file)) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name +else: + simul_name = "" +print(("simul name is", simul_name)) + +matricesToLoad = {} +if (simul_name == ""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) +c = ch.carmaWrap_context(devices=GPUs) + + +class wao_class(): + + def __init__(self, config, wfs, tel, atm, dms, tar, rtc): + self.config = config + self.wfs = wfs + self.tel = tel + self.atm = atm + self.dms = dms + self.tar = tar + self.rtc = rtc + + +def makeFITSHeader(filepath, df): + hdulist = pf.open(filepath) # read file + header = hdulist[0].header + names = np.sort(list(set(df))).tolist() + + for name in names: + if (name != "centroider.thresh"): + val = df[name][0] + if (type(val) is list): + value = "" + for v in val: + value += (str(v) + " ") + value = value.replace("\n", "") + elif (type(val) is np.ndarray): + value = "" + for v in val: + value += (str(v) + " ") + value = value.replace("\n", "") + + else: + value = val + + if ((type(value) is str)): + if (len(value) > 50): + print(("warning", name, "keyword has been cut to 100 characters")) + #header.set(name, value[:50],'') + else: + header.set(name, value, '') + + hdulist.writeto(filepath, clobber=True) # Save changes to file + + +def initSimu(config, c): + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + print("->wfs") + wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + print("->atmos") + atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0) + print("->dm") + dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + print("->target") + tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + print("->rtc") + rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + do_refslp=False, clean=clean, simul_name=simul_name, + load=matricesToLoad, doimat=0) + + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(atm) + print(wfs) + print(dms) + print(tar) + print(rtc) + return wfs, tel, atm, dms, tar, rtc + + +def compute_modal_residuals(P, rtc, dms, tar): + rtc.do_control_geo(1, dms, tar, 0) + #self.rtc.do_control_geo_on(1, self.dms,self.tar, 0) + v = rtc.get_command(1) + ai = P.dot(v) * 1000. # np rms units + return ai + + +def loop(n, wfs, tel, atm, dms, tar, rtc, move_atmos=True, noise=True, loopData=0, + P=None): + t0 = time.time() + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | Est. Rem. | framerate") + print("----------------------------------------------------") + """ + ph = tar.get_image(0, "se") + pupBig = ph*0. + + phsize = pup.shape[0] + npup = pupBig.shape[0] # wao.wfs.get_pyrimghr(0).shape + + pupBig[(npup-phsize)/2:(npup+phsize)/2, (npup-phsize)/2:(npup+phsize)/2] = pup + PSFLEArray = np.zeros((config.p_target.ntargets, ph.shape[0],ph.shape[1])) + PSFSEArray = np.zeros((config.p_target.ntargets, ph.shape[0],ph.shape[1])) + + """ + RmsErrorTot = [] + ph = tar.get_image(0, "se") + PSFtarget = np.zeros((config.p_target.ntargets, ph.shape[0], ph.shape[1])) + sr_se = [] + numiter = [] + if (loopData): + if (loopData > n): + loopData = n + slopes = np.zeros((loopData, rtc.get_centroids(0).shape[0])) + volts = np.zeros((loopData, rtc.get_voltage(0).shape[0])) + else: + slopes = volts = None + ii = 0 + jj = 0 + sr_se = np.zeros((n / 10, config.p_target.ntargets)) + sr_le = np.zeros((n / 10, config.p_target.ntargets)) + + for i in range(n): + if (move_atmos): + atm.move_atmos() + + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.raytrace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w, noise=noise) + + rtc.do_centroids(0) + if (loopData): + if (i >= (n - loopData)): + #print("Recording loop Data") + s = rtc.get_centroids(0) + v = rtc.get_voltage(0) + volts[ii, :] = v.copy() + slopes[ii, :] = s.copy() + ii += 1 + rtc.docontrol(0) + rtc.doclipping(0, -1e5, 1e5) + rtc.apply_control(0) + + signal_le = "" + signal_se = "" + if (P is not None): + ai = compute_modal_residuals(P, rtc, dms, tar) + tarPhaseError = np.sqrt(np.sum(ai**2)) + else: + tarPhaseError = 0. + RmsErrorTot.append(tarPhaseError) + print("tarPhaseError =", tarPhaseError, "nm rms") + if ((i + 1) % 10 == 0): + print(("Iter#:", i + 1, "/", n)) + t = 0 + SRTmp = np.zeros(config.p_target.ntargets) + SRTmp2 = np.zeros(config.p_target.ntargets) + + for t in range(config.p_target.ntargets): + if (PSFWithOtherPupil): + SR = list([0, 0]) + SR[0] = PSFSEArray[t, :, :].max() # SE SR + SR[1] = (PSFLEArray[t, :, :] / (i + 1)).max() # LE SR + else: + SR = tar.get_strehl(t) + #print("Tar %d at %3.2fMicrons:" % (t+1, tar.Lambda[t])) + signal_se += "SR S.E %3.2fMicrons:: %1.2f " % (tar.Lambda[t], SR[0]) + signal_le += "SR L.E %3.2fMicrons:: %1.2f " % (tar.Lambda[t], SR[1]) + SRTmp[t] = SR[0] * 100 + SRTmp2[t] = SR[1] * 100 + print((signal_se + signal_le)) + sr_se[jj, :] = SRTmp.copy() + sr_le[jj, :] = SRTmp2.copy() + + #sr_se.append() + #sr_se.append(SR[0]) + numiter.append(i + 1) + jj += 1 + + t1 = time.time() + print((" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz")) + SRList = [] + for t in range(config.p_target.ntargets): + SR = tar.get_strehl(t) + PSFtarget[t, :, :] = tar.get_image(t, "le") + SRList.append(SR[1]) # Saving Last Long Exp SR + + return SRList, tar.Lambda.tolist(), sr_se.astype(int), sr_le.astype( + int), numiter, slopes, volts, PSFtarget, RmsErrorTot + + +SR = [] +colnames = h5u.params_dictionary(config) # config values internal to compass +simunames = { + "PSFFilenames": None, + "rmsError": None, + "rmsErrorList": None, + "comment": None, + "NCPA": None, + "NCPAList": None, + "ModalType": None, + "srir": None, + "gainModal": None, + "lambdaTarget": None, + "nbBrightest": None, + "sr_le": None, + "sr_se": None, + "numiter": None, + "NklFilt": None, + "NklTot": None, + "Nkl": None, + "eigenvals": None, + "Nphotons": None, + "Nactu": None, + "RON": None, + "Nslopes": None +} # Added values computed by the simu.. + +resAll = db.readDataBase(fullpath=dBResult) # Reads all the database if exists +if (not (type(resAll) == pd.core.frame.DataFrame)): + print("Creating compass database") + resAll = db.createDf(list(colnames.keys()) + list( + simunames.keys())) # Creates the global compass Db + +# ----------------------------------------------------------------------------- +# ----------- Replacing values from user defined variables------------------- +# ----------------------------------------------------------------------------- +config.p_wfs0.set_nxsub(NSSP) +if (PYR): + decalage = int((240 - 4 - (NSSP * 2)) / 2. + NSSP / 2) + config.p_wfs0.set_pyr_pup_sep(decalage) + rMod = MODU + config.p_wfs0.set_pyr_npts(int(np.ceil(int(rMod * 2 * 3.141592653589793) / 4.) * 4)) + config.p_wfs0.set_pyr_ampl(rMod) + +config.p_loop.set_ittime(1 / freq) +config.p_wfs0.set_noise(RON) +config.p_loop.set_niter(niter) + +config.p_wfs0.set_gsmag(magnitude) + +res = pd.DataFrame(columns=list(colnames.keys()) + list(simunames.keys())) # Create Db +wfs, tel, atm, dms, tar, rtc = initSimu(config, c) # Init COMPASS Simu! + +if (PSFWithOtherPupil): + #pp = config.p_geom.get_spupil().shape[0] + cent = config.p_geom.pupdiam / 2. + 0.5 + oldsetting = int(config.p_tel.t_spiders) + config.p_tel.set_t_spiders(-1) # Enabling spiders for pupil computation + pupELTSpiders = mkP.make_pupil(config.p_geom.pupdiam, config.p_geom.pupdiam, + config.p_tel, cent, cent).astype(np.float32) + config.p_tel.set_t_spiders( + oldsetting) # Disabling spiders in case of it is used else where in GPU... + #PUPILPATH = "/home/fvidal/dataSimus/pupELTwithSpiders_368.fits" + #PUPILPATH = "/home/fvidal/dataSimus/pupELTwithSpiders.fits" + #PUPILPATH = "/home/fvidal/dataSimus/pupELTwithSpiders_1472.fits" + for target in range(config.p_target.ntargets): # Apply E-ELT pupil for each target + tar.set_pupil(target, pupELTSpiders.astype(np.float32)) + +# ------------ ADOPT ---------------- +ADOPTPATH = os.getenv("ADOPTPATH") +sys.path.insert(0, ADOPTPATH) +import adoptCompass as adoptComm +import adoptVariables as adoptVar +import aoCalib as cal +config_fileName = ADOPTPATH + "/config/ADOPT.conf" +wao = wao_class(config, wfs, tel, atm, dms, tar, rtc) +cf.returnConfigfromWao(wao, filepath=config_fileName) +com = adoptComm.command_class(wao, ao) +aoAd = adoptVar.ao_class(adoptVar.ao_attributes, adoptVar.wfs_attributes, + adoptVar.dm_attributes, config_fileName) +com.initComm(aoAd) +com.do_ref_slopes() + +#KL2V = com.getKL2V() +# +nfilt = nKL_Filt + +# Computing imat on diffraction limited source. +if (imatFromFile): + print("Reloading imat KL2V and gains4K from files...") + #print(imat0_PATH+"/"+iMatName) + #print(imat0_PATH+"/gains4K_MODU_"+str(int(MODU))+".fits") + imat = pf.get_data(imat0_PATH + "/" + iMatName) + modal_basis = pf.get_data(imat0_PATH + "/" + KL2VName) + gains4KRAW = pf.get_data(imat0_PATH + "/" + gainModalName) + gains4K = np.zeros(imat.shape[0] - nfilt) + gains4K[:-2] = gains4KRAW[:imat.shape[0] - nfilt - 2] + gains4K[-2:] = gains4KRAW[-2:] + gainopt = gains4K.copy() +else: + KL2V = com.getKL2V() + KL2VNorm = cal.normalizeKL2V(KL2V) + print("Computing Imat Diffraction Limited") + imat = cal.computeImatModal(com, KL2VNorm, aoAd.dm0.push4iMat, aoAd.dm1.push4iMat, + withTurbu=False, noise=False) + gains = np.linspace(1., 1., aoAd.Nactu - 2 - nfilt) + gains[-2:] = 1.0 + cmat0, cmatKL0 = cal.computeCmatModal(imat, KL2VNorm, nfilt, gains) + com.set_command_matrix(cmat0) + com.close_loop() + print("Closing Loop with Imat Diffraction Limited") + + # Closing loop until we reach the fitting error for the given ao config + turbulence conditions (seeing ect...) but without noise and bandwidth (screen is frozen) + SR, lambdaTargetList, sr_se, numiter, _, _, _ = loop(200, wfs, tel, atm, dms, tar, + rtc, move_atmos=True, + noise=True) + print("SR After 200 iterations of closed loop:") + + if (PYR): + cmat0, cmatModal0 = cal.computeCmatModal(imat, modal_basis, nfilt, gains) + com.set_command_matrix(cmat0) + com.close_loop() + print("Closing Loop with Imat Diffraction Limited") + + # Closing loop until we reach the fitting error for the given ao config + turbulence conditions (seeing ect...) but without noise and bandwidth (screen is frozen) + SR, lambdaTargetList, sr_se, sr_le, numiter, _, _, _, _ = loop( + 100, wfs, tel, atm, dms, tar, rtc, move_atmos=True, noise=True, P=P) + print("SR After 100 iterations of closed loop:") + + # Computing 2nd imat with this best conditions (no noise + limited by fitting) + imatTurbu = cal.computeImatModal(com, modal_basis, aoAd.dm0.push4iMat, + aoAd.dm1.push4iMat, withTurbu=True, noise=False) + gains4K = cal.computeOptimGainK(imat, imatTurbu, nfilt) + + date = time.strftime("_%d-%m-%Y_%H:%M:%S_") + gainModalName = "gains4K_" + date + ".fits" + iMatName = "imat_" + date + ".fits" + KL2VName = "modal_basis_" + date + ".fits" + # saving imat, modal basis, and gains... + pf.writeto(pathResults + "/AODATA/" + iMatName, imat) + pf.writeto(pathResults + "/AODATA/" + KL2VName, modal_basis) + pf.writeto(pathResults + "/AODATA/" + gainModalName, gains4K) + gainopt = gains4K.copy() + else: + gainopt = np.linspace(1., 1., aoAd.Nactu - 2 - nfilt) + gainopt[-2:] = 1.0 +cmatT, cmatKLT = cal.computeCmatModal(imat, modal_basis, nfilt, gainopt * gain) +cmat = cmatT +com.set_command_matrix(cmatT) +com.close_loop() +com.resetSR() + +# ------------------------------------------------------------------------------ +# --------------------- Modal Optim. ---------------------------------------- +# ------------------------------------------------------------------------------ +# Taking 2048 loop data for Optim Modal gain optim ("a la" Gendron & Lena) +# closing loop by adding noise + bandwidth and wait a bit that loop converge... +""" +SR, lambdaTargetList, sr_le, sr_se, numiter, _, _ = loop(200,wfs,tel,atm,dms,tar,rtc, noise=True) +com.resetSR() +SR, lambdaTargetList, sr_le, sr_se, numiter, slopes, volts = loop(2048,wfs,tel,atm,dms,tar,rtc, noise=True, loopData=True) +V2KL =np.linalg.pinv(KL2VNorm) +sol = cal.recPseudoopen_loop(slopes, volts, imat, V2KL, gains4K, nfilt, 1/aoAd.Fe, aoAd.Fe) +gainoptCorr = cal.modalControlOptimizationopen_loopData(sol.T, cmatKL0, KL2VNorm, gmax = 1.0, Fs = aoAd.Fe, latency = 1/aoAd.Fe, BP = 1e12,ngain=200) +gainopt = gainopt*gainoptCorr +cmatOptim,_ = cal.computeCmatModal(imat, KL2VNorm, nfilt, gainopt); +com.set_command_matrix(cmatOptim) + +com.close_loop() +""" +# ------------------------------------------------------------------------------ + +#cmat = pf.get_data(os.environ["SHESHA_ROOT"]+"/test/scripts/cmatKLGood.fits").byteswap().newbyteorder() +#rtc.set_cmat(0, cmat.copy().astype(np.float32)) + +# ----------------------------------------------------------------------------- +# ----------- !!!!!! Starting real loop !!!!!!------------------- +# ----------------------------------------------------------------------------- + +print("Starting Real Loop") +com.resetSR() +# com.resetDm() +SR, lambdaTargetList, sr_se, sr_le, numiter, slopesCB, voltsCB, PSFtarget, rmsErrorList = loop( + config.p_loop.niter, wfs, tel, atm, dms, tar, rtc, loopData=nbLoopData, P=P) + +if (saveCBData): + PYRImage = wfs.get_pyrimg(0) + date = time.strftime("_%d-%m-%Y_%H:%M:%S_") + slopesCBName = "slopesCB_" + date + ".fits" + voltsCBName = "voltsCB_" + date + ".fits" + PYRIMAGEName = "pyrImageCB_" + date + ".fits" + SRHistorySEName = "SRHistorySE_" + date + ".fits" + SRHistoryLEName = "SRHistoryLE_" + date + ".fits" + pf.writeto(pathResults + "/CircularBuffers/" + slopesCBName, slopesCB.copy()) + pf.writeto(pathResults + "/CircularBuffers/" + voltsCBName, voltsCB.copy()) + pf.writeto(pathResults + "/CircularBuffers/" + PYRIMAGEName, PYRImage.copy()) + pf.writeto(pathResults + "/CircularBuffers/" + SRHistorySEName, sr_se.copy()) + pf.writeto(pathResults + "/CircularBuffers/" + SRHistoryLEName, sr_le.copy()) + +else: + slopesCBName = "" + voltsCBName = "" + PYRIMAGEName = "" + SRHistorySEName = "" + SRHistoryLEName = "" + +# ------------- Saving config and results in data frame ----- +dfparams = h5u.params_dictionary(config) # get the current compass config +for i in range(len(dfparams)): + if (type(dfparams[list(dfparams.keys())[i]]) is list): # If list + if (len(dfparams[list(dfparams.keys())[i]]) == 1): # If list has only 1 element: + dfparams[list(dfparams.keys())[i]] = dfparams[list(dfparams.keys())[i]][0] + +dfparams.update(simunames) # Add the simunames params + +res = db.fillDf(res, dfparams) # Saving dictionnary config +res.loc[0, "iMatName"] = iMatName +res.loc[0, "ModalName"] = KL2VName +res.loc[0, "gainModalName"] = gainModalName +res.loc[0, "slopesCBName"] = slopesCBName +res.loc[0, "voltsCBName"] = voltsCBName +res.loc[0, "SRHistoryLEName"] = SRHistorySEName +res.loc[0, "SRHistorySEName"] = SRHistoryLEName +res.loc[0, "PYRIMAGEName"] = PYRIMAGEName +res.loc[0, "comment"] = comment +res.loc[0, "NklFilt"] = nKL_Filt +res.loc[0, "Nkl"] = imat.shape[0] - nfilt - 2 +res.loc[0, "NklTot"] = cmat.shape[1] - 2 +res.loc[0, "Nactu"] = cmat.shape[1] +res.loc[0, "Nslopes"] = cmat.shape[0] +res.loc[0, "Nphotons"] = config.p_wfs0._nphotons +res.loc[0, "RON"] = RON +#res.eigenvals.values[0] = rtc.getEigenvals(0) +res.srir.values[0] = SR # Saving computed values +res.lambdaTarget.values[0] = lambdaTargetList +res.loc[0, "gsmag"] = config.p_wfs0.gsmag +res.loc[0, "gain"] = gain +#res.loc[0, "type_ap"] = str(res.loc[0, "type_ap"][0]) +#res.loc[0, "type_wfs"] = str(res.loc[0, "type_wfs"][0]) +res.loc[0, "type_dm"] = "pzt, tt" +#res.loc[0, "npix"] = res.loc[0, "npix"][0] +#res.loc[0, "nbBrightest"] = res.loc[0, "nbBrightest"][0] +res.loc[0, "pixsizeInMeters"] = (config.p_tel.diam / config.p_geom.get_spupil().shape[0]) +# PSF pixsize = +#res.sr_le.values[0] = sr_le +#res.sr_se.values[0] = sr_se +#res.numiter.values[0] = numiter +res.loc[0, "simulname"] = simulName +res.rmsErrorList.values[0] = rmsErrorList +res.rmsError.values[0] = np.average(np.array(rmsErrorList)) + +# --------------- PSF Stuff ---------------------- +print("Saving PSFs...") +PSFNameList = [] +for t in range(config.p_target.ntargets): + + date = time.strftime("_%d-%m-%Y_%H:%M:%S_") + lam = "%3.2f" % tar.Lambda.tolist()[t] + lam = lam.replace(".", "_") + PSFName = "PYR_TARGET_" + str(t + 1) + "_Lambda_" + lam + "_" + date + ".fits" + PSFNameList.append(PSFName) + #PSFNameList.append("NOT SAVED") + if (savePSFs): + pf.writeto(pathResults + "/PSFs/" + PSFName, PSFtarget[t, :, :].copy(), + clobber=True) + lam2 = "%3.2f" % tar.Lambda.tolist()[t] + res.loc[0, "SR_%s" % lam2] = SR[t] + PSFPixsize = (tar.Lambda.tolist()[t] * 1e-6) / ( + wao.config.p_tel.diam / wao.config.p_geom.get_spupil().shape[0] * + wao.config.p_geom.get_ipupil().shape[0]) * 206265. + res.loc[0, "pixsizeArcSec_%s" % lam2] = PSFPixsize + filepath = pathResults + "/PSFs/" + PSFName + if (savePSFs): + #"Add the SR and wavelegth value at the top of the PSF header file" + hdulist = pf.open(filepath) # read file + header = hdulist[0].header + header["SR"] = SR[t] + header["wavelengthMic"] = tar.Lambda.tolist()[t] + header["pixsizeArcSec"] = PSFPixsize + hdulist.writeto(filepath, clobber=True) # Save changes to file + # Adding all the parameters to the header + makeFITSHeader(filepath, res) + else: + res.PSFFilenames.values[0] = ["PSF NOT SAVED"] +print("Done") +res.PSFFilenames.values[0] = PSFNameList + +resAll = db.fillDf(resAll, res) # Saving in global DB +#resAll.to_hdf("/home/fvidal/compass/trunk/shesha/test/scripts/resultatsScripts/SH39m.h5", "resAll", complevel=9,complib='blosc') +resAll.to_hdf(dBResult, "resAll", complevel=9, complib='blosc') +#db.saveDataBase(resAll) + +print("Simulation Done...") +""" +except Exception as e: + if(e): + print "Simu failed" + print e + sendMail("Script failed!" + str(sys.argv), str(e)+ str("\nline: ") +str(sys.exc_info()[-1].tb_lineno)) +""" +"""Convert resAll not well formatted: + + + +for i in range(len(dfparams)): + if(type(dfparams[dfparams.keys()[i]]) is list): # If list + if(len(dfparams[dfparams.keys()[i]]) == 1): # If list has only 1 element: + dfparams[dfparams.keys()[i]] = dfparams[dfparams.keys()[i]][0] + + + + +for i in range(len(list(set(resAll)))): + if(type(resAll[list(set(resAll))[i]][0]) is list): # If list + if(len(resAll[list(set(resAll))[i]][0]) == 1): # If list has only 1 element: + key = list(set(resAll))[i] + for j in range(len(resAll)): + try: + resAll.loc[j, key] = resAll.loc[j, key][0] + except: + print "Warning! Could not convert list "+key+" with 1 element to 1 element" + pass + + + + +""" diff --git a/tests/old_scripts/script_SH39m.py b/tests/old_scripts/script_SH39m.py new file mode 100644 index 0000000..3a0d640 --- /dev/null +++ b/tests/old_scripts/script_SH39m.py @@ -0,0 +1,392 @@ +import cProfile +import pstats as ps +import sys, os +sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/widgets/") + +from shesha.util import tools +import numpy as np +import carmaWrap as ch +import shesha as ao +import time +import matplotlib.pyplot as plt +import hdf5_util as h5u +import resDataBase as db +import astropy.io.fits as pf +import glob +import pandas as pd + +print("TEST SHESHA\n closed loop: call loop(int niter)") +simulName = "SH_39m" +#pathResults="/home/fvidal/compass/shesha/test/scripts/resultatsScripts/RunSH39m/" +#pathResults="/opt/public/fvidal/data/RunSH39m/" +pathResults = "/volumes/hra/micado/RunSH39m_RoundPupil/" +dBResult = "SH39m_RoundPupil.h5" +#GPUS = np.array([0, 1, 2, 3]) + +if (len(sys.argv) == 1): + error = 'command line should be:"python -i test.py parameters_filename"\n with "parameters_filename" the path to the parameters file' + raise Exception(error) +if (len(sys.argv) == 2): + print("Using Internal parameters...") + """ + ----------------- + INPUTS + ----------------- + """ + freqs = [500.] + npixs = [8] + pixsizes = [1] # in lambda/dssp + gainslist = [0.3] + bps = [10] + magnitudes = [11, 12, 13, 14, 15, 16] + RONS = [2, 10] # noises + nKL_Filt = 450 +else: + print("DETECTED BASH SCRIPT") + print(sys.argv) + freqs = [float(sys.argv[2])] # frequency + npixs = [float(sys.argv[3])] # npixs + pixsizes = [float(sys.argv[4])] # pixsizes + gainslist = [float(sys.argv[5])] # Gains + bps = [float(sys.argv[6])] # nb Brightests pixels + magnitudes = [float(sys.argv[7])] # magnitudes + RONS = [float(sys.argv[8])] # noises + nKL_Filt = float(sys.argv[9]) # nb of KL +#$FREQ $NPIX $PIXSIZE $GAIN $BP $MAG $KLFILT + +if (not glob.glob(pathResults)): + print("Results folder not found. Creating it now:") + tools.system("mkdir " + pathResults) +if (not glob.glob(pathResults + "PSFs/")): + print("PSFs folder not found. Creating it now:") + + tools.system("mkdir " + pathResults + "PSFs/") + +#get parameters from file +param_file = sys.argv[1] # par filename +if (param_file.split('.')[-1] == b"py"): + filename = param_file.split('/')[-1] + param_path = param_file.split(filename)[0] + sys.path.insert(0, param_path) + exec("import %s as config" % filename.split(".py")[0]) + #sys.path.remove(param_path) +elif (param_file.split('.')[-1] == b"h5"): + sys.path.insert(0, os.environ["SHESHA_ROOT"] + "/data/par/par4bench/") + import scao_sh_16x16_8pix as config + #sys.path.remove(os.environ["SHESHA_ROOT"]+"/data/par/par4bench/") + h5u.configFromH5(param_file, config) +else: + raise ValueError("Parameter file extension must be .py or .h5") + +print("param_file is", param_file) + +if (hasattr(config, "simul_name")): + if (config.simul_name is None): + simul_name = "" + else: + simul_name = config.simul_name +else: + simul_name = "" +print("simul name is", simul_name) + +matricesToLoad = {} +if (simul_name == b""): + clean = 1 +else: + clean = 0 + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) +#initialisation: +# context +c = ch.carmaWrap_context(devices=np.array([0, 1, 2, 3], dtype=np.int32)) + +#c.set_active_device(6) + + +def makeFITSHeader(filepath, df): + hdulist = pf.open(filepath) # read file + header = hdulist[0].header + names = np.sort(list(set(df))).tolist() + for name in names: + val = df[name][0] + if (type(val) is list): + value = "" + for v in val: + value += (str(v) + " ") + elif (type(val) is np.ndarray): + value = "" + for v in val: + value += (str(v) + " ") + else: + value = val + header.set(name, value, '') + hdulist.writeto(filepath, clobber=True) # Save changes to file + + +def initSimu(config, c): + # wfs + param_dict = h5u.params_dictionary(config) + matricesToLoad = h5u.checkMatricesDataBase(os.environ["SHESHA_ROOT"] + "/data/", + config, param_dict) + print("->wfs") + wfs, tel = ao.wfs_init(config.p_wfss, config.p_atmos, config.p_tel, config.p_geom, + config.p_target, config.p_loop, config.p_dms) + # atmos + print("->atmos") + atm = ao.atmos_init(c, config.p_atmos, config.p_tel, config.p_geom, config.p_loop, + config.p_wfss, wfs, config.p_target, rank=0, clean=clean, + load=matricesToLoad) + + # dm + print("->dm") + dms = ao.dm_init(config.p_dms, config.p_wfss, wfs, config.p_geom, config.p_tel) + + # target + print("->target") + tar = ao.target_init(c, tel, config.p_target, config.p_atmos, config.p_geom, + config.p_tel, config.p_dms) + + print("->rtc") + # rtc + rtc = ao.rtc_init(tel, wfs, config.p_wfss, dms, config.p_dms, config.p_geom, + config.p_rtc, config.p_atmos, atm, config.p_tel, config.p_loop, + do_refslp=False, clean=clean, simul_name=simul_name, + load=matricesToLoad) + + h5u.validDataBase(os.environ["SHESHA_ROOT"] + "/data/", matricesToLoad) + + print("====================") + print("init done") + print("====================") + print("objects initialzed on GPU:") + print("--------------------------------------------------------") + print(atm) + print(wfs) + print(dms) + print(tar) + print(rtc) + return wfs, tel, atm, dms, tar, rtc + + +def loop(n, wfs, tel, atm, dms, tar, rtc): + t0 = time.time() + print("----------------------------------------------------") + print("iter# | S.E. SR | L.E. SR | Est. Rem. | framerate") + print("----------------------------------------------------") + sr_se = [] + sr_le = [] + numiter = [] + for i in range(n): + atm.move_atmos() + + if (config.p_controllers[0].type == b"geo"): + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + rtc.do_control_geo(0, dms, tar, 0) + rtc.apply_control(0) + tar.dmtrace(0, dms) + else: + for t in range(config.p_target.ntargets): + tar.atmos_trace(t, atm, tel) + tar.dmtrace(t, dms) + for w in range(len(config.p_wfss)): + wfs.raytrace(w, "all", tel, atm, dms) + wfs.sensors_compimg(w) + + rtc.do_centroids(0) + rtc.docontrol(0) + rtc.apply_control(0) + + if ((i + 1) % 100 == 0): + print("Iter#:", i + 1) + #for t in range(config.p_target.ntargets): + t = 1 + SR = tar.get_strehl(t) + print("Tar %d at %3.2fMicrons:" % (t + 1, tar.Lambda[t])) + signal_se = "SR S.E: %1.2f " % SR[0] + signal_le = "SR L.E: %1.2f " % SR[1] + + print(signal_se + signal_le) + #print(i+1,"\t",,SR[0],"\t",SR[1]) + sr_le.append(SR[1]) + sr_se.append(SR[0]) + numiter.append(i + 1) + + +# +# plt.pause(0.01) +# plt.scatter(numiter, sr_le, color="green", label="Long Exposure") +# plt.plot(numiter, sr_le, color="green") +# plt.scatter(numiter, sr_se, color="red", label="Short Exposure") +# plt.plot(numiter, sr_se, color="red") + + t1 = time.time() + print(" loop execution time:", t1 - t0, " (", n, "iterations), ", (t1 - t0) / n, + "(mean) ", n / (t1 - t0), "Hz") + SRList = [] + for t in range(config.p_target.ntargets): + SR = tar.get_strehl(t) + SRList.append(SR[1]) # Saving Long Exp SR + return SRList, tar.Lambda.tolist(), sr_le, sr_se, numiter + +mimg = 0. # initializing average image + +SR = [] +""" +dictProcess, dictplot = getDataFrameColumns() +colnames = h5u.params_dictionary(config) +resAll = pd.DataFrame( columns=colnames.keys()) # res is the local dataframe for THIS data set +resAll = resAll.append(colnames, ignore_index=True) #Fill dataframe +resAll.srir = None +""" + +colnames = h5u.params_dictionary(config) # config values internal to compass +simunames = { + "PSFFilenames": None, + "srir": None, + "lambdaTarget": None, + "nbBrightest": None, + "sr_le": None, + "sr_se": None, + "numiter": None, + "NklFilt": None, + "NklTot": None, + "Nkl": None, + "eigenvals": None, + "Nphotons": None, + "Nactu": None, + "RON": None, + "Nslopes": None +} # Added values computed by the simu.. + +resAll = db.readDataBase( + fullpath=pathResults + dBResult) # Reads all the database if exists +if (not (type(resAll) == pd.core.frame.DataFrame)): + print("Creating compass database") + resAll = db.createDf(list(colnames.keys()) + list( + simunames.keys())) # Creates the global compass Db + +#res = db.addcolumn(res,simunames) + +#freqs = [100.,300., 500., 1000.] +#npixs = [4,6,8] +#pixsizes = [0.5,1,1.5] # in lambda/dssp +#gainslist = [0.1, 0.3, 0.5] +#magnitudes=[11.5,12.5,13.5,14.5] +## + +#res500 = pf.get_data("/home/fvidal/res_500.fits") +#fig = plt.figure(num = 1) +#fig.show() +Nsimutot = len(gainslist) * len(magnitudes) * len(bps) * len(RONS) * len(pixsizes) * len( + npixs) * len(freqs) +NCurrSim = 0 +for freq in freqs: + config.p_loop.set_ittime(1 / freq) + for npix in npixs: + config.p_wfs0.set_npix(npix) + for pixsize in pixsizes: + pxsize = pixsize * config.p_wfs0.Lambda / ( + config.p_tel.diam / config.p_wfs0.nxsub) * 0.206265 + config.p_wfs0.set_pixsize(pxsize) + for gain in gainslist: + config.p_controller0.set_gain(gain) # Change Gain + for bp in bps: + config.p_centroider0.set_nmax(bp) + for RON in RONS: + config.p_wfs0.set_noise(RON) + for magnitude in magnitudes: + NCurrSim += 1 + config.p_wfs0.set_gsmag(magnitude) + res = pd.DataFrame( + columns=list(colnames.keys()) + + list(simunames.keys())) # Create Db for last result + print("Simu #%d/%d" % (NCurrSim, Nsimutot)) + print("Freq = %3.2f Hz" % (1. / config.p_loop.ittime)) + print("npix = %d pixels" % config.p_wfs0.npix) + print("nb of Brightest pixels= %d " % bp) + print("%3.2f'' pixel size " % config.p_wfs0.pixsize) + print("Magnitude = %3.2f" % config.p_wfs0.gsmag) + print("RON = %3.1f e-" % RON) + print("Gain = %3.2f" % config.p_controller0.gain) + + wfs, tel, atm, dms, tar, rtc = initSimu(config, + c) # Init Simu + nfilt = nKL_Filt + cmat = ao.compute_cmatWithKL(rtc, config.p_controllers[0], + dms, config.p_dms, + config.p_geom, config.p_atmos, + config.p_tel, nfilt) + + rtc.set_cmat(0, cmat.copy().astype(np.float32)) + + SR, lambdaTargetList, sr_le, sr_se, numiter = loop( + config.p_loop.niter, wfs, tel, atm, dms, tar, rtc) + dfparams = h5u.params_dictionary( + config) # get the current compass config + dfparams.update(simunames) # Add the simunames params + + res = db.fillDf(res, dfparams) # Saving dictionnary config + res.loc[0, "NklFilt"] = nKL_Filt + res.loc[0, "Nkl"] = cmat.shape[0] - 2 - nKL_Filt + res.loc[0, "NklTot"] = cmat.shape[0] - 2 + res.loc[0, "Nactu"] = cmat.shape[0] + res.loc[0, "Nslopes"] = cmat.shape[1] + res.loc[0, "Nphotons"] = config.p_wfs0._nphotons + res.loc[0, "RON"] = RON + #res.eigenvals.values[0] = rtc.getEigenvals(0) + res.srir.values[0] = SR # Saving computed values + res.lambdaTarget.values[0] = lambdaTargetList + res.loc[0, "gsmag"] = config.p_wfs0.gsmag + res.loc[0, "gain"] = config.p_controller0.gain + res.loc[0, "pixsize"] = config.p_wfs0.pixsize + res.loc[0, "npix"] = config.p_wfs0.npix + res.loc[0, "nbBrightest"] = bp + #res.sr_le.values[0] = sr_le + #res.sr_se.values[0] = sr_se + #res.numiter.values[0] = numiter + res.loc[0, "simulname"] = simulName + print("Saving PSFs...") + PSFNameList = [] + for t in range(config.p_target.ntargets): + PSFtarget = tar.get_image(t, "le") + date = time.strftime("_%d-%m-%Y_%H:%M:%S_") + lam = "%3.2f" % tar.Lambda.tolist()[t] + lam = lam.replace(".", "_") + PSFName = "SH_" + lam + "_" + date + ".fits" + PSFNameList.append(PSFName) + #PSFNameList.append("NOT SAVED") + pf.writeto(pathResults + "PSFs/" + PSFName, + PSFtarget.copy(), clobber=True) + lam2 = "%3.2f" % tar.Lambda.tolist()[t] + res.loc[0, "SR_%s" % lam2] = SR[t] + filepath = pathResults + "PSFs/" + PSFName + + #"Add the SR and wavelegth value at the top of the PSF header file" + hdulist = pf.open(filepath) # read file + header = hdulist[0].header + header["SR"] = SR[t] + header["wavelength"] = tar.Lambda.tolist()[t] + hdulist.writeto(filepath, + clobber=True) # Save changes to file + # Adding all the parameters to the header + makeFITSHeader(filepath, res) + print("Done") + res.loc[0, "type_ap"] = str(res.loc[0, "type_ap"][0]) + res.loc[0, "type"] = str(res.loc[0, "type"][0]) + res.loc[0, "type"] = "pzt, tt" + res.PSFFilenames.values[0] = PSFNameList + resAll = db.fillDf(resAll, + res) # Saving res in global resAll DB + #resAll.to_hdf("/home/fvidal/compass/trunk/shesha/test/scripts/resultatsScripts/SH39m.h5", "resAll", complevel=9,complib='blosc') + resAll.to_hdf(pathResults + dBResult, "resAll", complevel=9, + complib='blosc') + #db.saveDataBase(resAll) + +print("Simulation Done...") +""" +Sauver PSF dans le bon nom + directory + ranger... + params dans le header + +""" diff --git a/tests/old_scripts/test_rtc_standalone.py b/tests/old_scripts/test_rtc_standalone.py new file mode 100644 index 0000000..d3575fe --- /dev/null +++ b/tests/old_scripts/test_rtc_standalone.py @@ -0,0 +1,79 @@ +"""script to test rtc_standalone feature + +Usage: + test_rtc_standalone [options] + +with 'parameters_filename' the path to the parameters file + +Options: + -h --help Show this help message and exit + -d, --devices devices Specify the devices +""" + +from docopt import docopt +from tqdm import tqdm + +from carmaWrap import threadSync +import numpy as np +import shesha.sim +import shesha.init +import time + +arguments = docopt(__doc__) +param_file = arguments[""] + +# Get parameters from file +sim = shesha_sim.Simulator(param_file) + +if arguments["--devices"]: + devices = [] + for k in range(len(arguments["--devices"])): + devices.append(int(arguments["--devices"][k])) + sim.config.p_loop.set_devices(devices) + +sim.init_sim() +nactu = sim.config.p_controller0.nactu +nvalid = sim.config.p_controller0.nvalid +rtc_standalone = shesha_init.rtc_standalone( + sim.c, len(sim.config.p_wfss), nvalid, nactu, sim.config.p_centroider0.type, + sim.config.p_controller0.delay, sim.config.p_wfs0.npix // 2 - 0.5, + sim.config.p_wfs0.pixsize) +rtc_standalone.set_cmat(0, sim.rtc.get_cmat(0)) +rtc_standalone.set_decayFactor(0, np.ones(nactu, dtype=np.float32)) +rtc_standalone.set_matE(0, np.identity(nactu, dtype=np.float32)) +rtc_standalone.set_modal_gains( + 0, + np.ones(nactu, dtype=np.float32) * sim.config.p_controller0.gain) + +s_ref = np.zeros((sim.config.p_loop.niter, 2 * nvalid.sum()), dtype=np.float32) +s = s_ref.copy() +c = np.zeros((sim.config.p_loop.niter, nactu), dtype=np.float32) +c_ref = c.copy() +img = sim.wfs.get_binimg(0) +img = np.zeros((sim.config.p_loop.niter, img.shape[0], img.shape[1]), dtype=np.float32) + +for k in tqdm(range(sim.config.p_loop.niter)): + sim.next() + img[k, :, :] = sim.wfs.get_binimg(0) + s_ref[k, :] = sim.rtc.get_centroids(0) + c_ref[k, :] = sim.rtc.get_com(0) + +rtc_standalone.load_rtc_validpos(0, sim.config.p_wfs0._validsubsx, + sim.config.p_wfs0._validsubsy) +rtc_standalone.set_open_loop(0, 1) +rtc_standalone.set_open_loop(0, 0) + +rtc_time = 0 +for k in tqdm(range(sim.config.p_loop.niter)): + rtc_standalone.load_rtc_img(0, img[k, :, :].copy()) + a = time.time() + rtc_standalone.fill_rtc_bincube(0, sim.config.p_wfs0.npix) + rtc_standalone.do_centroids(0) + rtc_standalone.do_control(0) + rtc_standalone.save_com(0) + threadSync() + rtc_time += time.time() - a + s[k, :] = rtc_standalone.get_centroids(0) + c[k, :] = rtc_standalone.get_com(0) + +print("RTC speed : ", 1 / (rtc_time / sim.config.p_loop.niter), " fps") diff --git a/tests/old_scripts/test_rtc_standalone_pyr.py b/tests/old_scripts/test_rtc_standalone_pyr.py new file mode 100644 index 0000000..14323fd --- /dev/null +++ b/tests/old_scripts/test_rtc_standalone_pyr.py @@ -0,0 +1,89 @@ +"""script to test rtc_standalone feature + +Usage: + test_rtc_standalone [options] + +with 'parameters_filename' the path to the parameters file + +Options: + -h --help Show this help message and exit + -d, --devices devices Specify the devices +""" + +from docopt import docopt +from tqdm import tqdm + +from carmaWrap import threadSync +import numpy as np +import shesha.sim +import shesha.init +from shesha.constants import CONST +import time +import matplotlib.pyplot as plt +plt.ion() + +arguments = docopt(__doc__) +param_file = arguments[""] + +# Get parameters from file +sim = shesha_sim.Simulator(param_file) +sim.config.p_loop.set_niter(100) + +if arguments["--devices"]: + devices = [] + for k in range(len(arguments["--devices"])): + devices.append(int(arguments["--devices"][k])) + sim.config.p_loop.set_devices(devices) + +sim.init_sim() +nactu = sim.config.p_controller0.nactu +nvalid = sim.config.p_controller0.nvalid +offset = 0 +p_wfs = sim.config.p_wfs0 +p_centroider = sim.config.p_centroider0 +scale = (p_wfs.Lambda * 1e-6 / sim.config.p_tel.diam) * \ + p_wfs.pyr_ampl * CONST.RAD2ARCSEC + +rtc_standalone = shesha_init.rtc_standalone(sim.c, len(sim.config.p_wfss), nvalid, nactu, + sim.config.p_centroider0.type, + sim.config.p_controller0.delay, offset, + scale) +rtc_standalone.set_cmat(0, sim.rtc.get_cmat(0)) +rtc_standalone.set_decayFactor(0, np.ones(nactu, dtype=np.float32)) +rtc_standalone.set_matE(0, np.identity(nactu, dtype=np.float32)) +rtc_standalone.set_modal_gains( + 0, + np.ones(nactu, dtype=np.float32) * sim.config.p_controller0.gain) + +s_ref = np.zeros((sim.config.p_loop.niter, 2 * nvalid.sum()), dtype=np.float32) +s = s_ref.copy() +c = np.zeros((sim.config.p_loop.niter, nactu), dtype=np.float32) +c_ref = c.copy() +img = sim.wfs.get_pyrimg(0) +img = np.zeros((sim.config.p_loop.niter, img.shape[0], img.shape[1]), dtype=np.float32) + +for k in tqdm(range(sim.config.p_loop.niter)): + sim.next() + img[k, :, :] = sim.wfs.get_pyrimg(0) + s_ref[k, :] = sim.rtc.get_centroids(0) + c_ref[k, :] = sim.rtc.get_com(0) + +rtc_standalone.load_rtc_validpos(0, sim.config.p_wfs0._validsubsy, + sim.config.p_wfs0._validsubsx) +rtc_standalone.set_open_loop(0, 1) +rtc_standalone.set_open_loop(0, 0) + +rtc_time = 0 +for k in tqdm(range(sim.config.p_loop.niter)): + rtc_standalone.load_rtc_pyrimg(0, img[k, :, :].copy()) + a = time.time() + rtc_standalone.do_centroids(0) + threadSync() + rtc_standalone.do_control(0) + threadSync() + rtc_standalone.save_com(0) + rtc_time += (time.time() - a) + s[k, :] = rtc_standalone.get_centroids(0) + c[k, :] = rtc_standalone.get_com(0) + +print("RTC speed : ", 1 / (rtc_time / sim.config.p_loop.niter), " fps") diff --git a/tests/pytest/rtc/FP32/test_rtcFFF_pyramid.py b/tests/pytest/rtc/FP32/test_rtcFFF_pyramid.py deleted file mode 100644 index bbdd52d..0000000 --- a/tests/pytest/rtc/FP32/test_rtcFFF_pyramid.py +++ /dev/null @@ -1,60 +0,0 @@ -import numpy as np -import naga as ng -import os -from shesha.sutra_wrap import Rtc_FFF as Rtc -from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor -from scipy.ndimage.measurements import center_of_mass -from shesha.util.utilities import load_config_from_file - -precision = 1e-2 - -config = load_config_from_file(os.getenv("COMPASS_ROOT") + - "/shesha/tests/pytest/par/test_pyrhr.py") -sup = Supervisor(config) -sup.next() -sup.rtc.open_loop(0) -sup.rtc.close_loop(0) -sup.rtc.do_control(0) -rtc = Rtc() -rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, 0, sup.config.p_wfs0.pixsize, - False, 0, "maskedpix") -rtc.add_controller(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_controller0.nslope, sup.config.p_controller0.nactu, - sup.config.p_controller0.delay, 0, "generic", idx_centro=np.zeros(1), ncentro=1) -centro = rtc.d_centro[0] -control = rtc.d_control[0] -rtc.d_centro[0].set_npix(sup.config.p_wfs0.npix) -xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) -yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) -rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) -cmat = sup.rtc.get_command_matrix(0) -rtc.d_control[0].set_cmat(cmat) -rtc.d_control[0].set_gain(sup.config.p_controller0.gain) -frame = sup.wfs.get_wfs_image(0) -frame /= frame.max() -rtc.d_centro[0].load_img(frame, frame.shape[0]) -rtc.d_centro[0].calibrate_img() - -rtc.do_centroids(0) -slp = ng.array(rtc.d_control[0].d_centroids) -rtc.do_control(0) -com = ng.array(rtc.d_control[0].d_com) - -dark = np.random.random(frame.shape) -flat = np.random.random(frame.shape) -centro.set_dark(dark, frame.shape[0]) -centro.set_flat(flat, frame.shape[0]) - - -def relative_array_error(array1, array2): - return np.abs((array1 - array2) / array2.max()).max() - - -def test_doCentroids_maskedPix(): - binimg = np.array(centro.d_img) - slopes = np.zeros(xvalid.size) - psum = binimg[xvalid, yvalid].sum() / slopes.size - for k in range(slopes.size): - slopes[k] = binimg[xvalid[k], yvalid[k]] / psum - assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < - precision) diff --git a/tests/pytest/rtc/FP32/test_rtcFFF.py b/tests/pytest/rtc/test_rtcFFF.py similarity index 61% rename from tests/pytest/rtc/FP32/test_rtcFFF.py rename to tests/pytest/rtc/test_rtcFFF.py index 372fe50..b1644ea 100644 --- a/tests/pytest/rtc/FP32/test_rtcFFF.py +++ b/tests/pytest/rtc/test_rtcFFF.py @@ -1,14 +1,51 @@ +## @package shesha.tests +## @brief Tests the RTC module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + import numpy as np import naga as ng import os from shesha.sutra_wrap import Rtc_FFF as Rtc from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor from scipy.ndimage.measurements import center_of_mass -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig precision = 1e-2 -config = load_config_from_file(os.getenv("COMPASS_ROOT") + +config = ParamConfig(os.getenv("COMPASS_ROOT") + "/shesha/tests/pytest/par/test_sh.py") sup = Supervisor(config) @@ -17,22 +54,22 @@ sup.rtc.close_loop(0) sup.rtc.do_control(0) rtc = Rtc() -rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, False, 0, +rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "cog") -rtc.add_controller(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0._nvalid * 2, sup.config.p_controller0.nactu, - sup.config.p_controller0.delay, 0, "generic", idx_centro=np.zeros(1), +rtc.add_controller(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0]._nvalid * 2, sup.config.p_controllers[0].nactu, + sup.config.p_controllers[0].delay, 0, "generic", idx_centro=np.zeros(1), ncentro=1) centro = rtc.d_centro[0] control = rtc.d_control[0] -rtc.d_centro[0].set_npix(sup.config.p_wfs0.npix) +rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) cmat = sup.rtc.get_command_matrix(0) rtc.d_control[0].set_cmat(cmat) -rtc.d_control[0].set_gain(sup.config.p_controller0.gain) +rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) frame = sup.wfs.get_wfs_image(0) frame /= frame.max() rtc.d_centro[0].load_img(frame, frame.shape[0]) @@ -54,15 +91,15 @@ def relative_array_error(array1, array2): def test_initCentro_nvalid(): - assert (centro.nvalid - sup.config.p_wfs0._nvalid < precision) + assert (centro.nvalid - sup.config.p_wfss[0]._nvalid < precision) def test_initCentro_offset(): - assert (centro.offset - (sup.config.p_wfs0.npix / 2 - 0.5) < precision) + assert (centro.offset - (sup.config.p_wfss[0].npix / 2 - 0.5) < precision) def test_initCentro_scale(): - assert (centro.scale - sup.config.p_wfs0.pixsize < precision) + assert (centro.scale - sup.config.p_wfss[0].pixsize < precision) def test_initCentro_type(): @@ -70,11 +107,11 @@ def test_initCentro_type(): def test_initControl_nslope(): - assert (control.nslope - sup.config.p_wfs0._nvalid * 2 < precision) + assert (control.nslope - sup.config.p_wfss[0]._nvalid * 2 < precision) def test_initControl_nactu(): - assert (control.nactu - sup.config.p_controller0.nactu < precision) + assert (control.nactu - sup.config.p_controllers[0].nactu < precision) def test_initControl_type(): @@ -82,11 +119,11 @@ def test_initControl_type(): def test_initControl_delay(): - assert (control.delay - sup.config.p_controller0.delay < precision) + assert (control.delay - sup.config.p_controllers[0].delay < precision) def test_set_npix(): - assert (centro.npix - sup.config.p_wfs0.npix < precision) + assert (centro.npix - sup.config.p_wfss[0].npix < precision) def test_load_validposX(): @@ -102,7 +139,7 @@ def test_set_cmat(): def test_set_gain(): - assert (control.gain - sup.config.p_controller0.gain < precision) + assert (control.gain - sup.config.p_controllers[0].gain < precision) def test_load_img(): @@ -125,13 +162,13 @@ def test_calibrate_img(): def test_doCentroids_cog(): bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - for k in range(sup.config.p_wfs0._nvalid): + for k in range(sup.config.p_wfss[0]._nvalid): tmp = center_of_mass(bincube[:, :, k]) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(np.array(control.d_centroids), slopes) < precision) @@ -150,7 +187,7 @@ def test_set_comRange(): def test_clipping(): control.set_comRange(-1, 1) - C = (np.random.random(sup.config.p_controller0.nactu) - 0.5) * 4 + C = (np.random.random(sup.config.p_controllers[0].nactu) - 0.5) * 4 control.set_com(C, C.size) rtc.do_clipping(0) C_clipped = C.copy() @@ -161,7 +198,7 @@ def test_clipping(): def test_add_perturb_voltage(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) assert (relative_array_error( ng.array(control.d_perturb_map["test"][0]).toarray(), C) < precision) @@ -173,7 +210,7 @@ def test_remove_perturb_voltage(): def test_add_perturb(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) com = ng.array(control.d_com_clipped).toarray() control.add_perturb() @@ -207,13 +244,13 @@ def test_comp_voltage(): volt_max = 1 control.set_comRange(volt_min, volt_max) control.comp_voltage() - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) control.set_com(C, C.size) com0 = ng.array(control.d_circularComs0).toarray() com1 = ng.array(control.d_circularComs1).toarray() control.comp_voltage() - delay = sup.config.p_controller0.delay + delay = sup.config.p_controllers[0].delay a = delay - int(delay) b = 1 - a commands = a * com0 + b * com1 @@ -230,57 +267,57 @@ def test_remove_centroider(): def test_doCentroids_tcog(): - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "tcog") centro = rtc.d_centro[-1] threshold = 0.1 centro.set_threshold(threshold) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) bincube /= bincube.max() - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale bincube = bincube - threshold bincube[np.where(bincube < 0)] = 0 - for k in range(sup.config.p_wfs0._nvalid): + for k in range(sup.config.p_wfss[0]._nvalid): tmp = center_of_mass(bincube[:, :, k]) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(np.array(control.d_centroids), slopes) < precision) def test_doCentroids_bpcog(): rtc.remove_centroider(0) - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "bpcog") centro = rtc.d_centro[-1] bpix = 8 centro.set_nmax(8) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) bincube /= bincube.max() - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - for k in range(sup.config.p_wfs0._nvalid): + for k in range(sup.config.p_wfss[0]._nvalid): imagette = bincube[:, :, k] threshold = np.sort(imagette, axis=None)[-(bpix + 1)] imagette -= threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(np.array(control.d_centroids), slopes) < precision) diff --git a/tests/pytest/rtc/test_rtcFFF_pyramid.py b/tests/pytest/rtc/test_rtcFFF_pyramid.py new file mode 100644 index 0000000..1ce2d19 --- /dev/null +++ b/tests/pytest/rtc/test_rtcFFF_pyramid.py @@ -0,0 +1,97 @@ +## @package shesha.tests +## @brief Tests the RTC module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + +import numpy as np +import naga as ng +import os +from shesha.sutra_wrap import Rtc_FFF as Rtc +from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor +from scipy.ndimage.measurements import center_of_mass +from shesha.config import ParamConfig + +precision = 1e-2 + +config = ParamConfig(os.getenv("COMPASS_ROOT") + + "/shesha/tests/pytest/par/test_pyrhr.py") +sup = Supervisor(config) +sup.next() +sup.rtc.open_loop(0) +sup.rtc.close_loop(0) +sup.rtc.do_control(0) +rtc = Rtc() +rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid * sup.config.p_wfss[0].nPupils, 0, sup.config.p_wfss[0].pixsize, + False, 0, "maskedpix") +rtc.add_controller(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_controllers[0].nslope, sup.config.p_controllers[0].nactu, + sup.config.p_controllers[0].delay, 0, "generic", idx_centro=np.zeros(1), ncentro=1) +centro = rtc.d_centro[0] +control = rtc.d_control[0] +rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) +xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) +yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) +rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) +cmat = sup.rtc.get_command_matrix(0) +rtc.d_control[0].set_cmat(cmat) +rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) +frame = sup.wfs.get_wfs_image(0) +frame /= frame.max() +rtc.d_centro[0].load_img(frame, frame.shape[0]) +rtc.d_centro[0].calibrate_img() + +rtc.do_centroids(0) +slp = ng.array(rtc.d_control[0].d_centroids) +rtc.do_control(0) +com = ng.array(rtc.d_control[0].d_com) + +dark = np.random.random(frame.shape) +flat = np.random.random(frame.shape) +centro.set_dark(dark, frame.shape[0]) +centro.set_flat(flat, frame.shape[0]) + + +def relative_array_error(array1, array2): + return np.abs((array1 - array2) / array2.max()).max() + + +def test_doCentroids_maskedPix(): + binimg = np.array(centro.d_img) + slopes = np.zeros(xvalid.size) + psum = binimg[xvalid, yvalid].sum() / slopes.size + for k in range(slopes.size): + slopes[k] = binimg[xvalid[k], yvalid[k]] / psum - 1 # -1 for ref slopes + assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < + precision) diff --git a/tests/pytest/rtc/FP32/test_rtcFFU.py b/tests/pytest/rtc/test_rtcFFU.py similarity index 61% rename from tests/pytest/rtc/FP32/test_rtcFFU.py rename to tests/pytest/rtc/test_rtcFFU.py index 6903748..d23bbee 100644 --- a/tests/pytest/rtc/FP32/test_rtcFFU.py +++ b/tests/pytest/rtc/test_rtcFFU.py @@ -1,14 +1,51 @@ +## @package shesha.tests +## @brief Tests the RTC module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + import numpy as np import naga as ng import os from shesha.sutra_wrap import Rtc_FFU as Rtc from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor from scipy.ndimage.measurements import center_of_mass -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig precision = 1e-2 -config = load_config_from_file(os.getenv("COMPASS_ROOT") + +config = ParamConfig(os.getenv("COMPASS_ROOT") + "/shesha/tests/pytest/par/test_sh.py") config.p_dms[0].push4imat = 0.5 config.p_dms[1].unitpervolt = 500 @@ -16,29 +53,29 @@ config.p_dms[0].unitpervolt = 500 sup = Supervisor(config) sup.wfs._wfs.d_wfs[0].set_fakecam(True) -sup.wfs._wfs.d_wfs[0].set_max_flux_per_pix(int(sup.config.p_wfs0._nphotons // 2)) +sup.wfs._wfs.d_wfs[0].set_max_flux_per_pix(int(sup.config.p_wfss[0]._nphotons // 2)) sup.wfs._wfs.d_wfs[0].set_max_pix_value(2**16 - 1) sup.next() sup.rtc.open_loop(0) sup.rtc.close_loop(0) sup.rtc.do_control(0) rtc = Rtc() -rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, False, 0, +rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "cog") -rtc.add_controller(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0._nvalid * 2, sup.config.p_controller0.nactu, - sup.config.p_controller0.delay, 0, "generic", idx_centro=np.zeros(1), +rtc.add_controller(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0]._nvalid * 2, sup.config.p_controllers[0].nactu, + sup.config.p_controllers[0].delay, 0, "generic", idx_centro=np.zeros(1), ncentro=1) centro = rtc.d_centro[0] control = rtc.d_control[0] -rtc.d_centro[0].set_npix(sup.config.p_wfs0.npix) +rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) cmat = sup.rtc.get_command_matrix(0) rtc.d_control[0].set_cmat(cmat) -rtc.d_control[0].set_gain(sup.config.p_controller0.gain) +rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) frame = np.array(sup.wfs._wfs.d_wfs[0].d_camimg) rtc.d_centro[0].load_img(frame, frame.shape[0]) rtc.d_centro[0].calibrate_img() @@ -59,15 +96,15 @@ def relative_array_error(array1, array2): def test_initCentro_nvalid(): - assert (centro.nvalid - sup.config.p_wfs0._nvalid < precision) + assert (centro.nvalid - sup.config.p_wfss[0]._nvalid < precision) def test_initCentro_offset(): - assert (centro.offset - (sup.config.p_wfs0.npix / 2 - 0.5) < precision) + assert (centro.offset - (sup.config.p_wfss[0].npix / 2 - 0.5) < precision) def test_initCentro_scale(): - assert (centro.scale - sup.config.p_wfs0.pixsize < precision) + assert (centro.scale - sup.config.p_wfss[0].pixsize < precision) def test_initCentro_type(): @@ -75,11 +112,11 @@ def test_initCentro_type(): def test_initControl_nslope(): - assert (control.nslope - sup.config.p_wfs0._nvalid * 2 < precision) + assert (control.nslope - sup.config.p_wfss[0]._nvalid * 2 < precision) def test_initControl_nactu(): - assert (control.nactu - sup.config.p_controller0.nactu < precision) + assert (control.nactu - sup.config.p_controllers[0].nactu < precision) def test_initControl_type(): @@ -87,11 +124,11 @@ def test_initControl_type(): def test_initControl_delay(): - assert (control.delay - sup.config.p_controller0.delay < precision) + assert (control.delay - sup.config.p_controllers[0].delay < precision) def test_set_npix(): - assert (centro.npix - sup.config.p_wfs0.npix < precision) + assert (centro.npix - sup.config.p_wfss[0].npix < precision) def test_load_validposX(): @@ -107,7 +144,7 @@ def test_set_cmat(): def test_set_gain(): - assert (control.gain - sup.config.p_controller0.gain < precision) + assert (control.gain - sup.config.p_controllers[0].gain < precision) def test_load_img(): @@ -130,13 +167,13 @@ def test_calibrate_img(): def test_doCentroids_cog(): bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - for k in range(sup.config.p_wfs0._nvalid): + for k in range(sup.config.p_wfss[0]._nvalid): tmp = center_of_mass(bincube[:, :, k]) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) @@ -157,7 +194,7 @@ def test_set_comRange(): def test_clipping(): control.set_comRange(-1, 1) - C = (np.random.random(sup.config.p_controller0.nactu) - 0.5) * 4 + C = (np.random.random(sup.config.p_controllers[0].nactu) - 0.5) * 4 control.set_com(C, C.size) rtc.do_clipping(0) C_clipped = C.copy() @@ -168,7 +205,7 @@ def test_clipping(): def test_add_perturb_voltage(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) assert (relative_array_error( ng.array(control.d_perturb_map["test"][0]).toarray(), C) < precision) @@ -180,7 +217,7 @@ def test_remove_perturb_voltage(): def test_add_perturb(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) com = ng.array(control.d_com_clipped).toarray() control.add_perturb() @@ -214,13 +251,13 @@ def test_comp_voltage(): volt_max = 1 control.set_comRange(volt_min, volt_max) control.comp_voltage() - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) control.set_com(C, C.size) com0 = ng.array(control.d_circularComs0).toarray() com1 = ng.array(control.d_circularComs1).toarray() control.comp_voltage() - delay = sup.config.p_controller0.delay + delay = sup.config.p_controllers[0].delay a = delay - int(delay) b = 1 - a commands = a * com0 + b * com1 @@ -239,64 +276,64 @@ def test_remove_centroider(): def test_doCentroids_tcog(): - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "tcog") centro = rtc.d_centro[-1] threshold = 500 centro.set_threshold(threshold) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - vx = sup.config.p_wfs0._validsubsx - vy = sup.config.p_wfs0._validsubsy - npix = sup.config.p_wfs0.npix - for k in range(sup.config.p_wfs0._nvalid): + vx = sup.config.p_wfss[0]._validsubsx + vy = sup.config.p_wfss[0]._validsubsy + npix = sup.config.p_wfss[0].npix + for k in range(sup.config.p_wfss[0]._nvalid): imagette = frame[vx[k]:vx[k] + npix, vy[k]:vy[k] + npix].astype( np.float32) - threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) def test_doCentroids_bpcog(): rtc.remove_centroider(0) - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "bpcog") centro = rtc.d_centro[-1] bpix = 8 centro.set_nmax(8) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) bincube /= bincube.max() - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - vx = sup.config.p_wfs0._validsubsx - vy = sup.config.p_wfs0._validsubsy - npix = sup.config.p_wfs0.npix - for k in range(sup.config.p_wfs0._nvalid): + vx = sup.config.p_wfss[0]._validsubsx + vy = sup.config.p_wfss[0]._validsubsy + npix = sup.config.p_wfss[0].npix + for k in range(sup.config.p_wfss[0]._nvalid): imagette = frame[vx[k]:vx[k] + npix, vy[k]:vy[k] + npix].astype(np.float32) threshold = np.sort(imagette, axis=None)[-(bpix + 1)] imagette -= threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) diff --git a/tests/pytest/rtc/FP32/test_rtcUFF.py b/tests/pytest/rtc/test_rtcUFF.py similarity index 60% rename from tests/pytest/rtc/FP32/test_rtcUFF.py rename to tests/pytest/rtc/test_rtcUFF.py index e6a61c6..ffb6b6a 100644 --- a/tests/pytest/rtc/FP32/test_rtcUFF.py +++ b/tests/pytest/rtc/test_rtcUFF.py @@ -1,41 +1,78 @@ +## @package shesha.tests +## @brief Tests the RTC module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + import numpy as np import naga as ng import os from shesha.sutra_wrap import Rtc_UFF as Rtc from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor from scipy.ndimage.measurements import center_of_mass -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig precision = 1e-2 -config = load_config_from_file(os.getenv("COMPASS_ROOT") + +config = ParamConfig(os.getenv("COMPASS_ROOT") + "/shesha/tests/pytest/par/test_sh.py") sup = Supervisor(config) sup.wfs._wfs.d_wfs[0].set_fakecam(True) -sup.wfs._wfs.d_wfs[0].set_max_flux_per_pix(int(sup.config.p_wfs0._nphotons // 2)) +sup.wfs._wfs.d_wfs[0].set_max_flux_per_pix(int(sup.config.p_wfss[0]._nphotons // 2)) sup.wfs._wfs.d_wfs[0].set_max_pix_value(2**16 - 1) sup.next() sup.rtc.open_loop(0) sup.rtc.close_loop(0) sup.rtc.do_control(0) rtc = Rtc() -rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, False, 0, +rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "cog") -rtc.add_controller(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0._nvalid * 2, sup.config.p_controller0.nactu, - sup.config.p_controller0.delay, 0, "generic", idx_centro=np.zeros(1), +rtc.add_controller(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0]._nvalid * 2, sup.config.p_controllers[0].nactu, + sup.config.p_controllers[0].delay, 0, "generic", idx_centro=np.zeros(1), ncentro=1) centro = rtc.d_centro[0] control = rtc.d_control[0] -rtc.d_centro[0].set_npix(sup.config.p_wfs0.npix) +rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) cmat = sup.rtc.get_command_matrix(0) rtc.d_control[0].set_cmat(cmat) -rtc.d_control[0].set_gain(sup.config.p_controller0.gain) +rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) frame = np.array(sup.wfs._wfs.d_wfs[0].d_camimg) rtc.d_centro[0].load_img(frame, frame.shape[0]) rtc.d_centro[0].calibrate_img() @@ -56,15 +93,15 @@ def relative_array_error(array1, array2): def test_initCentro_nvalid(): - assert (centro.nvalid - sup.config.p_wfs0._nvalid < precision) + assert (centro.nvalid - sup.config.p_wfss[0]._nvalid < precision) def test_initCentro_offset(): - assert (centro.offset - (sup.config.p_wfs0.npix / 2 - 0.5) < precision) + assert (centro.offset - (sup.config.p_wfss[0].npix / 2 - 0.5) < precision) def test_initCentro_scale(): - assert (centro.scale - sup.config.p_wfs0.pixsize < precision) + assert (centro.scale - sup.config.p_wfss[0].pixsize < precision) def test_initCentro_type(): @@ -72,11 +109,11 @@ def test_initCentro_type(): def test_initControl_nslope(): - assert (control.nslope - sup.config.p_wfs0._nvalid * 2 < precision) + assert (control.nslope - sup.config.p_wfss[0]._nvalid * 2 < precision) def test_initControl_nactu(): - assert (control.nactu - sup.config.p_controller0.nactu < precision) + assert (control.nactu - sup.config.p_controllers[0].nactu < precision) def test_initControl_type(): @@ -84,11 +121,11 @@ def test_initControl_type(): def test_initControl_delay(): - assert (control.delay - sup.config.p_controller0.delay < precision) + assert (control.delay - sup.config.p_controllers[0].delay < precision) def test_set_npix(): - assert (centro.npix - sup.config.p_wfs0.npix < precision) + assert (centro.npix - sup.config.p_wfss[0].npix < precision) def test_load_validposX(): @@ -104,7 +141,7 @@ def test_set_cmat(): def test_set_gain(): - assert (control.gain - sup.config.p_controller0.gain < precision) + assert (control.gain - sup.config.p_controllers[0].gain < precision) def test_load_img(): @@ -127,13 +164,13 @@ def test_calibrate_img(): def test_doCentroids_cog(): bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - for k in range(sup.config.p_wfs0._nvalid): + for k in range(sup.config.p_wfss[0]._nvalid): tmp = center_of_mass(bincube[:, :, k]) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) @@ -154,7 +191,7 @@ def test_set_comRange(): def test_clipping(): control.set_comRange(-1, 1) - C = (np.random.random(sup.config.p_controller0.nactu) - 0.5) * 4 + C = (np.random.random(sup.config.p_controllers[0].nactu) - 0.5) * 4 control.set_com(C, C.size) rtc.do_clipping(0) C_clipped = C.copy() @@ -165,7 +202,7 @@ def test_clipping(): def test_add_perturb_voltage(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) assert (relative_array_error( ng.array(control.d_perturb_map["test"][0]).toarray(), C) < precision) @@ -177,7 +214,7 @@ def test_remove_perturb_voltage(): def test_add_perturb(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) com = ng.array(control.d_com_clipped).toarray() control.add_perturb() @@ -211,13 +248,13 @@ def test_comp_voltage(): volt_max = 1 control.set_comRange(volt_min, volt_max) control.comp_voltage() - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) control.set_com(C, C.size) com0 = ng.array(control.d_circularComs0).toarray() com1 = ng.array(control.d_circularComs1).toarray() control.comp_voltage() - delay = sup.config.p_controller0.delay + delay = sup.config.p_controllers[0].delay a = delay - int(delay) b = 1 - a commands = a * com0 + b * com1 @@ -234,64 +271,64 @@ def test_remove_centroider(): def test_doCentroids_tcog(): - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "tcog") centro = rtc.d_centro[-1] threshold = 5000 centro.set_threshold(threshold) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - vx = sup.config.p_wfs0._validsubsx - vy = sup.config.p_wfs0._validsubsy - npix = sup.config.p_wfs0.npix - for k in range(sup.config.p_wfs0._nvalid): + vx = sup.config.p_wfss[0]._validsubsx + vy = sup.config.p_wfss[0]._validsubsy + npix = sup.config.p_wfss[0].npix + for k in range(sup.config.p_wfss[0]._nvalid): imagette = frame[vx[k]:vx[k] + npix, vy[k]:vy[k] + npix].astype( np.float32) - threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) def test_doCentroids_bpcog(): rtc.remove_centroider(0) - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "bpcog") centro = rtc.d_centro[-1] bpix = 8 centro.set_nmax(8) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) bincube /= bincube.max() - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - vx = sup.config.p_wfs0._validsubsx - vy = sup.config.p_wfs0._validsubsy - npix = sup.config.p_wfs0.npix - for k in range(sup.config.p_wfs0._nvalid): + vx = sup.config.p_wfss[0]._validsubsx + vy = sup.config.p_wfss[0]._validsubsy + npix = sup.config.p_wfss[0].npix + for k in range(sup.config.p_wfss[0]._nvalid): imagette = frame[vx[k]:vx[k] + npix, vy[k]:vy[k] + npix].astype(np.float32) threshold = np.sort(imagette, axis=None)[-(bpix + 1)] imagette -= threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) diff --git a/tests/pytest/rtc/FP32/test_rtcUFU.py b/tests/pytest/rtc/test_rtcUFU.py similarity index 61% rename from tests/pytest/rtc/FP32/test_rtcUFU.py rename to tests/pytest/rtc/test_rtcUFU.py index 5762fc8..b2f65b9 100644 --- a/tests/pytest/rtc/FP32/test_rtcUFU.py +++ b/tests/pytest/rtc/test_rtcUFU.py @@ -1,14 +1,51 @@ +## @package shesha.tests +## @brief Tests the RTC module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + import numpy as np import naga as ng import os from shesha.sutra_wrap import Rtc_UFU as Rtc from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor from scipy.ndimage.measurements import center_of_mass -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig precision = 1e-2 -config = load_config_from_file(os.getenv("COMPASS_ROOT") + +config = ParamConfig(os.getenv("COMPASS_ROOT") + "/shesha/tests/pytest/par/test_sh.py") config.p_dms[0].unitpervolt = 500 config.p_dms[0].push4imat = 0.5 @@ -16,29 +53,29 @@ config.p_dms[1].push4imat = 0.5 sup = Supervisor(config) sup.wfs._wfs.d_wfs[0].set_fakecam(True) -sup.wfs._wfs.d_wfs[0].set_max_flux_per_pix(int(sup.config.p_wfs0._nphotons // 2)) +sup.wfs._wfs.d_wfs[0].set_max_flux_per_pix(int(sup.config.p_wfss[0]._nphotons // 2)) sup.wfs._wfs.d_wfs[0].set_max_pix_value(2**16 - 1) sup.next() sup.rtc.open_loop(0) sup.rtc.close_loop(0) sup.rtc.do_control(0) rtc = Rtc() -rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, False, 0, +rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "cog") -rtc.add_controller(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0._nvalid * 2, sup.config.p_controller0.nactu, - sup.config.p_controller0.delay, 0, "generic", idx_centro=np.zeros(1), +rtc.add_controller(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0]._nvalid * 2, sup.config.p_controllers[0].nactu, + sup.config.p_controllers[0].delay, 0, "generic", idx_centro=np.zeros(1), ncentro=1) centro = rtc.d_centro[0] control = rtc.d_control[0] -rtc.d_centro[0].set_npix(sup.config.p_wfs0.npix) +rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) cmat = sup.rtc.get_command_matrix(0) rtc.d_control[0].set_cmat(cmat) -rtc.d_control[0].set_gain(sup.config.p_controller0.gain) +rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) frame = np.array(sup.wfs._wfs.d_wfs[0].d_camimg) rtc.d_centro[0].load_img(frame, frame.shape[0]) rtc.d_centro[0].calibrate_img() @@ -59,15 +96,15 @@ def relative_array_error(array1, array2): def test_initCentro_nvalid(): - assert (centro.nvalid - sup.config.p_wfs0._nvalid < precision) + assert (centro.nvalid - sup.config.p_wfss[0]._nvalid < precision) def test_initCentro_offset(): - assert (centro.offset - (sup.config.p_wfs0.npix / 2 - 0.5) < precision) + assert (centro.offset - (sup.config.p_wfss[0].npix / 2 - 0.5) < precision) def test_initCentro_scale(): - assert (centro.scale - sup.config.p_wfs0.pixsize < precision) + assert (centro.scale - sup.config.p_wfss[0].pixsize < precision) def test_initCentro_type(): @@ -75,11 +112,11 @@ def test_initCentro_type(): def test_initControl_nslope(): - assert (control.nslope - sup.config.p_wfs0._nvalid * 2 < precision) + assert (control.nslope - sup.config.p_wfss[0]._nvalid * 2 < precision) def test_initControl_nactu(): - assert (control.nactu - sup.config.p_controller0.nactu < precision) + assert (control.nactu - sup.config.p_controllers[0].nactu < precision) def test_initControl_type(): @@ -87,11 +124,11 @@ def test_initControl_type(): def test_initControl_delay(): - assert (control.delay - sup.config.p_controller0.delay < precision) + assert (control.delay - sup.config.p_controllers[0].delay < precision) def test_set_npix(): - assert (centro.npix - sup.config.p_wfs0.npix < precision) + assert (centro.npix - sup.config.p_wfss[0].npix < precision) def test_load_validposX(): @@ -107,7 +144,7 @@ def test_set_cmat(): def test_set_gain(): - assert (control.gain - sup.config.p_controller0.gain < precision) + assert (control.gain - sup.config.p_controllers[0].gain < precision) def test_load_img(): @@ -130,13 +167,13 @@ def test_calibrate_img(): def test_doCentroids_cog(): bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - for k in range(sup.config.p_wfs0._nvalid): + for k in range(sup.config.p_wfss[0]._nvalid): tmp = center_of_mass(bincube[:, :, k]) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) @@ -157,7 +194,7 @@ def test_set_comRange(): def test_clipping(): control.set_comRange(-1, 1) - C = (np.random.random(sup.config.p_controller0.nactu) - 0.5) * 4 + C = (np.random.random(sup.config.p_controllers[0].nactu) - 0.5) * 4 control.set_com(C, C.size) rtc.do_clipping(0) C_clipped = C.copy() @@ -168,7 +205,7 @@ def test_clipping(): def test_add_perturb_voltage(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) assert (relative_array_error( ng.array(control.d_perturb_map["test"][0]).toarray(), C) < precision) @@ -180,7 +217,7 @@ def test_remove_perturb_voltage(): def test_add_perturb(): - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) com = ng.array(control.d_com_clipped).toarray() control.add_perturb() @@ -214,13 +251,13 @@ def test_comp_voltage(): volt_max = 1 control.set_comRange(volt_min, volt_max) control.comp_voltage() - C = np.random.random(sup.config.p_controller0.nactu) + C = np.random.random(sup.config.p_controllers[0].nactu) control.add_perturb_voltage("test", C, 1) control.set_com(C, C.size) com0 = ng.array(control.d_circularComs0).toarray() com1 = ng.array(control.d_circularComs1).toarray() control.comp_voltage() - delay = sup.config.p_controller0.delay + delay = sup.config.p_controllers[0].delay a = delay - int(delay) b = 1 - a commands = a * com0 + b * com1 @@ -240,64 +277,64 @@ def test_remove_centroider(): def test_doCentroids_tcog(): - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "tcog") centro = rtc.d_centro[-1] threshold = 500 centro.set_threshold(threshold) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - vx = sup.config.p_wfs0._validsubsx - vy = sup.config.p_wfs0._validsubsy - npix = sup.config.p_wfs0.npix - for k in range(sup.config.p_wfs0._nvalid): + vx = sup.config.p_wfss[0]._validsubsx + vy = sup.config.p_wfss[0]._validsubsy + npix = sup.config.p_wfss[0].npix + for k in range(sup.config.p_wfss[0]._nvalid): imagette = frame[vx[k]:vx[k] + npix, vy[k]:vy[k] + npix].astype( np.float32) - threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) def test_doCentroids_bpcog(): rtc.remove_centroider(0) - rtc.add_centroider(sup.context, sup.config.p_wfs0._nvalid, - sup.config.p_wfs0.npix / 2 - 0.5, sup.config.p_wfs0.pixsize, + rtc.add_centroider(sup.context, sup.config.p_wfss[0]._nvalid, + sup.config.p_wfss[0].npix / 2 - 0.5, sup.config.p_wfss[0].pixsize, False, 0, "bpcog") centro = rtc.d_centro[-1] bpix = 8 centro.set_nmax(8) - centro.set_npix(sup.config.p_wfs0.npix) + centro.set_npix(sup.config.p_wfss[0].npix) centro.load_validpos(xvalid, yvalid, xvalid.size) centro.load_img(frame, frame.shape[0]) centro.calibrate_img() rtc.do_centroids(0) bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) bincube /= bincube.max() - slopes = np.zeros(sup.config.p_wfs0._nvalid * 2) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) offset = centro.offset scale = centro.scale - vx = sup.config.p_wfs0._validsubsx - vy = sup.config.p_wfs0._validsubsy - npix = sup.config.p_wfs0.npix - for k in range(sup.config.p_wfs0._nvalid): + vx = sup.config.p_wfss[0]._validsubsx + vy = sup.config.p_wfss[0]._validsubsy + npix = sup.config.p_wfss[0].npix + for k in range(sup.config.p_wfss[0]._nvalid): imagette = frame[vx[k]:vx[k] + npix, vy[k]:vy[k] + npix].astype(np.float32) threshold = np.sort(imagette, axis=None)[-(bpix + 1)] imagette -= threshold imagette[np.where(imagette < 0)] = 0 tmp = center_of_mass(imagette) slopes[k] = (tmp[0] - offset) * scale - slopes[k + sup.config.p_wfs0._nvalid] = (tmp[1] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < precision) diff --git a/tests/pytest/rtc_standalone/FP32/test_rtcstandalone_maskedpix.py b/tests/pytest/rtc_standalone/FP32/test_rtcstandalone_maskedpix.py new file mode 100644 index 0000000..687548c --- /dev/null +++ b/tests/pytest/rtc_standalone/FP32/test_rtcstandalone_maskedpix.py @@ -0,0 +1,108 @@ +## @package shesha.tests +## @brief Tests the RTC standalone module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + +import numpy as np +import naga as ng +import os +from shesha.supervisor.components import RtcStandalone +from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor +from scipy.ndimage.measurements import center_of_mass +from shesha.config import ParamConfig + +precision = 1e-2 + +config = ParamConfig(os.getenv("COMPASS_ROOT") + + "/shesha/tests/pytest/par/test_pyrhr.py") +sup = Supervisor(config) +sup.next() +sup.rtc.open_loop(0) +sup.rtc.close_loop(0) +sup.rtc._rtc.do_control(0) +rtc = RtcStandalone(sup.context, sup.config, 1, [sup.config.p_wfss[0]._nvalid], sup.config.p_controllers[0].nactu, + ["maskedpix"], [sup.config.p_controllers[0].delay], [0], [1]) +centro = rtc._rtc.d_centro[0] +control = rtc._rtc.d_control[0] +rtc._rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) +xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) +yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) +rtc._rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) +cmat = sup.rtc.get_command_matrix(0) +rtc._rtc.d_control[0].set_cmat(cmat) +rtc._rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) +frame = sup.wfs.get_wfs_image(0) +frame /= frame.max() +rtc._rtc.d_centro[0].load_img(frame, frame.shape[0]) +rtc._rtc.d_centro[0].calibrate_img() + +rtc._rtc.do_centroids(0) +slp = ng.array(rtc._rtc.d_control[0].d_centroids) +rtc._rtc.do_control(0) +com = ng.array(rtc._rtc.d_control[0].d_com) + +dark = np.random.random(frame.shape) +flat = np.random.random(frame.shape) +centro.set_dark(dark, frame.shape[0]) +centro.set_flat(flat, frame.shape[0]) + + +def relative_array_error(array1, array2): + return np.abs((array1 - array2) / array2.max()).max() + +def test_doCentroids_maskedPix(): + binimg = np.array(centro.d_img) + slopes = np.zeros(xvalid.size) + psum = binimg[xvalid, yvalid].sum() / slopes.size + for k in range(slopes.size): + slopes[k] = binimg[xvalid[k], yvalid[k]] / psum - 1 + assert (relative_array_error(ng.array(control.d_centroids).toarray(), slopes) < + precision) + +def test_calibrate_img_validPix(): + centro.calibrate_img_validPix() + validx = np.array(centro.d_validx) + validy = np.array(centro.d_validy) + valid_mask = frame*0 + valid_mask[validx, validy] = 1 + imgCal = (frame - dark) * flat * valid_mask + assert (relative_array_error(np.array(centro.d_img), imgCal) < precision) + +def test_do_control_generic(): + slopes = np.array(control.d_centroids) + gain = control.gain + cmat = np.array(control.d_cmat) + commands = cmat.dot(slopes) * gain * (-1) + assert (relative_array_error(np.array(control.d_com), commands) < precision) diff --git a/tests/pytest/rtc_standalone/FP32/test_rtcstandalone_sh.py b/tests/pytest/rtc_standalone/FP32/test_rtcstandalone_sh.py new file mode 100644 index 0000000..256a120 --- /dev/null +++ b/tests/pytest/rtc_standalone/FP32/test_rtcstandalone_sh.py @@ -0,0 +1,267 @@ +## @package shesha.tests +## @brief Tests the RTC standalone module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + +import numpy as np +import naga as ng +import os +from shesha.supervisor.components import RtcStandalone +from shesha.supervisor.compassSupervisor import CompassSupervisor as Supervisor +from scipy.ndimage.measurements import center_of_mass +from shesha.config import ParamConfig + +precision = 1e-2 + +config = ParamConfig(os.getenv("COMPASS_ROOT") + + "/shesha/tests/pytest/par/test_sh.py") +sup = Supervisor(config) +sup.next() +sup.rtc.open_loop(0) +sup.rtc.close_loop(0) +sup.rtc._rtc.do_control(0) +rtc = RtcStandalone(sup.context, sup.config, 1, [sup.config.p_wfss[0]._nvalid], sup.config.p_controllers[0].nactu, + ["cog"], [sup.config.p_controllers[0].delay], [0], [0.29005988378497927]) +centro = rtc._rtc.d_centro[0] +control = rtc._rtc.d_control[0] +rtc._rtc.d_centro[0].set_npix(sup.config.p_wfss[0].npix) +xvalid = np.array(sup.rtc._rtc.d_centro[0].d_validx) +yvalid = np.array(sup.rtc._rtc.d_centro[0].d_validy) +rtc._rtc.d_centro[0].load_validpos(xvalid, yvalid, xvalid.size) +cmat = sup.rtc.get_command_matrix(0) +rtc._rtc.d_control[0].set_cmat(cmat) +rtc._rtc.d_control[0].set_gain(sup.config.p_controllers[0].gain) +frame = sup.wfs.get_wfs_image(0) +frame /= frame.max() +rtc._rtc.d_centro[0].load_img(frame, frame.shape[0]) +rtc._rtc.d_centro[0].calibrate_img() + +rtc._rtc.do_centroids(0) +slp = ng.array(rtc._rtc.d_control[0].d_centroids) +rtc._rtc.do_control(0) +com = ng.array(rtc._rtc.d_control[0].d_com) + +dark = np.random.random(frame.shape) +flat = np.random.random(frame.shape) +centro.set_dark(dark, frame.shape[0]) +centro.set_flat(flat, frame.shape[0]) + + + +def relative_array_error(array1, array2): + return np.abs((array1 - array2) / array2.max()).max() + + +def test_initCentro_nvalid(): + assert (centro.nvalid - sup.config.p_wfss[0]._nvalid < precision) + + +def test_initCentro_offset(): + assert (centro.offset - (sup.config.p_wfss[0].npix / 2 - 0.5) < precision) + + +def test_initCentro_scale(): + assert (centro.scale - sup.config.p_wfss[0].pixsize < precision) + + +def test_initCentro_type(): + assert (centro.type == "cog") + + +def test_initControl_nslope(): + assert (control.nslope - sup.config.p_wfss[0]._nvalid * 2 < precision) + + +def test_initControl_nactu(): + assert (control.nactu - sup.config.p_controllers[0].nactu < precision) + + +def test_initControl_type(): + assert (control.type == "generic") + + +def test_initControl_delay(): + assert (control.delay - sup.config.p_controllers[0].delay < precision) + + +def test_set_npix(): + assert (centro.npix - sup.config.p_wfss[0].npix < precision) + + +def test_load_validposX(): + assert (relative_array_error(np.array(centro.d_validx), xvalid) < precision) + + +def test_load_validposY(): + assert (relative_array_error(np.array(centro.d_validy), yvalid) < precision) + + +def test_set_cmat(): + assert (relative_array_error(np.array(control.d_cmat), cmat) < precision) + + +def test_set_gain(): + assert (control.gain - sup.config.p_controllers[0].gain < precision) + + +def test_load_img(): + assert (relative_array_error(np.array(centro.d_img_raw), frame) < precision) + + +def test_set_dark(): + assert (relative_array_error(np.array(centro.d_dark), dark) < precision) + + +def test_set_flat(): + assert (relative_array_error(np.array(centro.d_flat), flat) < precision) + + +def test_calibrate_img(): + centro.calibrate_img() + imgCal = (frame - dark) * flat + assert (relative_array_error(np.array(centro.d_img), imgCal) < precision) + +def test_calibrate_img_validPix(): + centro.calibrate_img_validPix() + valid_mask = np.array(centro.d_validMask) + valid_mask[np.where(valid_mask)] = 1 + imgCal = (frame - dark) * flat * valid_mask + assert (relative_array_error(np.array(centro.d_img), imgCal) < precision) + + +def test_doCentroids_cog(): + bincube = np.array(sup.wfs._wfs.d_wfs[0].d_bincube) + slopes = np.zeros(sup.config.p_wfss[0]._nvalid * 2) + offset = centro.offset + scale = centro.scale + for k in range(sup.config.p_wfss[0]._nvalid): + tmp = center_of_mass(bincube[:, :, k]) + slopes[k] = (tmp[0] - offset) * scale + slopes[k + sup.config.p_wfss[0]._nvalid] = (tmp[1] - offset) * scale + assert (relative_array_error(np.array(control.d_centroids), slopes) < precision) + + +def test_do_control_generic(): + slopes = np.array(control.d_centroids) + gain = control.gain + cmat = np.array(control.d_cmat) + commands = cmat.dot(slopes) * gain * (-1) + assert (relative_array_error(np.array(control.d_com), commands) < precision) + + +def test_set_comRange(): + control.set_comRange(-1, 1) + assert (control.comRange == (-1, 1)) + + +def test_clipping(): + control.set_comRange(-1, 1) + C = (np.random.random(sup.config.p_controllers[0].nactu) - 0.5) * 4 + control.set_com(C, C.size) + rtc.do_clipping(0) + C_clipped = C.copy() + C_clipped[np.where(C > 1)] = 1 + C_clipped[np.where(C < -1)] = -1 + assert (relative_array_error(ng.array(control.d_com_clipped).toarray(), C_clipped) < + precision) + + +def test_add_perturb_voltage(): + C = np.random.random(sup.config.p_controllers[0].nactu) + control.add_perturb_voltage("test", C, 1) + assert (relative_array_error( + ng.array(control.d_perturb_map["test"][0]).toarray(), C) < precision) + + +def test_remove_perturb_voltage(): + control.remove_perturb_voltage("test") + assert (control.d_perturb_map == {}) + + +def test_add_perturb(): + C = np.random.random(sup.config.p_controllers[0].nactu) + control.add_perturb_voltage("test", C, 1) + com = ng.array(control.d_com_clipped).toarray() + control.add_perturb() + assert (relative_array_error(ng.array(control.d_com_clipped).toarray(), com + C) < + precision) + + +def test_disable_perturb_voltage(): + control.disable_perturb_voltage("test") + com = np.array(control.d_com) + control.add_perturb() + assert (relative_array_error(np.array(control.d_com), com) < precision) + + +def test_enable_perturb_voltage(): + control.enable_perturb_voltage("test") + com = ng.array(control.d_com_clipped).toarray() + C = ng.array(control.d_perturb_map["test"][0]).toarray() + control.add_perturb() + assert (relative_array_error(ng.array(control.d_com_clipped).toarray(), com + C) < + precision) + + +def test_reset_perturb_voltage(): + control.reset_perturb_voltage() + assert (control.d_perturb_map == {}) + + +def test_comp_voltage(): + volt_min = -1 + volt_max = 1 + control.set_comRange(volt_min, volt_max) + control.comp_voltage() + C = np.random.random(sup.config.p_controllers[0].nactu) + control.add_perturb_voltage("test", C, 1) + control.set_com(C, C.size) + com0 = ng.array(control.d_circularComs0).toarray() + com1 = ng.array(control.d_circularComs1).toarray() + control.comp_voltage() + delay = sup.config.p_controllers[0].delay + a = delay - int(delay) + b = 1 - a + commands = a * com0 + b * com1 + comPertu = commands + C + comPertu[np.where(comPertu > volt_max)] = volt_max + comPertu[np.where(comPertu < volt_min)] = volt_min + assert (relative_array_error(ng.array(control.d_voltage).toarray(), comPertu) < + precision) + + +def test_remove_centroider(): + rtc._rtc.remove_centroider(0) + assert (rtc._rtc.d_centro == []) diff --git a/tests/pytest/supervisor/test_compassSupervisor.py b/tests/pytest/supervisor/test_compassSupervisor.py index 6859c38..d63a670 100644 --- a/tests/pytest/supervisor/test_compassSupervisor.py +++ b/tests/pytest/supervisor/test_compassSupervisor.py @@ -1,11 +1,48 @@ +## @package shesha.tests +## @brief Tests the supervisor module +## @author COMPASS Team +## @version 5.1.0 +## @date 2020/05/18 +## @copyright GNU Lesser General Public License +# +# This file is part of COMPASS +# +# Copyright (C) 2011-2019 COMPASS Team +# All rights reserved. +# Distributed under GNU - LGPL +# +# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser +# General Public License as published by the Free Software Foundation, either version 3 of the License, +# or any later version. +# +# COMPASS: End-to-end AO simulation tool using GPU acceleration +# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems. +# +# The final product includes a software package for simulating all the critical subcomponents of AO, +# particularly in the context of the ELT and a real-time core based on several control approaches, +# with performances consistent with its integration into an instrument. Taking advantage of the specific +# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to +# conduct large simulation campaigns called to the ELT. +# +# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components +# of AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and +# various systems configurations such as multi-conjugate AO. +# +# COMPASS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with COMPASS. +# If not, see . + from shesha.supervisor.compassSupervisor import CompassSupervisor -from shesha.util.utilities import load_config_from_file +from shesha.config import ParamConfig import os import numpy as np import pytest -config = load_config_from_file(os.getenv("COMPASS_ROOT") + "/shesha/tests/pytest/par/test_pyrhr.py") +config = ParamConfig(os.getenv("COMPASS_ROOT") + "/shesha/tests/pytest/par/test_pyrhr.py") config.p_controllers[0].set_type("generic") sup = CompassSupervisor(config) @@ -21,6 +58,32 @@ def test_loop(): def test_reset(): sup.reset() assert(True) + +# __ _ +# __ ___ _ _ / _(_)__ _ +# / _/ _ \ ' \| _| / _` | +# \__\___/_||_|_| |_\__, | +# |___/ + +def test_get_ipupil(): + ipupil = sup.config.get_pupil("i") + ipupil = sup.config.get_pupil("ipupil") + assert(ipupil is sup.config.p_geom._ipupil) + +def test_get_mpupil(): + mpupil = sup.config.get_pupil("m") + mpupil = sup.config.get_pupil("mpupil") + assert(mpupil is sup.config.p_geom._mpupil) + +def test_get_spupil(): + spupil = sup.config.get_pupil("spupil") + spupil = sup.config.get_pupil("s") + assert(spupil is sup.config.p_geom._spupil) + +def test_export_config(): + aodict, datadict = sup.config.export_config() + assert(True) + # _ ___ # __ _| |_ _ __ ___ ___/ __|___ _ __ _ __ __ _ ______ # / _` | _| ' \/ _ (_-< (__/ _ \ ' \| '_ \/ _` (_-<_-< @@ -494,3 +557,58 @@ def test_do_imat_phase(): def test_compute_modal_residuals(): sup.calibration.compute_modal_residuals(sup.basis.projection_matrix) assert(True) + +# +# ModalGains +# + +def test_set_modal_basis(): + nactu = sup.config.p_controllers[0].nactu + sup.modalgains.set_modal_basis(np.ones((nactu, nactu))) + assert(True) + +def test_get_modal_basis(): + sup.modalgains.get_modal_basis() + assert(True) + +def test_set_cmat_modal(): + nslope = sup.config.p_controllers[0].nslope + nactu = sup.config.p_controllers[0].nactu + sup.modalgains.set_cmat_modal(np.ones((nactu, nslope))) + assert(True) + +def test_get_modal_gains(): + sup.modalgains.get_modal_gains() + assert(True) + +def test_set_mask(): + sup.modalgains.set_mask(np.zeros(sup.config.p_controllers[0].nactu)) + assert(True) + +def test_set_initial_gain(): + sup.modalgains.set_initial_gain(1) + assert(True) + +def test_set_config(): + sup.modalgains.set_config(0.1, 0.05, 0.05, 0.0) + assert(True) + +def test_adapt_modal_gains(): + sup.modalgains.adapt_modal_gains(False) + assert(True) + +def test_reset_mgains(): + sup.modalgains.reset_mgains() + assert(True) + +def test_reset_close(): + sup.modalgains.reset_close() + assert(True) + +def test_update_modal_meas(): + sup.modalgains.update_modal_meas() + assert(True) + +def test_update_mgains(): + sup.modalgains.update_mgains() + assert(True) \ No newline at end of file