Skip to content

Commit

Permalink
Format the whole codebase
Browse files Browse the repository at this point in the history
  • Loading branch information
Tools authored and franzpoeschel committed Jul 8, 2021
1 parent df8b0fb commit ffbb227
Show file tree
Hide file tree
Showing 170 changed files with 21,200 additions and 19,545 deletions.
3 changes: 1 addition & 2 deletions examples/10_streaming_read.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
using std::cout;
using namespace openPMD;

int
main()
int main()
{
#if openPMD_HAVE_ADIOS2
using position_t = double;
Expand Down
3 changes: 1 addition & 2 deletions examples/10_streaming_write.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
using std::cout;
using namespace openPMD;

int
main()
int main()
{
#if openPMD_HAVE_ADIOS2
using position_t = double;
Expand Down
3 changes: 1 addition & 2 deletions examples/12_span_write.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@ void span_write( std::string const & filename )
auto dynamicMemoryView = pos.storeChunk< position_t >(
Offset{ 0 },
extent,
[ &fallbackBuffer, &fallbackBufferIsUsed ]( size_t size )
{
[ &fallbackBuffer, &fallbackBufferIsUsed ]( size_t size ) {
fallbackBufferIsUsed = true;
fallbackBuffer.resize( size );
return std::shared_ptr< position_t >(
Expand Down
45 changes: 25 additions & 20 deletions examples/1_structure.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,39 +20,44 @@
*/
#include <openPMD/openPMD.hpp>


using namespace openPMD;

int main()
{
/* The root of any openPMD output spans across all data for all iterations is a 'Series'.
/* The root of any openPMD output spans across all data for all iterations
* is a 'Series'.
* Data is either in a single file or spread across multiple files. */
Series series = Series("../samples/1_structure.h5", Access::CREATE);
Series series = Series( "../samples/1_structure.h5", Access::CREATE );

/* Every element that structures your file (groups and datasets for example) can be annotated with attributes. */
series.setComment("This string will show up at the root ('/') of the output with key 'comment'.");
/* Every element that structures your file (groups and datasets for example)
* can be annotated with attributes. */
series.setComment(
"This string will show up at the root ('/') of the output with key "
"'comment'." );

/* Access to individual positions inside happens hierarchically, according to the openPMD standard.
* Creation of new elements happens on access inside the tree-like structure.
* Required attributes are initialized to reasonable defaults for every object. */
ParticleSpecies electrons = series.iterations[1].particles["electrons"];
/* Access to individual positions inside happens hierarchically, according
* to the openPMD standard. Creation of new elements happens on access
* inside the tree-like structure. Required attributes are initialized to
* reasonable defaults for every object. */
ParticleSpecies electrons = series.iterations[ 1 ].particles[ "electrons" ];

/* Data to be moved from memory to persistent storage is structured into Records,
* each holding an unbounded number of RecordComponents.
* If a Record only contains a single (scalar) component, it is treated slightly differently.
/* Data to be moved from memory to persistent storage is structured into
* Records, each holding an unbounded number of RecordComponents. If a
* Record only contains a single (scalar) component, it is treated slightly
* differently.
* https://github.com/openPMD/openPMD-standard/blob/latest/STANDARD.md#scalar-vector-and-tensor-records*/
Record mass = electrons["mass"];
RecordComponent mass_scalar = mass[RecordComponent::SCALAR];
Record mass = electrons[ "mass" ];
RecordComponent mass_scalar = mass[ RecordComponent::SCALAR ];

Dataset dataset = Dataset(Datatype::DOUBLE, Extent{1});
mass_scalar.resetDataset(dataset);
Dataset dataset = Dataset( Datatype::DOUBLE, Extent{ 1 } );
mass_scalar.resetDataset( dataset );

/* Required Records and RecordComponents are created automatically.
* Initialization has to be done explicitly by the user. */
electrons["position"]["x"].resetDataset(dataset);
electrons["position"]["x"].makeConstant(20.0);
electrons["positionOffset"]["x"].resetDataset(dataset);
electrons["positionOffset"]["x"].makeConstant(22.0);
electrons[ "position" ][ "x" ].resetDataset( dataset );
electrons[ "position" ][ "x" ].makeConstant( 20.0 );
electrons[ "positionOffset" ][ "x" ].resetDataset( dataset );
electrons[ "positionOffset" ][ "x" ].makeConstant( 22.0 );

/* The files in 'series' are still open until the object is destroyed, on
* which it cleanly flushes and closes all open file handles.
Expand Down
66 changes: 35 additions & 31 deletions examples/2_read_serial.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,77 +20,81 @@
*/
#include <openPMD/openPMD.hpp>

#include <cstddef>
#include <iostream>
#include <memory>
#include <cstddef>


using std::cout;
using namespace openPMD;

int main()
{
Series series = Series(
"../samples/git-sample/data%T.h5",
Access::READ_ONLY
);
cout << "Read a Series with openPMD standard version "
<< series.openPMD() << '\n';
Series series =
Series( "../samples/git-sample/data%T.h5", Access::READ_ONLY );
cout << "Read a Series with openPMD standard version " << series.openPMD()
<< '\n';

cout << "The Series contains " << series.iterations.size() << " iterations:";
for( auto const& i : series.iterations )
cout << "The Series contains " << series.iterations.size()
<< " iterations:";
for( auto const & i : series.iterations )
cout << "\n\t" << i.first;
cout << '\n';

Iteration i = series.iterations[100];
Iteration i = series.iterations[ 100 ];
cout << "Iteration 100 contains " << i.meshes.size() << " meshes:";
for( auto const& m : i.meshes )
for( auto const & m : i.meshes )
cout << "\n\t" << m.first;
cout << '\n';
cout << "Iteration 100 contains " << i.particles.size() << " particle species:";
for( auto const& ps : i.particles ) {
cout << "Iteration 100 contains " << i.particles.size()
<< " particle species:";
for( auto const & ps : i.particles )
{
cout << "\n\t" << ps.first;
for( auto const& r : ps.second ) {
for( auto const & r : ps.second )
{
cout << "\n\t" << r.first;
cout << '\n';
}
}

openPMD::ParticleSpecies electrons = i.particles["electrons"];
std::shared_ptr<double> charge = electrons["charge"][openPMD::RecordComponent::SCALAR].loadChunk<double>();
openPMD::ParticleSpecies electrons = i.particles[ "electrons" ];
std::shared_ptr< double > charge =
electrons[ "charge" ][ openPMD::RecordComponent::SCALAR ]
.loadChunk< double >();
series.flush();
cout << "And the first electron particle has a charge = " << charge.get()[0];
cout << "And the first electron particle has a charge = "
<< charge.get()[ 0 ];
cout << '\n';

MeshRecordComponent E_x = i.meshes["E"]["x"];
MeshRecordComponent E_x = i.meshes[ "E" ][ "x" ];
Extent extent = E_x.getExtent();
cout << "Field E/x has shape (";
for( auto const& dim : extent )
for( auto const & dim : extent )
cout << dim << ',';
cout << ") and has datatype " << E_x.getDatatype() << '\n';

Offset chunk_offset = {1, 1, 1};
Extent chunk_extent = {2, 2, 1};
auto chunk_data = E_x.loadChunk<double>(chunk_offset, chunk_extent);
Offset chunk_offset = { 1, 1, 1 };
Extent chunk_extent = { 2, 2, 1 };
auto chunk_data = E_x.loadChunk< double >( chunk_offset, chunk_extent );
cout << "Queued the loading of a single chunk from disk, "
"ready to execute\n";
series.flush();
cout << "Chunk has been read from disk\n"
<< "Read chunk contains:\n";
for( size_t row = 0; row < chunk_extent[0]; ++row )
for( size_t row = 0; row < chunk_extent[ 0 ]; ++row )
{
for( size_t col = 0; col < chunk_extent[1]; ++col )
cout << "\t"
<< '(' << row + chunk_offset[0] << '|' << col + chunk_offset[1] << '|' << 1 << ")\t"
<< chunk_data.get()[row*chunk_extent[1]+col];
for( size_t col = 0; col < chunk_extent[ 1 ]; ++col )
cout << "\t" << '(' << row + chunk_offset[ 0 ] << '|'
<< col + chunk_offset[ 1 ] << '|' << 1 << ")\t"
<< chunk_data.get()[ row * chunk_extent[ 1 ] + col ];
cout << '\n';
}

auto all_data = E_x.loadChunk<double>();
auto all_data = E_x.loadChunk< double >();
series.flush();
cout << "Full E/x starts with:\n\t{";
for( size_t col = 0; col < extent[1] && col < 5; ++col )
cout << all_data.get()[col] << ", ";
for( size_t col = 0; col < extent[ 1 ] && col < 5; ++col )
cout << all_data.get()[ col ] << ", ";
cout << "...}\n";

/* The files in 'series' are still open until the object is destroyed, on
Expand Down
32 changes: 16 additions & 16 deletions examples/2a_read_thetaMode_serial.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,31 +20,30 @@
*/
#include <openPMD/openPMD.hpp>

#include <cstddef>
#include <iostream>
#include <memory>
#include <cstddef>


using std::cout;
using namespace openPMD;

int main()
{
Series series = Series(
"../samples/git-sample/thetaMode/data%T.h5",
Access::READ_ONLY
);
"../samples/git-sample/thetaMode/data%T.h5", Access::READ_ONLY );

Iteration i = series.iterations[500];
MeshRecordComponent E_z_modes = i.meshes["E"]["z"];
Extent extent = E_z_modes.getExtent(); // (modal components, r, z)
Iteration i = series.iterations[ 500 ];
MeshRecordComponent E_z_modes = i.meshes[ "E" ][ "z" ];
Extent extent = E_z_modes.getExtent(); // (modal components, r, z)

// read E_z in all modes
auto E_z_raw = E_z_modes.loadChunk<double>();
auto E_z_raw = E_z_modes.loadChunk< double >();
// read E_z in mode_0 (one scalar field)
auto E_z_m0 = E_z_modes.loadChunk<double>(Offset{0, 0, 0}, Extent{1, extent[1], extent[2]});
auto E_z_m0 = E_z_modes.loadChunk< double >(
Offset{ 0, 0, 0 }, Extent{ 1, extent[ 1 ], extent[ 2 ] } );
// read E_z in mode_1 (two fields; skip mode_0 with one scalar field)
auto E_z_m1 = E_z_modes.loadChunk<double>(Offset{1, 0, 0}, Extent{2, extent[1], extent[2]});
auto E_z_m1 = E_z_modes.loadChunk< double >(
Offset{ 1, 0, 0 }, Extent{ 2, extent[ 1 ], extent[ 2 ] } );
series.flush();

// all this is still mode-decomposed data, not too useful for users
Expand All @@ -54,8 +53,8 @@ int main()
// user change frequency: time ~= component >> theta >> selected modes
// thetaMode::ToCylindrical toCylindrical("all");
// thetaMode::ToCylindricalSlice toCylindricalSlice(1.5708, "all")
// reconstruction to 2D slice in cylindrical coordinates (r, z) for a fixed theta
// E_z_90deg = toCylindricalSlice(E_z_modes).loadChunk<double>();
// reconstruction to 2D slice in cylindrical coordinates (r, z) for a fixed
// theta E_z_90deg = toCylindricalSlice(E_z_modes).loadChunk<double>();
// E_r_90deg = toCylindricalSlice(i.meshes["E"]["r"]).loadChunk<double>();
// E_t_90deg = toCylindricalSlice(i.meshes["E"]["t"]).loadChunk<double>();
// reconstruction to 3D cylindrical coordinates (r, t, z)
Expand All @@ -64,9 +63,10 @@ int main()

// reconstruction to 3D and 2D cartesian: E_x, E_y, E_z
// thetaMode::ToCylindrical toCartesian({'x': 1.e-6, 'y': 1.e-6}, "all");
// ... toCartesianSliceYZ({'x': 1.e-6, 'y': 1.e-6}, 'x', 0., "all"); // and absolute slice position
// E_z_xyz = toCartesian(E_z_modes).loadChunk<double>(); # (x, y, z)
// E_z_yz = toCartesianSliceYZ(E_z_modes).loadChunk<double>(); # (y, z)
// ... toCartesianSliceYZ({'x': 1.e-6, 'y': 1.e-6}, 'x', 0.,
// "all"); // and absolute slice position E_z_xyz =
// toCartesian(E_z_modes).loadChunk<double>(); # (x, y, z) E_z_yz =
// toCartesianSliceYZ(E_z_modes).loadChunk<double>(); # (y, z)
// series.flush();

/* The files in 'series' are still open until the object is destroyed, on
Expand Down
42 changes: 19 additions & 23 deletions examples/3_write_serial.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,54 +20,50 @@
*/
#include <openPMD/openPMD.hpp>

#include <cstdlib>
#include <iostream>
#include <memory>
#include <numeric>
#include <cstdlib>


using std::cout;
using namespace openPMD;

int main(int argc, char *argv[])
int main( int argc, char * argv[] )
{
// user input: size of matrix to write, default 3x3
size_t size = (argc == 2 ? atoi(argv[1]) : 3);
size_t size = ( argc == 2 ? atoi( argv[ 1 ] ) : 3 );

// matrix dataset to write with values 0...size*size-1
std::vector<double> global_data(size*size);
std::iota(global_data.begin(), global_data.end(), 0.);
std::vector< double > global_data( size * size );
std::iota( global_data.begin(), global_data.end(), 0. );

cout << "Set up a 2D square array (" << size << 'x' << size
<< ") that will be written\n";

// open file for writing
Series series = Series(
"../samples/3_write_serial.h5",
Access::CREATE
);
Series series = Series( "../samples/3_write_serial.h5", Access::CREATE );
cout << "Created an empty " << series.iterationEncoding() << " Series\n";

MeshRecordComponent rho =
series
.iterations[1]
.meshes["rho"][MeshRecordComponent::SCALAR];
cout << "Created a scalar mesh Record with all required openPMD attributes\n";
series.iterations[ 1 ].meshes[ "rho" ][ MeshRecordComponent::SCALAR ];
cout << "Created a scalar mesh Record with all required openPMD "
"attributes\n";

Datatype datatype = determineDatatype(shareRaw(global_data));
Extent extent = {size, size};
Dataset dataset = Dataset(datatype, extent);
cout << "Created a Dataset of size " << dataset.extent[0] << 'x' << dataset.extent[1]
<< " and Datatype " << dataset.dtype << '\n';
Datatype datatype = determineDatatype( shareRaw( global_data ) );
Extent extent = { size, size };
Dataset dataset = Dataset( datatype, extent );
cout << "Created a Dataset of size " << dataset.extent[ 0 ] << 'x'
<< dataset.extent[ 1 ] << " and Datatype " << dataset.dtype << '\n';

rho.resetDataset(dataset);
cout << "Set the dataset properties for the scalar field rho in iteration 1\n";
rho.resetDataset( dataset );
cout << "Set the dataset properties for the scalar field rho in iteration "
"1\n";

series.flush();
cout << "File structure and required attributes have been written\n";

Offset offset = {0, 0};
rho.storeChunk(shareRaw(global_data), offset, extent);
Offset offset = { 0, 0 };
rho.storeChunk( shareRaw( global_data ), offset, extent );
cout << "Stored the whole Dataset contents as a single chunk, "
"ready to write content\n";

Expand Down
Loading

0 comments on commit ffbb227

Please sign in to comment.