diff --git a/immer/extra/archive/champ/traits.hpp b/immer/extra/archive/champ/traits.hpp index 7d9cba18..05811723 100644 --- a/immer/extra/archive/champ/traits.hpp +++ b/immer/extra/archive/champ/traits.hpp @@ -25,7 +25,14 @@ template struct container_traits> : champ_traits> -{}; +{ + template + static auto transform(F&& func) + { + using U = std::decay_t()))>; + return immer::map{}; + } +}; template struct archive_type_load { - typename container_traits::load_archive_t archive = {}; + using archive_t = typename container_traits::load_archive_t; + + archive_t archive = {}; std::optional::loader_t> loader; archive_type_load() = default; + explicit archive_type_load(archive_t archive_) + : archive{std::move(archive_)} + { + } + archive_type_load(const archive_type_load& other) : archive{other.archive} { @@ -123,6 +130,47 @@ struct archives_load { return left.storage == right.storage; } + + /** + * Return a new archives_load after applying the described transformations + * to each archive type. + */ + template + auto transform(const ConversionMap& map) const + { + const auto transform_pair = [&map](const auto& pair) { + // If the conversion map doesn't mention the current type, we leave + // it as is. + using Contains = decltype(hana::contains(map, hana::first(pair))); + constexpr bool contains = hana::value(); + if constexpr (contains) { + // Look up the conversion function by the type from the original + // archive. + const auto& func = map[hana::first(pair)]; + const auto& archive = hana::second(pair).archive; + + // Each archive defines the transform_archive function that + // transforms its leaves with the given function. + auto new_archive = transform_archive(archive, func); + + using Container = typename decltype(+hana::first(pair))::type; + using NewContainer = std::decay_t< + decltype(container_traits::transform(func))>; + return hana::make_pair( + hana::type_c, + archive_type_load{std::move(new_archive)}); + } else { + return pair; + } + }; + + auto new_storage = hana::fold_left( + storage, hana::make_map(), [&transform_pair](auto map, auto pair) { + return hana::insert(map, transform_pair(pair)); + }); + using NewStorage = decltype(new_storage); + return archives_load{std::move(new_storage)}; + } }; inline auto generate_archives_save(auto type_names) @@ -230,7 +278,7 @@ auto to_json_with_archive(const T& serializable) } template -T from_json_with_archive(const std::string& input) +auto load_archives(const std::string& input) { using Archives = std::decay_t())))>; @@ -266,6 +314,32 @@ T from_json_with_archive(const std::string& input) prev = archives; } } + return archives; +} + +template +T from_json_with_archive(const std::string& input) +{ + using Archives = std::decay_t())))>; + auto archives = load_archives(input); + + auto is = std::istringstream{input}; + auto ar = immer::archive::json_immer_input_archive{ + std::move(archives), is}; + auto r = T{}; + ar(r); + return r; +} + +template +T from_json_with_archive_with_conversion(const std::string& input, + const ConversionsMap& map) +{ + // Load the archives part for the old type + auto archives_old = load_archives(input); + auto archives = archives_old.transform(map); + using Archives = decltype(archives); auto is = std::istringstream{input}; auto ar = immer::archive::json_immer_input_archive{ diff --git a/immer/extra/archive/rbts/traits.hpp b/immer/extra/archive/rbts/traits.hpp index 34e3883d..1271cc52 100644 --- a/immer/extra/archive/rbts/traits.hpp +++ b/immer/extra/archive/rbts/traits.hpp @@ -16,6 +16,15 @@ struct container_traits> using load_archive_t = rbts::archive_load; using loader_t = rbts::vector_loader; using container_id = immer::archive::container_id; + + // This function is used to determine the type of the container after + // applying some transformation. + template + static auto transform(F&& func) + { + using U = std::decay_t()))>; + return immer::vector{}; + } }; template #include #include - -#include +#include // to save std::pair #include #include +#include + namespace { namespace hana = boost::hana; @@ -29,6 +30,7 @@ namespace hana = boost::hana; using test::flex_vector_one; using test::test_value; using test::vector_one; +using json_t = nlohmann::json; template using arch = immer::archive::archivable; @@ -774,3 +776,711 @@ TEST_CASE("Test non-unique names in the map") static_assert(boost::hana::value(), "Names are unique"); } } + +namespace { +using test::new_type; +using test::old_type; + +template +using map_t = immer::map>; + +template +using table_t = + immer::table>; + +// Some type that an application would serialize. Contains multiple vectors and +// maps to demonstrate structural sharing. +struct old_app_type +{ + arch> vec; + arch> vec2; + arch> map; + arch> map2; + arch> table; + + template + void serialize(Archive& ar) + { + ar(CEREAL_NVP(vec), + CEREAL_NVP(vec2), + CEREAL_NVP(map), + CEREAL_NVP(map2), + CEREAL_NVP(table)); + } +}; + +auto get_archives_types(const old_app_type&) +{ + return hana::make_map( + hana::make_pair(hana::type_c>, + BOOST_HANA_STRING("vec")), + hana::make_pair(hana::type_c>, + BOOST_HANA_STRING("map")), + hana::make_pair(hana::type_c>, + BOOST_HANA_STRING("table")) + + ); +} + +/** + * We want to load and transform the old type into the new type. + * + * An approach to first load the old type and then apply some transformation + * would not preserve the structural sharing within the type. (Converting 2 + * vectors that initially use structural sharing would result in 2 independent + * vectors without SS). + * + * Therefore, we have to apply the transformation to the archives that are later + * used to materialize these new vectors that would preserve SS. + * + * The new type can't differ much from the old type. The type's JSON layout must + * be the same as the old type. Each archivable member gets serialized into an + * integer (container ID within the archive), so that works. But we can't add + * new members. + */ +struct new_app_type +{ + arch> vec; + arch> vec2; + arch> map; + arch> map2; + + // Demonstrate the member that we do not upgrade. + arch> table; + + template + void serialize(Archive& ar) + { + ar(CEREAL_NVP(vec), + CEREAL_NVP(vec2), + CEREAL_NVP(map), + CEREAL_NVP(map2), + CEREAL_NVP(table)); + } +}; + +auto get_archives_types(const new_app_type&) +{ + return hana::make_map( + hana::make_pair(hana::type_c>, + BOOST_HANA_STRING("vec")), + hana::make_pair(hana::type_c>, + BOOST_HANA_STRING("map")), + hana::make_pair(hana::type_c>, + BOOST_HANA_STRING("table")) + + ); +} +} // namespace + +TEST_CASE("Test conversion with a special archive") +{ + const auto vec1 = test::vector_one{ + old_type{.data = 123}, + old_type{.data = 234}, + }; + const auto vec2 = vec1.push_back(old_type{.data = 345}); + + const auto map1 = [] { + auto map = map_t{}; + for (auto i = 0; i < 30; ++i) { + map = + std::move(map).set(fmt::format("x{}x", i), old_type{.data = i}); + } + return map; + }(); + const auto map2 = map1.set("345", old_type{.data = 345}); + + // Prepare a value of the old type that uses some structural sharing + // internally. + const auto value = old_app_type{ + .vec = vec1, + .vec2 = vec2, + .map = map1, + .map2 = map2, + .table = + { + old_type{"_51_", 51}, + old_type{"_52_", 52}, + old_type{"_53_", 53}, + }, + }; + const auto [json_str, archives] = + immer::archive::to_json_with_archive(value); + // REQUIRE(json_str == ""); + + // Describe how to go from the old archive to the desired new archive. + const auto archives_conversions = hana::make_map( + hana::make_pair( + // Take this archive + hana::type_c>, + // And apply this conversion function to it + test::convert_old_type), + hana::make_pair(hana::type_c>, test::convert_old_type) + + ); + + // Having a JSON from serializing old_app_type and a conversion function, + // we need to somehow load new_app_type. + const new_app_type full_load = + immer::archive::from_json_with_archive_with_conversion( + json_str, archives_conversions); + + { + REQUIRE(full_load.vec.container == transform_vec(value.vec.container)); + REQUIRE(full_load.vec2.container == + transform_vec(value.vec2.container)); + REQUIRE(full_load.map.container == transform_map(value.map.container)); + REQUIRE(full_load.map2.container == + transform_map(value.map2.container)); + REQUIRE(full_load.table.container == value.table.container); + } + + SECTION( + "Demonstrate that the loaded vectors and maps still share structure") + { + const auto [json_str, archives] = + immer::archive::to_json_with_archive(full_load); + // For example, "x21x" is stored only once. + const auto expected = json_t::parse(R"( +{ + "archives": { + "map": [ + { + "children": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "collisions": false, + "datamap": 2013603464, + "nodemap": 2188935188, + "values": [ + { + "first": "x13x", + "second": { + "data": 13, + "data2": "_13_", + "id": "" + } + }, + { + "first": "x4x", + "second": { + "data": 4, + "data2": "_4_", + "id": "" + } + }, + { + "first": "x22x", + "second": { + "data": 22, + "data2": "_22_", + "id": "" + } + }, + { + "first": "x28x", + "second": { + "data": 28, + "data2": "_28_", + "id": "" + } + }, + { + "first": "x10x", + "second": { + "data": 10, + "data2": "_10_", + "id": "" + } + }, + { + "first": "x12x", + "second": { + "data": 12, + "data2": "_12_", + "id": "" + } + }, + { + "first": "x9x", + "second": { + "data": 9, + "data2": "_9_", + "id": "" + } + }, + { + "first": "x29x", + "second": { + "data": 29, + "data2": "_29_", + "id": "" + } + }, + { + "first": "x6x", + "second": { + "data": 6, + "data2": "_6_", + "id": "" + } + }, + { + "first": "x17x", + "second": { + "data": 17, + "data2": "_17_", + "id": "" + } + }, + { + "first": "x11x", + "second": { + "data": 11, + "data2": "_11_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 67125248, + "nodemap": 0, + "values": [ + { + "first": "x21x", + "second": { + "data": 21, + "data2": "_21_", + "id": "" + } + }, + { + "first": "x5x", + "second": { + "data": 5, + "data2": "_5_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 32770, + "nodemap": 0, + "values": [ + { + "first": "x25x", + "second": { + "data": 25, + "data2": "_25_", + "id": "" + } + }, + { + "first": "x26x", + "second": { + "data": 26, + "data2": "_26_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 65539, + "nodemap": 0, + "values": [ + { + "first": "x8x", + "second": { + "data": 8, + "data2": "_8_", + "id": "" + } + }, + { + "first": "x16x", + "second": { + "data": 16, + "data2": "_16_", + "id": "" + } + }, + { + "first": "x3x", + "second": { + "data": 3, + "data2": "_3_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 139264, + "nodemap": 0, + "values": [ + { + "first": "x14x", + "second": { + "data": 14, + "data2": "_14_", + "id": "" + } + }, + { + "first": "x18x", + "second": { + "data": 18, + "data2": "_18_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 1073742080, + "nodemap": 0, + "values": [ + { + "first": "x23x", + "second": { + "data": 23, + "data2": "_23_", + "id": "" + } + }, + { + "first": "x0x", + "second": { + "data": 0, + "data2": "_0_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 2621440, + "nodemap": 0, + "values": [ + { + "first": "x15x", + "second": { + "data": 15, + "data2": "_15_", + "id": "" + } + }, + { + "first": "x24x", + "second": { + "data": 24, + "data2": "_24_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 8224, + "nodemap": 0, + "values": [ + { + "first": "x27x", + "second": { + "data": 27, + "data2": "_27_", + "id": "" + } + }, + { + "first": "x1x", + "second": { + "data": 1, + "data2": "_1_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 8421376, + "nodemap": 0, + "values": [ + { + "first": "x7x", + "second": { + "data": 7, + "data2": "_7_", + "id": "" + } + }, + { + "first": "x20x", + "second": { + "data": 20, + "data2": "_20_", + "id": "" + } + } + ] + }, + { + "children": [], + "collisions": false, + "datamap": 134234112, + "nodemap": 0, + "values": [ + { + "first": "x19x", + "second": { + "data": 19, + "data2": "_19_", + "id": "" + } + }, + { + "first": "x2x", + "second": { + "data": 2, + "data2": "_2_", + "id": "" + } + } + ] + }, + { + "children": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "collisions": false, + "datamap": 2013619848, + "nodemap": 2188935188, + "values": [ + { + "first": "x13x", + "second": { + "data": 13, + "data2": "_13_", + "id": "" + } + }, + { + "first": "x4x", + "second": { + "data": 4, + "data2": "_4_", + "id": "" + } + }, + { + "first": "x22x", + "second": { + "data": 22, + "data2": "_22_", + "id": "" + } + }, + { + "first": "x28x", + "second": { + "data": 28, + "data2": "_28_", + "id": "" + } + }, + { + "first": "x10x", + "second": { + "data": 10, + "data2": "_10_", + "id": "" + } + }, + { + "first": "x12x", + "second": { + "data": 12, + "data2": "_12_", + "id": "" + } + }, + { + "first": "x9x", + "second": { + "data": 9, + "data2": "_9_", + "id": "" + } + }, + { + "first": "x29x", + "second": { + "data": 29, + "data2": "_29_", + "id": "" + } + }, + { + "first": "x6x", + "second": { + "data": 6, + "data2": "_6_", + "id": "" + } + }, + { + "first": "345", + "second": { + "data": 345, + "data2": "_345_", + "id": "" + } + }, + { + "first": "x17x", + "second": { + "data": 17, + "data2": "_17_", + "id": "" + } + }, + { + "first": "x11x", + "second": { + "data": 11, + "data2": "_11_", + "id": "" + } + } + ] + } + ], + "table": [ + { + "children": [], + "collisions": false, + "datamap": 1544, + "nodemap": 0, + "values": [ + { + "data": 53, + "id": "_53_" + }, + { + "data": 52, + "id": "_52_" + }, + { + "data": 51, + "id": "_51_" + } + ] + } + ], + "vec": { + "inners": [ + { + "key": 0, + "value": { + "children": [], + "relaxed": false + } + }, + { + "key": 2, + "value": { + "children": [ + 1 + ], + "relaxed": false + } + } + ], + "leaves": [ + { + "key": 1, + "value": [ + { + "data": 123, + "data2": "_123_", + "id": "" + }, + { + "data": 234, + "data2": "_234_", + "id": "" + } + ] + }, + { + "key": 3, + "value": [ + { + "data": 345, + "data2": "_345_", + "id": "" + } + ] + } + ], + "vectors": [ + { + "root": 0, + "tail": 1 + }, + { + "root": 2, + "tail": 3 + } + ] + } + }, + "value0": { + "map": 0, + "map2": 10, + "table": 0, + "vec": 0, + "vec2": 1 + } +} + )"); + REQUIRE(json_t::parse(json_str) == expected); + } +} diff --git a/test/extra/archive/test_vectors.cpp b/test/extra/archive/test_vectors.cpp index 328d741a..801fcbb9 100644 --- a/test/extra/archive/test_vectors.cpp +++ b/test/extra/archive/test_vectors.cpp @@ -1624,14 +1624,6 @@ TEST_CASE("Test vector archive conversion") REQUIRE(json_t::parse(to_json(ar)) == expected_ar); - const auto transform_vec = [&](const auto& vec) { - auto result = test::vector_one{}; - for (const auto& item : vec) { - result = std::move(result).push_back(convert_old_type(item)); - } - return result; - }; - const auto load_archive = to_load_archive(ar); const auto load_archive_new_type = transform_archive(load_archive, convert_old_type); diff --git a/test/extra/archive/utils.hpp b/test/extra/archive/utils.hpp index d08c90db..807c91ff 100644 --- a/test/extra/archive/utils.hpp +++ b/test/extra/archive/utils.hpp @@ -2,6 +2,7 @@ #include #include +#include #include @@ -93,6 +94,13 @@ struct old_type { ar(CEREAL_NVP(id), CEREAL_NVP(data)); } + + auto tie() const { return std::tie(id, data); } + + friend bool operator==(const old_type& left, const old_type& right) + { + return left.tie() == right.tie(); + } }; struct new_type @@ -124,6 +132,25 @@ inline auto convert_old_type(const old_type& val) }; } +inline auto transform_vec(const auto& vec) +{ + auto result = vector_one{}; + for (const auto& item : vec) { + result = std::move(result).push_back(convert_old_type(item)); + } + return result; +} + +inline auto transform_map(const auto& map) +{ + auto result = immer:: + map>{}; + for (const auto& [key, value] : map) { + result = std::move(result).set(key, convert_old_type(value)); + } + return result; +} + } // namespace test template <>