@@ -1364,10 +1364,13 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
13641364 assert (static_mapinfo->mapping_end_offset () == dynamic_mapinfo->mapping_base_offset (), " no gap" );
13651365 }
13661366
1367- ReservedSpace archive_space_rs, class_space_rs;
1367+ ReservedSpace total_space_rs, archive_space_rs, class_space_rs;
13681368 MapArchiveResult result = MAP_ARCHIVE_OTHER_FAILURE;
1369- char * mapped_base_address = reserve_address_space_for_archives (static_mapinfo, dynamic_mapinfo,
1370- use_requested_addr, archive_space_rs,
1369+ char * mapped_base_address = reserve_address_space_for_archives (static_mapinfo,
1370+ dynamic_mapinfo,
1371+ use_requested_addr,
1372+ total_space_rs,
1373+ archive_space_rs,
13711374 class_space_rs);
13721375 if (mapped_base_address == NULL ) {
13731376 result = MAP_ARCHIVE_MMAP_FAILURE;
@@ -1417,6 +1420,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14171420 // this with use_requested_addr, since we're going to patch all the
14181421 // pointers anyway so there's no benefit to mmap.
14191422 if (use_requested_addr) {
1423+ assert (!total_space_rs.is_reserved (), " Should not be reserved for Windows" );
14201424 log_info (cds)(" Windows mmap workaround: releasing archive space." );
14211425 archive_space_rs.release ();
14221426 }
@@ -1472,6 +1476,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14721476 // cover both archive and class space.
14731477 address cds_base = (address)static_mapinfo->mapped_base ();
14741478 address ccs_end = (address)class_space_rs.end ();
1479+ assert (ccs_end > cds_base, " Sanity check" );
14751480 CompressedKlassPointers::initialize (cds_base, ccs_end - cds_base);
14761481
14771482 // map_heap_regions() compares the current narrow oop and klass encodings
@@ -1484,7 +1489,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14841489 } else {
14851490 unmap_archive (static_mapinfo);
14861491 unmap_archive (dynamic_mapinfo);
1487- release_reserved_spaces (archive_space_rs, class_space_rs);
1492+ release_reserved_spaces (total_space_rs, archive_space_rs, class_space_rs);
14881493 }
14891494
14901495 return result;
@@ -1533,6 +1538,10 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
15331538// Return:
15341539//
15351540// - On success:
1541+ // - total_space_rs will be reserved as whole for archive_space_rs and
1542+ // class_space_rs if UseCompressedClassPointers is true.
1543+ // On Windows, try reserve archive_space_rs and class_space_rs
1544+ // separately first if use_archive_base_addr is true.
15361545// - archive_space_rs will be reserved and large enough to host static and
15371546// if needed dynamic archive: [Base, A).
15381547// archive_space_rs.base and size will be aligned to CDS reserve
@@ -1547,6 +1556,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
15471556char * MetaspaceShared::reserve_address_space_for_archives (FileMapInfo* static_mapinfo,
15481557 FileMapInfo* dynamic_mapinfo,
15491558 bool use_archive_base_addr,
1559+ ReservedSpace& total_space_rs,
15501560 ReservedSpace& archive_space_rs,
15511561 ReservedSpace& class_space_rs) {
15521562
@@ -1612,34 +1622,53 @@ char* MetaspaceShared::reserve_address_space_for_archives(FileMapInfo* static_ma
16121622 align_up (archive_space_size + gap_size + class_space_size,
16131623 os::vm_allocation_granularity ());
16141624
1615- ReservedSpace total_rs;
1616- if (base_address != NULL ) {
1617- // Reserve at the given archive base address, or not at all.
1618- total_rs = ReservedSpace (total_range_size, archive_space_alignment,
1619- false /* bool large */ , (char *) base_address);
1625+ assert (total_range_size > ccs_begin_offset, " must be" );
1626+ if (use_windows_memory_mapping () && use_archive_base_addr) {
1627+ if (base_address != nullptr ) {
1628+ // On Windows, we cannot safely split a reserved memory space into two (see JDK-8255917).
1629+ // Hence, we optimistically reserve archive space and class space side-by-side. We only
1630+ // do this for use_archive_base_addr=true since for use_archive_base_addr=false case
1631+ // caller will not split the combined space for mapping, instead read the archive data
1632+ // via sequential file IO.
1633+ address ccs_base = base_address + archive_space_size + gap_size;
1634+ archive_space_rs = ReservedSpace (archive_space_size, archive_space_alignment,
1635+ false /* large */ , (char *)base_address);
1636+ class_space_rs = ReservedSpace (class_space_size, class_space_alignment,
1637+ false /* large */ , (char *)ccs_base);
1638+ }
1639+ if (!archive_space_rs.is_reserved () || !class_space_rs.is_reserved ()) {
1640+ release_reserved_spaces (total_space_rs, archive_space_rs, class_space_rs);
1641+ return NULL ;
1642+ }
16201643 } else {
1621- // Reserve at any address, but leave it up to the platform to choose a good one.
1622- total_rs = Metaspace::reserve_address_space_for_compressed_classes (total_range_size);
1623- }
1624-
1625- if (!total_rs.is_reserved ()) {
1626- return NULL ;
1627- }
1628-
1629- // Paranoid checks:
1630- assert (base_address == NULL || (address)total_rs.base () == base_address,
1631- " Sanity (" PTR_FORMAT " vs " PTR_FORMAT " )" , p2i (base_address), p2i (total_rs.base ()));
1632- assert (is_aligned (total_rs.base (), archive_space_alignment), " Sanity" );
1633- assert (total_rs.size () == total_range_size, " Sanity" );
1634- assert (CompressedKlassPointers::is_valid_base ((address)total_rs.base ()), " Sanity" );
1644+ if (use_archive_base_addr && base_address != nullptr ) {
1645+ total_space_rs = ReservedSpace (total_range_size, archive_space_alignment,
1646+ false /* bool large */ , (char *) base_address);
1647+ } else {
1648+ // Reserve at any address, but leave it up to the platform to choose a good one.
1649+ total_space_rs = Metaspace::reserve_address_space_for_compressed_classes (total_range_size);
1650+ }
16351651
1636- // Now split up the space into ccs and cds archive. For simplicity, just leave
1637- // the gap reserved at the end of the archive space.
1638- archive_space_rs = total_rs.first_part (ccs_begin_offset,
1639- (size_t )os::vm_allocation_granularity (),
1640- /* split=*/ true );
1641- class_space_rs = total_rs.last_part (ccs_begin_offset);
1652+ if (!total_space_rs.is_reserved ()) {
1653+ return NULL ;
1654+ }
16421655
1656+ // Paranoid checks:
1657+ assert (base_address == NULL || (address)total_space_rs.base () == base_address,
1658+ " Sanity (" PTR_FORMAT " vs " PTR_FORMAT " )" , p2i (base_address), p2i (total_space_rs.base ()));
1659+ assert (is_aligned (total_space_rs.base (), archive_space_alignment), " Sanity" );
1660+ assert (total_space_rs.size () == total_range_size, " Sanity" );
1661+ assert (CompressedKlassPointers::is_valid_base ((address)total_space_rs.base ()), " Sanity" );
1662+
1663+ // Now split up the space into ccs and cds archive. For simplicity, just leave
1664+ // the gap reserved at the end of the archive space. Do not do real splitting.
1665+ archive_space_rs = total_space_rs.first_part (ccs_begin_offset,
1666+ (size_t )os::vm_allocation_granularity (),
1667+ /* split=*/ false );
1668+ class_space_rs = total_space_rs.last_part (ccs_begin_offset);
1669+ MemTracker::record_virtual_memory_split_reserved (total_space_rs.base (), total_space_rs.size (),
1670+ ccs_begin_offset);
1671+ }
16431672 assert (is_aligned (archive_space_rs.base (), archive_space_alignment), " Sanity" );
16441673 assert (is_aligned (archive_space_rs.size (), archive_space_alignment), " Sanity" );
16451674 assert (is_aligned (class_space_rs.base (), class_space_alignment), " Sanity" );
@@ -1658,15 +1687,21 @@ char* MetaspaceShared::reserve_address_space_for_archives(FileMapInfo* static_ma
16581687
16591688}
16601689
1661- void MetaspaceShared::release_reserved_spaces (ReservedSpace& archive_space_rs,
1690+ void MetaspaceShared::release_reserved_spaces (ReservedSpace& total_space_rs,
1691+ ReservedSpace& archive_space_rs,
16621692 ReservedSpace& class_space_rs) {
1663- if (archive_space_rs.is_reserved ()) {
1664- log_debug (cds)(" Released shared space (archive) " INTPTR_FORMAT, p2i (archive_space_rs.base ()));
1665- archive_space_rs.release ();
1666- }
1667- if (class_space_rs.is_reserved ()) {
1668- log_debug (cds)(" Released shared space (classes) " INTPTR_FORMAT, p2i (class_space_rs.base ()));
1669- class_space_rs.release ();
1693+ if (total_space_rs.is_reserved ()) {
1694+ log_debug (cds)(" Released shared space (archive + class) " INTPTR_FORMAT, p2i (total_space_rs.base ()));
1695+ total_space_rs.release ();
1696+ } else {
1697+ if (archive_space_rs.is_reserved ()) {
1698+ log_debug (cds)(" Released shared space (archive) " INTPTR_FORMAT, p2i (archive_space_rs.base ()));
1699+ archive_space_rs.release ();
1700+ }
1701+ if (class_space_rs.is_reserved ()) {
1702+ log_debug (cds)(" Released shared space (classes) " INTPTR_FORMAT, p2i (class_space_rs.base ()));
1703+ class_space_rs.release ();
1704+ }
16701705 }
16711706}
16721707
@@ -1710,6 +1745,7 @@ void MetaspaceShared::unmap_archive(FileMapInfo* mapinfo) {
17101745 assert (UseSharedSpaces, " must be runtime" );
17111746 if (mapinfo != NULL ) {
17121747 mapinfo->unmap_regions (archive_regions, archive_regions_count);
1748+ mapinfo->unmap_region (MetaspaceShared::bm);
17131749 mapinfo->set_is_mapped (false );
17141750 }
17151751}
0 commit comments