Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix failing tests with Intel compiler #901

Merged
merged 22 commits into from
May 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
61602f3
error #7976: An allocatable dummy argument may only be argument assoc…
perazz May 11, 2023
d9de989
enforce Fortran standard to enable LHS reallocation
perazz May 11, 2023
8f1a8f3
fix empty args
perazz May 11, 2023
25b464b
fix input namelist formats
perazz May 11, 2023
e0c86d6
fix SEGFAULT building fpm_publish_settings
perazz May 11, 2023
aca4925
Revert "fix SEGFAULT building fpm_publish_settings"
perazz May 11, 2023
ff1e885
Revert "Revert "fix SEGFAULT building fpm_publish_settings""
perazz May 11, 2023
3d2907b
Revert "fix empty args"
perazz May 11, 2023
77e4570
fix test-manifest routine (segfault unallocated `flags`)
perazz May 11, 2023
c52e840
line too long
perazz May 11, 2023
ca5cb7a
Revert "Revert "fix empty args""
perazz May 11, 2023
e55f7ea
Revert "Revert "Revert "fix SEGFAULT building fpm_publish_settings"""
perazz May 11, 2023
8136af8
make fpm_publish_settings work with both gfortran and intel
perazz May 11, 2023
f661f17
Update fpm_command_line.f90
perazz May 11, 2023
4fb33df
fix bus error returning string
perazz May 11, 2023
6fad38e
fix unallocated variables in non-allocatable dummy arguments
perazz May 11, 2023
d6bc5b2
fix more unallocated strings
perazz May 11, 2023
8287f08
check existing directory: intel compiler fix
perazz May 11, 2023
75b4d9a
fix join_path in dependency with root specified
perazz May 11, 2023
2b090a0
more unallocated strings
perazz May 11, 2023
47df981
fix ifort bug with extended `mock_dependency_tree_t`
perazz May 11, 2023
a0dee9e
Merge branch 'main' into fix_intel_tests
perazz May 15, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 20 additions & 15 deletions src/fpm/dependency.f90
Original file line number Diff line number Diff line change
Expand Up @@ -719,40 +719,45 @@ subroutine check_and_read_pkg_data(json, node, download_url, version, error)

integer :: code, stat
type(json_object), pointer :: p, q
character(:), allocatable :: version_key, version_str, error_message
character(:), allocatable :: version_key, version_str, error_message, namespace, name

namespace = ""
name = "UNNAMED_NODE"
if (allocated(node%namespace)) namespace = node%namespace
if (allocated(node%name)) name = node%name

if (.not. json%has_key('code')) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': No status code."); return
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': No status code."); return
end if

call get_value(json, 'code', code, stat=stat)
if (stat /= 0) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': "// &
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': "// &
& "Failed to read status code."); return
end if

if (code /= 200) then
if (.not. json%has_key('message')) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': No error message."); return
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': No error message."); return
end if

call get_value(json, 'message', error_message, stat=stat)
if (stat /= 0) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': "// &
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': "// &
& "Failed to read error message."); return
end if

call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"'. Status code: '"// &
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"'. Status code: '"// &
& str(code)//"'. Error message: '"//error_message//"'."); return
end if

if (.not. json%has_key('data')) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': No data."); return
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': No data."); return
end if

call get_value(json, 'data', p, stat=stat)
if (stat /= 0) then
call fatal_error(error, "Failed to read package data for '"//join_path(node%namespace, node%name)//"'."); return
call fatal_error(error, "Failed to read package data for '"//join_path(namespace, name)//"'."); return
end if

if (allocated(node%requested_version)) then
Expand All @@ -762,38 +767,38 @@ subroutine check_and_read_pkg_data(json, node, download_url, version, error)
end if

if (.not. p%has_key(version_key)) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': No version data."); return
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': No version data."); return
end if

call get_value(p, version_key, q, stat=stat)
if (stat /= 0) then
call fatal_error(error, "Failed to retrieve version data for '"//join_path(node%namespace, node%name)//"'."); return
call fatal_error(error, "Failed to retrieve version data for '"//join_path(namespace, name)//"'."); return
end if

if (.not. q%has_key('download_url')) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': No download url."); return
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': No download url."); return
end if

call get_value(q, 'download_url', download_url, stat=stat)
if (stat /= 0) then
call fatal_error(error, "Failed to read download url for '"//join_path(node%namespace, node%name)//"'."); return
call fatal_error(error, "Failed to read download url for '"//join_path(namespace, name)//"'."); return
end if

download_url = official_registry_base_url//download_url

if (.not. q%has_key('version')) then
call fatal_error(error, "Failed to download '"//join_path(node%namespace, node%name)//"': No version found."); return
call fatal_error(error, "Failed to download '"//join_path(namespace, name)//"': No version found."); return
end if

call get_value(q, 'version', version_str, stat=stat)
if (stat /= 0) then
call fatal_error(error, "Failed to read version data for '"//join_path(node%namespace, node%name)//"'."); return
call fatal_error(error, "Failed to read version data for '"//join_path(namespace, name)//"'."); return
end if

call new_version(version, version_str, error)
if (allocated(error)) then
call fatal_error(error, "'"//version_str//"' is not a valid version for '"// &
& join_path(node%namespace, node%name)//"'."); return
& join_path(namespace, name)//"'."); return
end if
end subroutine

Expand Down
7 changes: 6 additions & 1 deletion src/fpm/git.f90
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ module fpm_git
implicit none

public :: git_target_t, git_target_default, git_target_branch, git_target_tag, git_target_revision, git_revision, &
& git_archive, git_matches_manifest, operator(==)
& git_archive, git_matches_manifest, operator(==), compressed_package_name

!> Name of the compressed package that is generated temporarily.
character(len=*), parameter :: compressed_package_name = 'compressed_package'

!> Possible git target
type :: enum_descriptor
Expand Down Expand Up @@ -162,6 +165,8 @@ logical function git_matches_manifest(cached,manifest,verbosity,iunit)
!> while the cached dependency always stores a commit hash because it's built
!> after the repo is available (saved as git_descriptor%revision==revision).
!> So, comparing against the descriptor is not reliable
git_matches_manifest = allocated(cached%object) .eqv. allocated(manifest%object)
if (git_matches_manifest .and. allocated(cached%object)) &
git_matches_manifest = cached%object == manifest%object
if (.not.git_matches_manifest) then
if (verbosity>1) write(iunit,out_fmt) "GIT OBJECT has changed: ",cached%object," vs. ", manifest%object
Expand Down
4 changes: 2 additions & 2 deletions src/fpm/manifest/dependency.f90
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ module fpm_manifest_dependency
use fpm_git, only: git_target_t, git_target_tag, git_target_branch, &
& git_target_revision, git_target_default, operator(==), git_matches_manifest
use fpm_toml, only: toml_table, toml_key, toml_stat, get_value, check_keys
use fpm_filesystem, only: windows_path
use fpm_filesystem, only: windows_path, join_path
use fpm_environment, only: get_os_type, OS_WINDOWS
use fpm_versioning, only: version_t, new_version
implicit none
Expand Down Expand Up @@ -94,7 +94,7 @@ subroutine new_dependency(self, table, root, error)
call get_value(table, "path", uri)
if (allocated(uri)) then
if (get_os_type() == OS_WINDOWS) uri = windows_path(uri)
if (present(root)) uri = root//uri ! Relative to the fpm.toml it’s written in
if (present(root)) uri = join_path(root,uri) ! Relative to the fpm.toml it’s written in
call move_alloc(uri, self%path)
return
end if
Expand Down
60 changes: 30 additions & 30 deletions src/fpm/manifest/profiles.f90
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ module fpm_manifest_profile
& info_profile, find_profile, DEFAULT_COMPILER

!> Name of the default compiler
character(len=*), parameter :: DEFAULT_COMPILER = 'gfortran'
character(len=*), parameter :: DEFAULT_COMPILER = 'gfortran'
integer, parameter :: OS_ALL = -1
character(len=:), allocatable :: path

Expand All @@ -78,7 +78,7 @@ module fpm_manifest_profile

!> Value repesenting OS
integer :: os_type

!> Fortran compiler flags
character(len=:), allocatable :: flags

Expand Down Expand Up @@ -110,16 +110,16 @@ module fpm_manifest_profile
function new_profile(profile_name, compiler, os_type, flags, c_flags, cxx_flags, &
link_time_flags, file_scope_flags, is_built_in) &
& result(profile)

!> Name of the profile
character(len=*), intent(in) :: profile_name

!> Name of the compiler
character(len=*), intent(in) :: compiler

!> Type of the OS
integer, intent(in) :: os_type

!> Fortran compiler flags
character(len=*), optional, intent(in) :: flags

Expand Down Expand Up @@ -190,7 +190,7 @@ subroutine validate_compiler_name(compiler_name, is_valid)
is_valid = .false.
end select
end subroutine validate_compiler_name

!> Check if os_name is a valid name of a supported OS
subroutine validate_os_name(os_name, is_valid)

Expand Down Expand Up @@ -373,10 +373,10 @@ subroutine get_flags(profile_name, compiler_name, os_type, key_list, table, prof
& flags, c_flags, cxx_flags, link_time_flags, file_scope_flags)
profindex = profindex + 1
end subroutine get_flags

!> Traverse operating system tables to obtain number of profiles
subroutine traverse_oss_for_size(profile_name, compiler_name, os_list, table, profiles_size, error)

!> Name of profile
character(len=:), allocatable, intent(in) :: profile_name

Expand Down Expand Up @@ -447,7 +447,7 @@ end subroutine traverse_oss_for_size

!> Traverse operating system tables to obtain profiles
subroutine traverse_oss(profile_name, compiler_name, os_list, table, profiles, profindex, error)

!> Name of profile
character(len=:), allocatable, intent(in) :: profile_name

Expand All @@ -468,7 +468,7 @@ subroutine traverse_oss(profile_name, compiler_name, os_list, table, profiles, p

!> Index in the list of profiles
integer, intent(inout) :: profindex

type(toml_key), allocatable :: key_list(:)
character(len=:), allocatable :: os_name, l_os_name
type(toml_table), pointer :: os_node
Expand Down Expand Up @@ -513,7 +513,7 @@ end subroutine traverse_oss

!> Traverse compiler tables
subroutine traverse_compilers(profile_name, comp_list, table, error, profiles_size, profiles, profindex)

!> Name of profile
character(len=:), allocatable, intent(in) :: profile_name

Expand All @@ -522,10 +522,10 @@ subroutine traverse_compilers(profile_name, comp_list, table, error, profiles_si

!> Table containing compiler tables
type(toml_table), pointer, intent(in) :: table

!> Error handling
type(error_t), allocatable, intent(out) :: error

!> Number of profiles in list of profiles
integer, intent(inout), optional :: profiles_size

Expand All @@ -534,8 +534,8 @@ subroutine traverse_compilers(profile_name, comp_list, table, error, profiles_si

!> Index in the list of profiles
integer, intent(inout), optional :: profindex
character(len=:), allocatable :: compiler_name

character(len=:), allocatable :: compiler_name
type(toml_table), pointer :: comp_node
type(toml_key), allocatable :: os_list(:)
integer :: icomp, stat
Expand All @@ -544,7 +544,7 @@ subroutine traverse_compilers(profile_name, comp_list, table, error, profiles_si
if (size(comp_list)<1) return
do icomp = 1, size(comp_list)
call validate_compiler_name(comp_list(icomp)%key, is_valid)
if (is_valid) then
if (is_valid) then
compiler_name = comp_list(icomp)%key
call get_value(table, compiler_name, comp_node, stat=stat)
if (stat /= toml_stat%success) then
Expand All @@ -567,7 +567,7 @@ subroutine traverse_compilers(profile_name, comp_list, table, error, profiles_si
else
call fatal_error(error,'*traverse_compilers*:Error: Compiler name not specified or invalid.')
end if
end do
end do
end subroutine traverse_compilers

!> Construct new profiles array from a TOML data structure
Expand Down Expand Up @@ -596,9 +596,9 @@ subroutine new_profiles(profiles, table, error)
default_profiles = get_default_profiles(error)
if (allocated(error)) return
call table%get_keys(prof_list)

if (size(prof_list) < 1) return

profiles_size = 0

do iprof = 1, size(prof_list)
Expand Down Expand Up @@ -633,7 +633,7 @@ subroutine new_profiles(profiles, table, error)

profiles_size = profiles_size + size(default_profiles)
allocate(profiles(profiles_size))

do profindex=1, size(default_profiles)
profiles(profindex) = default_profiles(profindex)
end do
Expand Down Expand Up @@ -719,25 +719,25 @@ function get_default_profiles(error) result(default_profiles)
& 'ifort', &
& OS_ALL, &
& flags = ' -fp-model precise -pc64 -align all -error-limit 1 -reentrancy&
& threaded -nogen-interfaces -assume byterecl', &
& threaded -nogen-interfaces -assume byterecl -standard-semantics', &
& is_built_in=.true.), &
& new_profile('release', &
& 'ifort', &
& OS_WINDOWS, &
& flags = ' /fp:precise /align:all /error-limit:1 /reentrancy:threaded&
& /nogen-interfaces /assume:byterecl', &
& /nogen-interfaces /assume:byterecl /standard-semantics', &
& is_built_in=.true.), &
& new_profile('release', &
& 'ifx', &
& OS_ALL, &
& flags = ' -fp-model=precise -pc64 -align all -error-limit 1 -reentrancy&
& threaded -nogen-interfaces -assume byterecl', &
& threaded -nogen-interfaces -assume byterecl -standard-semantics', &
& is_built_in=.true.), &
& new_profile('release', &
& 'ifx', &
& OS_WINDOWS, &
& flags = ' /fp:precise /align:all /error-limit:1 /reentrancy:threaded&
& /nogen-interfaces /assume:byterecl', &
& /nogen-interfaces /assume:byterecl /standard-semantics', &
& is_built_in=.true.), &
& new_profile('release', &
&'nagfor', &
Expand Down Expand Up @@ -775,28 +775,28 @@ function get_default_profiles(error) result(default_profiles)
& new_profile('debug', &
& 'ifort', &
& OS_ALL, &
& flags = ' -warn all -check all -error-limit 1 -O0 -g -assume byterecl -traceback', &
& flags = ' -warn all -check all -error-limit 1 -O0 -g -assume byterecl -standard-semantics -traceback', &
& is_built_in=.true.), &
& new_profile('debug', &
& 'ifort', &
& OS_WINDOWS, &
& flags = ' /warn:all /check:all /error-limit:1&
& /Od /Z7 /assume:byterecl /traceback', &
& /Od /Z7 /assume:byterecl /standard-semantics /traceback', &
& is_built_in=.true.), &
& new_profile('debug', &
& 'ifx', &
& OS_ALL, &
& flags = ' -warn all -check all -error-limit 1 -O0 -g -assume byterecl -traceback', &
& flags = ' -warn all -check all -error-limit 1 -O0 -g -assume byterecl -standard-semantics -traceback', &
& is_built_in=.true.), &
& new_profile('debug', &
& 'ifx', &
& OS_WINDOWS, &
& flags = ' /warn:all /check:all /error-limit:1 /Od /Z7 /assume:byterecl', &
& flags = ' /warn:all /check:all /error-limit:1 /Od /Z7 /assume:byterecl /standard-semantics', &
& is_built_in=.true.), &
& new_profile('debug', &
& 'ifx', &
& OS_WINDOWS, &
& flags = ' /warn:all /check:all /error-limit:1 /Od /Z7 /assume:byterecl', &
& flags = ' /warn:all /check:all /error-limit:1 /Od /Z7 /assume:byterecl /standard-semantics', &
& is_built_in=.true.), &
& new_profile('debug', &
& 'lfortran', &
Expand Down
12 changes: 6 additions & 6 deletions src/fpm_command_line.f90
Original file line number Diff line number Diff line change
Expand Up @@ -218,10 +218,9 @@ subroutine get_command_line_settings(cmd_settings)
integer :: os
logical :: is_unix
type(fpm_install_settings), allocatable :: install_settings
type(fpm_publish_settings), allocatable :: publish_settings
type(version_t) :: version
character(len=:), allocatable :: common_args, compiler_args, run_args, working_dir, &
& c_compiler, cxx_compiler, archiver, version_s
& c_compiler, cxx_compiler, archiver, version_s, token_s

character(len=*), parameter :: fc_env = "FC", cc_env = "CC", ar_env = "AR", &
& fflags_env = "FFLAGS", cflags_env = "CFLAGS", cxxflags_env = "CXXFLAGS", ldflags_env = "LDFLAGS", &
Expand Down Expand Up @@ -633,8 +632,10 @@ subroutine get_command_line_settings(cmd_settings)
c_compiler = sget('c-compiler')
cxx_compiler = sget('cxx-compiler')
archiver = sget('archiver')
token_s = sget('token')

allocate(publish_settings, source=fpm_publish_settings( &
allocate(fpm_publish_settings :: cmd_settings)
cmd_settings = fpm_publish_settings( &
& show_package_version = lget('show-package-version'), &
& show_form_data = lget('show-form-data'), &
& profile=val_profile,&
Expand All @@ -650,9 +651,8 @@ subroutine get_command_line_settings(cmd_settings)
& list=lget('list'),&
& show_model=lget('show-model'),&
& build_tests=lget('tests'),&
& verbose=lget('verbose')))
call get_char_arg(publish_settings%token, 'token')
call move_alloc(publish_settings, cmd_settings)
& verbose=lget('verbose'),&
& token=token_s)

case default

Expand Down
Loading