diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 6b6c5529..d13a2f75 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -21,7 +21,7 @@ jobs: fail-fast: false matrix: version: - - '1.9' + - '1.10' os: - ubuntu-latest arch: @@ -70,7 +70,7 @@ jobs: rm -rf $ROOT_DIR/$TAR_FILE $SOURCES_DIR cd $CURR_DIR - name: Install petsc - ##if: steps.cache-petsc.outputs.cache-hit != 'true' + ## if: steps.cache-petsc.outputs.cache-hit != 'true' run: | CURR_DIR=$(pwd) PACKAGE=petsc diff --git a/Project.toml b/Project.toml index e64b78ea..27756560 100644 --- a/Project.toml +++ b/Project.toml @@ -9,9 +9,6 @@ BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" Gridap = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e" GridapDistributed = "f9701e48-63b3-45aa-9a63-9bc6c271f355" -GridapP4est = "c2c8e14b-f5fd-423d-9666-1dd9ad120af9" -GridapPETSc = "bcdc36c2-0c3e-11ea-095a-c9dadae499f1" -IterativeSolvers = "42fd0dbc-a981-5370-80f2-aaf504508153" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195" @@ -21,6 +18,16 @@ Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1" +[weakdeps] +GridapP4est = "c2c8e14b-f5fd-423d-9666-1dd9ad120af9" +GridapPETSc = "bcdc36c2-0c3e-11ea-095a-c9dadae499f1" +IterativeSolvers = "42fd0dbc-a981-5370-80f2-aaf504508153" + +[extensions] +GridapP4estExt = "GridapP4est" +GridapPETScExt = "GridapPETSc" +IterativeSolversExt = "IterativeSolvers" + [compat] AbstractTrees = "0.4" BlockArrays = "1" @@ -35,7 +42,7 @@ MPI = "0.20" NLsolve = "4.3.0" PartitionedArrays = "0.3" SparseMatricesCSR = "0.6.7" -julia = "1.7" +julia = "1.9" [extras] MPIPreferences = "3da0fdf6-3ccc-4f1b-acd9-58baa6c99267" diff --git a/ext/GridapP4estExt/GridapP4estExt.jl b/ext/GridapP4estExt/GridapP4estExt.jl new file mode 100644 index 00000000..5f612f7c --- /dev/null +++ b/ext/GridapP4estExt/GridapP4estExt.jl @@ -0,0 +1,57 @@ +module GridapP4estExt + +using Gridap +using GridapP4est + +export P4estCartesianModelHierarchy + +""" + P4estCartesianModelHierarchy( + ranks,np_per_level,domain,nc::NTuple{D,<:Integer}; + num_refs_coarse::Integer = 0, + add_labels!::Function = (labels -> nothing), + map::Function = identity, + isperiodic::NTuple{D,Bool} = Tuple(fill(false,D)) + ) where D + + Returns a `ModelHierarchy` with a Cartesian model as coarsest level, using GridapP4est.jl. + The i-th level will be distributed among `np_per_level[i]` processors. + The seed model is given by `cmodel = CartesianDiscreteModel(domain,nc)`. +""" +function P4estCartesianModelHierarchy( + ranks,np_per_level,domain,nc::NTuple{D,<:Integer}; + num_refs_coarse::Integer = 0, + add_labels!::Function = (labels -> nothing), + map::Function = identity, + isperiodic::NTuple{D,Bool} = Tuple(fill(false,D)) +) where D + cparts = generate_subparts(ranks,np_per_level[end]) + cmodel = CartesianDiscreteModel(domain,nc;map,isperiodic) + add_labels!(get_face_labeling(cmodel)) + + coarse_model = OctreeDistributedDiscreteModel(cparts,cmodel,num_refs_coarse) + mh = ModelHierarchy(ranks,coarse_model,np_per_level) + return mh +end + +function GridapDistributed.DistributedAdaptedDiscreteModel( + model :: GridapP4est.OctreeDistributedDiscreteModel, + parent :: GridapDistributed.DistributedDiscreteModel, + glue :: AbstractArray{<:Gridap.Adaptivity.AdaptivityGlue}; +) + GridapDistributed.DistributedAdaptedDiscreteModel( + model.dmodel,parent,glue + ) +end + +function GridapDistributed.DistributedAdaptedDiscreteModel( + model :: GridapP4est.OctreeDistributedDiscreteModel, + parent :: GridapP4est.OctreeDistributedDiscreteModel, + glue :: AbstractArray{<:Gridap.Adaptivity.AdaptivityGlue}; +) + GridapDistributed.DistributedAdaptedDiscreteModel( + model.dmodel,parent.dmodel,glue + ) +end + +end # module \ No newline at end of file diff --git a/src/LinearSolvers/PETSc/ElasticitySolvers.jl b/ext/GridapPETScExt/ElasticitySolvers.jl similarity index 100% rename from src/LinearSolvers/PETSc/ElasticitySolvers.jl rename to ext/GridapPETScExt/ElasticitySolvers.jl diff --git a/ext/GridapPETScExt/GridapPETScExt.jl b/ext/GridapPETScExt/GridapPETScExt.jl new file mode 100644 index 00000000..eb5f3027 --- /dev/null +++ b/ext/GridapPETScExt/GridapPETScExt.jl @@ -0,0 +1,25 @@ +module GridapPETScExt + +using LinearAlgebra +using SparseArrays +using SparseMatricesCSR + +using Gridap +using Gridap.Helpers, Gridap.Algebra, Gridap.CellData +using Gridap.Arrays, Gridap.FESpaces, Gridap.MultiField + +using GridapDistributed +using PartitionedArrays +using GridapPETSc + +using GridapSolvers +using GridapSolvers.MultilevelTools +using GridapSolvers.SolverInterfaces +using GridapSolvers.PatchBasedSmoothers + +include("PETScUtils.jl") +include("PETScCaches.jl") +include("ElasticitySolvers.jl") +include("HipmairXuSolvers.jl") + +end # module \ No newline at end of file diff --git a/src/LinearSolvers/PETSc/HipmairXuSolvers.jl b/ext/GridapPETScExt/HipmairXuSolvers.jl similarity index 100% rename from src/LinearSolvers/PETSc/HipmairXuSolvers.jl rename to ext/GridapPETScExt/HipmairXuSolvers.jl diff --git a/src/LinearSolvers/PETSc/PETScCaches.jl b/ext/GridapPETScExt/PETScCaches.jl similarity index 63% rename from src/LinearSolvers/PETSc/PETScCaches.jl rename to ext/GridapPETScExt/PETScCaches.jl index 9ba88d3b..7e81410f 100644 --- a/src/LinearSolvers/PETSc/PETScCaches.jl +++ b/ext/GridapPETScExt/PETScCaches.jl @@ -51,3 +51,38 @@ end function Algebra.numerical_setup!(ns::CachedPETScNS,mat::AbstractMatrix,x::AbstractVector) numerical_setup!(ns.ns,mat,x) end + +############################################################################################ +# Optimisations for GridapSolvers + PETSc + +function LinearSolvers.gmg_coarse_solver_caches( + solver::PETScLinearSolver, + smatrices::AbstractVector{<:AbstractMatrix}, + work_vectors +) + nlevs = num_levels(smatrices) + with_level(smatrices,nlevs) do AH + _, _, dxH, rH = work_vectors[nlevs-1] + cache = CachedPETScNS( + numerical_setup(symbolic_setup(solver, AH), AH), dxH, rH + ) + return cache + end +end + +function LinearSolvers.gmg_coarse_solver_caches( + solver::PETScLinearSolver, + smatrices::AbstractVector{<:AbstractMatrix}, + svectors::AbstractVector{<:AbstractVector}, + work_vectors +) + nlevs = num_levels(smatrices) + with_level(smatrices,nlevs) do AH + _, _, dxH, rH = work_vectors[nlevs-1] + xH = svectors[nlevs] + cache = CachedPETScNS( + numerical_setup(symbolic_setup(solver, AH, xH), AH, xH), dxH,rH + ) + return cache + end +end diff --git a/src/LinearSolvers/PETSc/PETScUtils.jl b/ext/GridapPETScExt/PETScUtils.jl similarity index 100% rename from src/LinearSolvers/PETSc/PETScUtils.jl rename to ext/GridapPETScExt/PETScUtils.jl diff --git a/src/LinearSolvers/IterativeLinearSolvers.jl b/ext/IterativeSolversExt.jl similarity index 65% rename from src/LinearSolvers/IterativeLinearSolvers.jl rename to ext/IterativeSolversExt.jl index 22d13e6b..36ea9145 100644 --- a/src/LinearSolvers/IterativeLinearSolvers.jl +++ b/ext/IterativeSolversExt.jl @@ -1,3 +1,17 @@ +module IterativeSolversExt + +using LinearAlgebra, SparseArrays +using PartitionedArrays + +using Gridap, GridapSolvers +using IterativeSolvers + +using Gridap.Helpers, Gridap.Algebra + +export IS_ConjugateGradientSolver +export IS_GMRESSolver +export IS_MINRESSolver +export IS_SSORSolver abstract type IterativeLinearSolverType end struct CGIterativeSolverType <: IterativeLinearSolverType end @@ -24,7 +38,7 @@ struct SSORIterativeSolverType <: IterativeLinearSolverType end - [`IS_MINRESSolver`](@ref) - [`IS_SSORSolver`](@ref) """ -struct IterativeLinearSolver{A} <: Gridap.Algebra.LinearSolver +struct IterativeLinearSolver{A} <: Algebra.LinearSolver args kwargs @@ -83,7 +97,7 @@ end # Symbolic setup -struct IterativeLinearSolverSS <: Gridap.Algebra.SymbolicSetup +struct IterativeLinearSolverSS <: Algebra.SymbolicSetup solver end @@ -93,7 +107,7 @@ end # Numerical setup -struct IterativeLinearSolverNS <: Gridap.Algebra.NumericalSetup +struct IterativeLinearSolverNS <: Algebra.NumericalSetup solver A caches @@ -104,23 +118,29 @@ function Gridap.Algebra.numerical_setup(ss::IterativeLinearSolverSS,A::AbstractM numerical_setup(solver_type,ss,A) end -function Gridap.Algebra.numerical_setup(::IterativeLinearSolverType, - ss::IterativeLinearSolverSS, - A::AbstractMatrix) +function Gridap.Algebra.numerical_setup( + ::IterativeLinearSolverType, + ss::IterativeLinearSolverSS, + A::AbstractMatrix +) IterativeLinearSolverNS(ss.solver,A,nothing) end -function Gridap.Algebra.numerical_setup(::CGIterativeSolverType, - ss::IterativeLinearSolverSS, - A::AbstractMatrix) +function Gridap.Algebra.numerical_setup( + ::CGIterativeSolverType, + ss::IterativeLinearSolverSS, + A::AbstractMatrix +) x = allocate_in_domain(A); fill!(x,zero(eltype(x))) caches = IterativeSolvers.CGStateVariables(zero(x), similar(x), similar(x)) return IterativeLinearSolverNS(ss.solver,A,caches) end -function Gridap.Algebra.numerical_setup(::SSORIterativeSolverType, - ss::IterativeLinearSolverSS, - A::AbstractMatrix) +function Gridap.Algebra.numerical_setup( + ::SSORIterativeSolverType, + ss::IterativeLinearSolverSS, + A::AbstractMatrix +) x = allocate_in_range(A); fill!(x,zero(eltype(x))) b = allocate_in_domain(A); fill!(b,zero(eltype(b))) ω = ss.solver.args[:ω] @@ -129,11 +149,13 @@ function Gridap.Algebra.numerical_setup(::SSORIterativeSolverType, return IterativeLinearSolverNS(ss.solver,A,caches) end -function IterativeSolvers.ssor_iterable(x::PVector, - A::PSparseMatrix, - b::PVector, - ω::Real; - maxiter::Int = 10) +function IterativeSolvers.ssor_iterable( + x::PVector, + A::PSparseMatrix, + b::PVector, + ω::Real; + maxiter::Int = 10 +) iterables = map(own_values(x),own_values(A),own_values(b)) do _xi,_Aii,_bi xi = Vector(_xi) Aii = SparseMatrixCSC(_Aii) @@ -149,48 +171,60 @@ function LinearAlgebra.ldiv!(x::AbstractVector,ns::IterativeLinearSolverNS,b::Ab solve!(x,ns,b) end -function Gridap.Algebra.solve!(x::AbstractVector, - ns::IterativeLinearSolverNS, - y::AbstractVector) +function Algebra.solve!( + x::AbstractVector, + ns::IterativeLinearSolverNS, + y::AbstractVector +) solver_type = SolverType(ns.solver) solve!(solver_type,x,ns,y) end -function Gridap.Algebra.solve!(::IterativeLinearSolverType, - ::AbstractVector, - ::IterativeLinearSolverNS, - ::AbstractVector) +function Algebra.solve!( + ::IterativeLinearSolverType, + ::AbstractVector, + ::IterativeLinearSolverNS, + ::AbstractVector +) @abstractmethod end -function Gridap.Algebra.solve!(::CGIterativeSolverType, - x::AbstractVector, - ns::IterativeLinearSolverNS, - y::AbstractVector) +function Algebra.solve!( + ::CGIterativeSolverType, + x::AbstractVector, + ns::IterativeLinearSolverNS, + y::AbstractVector +) A, kwargs, caches = ns.A, ns.solver.kwargs, ns.caches return cg!(x,A,y;kwargs...,statevars=caches) end -function Gridap.Algebra.solve!(::GMRESIterativeSolverType, - x::AbstractVector, - ns::IterativeLinearSolverNS, - y::AbstractVector) +function Algebra.solve!( + ::GMRESIterativeSolverType, + x::AbstractVector, + ns::IterativeLinearSolverNS, + y::AbstractVector +) A, kwargs = ns.A, ns.solver.kwargs return gmres!(x,A,y;kwargs...) end -function Gridap.Algebra.solve!(::MINRESIterativeSolverType, - x::AbstractVector, - ns::IterativeLinearSolverNS, - y::AbstractVector) +function Algebra.solve!( + ::MINRESIterativeSolverType, + x::AbstractVector, + ns::IterativeLinearSolverNS, + y::AbstractVector +) A, kwargs = ns.A, ns.solver.kwargs return minres!(x,A,y;kwargs...) end -function Gridap.Algebra.solve!(::SSORIterativeSolverType, - x::AbstractVector, - ns::IterativeLinearSolverNS, - y::AbstractVector) +function Algebra.solve!( + ::SSORIterativeSolverType, + x::AbstractVector, + ns::IterativeLinearSolverNS, + y::AbstractVector +) iterable = ns.caches iterable.x = x iterable.b = y @@ -199,10 +233,12 @@ function Gridap.Algebra.solve!(::SSORIterativeSolverType, return x end -function Gridap.Algebra.solve!(::SSORIterativeSolverType, - x::PVector, - ns::IterativeLinearSolverNS, - y::PVector) +function Algebra.solve!( + ::SSORIterativeSolverType, + x::PVector, + ns::IterativeLinearSolverNS, + y::PVector +) iterables = ns.caches map(iterables,own_values(x),own_values(y)) do iterable, xi, yi iterable.x .= xi @@ -214,3 +250,5 @@ function Gridap.Algebra.solve!(::SSORIterativeSolverType, consistent!(x) |> fetch return x end + +end # module \ No newline at end of file diff --git a/src/BlockSolvers/BlockSolvers.jl b/src/BlockSolvers/BlockSolvers.jl index 23d29c89..613925a9 100644 --- a/src/BlockSolvers/BlockSolvers.jl +++ b/src/BlockSolvers/BlockSolvers.jl @@ -4,7 +4,6 @@ using LinearAlgebra using SparseArrays using SparseMatricesCSR using BlockArrays -using IterativeSolvers using Gridap using Gridap.Helpers, Gridap.Algebra, Gridap.CellData, Gridap.Arrays, Gridap.FESpaces, Gridap.MultiField diff --git a/src/LinearSolvers/CallbackSolver.jl b/src/LinearSolvers/CallbackSolver.jl index 87eca8d0..382c8b89 100644 --- a/src/LinearSolvers/CallbackSolver.jl +++ b/src/LinearSolvers/CallbackSolver.jl @@ -1,4 +1,18 @@ +""" + CallbackSolver(solver::LinearSolver,callback::Function) + +A linear solver that runs a callback function after solving the linear system. The callback +function should take the solution vector as its only argument and return nothing, i.e +`callback(x::AbstractVector) -> nothing`. + +This structure is useful to add functionality to any linear solver, such as: + +- Logging the solution, residuals, etc. +- Monitoring properties of the solution, as it's divergence or mean. +- Modifying the solution in-place after solving the linear system, to apply a correction, + for example. +""" struct CallbackSolver{A,B} <: Algebra.LinearSolver solver :: A callback :: B diff --git a/src/LinearSolvers/GMGLinearSolvers.jl b/src/LinearSolvers/GMGLinearSolvers.jl index b564e48b..6a052f4b 100644 --- a/src/LinearSolvers/GMGLinearSolvers.jl +++ b/src/LinearSolvers/GMGLinearSolvers.jl @@ -373,9 +373,6 @@ function gmg_coarse_solver_caches( with_level(smatrices,nlevs) do AH _, _, dxH, rH = work_vectors[nlevs-1] cache = numerical_setup(symbolic_setup(solver, AH), AH) - if isa(solver,PETScLinearSolver) - cache = CachedPETScNS(cache, dxH, rH) - end return cache end end @@ -391,9 +388,6 @@ function gmg_coarse_solver_caches( _, _, dxH, rH = work_vectors[nlevs-1] xH = svectors[nlevs] cache = numerical_setup(symbolic_setup(solver, AH, xH), AH, xH) - if isa(solver,PETScLinearSolver) - cache = CachedPETScNS(cache, dxH, rH) - end return cache end end diff --git a/src/LinearSolvers/LinearSolvers.jl b/src/LinearSolvers/LinearSolvers.jl index 7c270430..33824cf9 100644 --- a/src/LinearSolvers/LinearSolvers.jl +++ b/src/LinearSolvers/LinearSolvers.jl @@ -6,12 +6,10 @@ using LinearAlgebra using SparseArrays using SparseMatricesCSR using BlockArrays -using IterativeSolvers using Gridap using Gridap.Helpers, Gridap.Algebra, Gridap.CellData, Gridap.Arrays, Gridap.FESpaces, Gridap.MultiField using PartitionedArrays -using GridapPETSc using GridapDistributed using GridapSolvers.MultilevelTools @@ -48,11 +46,6 @@ include("Krylov/GMRESSolvers.jl") include("Krylov/FGMRESSolvers.jl") include("Krylov/MINRESSolvers.jl") -include("PETSc/PETScUtils.jl") -include("PETSc/PETScCaches.jl") -include("PETSc/ElasticitySolvers.jl") -include("PETSc/HipmairXuSolvers.jl") - include("IdentityLinearSolvers.jl") include("LinearSolverFromSmoothers.jl") @@ -62,7 +55,6 @@ include("SymGaussSeidelSmoothers.jl") include("RichardsonLinearSolvers.jl") include("GMGLinearSolvers.jl") -include("IterativeLinearSolvers.jl") include("SchurComplementSolvers.jl") include("MatrixSolvers.jl") include("SchwarzLinearSolvers.jl") diff --git a/src/MultilevelTools/ModelHierarchies.jl b/src/MultilevelTools/ModelHierarchies.jl index 61147c3e..c8e4227e 100644 --- a/src/MultilevelTools/ModelHierarchies.jl +++ b/src/MultilevelTools/ModelHierarchies.jl @@ -177,55 +177,6 @@ function ModelHierarchy(models::Vector{<:GridapDistributed.DistributedDiscreteMo return HierarchicalArray(meshes,level_parts) end -""" - P4estCartesianModelHierarchy( - ranks,np_per_level,domain,nc::NTuple{D,<:Integer}; - num_refs_coarse::Integer = 0, - add_labels!::Function = (labels -> nothing), - map::Function = identity, - isperiodic::NTuple{D,Bool} = Tuple(fill(false,D)) - ) where D - - Returns a `ModelHierarchy` with a Cartesian model as coarsest level, using GridapP4est.jl. - The i-th level will be distributed among `np_per_level[i]` processors. - The seed model is given by `cmodel = CartesianDiscreteModel(domain,nc)`. -""" -function P4estCartesianModelHierarchy( - ranks,np_per_level,domain,nc::NTuple{D,<:Integer}; - num_refs_coarse::Integer = 0, - add_labels!::Function = (labels -> nothing), - map::Function = identity, - isperiodic::NTuple{D,Bool} = Tuple(fill(false,D)) -) where D - cparts = generate_subparts(ranks,np_per_level[end]) - cmodel = CartesianDiscreteModel(domain,nc;map,isperiodic) - add_labels!(get_face_labeling(cmodel)) - - coarse_model = OctreeDistributedDiscreteModel(cparts,cmodel,num_refs_coarse) - mh = ModelHierarchy(ranks,coarse_model,np_per_level) - return mh -end - -function GridapDistributed.DistributedAdaptedDiscreteModel( - model :: GridapP4est.OctreeDistributedDiscreteModel, - parent :: GridapDistributed.DistributedDiscreteModel, - glue :: AbstractArray{<:Gridap.Adaptivity.AdaptivityGlue}; -) - GridapDistributed.DistributedAdaptedDiscreteModel( - model.dmodel,parent,glue - ) -end - -function GridapDistributed.DistributedAdaptedDiscreteModel( - model :: GridapP4est.OctreeDistributedDiscreteModel, - parent :: GridapP4est.OctreeDistributedDiscreteModel, - glue :: AbstractArray{<:Gridap.Adaptivity.AdaptivityGlue}; -) - GridapDistributed.DistributedAdaptedDiscreteModel( - model.dmodel,parent.dmodel,glue - ) -end - """ ModelHierarchy(root_parts,model,num_procs_x_level) diff --git a/src/MultilevelTools/MultilevelTools.jl b/src/MultilevelTools/MultilevelTools.jl index 4ef8c95a..ee18f0d8 100644 --- a/src/MultilevelTools/MultilevelTools.jl +++ b/src/MultilevelTools/MultilevelTools.jl @@ -9,7 +9,7 @@ using Gridap using Gridap.Helpers, Gridap.Algebra, Gridap.Arrays, Gridap.Fields, Gridap.CellData using Gridap.ReferenceFEs, Gridap.Geometry, Gridap.FESpaces, Gridap.Adaptivity, Gridap.MultiField -using PartitionedArrays, GridapDistributed, GridapP4est +using PartitionedArrays, GridapDistributed using Gridap.FESpaces: BasisStyle, TestBasis, TrialBasis, SingleFieldFEBasis using Gridap.MultiField: MultiFieldFEBasisComponent