From 10ae5c80b6afe3bf5724849b46ba6460c8e81802 Mon Sep 17 00:00:00 2001 From: carlodev Date: Wed, 1 Feb 2023 11:46:38 +0100 Subject: [PATCH 01/22] fix for nls in time dep problems --- src/PETScNonlinearSolvers.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index ecd630b..abe6703 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -158,7 +158,8 @@ function Algebra.solve!(x::T, op::NonlinearOperator, cache::PETScNonlinearSolverCache{<:T}) where T <: AbstractVector - @assert cache.op === op + #@assert cache.op === op + cache.op = op @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) copy!(x,cache.x_petsc) cache From b9a6058c386621afef2e036b1f249a954f30d46b Mon Sep 17 00:00:00 2001 From: carlodev Date: Wed, 1 Feb 2023 11:49:42 +0100 Subject: [PATCH 02/22] add PETSc IS --- src/GridapPETSc.jl | 6 +- src/PETSC.jl | 39 +++++++++ src/PETScIndexes.jl | 191 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 234 insertions(+), 2 deletions(-) create mode 100644 src/PETScIndexes.jl diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index defef13..f2e5bc1 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -69,17 +69,19 @@ end include("PETSC.jl") using GridapPETSc.PETSC: @check_error_code -using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, SNES +using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, SNES, IS #export PETSC export @check_error_code -export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC +export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, IS include("Environment.jl") export PETScVector export PETScMatrix export petsc_sparse +export PETScIS include("PETScArrays.jl") +include("PETScIndexes.jl") include("PartitionedArrays.jl") export PETScLinearSolver diff --git a/src/PETSC.jl b/src/PETSC.jl index a8623d0..7479fb7 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -703,4 +703,43 @@ const SNESPATCH = "patch" @wrapper(:PetscObjectRegisterDestroy,PetscErrorCode,(Ptr{Cvoid},),(obj,),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroy.html") @wrapper(:PetscObjectRegisterDestroyAll,PetscErrorCode,(),(),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroyAll.html") +""" + Julia alias for the `IS` C type. + See [PETSc manual](https://petsc.org/release/docs/manualpages/IS/IS/)). + """ + struct IS + ptr::Ptr{Cvoid} + end + IS() = IS(Ptr{Cvoid}()) + Base.convert(::Type{IS},p::Ptr{Cvoid}) = IS(p) + Base.unsafe_convert(::Type{Ptr{Cvoid}},v::IS) = v.ptr + const ISType = Cstring + + + + """ + Julia alias for `PetscCopyMode` C type. + See [PETSc manual](https://petsc.org/main/docs/manualpages/Sys/PetscCopyMode/). + """ + @enum PetscCopyMode begin + PETSC_COPY_VALUES + PETSC_OWN_POINTER + PETSC_USE_POINTER + end +@wrapper(:PetscObjectSetName, PetscErrorCode, (Ptr{Cvoid}, Cstring), (field, name), "https://petsc.org/main/docs/manualpages/Sys/PetscObjectSetName/") +@wrapper(:ISCreateGeneral,PetscErrorCode,(MPI.Comm, PetscInt, PetscInt, PetscCopyMode, Ptr{IS}),(comm, n, idx, mode, is), "https://petsc.org/main/docs/manualpages/IS/ISCreateGeneral/") +@wrapper(:ISView,PetscErrorCode,(IS, PetscViewer),(is, viewer), "https://petsc.org/main/docs/manualpages/IS/ISView/") +@wrapper(:ISCreateBlock,PetscErrorCode,(MPI.Comm, PetscInt, PetscInt, PetscInt, PetscCopyMode, Ptr{IS}),(comm, bs, n, idx, mode, is), "https://petsc.org/release/docs/manualpages/IS/ISCreateBlock/") +@wrapper(:ISGetIndices,PetscErrorCode,(IS, Ptr{PetscInt}),(is, ptr), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") +@wrapper(:ISExpand,PetscErrorCode,(IS, IS, Ptr{IS}),(is1, is2, isout), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") +@wrapper(:ISGetSize,PetscErrorCode,(IS, Ptr{PetscInt}), (is, nsize), "https://petsc.org/main/docs/manualpages/IS/ISGetSize/") +@wrapper(:ISDestroy, PetscErrorCode,(Ptr{IS},) ,(pis,), "https://petsc.org/main/docs/manualpages/IS/ISDestroy/") +@wrapper(:PCFieldSplitSetIS,PetscErrorCode,(PC, Cstring, IS),(pc, Cfieldname, is), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetIS/") + + + +#PETSc Print +@wrapper(:PetscPrintf,PetscErrorCode,(MPI.Comm, Cstring ),(comm, args ),"https://petsc.org/release/docs/manualpages/Sys/PetscPrintf/") +@wrapper(:PetscSynchronizedPrintf,PetscErrorCode,(MPI.Comm, Cstring),(comm, args),"https://petsc.org/main/docs/manualpages/Sys/PetscSynchronizedPrintf/") + end # module diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl new file mode 100644 index 0000000..c63e9f3 --- /dev/null +++ b/src/PETScIndexes.jl @@ -0,0 +1,191 @@ +# Index + +mutable struct PETScIS <: AbstractVector{PetscInt} + is::Base.RefValue{IS} + initialized::Bool + size::Tuple{Int} + comm::MPI.Comm + function PETScIS(comm::MPI.Comm) + new(Ref{IS}(),false,(-1,),comm) +end + +end + + function Init(a::PETScIS) + n = Ref{PetscInt}() + @check_error_code PETSC.ISGetSize(a.is[],n) + a.size = (Int(n[]),) + @assert Threads.threadid() == 1 + _NREFS[] += 1 + a.initialized = true + finalizer(Finalize,a) + end + + function Finalize(a::PETScIS) + if a.initialized && GridapPETSc.Initialized() + if a.comm == MPI.COMM_SELF + @check_error_code PETSC.ISDestroy(a.is) + else + @check_error_code PETSC.PetscObjectRegisterDestroy(a.is[]) + end + a.initialized = false + @assert Threads.threadid() == 1 + _NREFS[] -= 1 + end + nothing + end + + function Base.size(v::PETScIS) + @assert v.initialized + v.size + end + + Base.@propagate_inbounds function Base.getindex(v::PETScIS,i1::Integer) + @boundscheck checkbounds(v, i1) + n = one(PetscInt) + # i0 = Ref(i1-n) + pi0 = Ref{PetscInt}() + # pi0 = reinterpret(Ptr{PetscInt},pointer_from_objref(i0)) + @check_error_code PETSC.ISGetIndices(v.is[], pi0) + return pi0 + end + + # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) + # @boundscheck checkbounds(v, i1) + # n = one(PetscInt) + # i0 = Ref(PetscInt(i1-n)) + # vi = Ref(PetscInt(y)) + # @check_error_code PETSC.VecSetValues(v.is[],n,i0,vi,PETSC.INSERT_VALUES) + # v.is[i0] .= y + # y + # end + + # Constructors + + function PETScIS(n::Integer) + println("PETScIS") + v = Ref{Ptr{PetscInt}}() + println("Construct\n") + @check_error_code PETSC.PetscMalloc1(n, v) + Init(v) + end + + function PETScIS(array::Vector, bs=1) + comm = MPI.COMM_SELF + is = PETScIS(comm) + n = length(array) + p_idx = Ref{Ptr{PetscInt}}() + xp=reinterpret(Ptr{PetscInt},pointer_from_objref(p_idx)) + for i = 1:1:n + unsafe_store!(xp, array[i], i) + end + @check_error_code GridapPETSc.PETSC.ISCreateGeneral(MPI.COMM_WORLD, n, xp, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) + + Init(is) + end + +# function PETScVector(a::PetscScalar,ax::AbstractUnitRange) +# PETScVector(fill(a,length(ax))) +# end + +# function Base.similar(::PETScVector,::Type{PetscScalar},ax::Tuple{Int}) +# PETScVector(ax[1]) +# end + +# function Base.similar(a::PETScVector,::Type{PetscScalar},ax::Tuple{<:Base.OneTo}) +# similar(a,PetscScalar,map(length,ax)) +# end + +# function Base.similar(::Type{PETScVector},ax::Tuple{Int}) +# PETScVector(ax[1]) +# end + +# function Base.similar(::Type{PETScVector},ax::Tuple{<:Base.OneTo}) +# PETScVector(map(length,ax)) +# end + +# function Base.copy(a::PETScVector) +# v = PETScVector(a.comm) +# @check_error_code PETSC.VecDuplicate(a.vec[],v.vec) +# @check_error_code PETSC.VecCopy(a.vec[],v.vec[]) +# Init(v) +# end + +# function Base.convert(::Type{PETScVector},a::PETScVector) +# a +# end + +# function Base.convert(::Type{PETScVector},a::AbstractVector) +# array = convert(Vector{PetscScalar},a) +# PETScVector(array) +# end + +# function Base.copy!(a::AbstractVector,b::PETScVector) +# @check length(a) == length(b) +# _copy!(a,b.vec[]) +# end + +# function _copy!(a::Vector,b::Vec) +# ni = length(a) +# ix = collect(PetscInt,0:(ni-1)) +# v = convert(Vector{PetscScalar},a) +# @check_error_code PETSC.VecGetValues(b,ni,ix,v) +# if !(v === a) +# a .= v +# end +# end + +# function Base.copy!(a::PETScVector,b::AbstractVector) +# @check length(a) == length(b) +# _copy!(a.vec[],b) +# end + +# function _copy!(a::Vec,b::Vector) +# ni = length(b) +# ix = collect(PetscInt,0:(ni-1)) +# v = convert(Vector{PetscScalar},b) +# @check_error_code PETSC.VecSetValues(a,ni,ix,v,PETSC.INSERT_VALUES) +# end + +# function _get_local_oh_vector(a::Vec) +# v=PETScVector(MPI.COMM_SELF) +# @check_error_code PETSC.VecGhostGetLocalForm(a,v.vec) +# if v.vec[] != C_NULL # a is a ghosted vector +# v.ownership=a +# Init(v) +# return v +# else # a is NOT a ghosted vector +# return _get_local_vector(a) +# end +# end + +# function _local_size(a::PETScVector) +# r_sz = Ref{PetscInt}() +# @check_error_code PETSC.VecGetLocalSize(a.vec[], r_sz) +# r_sz[] +# end + +# # This function works with either ghosted or non-ghosted MPI vectors. +# # In the case of a ghosted vector it solely returns the locally owned +# # entries. +# function _get_local_vector(a::PETScVector) +# r_pv = Ref{Ptr{PetscScalar}}() +# @check_error_code PETSC.VecGetArray(a.vec[], r_pv) +# v = unsafe_wrap(Array, r_pv[], _local_size(a); own = false) +# return v +# end + +# # This function works with either ghosted or non-ghosted MPI vectors. +# # In the case of a ghosted vector it solely returns the locally owned +# # entries. +# function _get_local_vector_read(a::PETScVector) +# r_pv = Ref{Ptr{PetscScalar}}() +# @check_error_code PETSC.VecGetArrayRead(a.vec[], r_pv) +# v = unsafe_wrap(Array, r_pv[], _local_size(a); own = false) +# return v +# end + +# function _restore_local_vector!(v::Array,a::PETScVector) +# @check_error_code PETSC.VecRestoreArray(a.vec[], Ref(pointer(v))) +# nothing +# end \ No newline at end of file From 71122f87bc7fa9409dc3752a7178a974accf6e98 Mon Sep 17 00:00:00 2001 From: carlodev Date: Wed, 1 Feb 2023 11:50:13 +0100 Subject: [PATCH 03/22] avoid multiple print in parallel --- src/Environment.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/Environment.jl b/src/Environment.jl index f642c75..882efc0 100644 --- a/src/Environment.jl +++ b/src/Environment.jl @@ -3,6 +3,12 @@ function Init(;args=String[],file="",help="",finalize_atexit=true) if !MPI.Initialized() MPI.Init() end + #To avoid multiple printing of the same line in parallel + # if MPI.Comm_rank(MPI.COMM_WORLD) != 0 + # redirect_stderr(devnull) + # redirect_stdout(devnull) + # end + if finalize_atexit atexit(Finalize) end From 8c5c4d045a2d9d46db0c5acc55cf5372d7a1354e Mon Sep 17 00:00:00 2001 From: carlodev Date: Wed, 1 Feb 2023 14:54:08 +0100 Subject: [PATCH 04/22] add tests --- src/PETSC.jl | 2 +- src/PETScIndexes.jl | 4 +--- test/IndexesTests.jl | 33 ++++++++++++++++++++++++++++ test/mpi/IndexesTests.jl | 3 +++ test/sequential/PETScIndexesTests.jl | 25 +++++++++++++++++++++ test/sequential/runtests.jl | 2 ++ 6 files changed, 65 insertions(+), 4 deletions(-) create mode 100644 test/IndexesTests.jl create mode 100644 test/mpi/IndexesTests.jl create mode 100644 test/sequential/PETScIndexesTests.jl diff --git a/src/PETSC.jl b/src/PETSC.jl index 7479fb7..6fc7ce1 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -733,7 +733,7 @@ const SNESPATCH = "patch" @wrapper(:ISGetIndices,PetscErrorCode,(IS, Ptr{PetscInt}),(is, ptr), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") @wrapper(:ISExpand,PetscErrorCode,(IS, IS, Ptr{IS}),(is1, is2, isout), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") @wrapper(:ISGetSize,PetscErrorCode,(IS, Ptr{PetscInt}), (is, nsize), "https://petsc.org/main/docs/manualpages/IS/ISGetSize/") -@wrapper(:ISDestroy, PetscErrorCode,(Ptr{IS},) ,(pis,), "https://petsc.org/main/docs/manualpages/IS/ISDestroy/") +@wrapper(:ISDestroy, PetscErrorCode,(Ptr{IS},), (pis,), "https://petsc.org/main/docs/manualpages/IS/ISDestroy/") @wrapper(:PCFieldSplitSetIS,PetscErrorCode,(PC, Cstring, IS),(pc, Cfieldname, is), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetIS/") diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index c63e9f3..a00b34f 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -5,11 +5,9 @@ mutable struct PETScIS <: AbstractVector{PetscInt} initialized::Bool size::Tuple{Int} comm::MPI.Comm - function PETScIS(comm::MPI.Comm) - new(Ref{IS}(),false,(-1,),comm) + PETScIS(comm::MPI.Comm) = new(Ref{IS}(),false,(-1,),comm) end -end function Init(a::PETScIS) n = Ref{PetscInt}() diff --git a/test/IndexesTests.jl b/test/IndexesTests.jl new file mode 100644 index 0000000..f3e29a6 --- /dev/null +++ b/test/IndexesTests.jl @@ -0,0 +1,33 @@ +using GridapPETSc +using Test +using SparseArrays +using SparseMatricesCSR +using GridapPETSc: PetscScalar, PetscInt +using LinearAlgebra +using MPI +using PartitionedArrays + + +function main(parts) +options = "-info" + GridapPETSc.with(args=split(options)) do + backend = get_backend(parts) + if backend == MPIBackend() + comm = MPI.COMM_WORLD + procid = PartitionedArrays.get_part_id(comm) + nprocs = PartitionedArrays.num_parts(comm) + elseif backend == SequentialBackend() + procid = 1 + nprocs = 1 + end + + println(procid) + array = ones(5) + array = array .+ procid + is = PETScIS(array) + @check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) + @test is.size[1] == length(array)*nprocs + end +end + +#mpiexecjl --project=. -n 4 julia test/IndexesTests.jl \ No newline at end of file diff --git a/test/mpi/IndexesTests.jl b/test/mpi/IndexesTests.jl new file mode 100644 index 0000000..ebb49d1 --- /dev/null +++ b/test/mpi/IndexesTests.jl @@ -0,0 +1,3 @@ +include("../IndexesTests.jl") +nparts = (2,2) +prun(main,mpi,nparts) diff --git a/test/sequential/PETScIndexesTests.jl b/test/sequential/PETScIndexesTests.jl new file mode 100644 index 0000000..8422c60 --- /dev/null +++ b/test/sequential/PETScIndexesTests.jl @@ -0,0 +1,25 @@ +module PETScIndexesTests + +using GridapPETSc +using Test +using SparseArrays +using SparseMatricesCSR +using GridapPETSc: PetscScalar, PetscInt +using LinearAlgebra + + + + +options = "-info" +GridapPETSc.with(args=split(options)) do + +array = collect(1:5) +is = PETScIS(array) +@check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) +@check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF) +@test is.size[1] == length(array) +@test is.initialized == true + +end + +end # module diff --git a/test/sequential/runtests.jl b/test/sequential/runtests.jl index a42e404..c82684c 100644 --- a/test/sequential/runtests.jl +++ b/test/sequential/runtests.jl @@ -7,6 +7,8 @@ using MPI @time @testset "PETScArrays" begin include("PETScArraysTests.jl") end +@time @testset "PETScIndexes" begin include("PETScIndexesTests.jl") end + @time @testset "PartitionedArrays (sequential)" begin include("PartitionedArraysTests.jl") end @time @testset "PETScLinearSolvers" begin include("PETScLinearSolversTests.jl") end From 3f6eb87331af9fefe294e663206ffe0e54543d17 Mon Sep 17 00:00:00 2001 From: carlodev Date: Wed, 1 Feb 2023 14:55:47 +0100 Subject: [PATCH 05/22] add comment --- src/PETScIndexes.jl | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index a00b34f..53cc31d 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -38,15 +38,15 @@ end v.size end - Base.@propagate_inbounds function Base.getindex(v::PETScIS,i1::Integer) - @boundscheck checkbounds(v, i1) - n = one(PetscInt) - # i0 = Ref(i1-n) - pi0 = Ref{PetscInt}() - # pi0 = reinterpret(Ptr{PetscInt},pointer_from_objref(i0)) - @check_error_code PETSC.ISGetIndices(v.is[], pi0) - return pi0 - end + # Base.@propagate_inbounds function Base.getindex(v::PETScIS,i1::Integer) + # @boundscheck checkbounds(v, i1) + # n = one(PetscInt) + # # i0 = Ref(i1-n) + # pi0 = Ref{PetscInt}() + # # pi0 = reinterpret(Ptr{PetscInt},pointer_from_objref(i0)) + # @check_error_code PETSC.ISGetIndices(v.is[], pi0) + # return pi0 + # end # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) # @boundscheck checkbounds(v, i1) From 9ffddcde5737c5a59fd4682645c738ab24fda441 Mon Sep 17 00:00:00 2001 From: carlodev Date: Wed, 1 Feb 2023 17:01:08 +0100 Subject: [PATCH 06/22] add Stokes Test --- test/sequential/StokesTest.jl | 114 ++++++++++++++++++++++++++++++++++ test/sequential/runtests.jl | 2 + 2 files changed, 116 insertions(+) create mode 100644 test/sequential/StokesTest.jl diff --git a/test/sequential/StokesTest.jl b/test/sequential/StokesTest.jl new file mode 100644 index 0000000..c0db00b --- /dev/null +++ b/test/sequential/StokesTest.jl @@ -0,0 +1,114 @@ +module StokesTest + +using Gridap +using Test +using GridapDistributed + +using PartitionedArrays +using SparseArrays +using GridapPETSc + +#You can provide the following options string (which will be automatically used when calling PetscLinearSolver()) +# or the PescLinearSolver(mykspsetup) +#They are equivalent (with the exeption of -ksp_monitor) +#You can use PescLinearSolver(mykspsetup) and at the same time "-ksp_monitor", they are merged + +function mykspsetup(ksp) + pc = Ref{GridapPETSc.PETSC.PC}() + umfpack = Ref{GridapPETSc.PETSC.Mat}() + @check_error_code GridapPETSc.PETSC.KSPSetType(ksp[], GridapPETSc.PETSC.KSPPREONLY) + @check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[], pc) + @check_error_code GridapPETSc.PETSC.PCSetType(pc[], GridapPETSc.PETSC.PCLU) + @check_error_code GridapPETSc.PETSC.PCFactorSetMatSolverType(pc[], GridapPETSc.PETSC.MATSOLVERUMFPACK) + @check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[]) + @check_error_code GridapPETSc.PETSC.PCFactorGetMatrix(pc[], umfpack) + @check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[]) +end + + +function main(solver_options) + if solver_options == :petsc_linecommand + options = "-pc_type lu -ksp_type preonly -ksp_max_it 10 -ksp_monitor -pc_factor_mat_solver_type umfpack" + elseif solver_options == :petsc_mykspsetup + options = "-ksp_monitor" + elseif solver_options == :julia + options = " " + else + error("Solver $(solver_options) not valid. Use instead:\n\ + :petsc_linecommand\n\ + :petsc_mykspsetup\n\ + :julia\n") + end + tt = 0.0 + GridapPETSc.with(args=split(options)) do + n = 50 + domain = (0, 1, 0, 1) + partition = (n, n) + model = CartesianDiscreteModel(domain, partition) + + labels = get_face_labeling(model) + add_tag_from_tags!(labels, "diri1", [6,]) + add_tag_from_tags!(labels, "diri0", [1, 2, 3, 4, 5, 7, 8]) + add_tag_from_tags!(labels, "dirip", [1]) + + order = 2 + reffeᵤ = ReferenceFE(lagrangian, VectorValue{2,Float64}, order) + reffeₚ = ReferenceFE(lagrangian, Float64, order - 1; space=:P) + + V = TestFESpace(model, reffeᵤ, labels=labels, dirichlet_tags=["diri0", "diri1"], conformity=:H1) + Q = TestFESpace(model, reffeₚ, conformity=:L2, dirichlet_tags="dirip") + Y = MultiFieldFESpace([V, Q]) + + u0 = VectorValue(0, 0) + u1 = VectorValue(1, 0) + U = TrialFESpace(V, [u0, u1]) + P = TrialFESpace(Q, 0.0) + X = MultiFieldFESpace([U, P]) + + degree = order + Ω = Triangulation(model) + dΩ = Measure(Ω, degree) + + f = VectorValue(0.0, 0.0) + h = 1 / n + τ = (h .^ 2) ./ 4 + + a((u, p), (v, q)) = ∫(∇(v) ⊙ ∇(u) - (∇ ⋅ v) * p + q * (∇ ⋅ u))dΩ + ∫((τ ⋅ ∇(q))' ⋅ (∇(p)))dΩ + #The last term is added in order to have elements on the main diagonal, if not PETSc does not work properly + + l((v, q)) = ∫(v ⋅ f)dΩ + res((u, p), (v, q)) = a((u, p), (v, q)) - l((v, q)) + op = AffineFEOperator(a, l, X, Y) + + + if solver_options == :petsc_linecommand + #Solve using Petsc solver, with the options given in line command style + solver = PETScLinearSolver() + uh, ph = solve(solver, op) + tt = @elapsed uh, ph = solve(solver, op) + elseif solver_options == :petsc_mykspsetup + #Solve using Petsc solver, with the options given in mykspsetup + solver = PETScLinearSolver(mykspsetup) + uh, ph = solve(solver, op) + tt = @elapsed uh, ph = solve(solver, op) + elseif solver_options == :julia + #Solve using default julia solver, which is a LU decomposition + uh, ph = solve(op) + tt = @elapsed uh, ph = solve(op) + end + + end + return tt + +end +#The PETSc and Julia in this case both use a LU decomposition, the time should be really close + +j = main(:julia) +pl = main(:petsc_linecommand) +pm = main(:petsc_mykspsetup) + +@test isapprox(j,pl; rtol = 0.1) +@test isapprox(j,pm; rtol = 0.1) +@test isapprox(pm,pm; rtol = 0.1) + +end #end module diff --git a/test/sequential/runtests.jl b/test/sequential/runtests.jl index c82684c..5d53ce0 100644 --- a/test/sequential/runtests.jl +++ b/test/sequential/runtests.jl @@ -21,6 +21,8 @@ using MPI @time @testset "ElasticityDriver" begin include("ElasticityDriver.jl") end +@time @testset "StokesTest" begin include("StokesTest.jl") end + @time @testset "DarcyDriver" begin include("DarcyDriver.jl") end @time @testset "PLaplacianDriver" begin include("PLaplacianDriver.jl") end From f7e24347e77f7fd7ff7a7a840ad81f647451df6d Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 2 Feb 2023 09:37:06 +0100 Subject: [PATCH 07/22] PETScIS getindex --- src/PETScIndexes.jl | 49 +++++++++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index 53cc31d..583acc6 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -11,7 +11,7 @@ end function Init(a::PETScIS) n = Ref{PetscInt}() - @check_error_code PETSC.ISGetSize(a.is[],n) + @check_error_code PETSC.ISGetSize(a.is[], n) a.size = (Int(n[]),) @assert Threads.threadid() == 1 _NREFS[] += 1 @@ -38,15 +38,14 @@ end v.size end - # Base.@propagate_inbounds function Base.getindex(v::PETScIS,i1::Integer) - # @boundscheck checkbounds(v, i1) - # n = one(PetscInt) - # # i0 = Ref(i1-n) - # pi0 = Ref{PetscInt}() - # # pi0 = reinterpret(Ptr{PetscInt},pointer_from_objref(i0)) - # @check_error_code PETSC.ISGetIndices(v.is[], pi0) - # return pi0 - # end + Base.@propagate_inbounds function Base.getindex(v::PETScIS,i1::Integer) + @boundscheck checkbounds(v, i1) + n = one(PetscInt) + i0 = Ref(i1-n) + pi0 = reinterpret(Ptr{PetscInt},pointer_from_objref(i0)) + @check_error_code PETSC.ISGetIndices(v.is[], pi0) + return pi0 + end # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) # @boundscheck checkbounds(v, i1) @@ -68,20 +67,32 @@ end Init(v) end - function PETScIS(array::Vector, bs=1) + + + + function PETScIS(idx::Vector{PetscInt}, bs=1) comm = MPI.COMM_SELF is = PETScIS(comm) - n = length(array) - p_idx = Ref{Ptr{PetscInt}}() - xp=reinterpret(Ptr{PetscInt},pointer_from_objref(p_idx)) - for i = 1:1:n - unsafe_store!(xp, array[i], i) - end - @check_error_code GridapPETSc.PETSC.ISCreateGeneral(MPI.COMM_WORLD, n, xp, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) + n = length(idx) + @check_error_code GridapPETSc.PETSC.ISCreateGeneral(comm, n, idx, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) + Init(is) + end + + function PETScIS(idx::Vector, bs=1) + idx = PetscInt.(idx) + PETScIS(idx) + end +#Block Implementation + function PETScIS(array::Vector{PetscInt},n, bs=1) + comm = MPI.COMM_SELF + is = PETScIS(comm) + n = PetscInt(n) + bs = PetscInt(bs) + @check_error_code GridapPETSc.PETSC.ISCreateBlock(comm, n, bs, array, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) Init(is) end - + # function PETScVector(a::PetscScalar,ax::AbstractUnitRange) # PETScVector(fill(a,length(ax))) # end From 7047a14dd54d7d58fe03aa209707169178e70a6e Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 2 Feb 2023 09:37:41 +0100 Subject: [PATCH 08/22] longer array IS test --- test/sequential/PETScIndexesTests.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/sequential/PETScIndexesTests.jl b/test/sequential/PETScIndexesTests.jl index 8422c60..7c93049 100644 --- a/test/sequential/PETScIndexesTests.jl +++ b/test/sequential/PETScIndexesTests.jl @@ -13,13 +13,13 @@ using LinearAlgebra options = "-info" GridapPETSc.with(args=split(options)) do -array = collect(1:5) +array = collect(1:1:100) is = PETScIS(array) @check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) @check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF) @test is.size[1] == length(array) @test is.initialized == true - end + end # module From 058bb529815e7b926dadf6db82194a58eb41add1 Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 2 Feb 2023 09:37:52 +0100 Subject: [PATCH 09/22] bug Ptr fix --- src/PETSC.jl | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/src/PETSC.jl b/src/PETSC.jl index 6fc7ce1..ca0664b 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -727,9 +727,9 @@ const SNESPATCH = "patch" PETSC_USE_POINTER end @wrapper(:PetscObjectSetName, PetscErrorCode, (Ptr{Cvoid}, Cstring), (field, name), "https://petsc.org/main/docs/manualpages/Sys/PetscObjectSetName/") -@wrapper(:ISCreateGeneral,PetscErrorCode,(MPI.Comm, PetscInt, PetscInt, PetscCopyMode, Ptr{IS}),(comm, n, idx, mode, is), "https://petsc.org/main/docs/manualpages/IS/ISCreateGeneral/") +@wrapper(:ISCreateGeneral,PetscErrorCode,(MPI.Comm, PetscInt, Ptr{PetscInt}, PetscCopyMode, Ptr{IS}),(comm, n, idx, mode, is), "https://petsc.org/main/docs/manualpages/IS/ISCreateGeneral/") @wrapper(:ISView,PetscErrorCode,(IS, PetscViewer),(is, viewer), "https://petsc.org/main/docs/manualpages/IS/ISView/") -@wrapper(:ISCreateBlock,PetscErrorCode,(MPI.Comm, PetscInt, PetscInt, PetscInt, PetscCopyMode, Ptr{IS}),(comm, bs, n, idx, mode, is), "https://petsc.org/release/docs/manualpages/IS/ISCreateBlock/") +@wrapper(:ISCreateBlock,PetscErrorCode,(MPI.Comm, PetscInt, PetscInt, Ptr{PetscInt}, PetscCopyMode, Ptr{IS}),(comm, bs, n, idx, mode, is), "https://petsc.org/release/docs/manualpages/IS/ISCreateBlock/") @wrapper(:ISGetIndices,PetscErrorCode,(IS, Ptr{PetscInt}),(is, ptr), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") @wrapper(:ISExpand,PetscErrorCode,(IS, IS, Ptr{IS}),(is1, is2, isout), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") @wrapper(:ISGetSize,PetscErrorCode,(IS, Ptr{PetscInt}), (is, nsize), "https://petsc.org/main/docs/manualpages/IS/ISGetSize/") @@ -742,4 +742,23 @@ const SNESPATCH = "patch" @wrapper(:PetscPrintf,PetscErrorCode,(MPI.Comm, Cstring ),(comm, args ),"https://petsc.org/release/docs/manualpages/Sys/PetscPrintf/") @wrapper(:PetscSynchronizedPrintf,PetscErrorCode,(MPI.Comm, Cstring),(comm, args),"https://petsc.org/main/docs/manualpages/Sys/PetscSynchronizedPrintf/") +#PETSc Alloc +@wrapper(:PetscMallocA,PetscErrorCode,(PetscInt, PetscBool, PetscInt, Ptr{Cstring}, Ptr{Cstring}, Csize_t, Ptr{Cvoid},), (n, clear, lineno, fun, fname, bytes0, ptr0,), "https://petsc.org/release/docs/manualpages/Sys/PetscMallocA/#petscmalloca") +#PETSC_EXTERN PetscErrorCode PetscMallocA(int,PetscBool,int,const char *,const char *,size_t,void *,...); + +""" + @PetscMalloc1 + +See [PETSc manual](https://petsc.org/release/docs/manualpages/Sys/PetscMalloc1/). +""" +macro PetscMalloc1(m1,r1) + quote + func = Ptr{Nothing}() + linec = Ptr{Nothing}() + PetscMallocA(1,PETSC_FALSE, Cint(11), Ref(Cstring(linec)), Ref(Cstring(func)), (Csize_t)($m1)*sizeof($r1),($r1)) + end +end +# PetscMalloc1(m1,r1) PetscMallocA(1,PETSC_FALSE,__LINE__,PETSC_FUNCTION_NAME,__FILE__,(size_t)(m1)*sizeof(**(r1)),(r1)) + + end # module From c72e3f27221b3e46ca6a5fd0045050290ce977a5 Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 2 Feb 2023 10:23:06 +0100 Subject: [PATCH 10/22] cleaning --- src/PETScIndexes.jl | 44 ++++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index 583acc6..e319ed2 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -47,26 +47,7 @@ end return pi0 end - # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) - # @boundscheck checkbounds(v, i1) - # n = one(PetscInt) - # i0 = Ref(PetscInt(i1-n)) - # vi = Ref(PetscInt(y)) - # @check_error_code PETSC.VecSetValues(v.is[],n,i0,vi,PETSC.INSERT_VALUES) - # v.is[i0] .= y - # y - # end - - # Constructors - - function PETScIS(n::Integer) - println("PETScIS") - v = Ref{Ptr{PetscInt}}() - println("Construct\n") - @check_error_code PETSC.PetscMalloc1(n, v) - Init(v) - end - + @@ -93,6 +74,29 @@ end Init(is) end + + # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) + # @boundscheck checkbounds(v, i1) + # n = one(PetscInt) + # i0 = Ref(PetscInt(i1-n)) + # vi = Ref(PetscInt(y)) + # @check_error_code PETSC.VecSetValues(v.is[],n,i0,vi,PETSC.INSERT_VALUES) + # v.is[i0] .= y + # y + # end + + # Constructors + + function PETScIS(n::Integer) + println("PETScIS") + v = Ref{Ptr{PetscInt}}() + println("Construct\n") + @check_error_code PETSC.PetscMalloc1(n, v) + Init(v) + end + + + # function PETScVector(a::PetscScalar,ax::AbstractUnitRange) # PETScVector(fill(a,length(ax))) # end From e2f271e78d09c2ed51dead9ce7281d0b9b7408c7 Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 9 Feb 2023 17:08:16 +0100 Subject: [PATCH 11/22] add Lidriven test --- Project.toml | 2 +- src/PETSC.jl | 20 +++ src/PETScIndexes.jl | 4 - test/LidDrivenSUPG_splitfield.jl | 247 +++++++++++++++++++++++++++++++ 4 files changed, 268 insertions(+), 5 deletions(-) create mode 100644 test/LidDrivenSUPG_splitfield.jl diff --git a/Project.toml b/Project.toml index 1540531..6ca6f6a 100644 --- a/Project.toml +++ b/Project.toml @@ -20,7 +20,7 @@ Gridap = "0.17" GridapDistributed = "0.2" MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19" PETSc_jll = "3.13" -PartitionedArrays = "0.2.4" +PartitionedArrays = "0.2.13" SparseMatricesCSR = "0.6.6" julia = "1.3" diff --git a/src/PETSC.jl b/src/PETSC.jl index ca0664b..d6b8b28 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -734,8 +734,28 @@ const SNESPATCH = "patch" @wrapper(:ISExpand,PetscErrorCode,(IS, IS, Ptr{IS}),(is1, is2, isout), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/") @wrapper(:ISGetSize,PetscErrorCode,(IS, Ptr{PetscInt}), (is, nsize), "https://petsc.org/main/docs/manualpages/IS/ISGetSize/") @wrapper(:ISDestroy, PetscErrorCode,(Ptr{IS},), (pis,), "https://petsc.org/main/docs/manualpages/IS/ISDestroy/") + @wrapper(:PCFieldSplitSetIS,PetscErrorCode,(PC, Cstring, IS),(pc, Cfieldname, is), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetIS/") +""" +Julia alias for `PCCompositeType` C type. +See [PETSc manual](https://petsc.org/release/docs/manualpages/PC/PCCompositeType/). +""" +@enum PCCompositeType begin + PC_COMPOSITE_ADDITIVE + PC_COMPOSITE_MULTIPLICATIVE + PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE + PC_COMPOSITE_SPECIAL + PC_COMPOSITE_SCHUR + PC_COMPOSITE_GKB +end + + +@wrapper(:PCFieldSplitSetType,PetscErrorCode,(PC, PCCompositeType),(pc, pcctype), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetType/") + + + + #PETSc Print diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index e319ed2..f917053 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -47,10 +47,6 @@ end return pi0 end - - - - function PETScIS(idx::Vector{PetscInt}, bs=1) comm = MPI.COMM_SELF is = PETScIS(comm) diff --git a/test/LidDrivenSUPG_splitfield.jl b/test/LidDrivenSUPG_splitfield.jl new file mode 100644 index 0000000..1db6ce8 --- /dev/null +++ b/test/LidDrivenSUPG_splitfield.jl @@ -0,0 +1,247 @@ +using Revise +using Gridap +using Gridap.CellData + + +using Test +using GridapDistributed +using GridapDistributed.CellData + + +using PartitionedArrays +using MPI +using SparseArrays +using SparseMatricesCSR +using GridapPETSc +using BenchmarkTools + +# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ +# -pc_use_amat -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 \ +# -fieldsplit_vel_pc_type ilu -fieldsplit_vel_pc_factor_levels 2 -fieldsplit_velu_ksp_type gmres -fieldsplit_vel_ksp_monitor -fieldsplit_vel_ksp_converged_reason \ +# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_monitor fieldsplit_pres_ksp_converged_reason" #Very good pressure convergence + +# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ +# -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 -ksp_monitor_short \ +# -fieldsplit_vel_pc_type asm -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ +# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works + + +options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-6 -snes_atol 0.0 -snes_monitor -log_view \ +-ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-10 -ksp_monitor_short \ +-fieldsplit_vel_pc_type gamg -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ +-fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works + + +#-fieldsplit_vel_ksp_converged_reason -ksp_monitor_short +#-fieldsplit_pres_ksp_converged_reason +#-log_view -mat_view :lid.m:ascii_matlab +function stretching_y_function(x) + gamma1 = 2.5 + S = 0.5815356159649889 #for rescaling the function over the domain -0.5 -> 0.5 + -tanh.(gamma1 .* (x)) ./ tanh.(gamma1) .* S +end + + +function stretching(x::Point) + m = zeros(length(x)) + m[1] = stretching_y_function(x[1]) + + + m[2] = stretching_y_function(x[2]) + Point(m) +end + +function main_ld(parts) +t0 = 0 +dt = 0.01 +tF = 0.05 +θ = 1 +θvp = 1 +ν = 0.0001 + +n = 256 +L = 0.5 +domain = (-L, L, -L, L) +partition = (n,n) +model = CartesianDiscreteModel(parts, domain, partition, map=stretching) + +labels = get_face_labeling(model) +add_tag_from_tags!(labels, "diri1", [5,]) +add_tag_from_tags!(labels, "diri0", [1, 2, 4, 3, 6, 7, 8]) +add_tag_from_tags!(labels, "p", [4,]) + +u_wall= VectorValue(0.0, 0.0) +u_top = VectorValue(1.0, 0.0) + +u_diri_values = [u_wall, u_top] + + +order = 1 +reffeᵤ = ReferenceFE(lagrangian,VectorValue{2,Float64},order) +reffeₚ = ReferenceFE(lagrangian,Float64,order) + +V = TestFESpace(model,reffeᵤ,dirichlet_tags=["diri0","diri1"],conformity=:H1) +Q = TestFESpace(model,reffeₚ,conformity=:H1,dirichlet_tags="p") +Y = MultiFieldFESpace([V,Q]) +U = TrialFESpace(V, u_diri_values) +P = TrialFESpace(Q,0.0) +X = MultiFieldFESpace([U,P]) + +degree = order*4 +Ω= Triangulation(model) +dΩ = Measure(Ω,degree) + +h = h_param(Ω,2) +function τ(u, h) + r = 1 + τ₂ = h^2 / (4 * ν) + val(x) = x + val(x::Gridap.Fields.ForwardDiff.Dual) = x.value + u = val(norm(u)) + + if iszero(u) + return τ₂ + end + + τ₃ = dt / 2 + τ₁ = h / (2 * u) + return 1 / (1 / τ₁^r + 1 / τ₂^r + 1 / τ₃^r) + + +end + + +τb(u, h) = (u ⋅ u) * τ(u, h) + + +Rm(t, (u, p)) = ∂t(u) + u ⋅ ∇(u) + ∇(p) #- hf(t) #- ν*Δ(u) +Rc(u) = ∇ ⋅ u +dRm((u, p), (du, dp), (v, q)) = du ⋅ ∇(u) + u ⋅ ∇(du) + ∇(dp) #- ν*Δ(du) +dRc(du) = ∇ ⋅ du + +Bᴳ(t, (u, p), (v, q)) = ∫(∂t(u) ⋅ v)dΩ + ∫((u ⋅ ∇(u)) ⋅ v)dΩ - ∫((∇ ⋅ v) * p)dΩ + ∫((q * (∇ ⋅ u)))dΩ + ν * ∫(∇(v) ⊙ ∇(u))dΩ #- ∫(hf(t) ⋅ v)dΩ +B_stab(t, (u, p), (v, q)) = ∫((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + ∇(q))) ⊙ Rm(t, (u, p)) # First term: SUPG, second term: PSPG u⋅∇(v) + ∇(q) ++ +τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ Rc(u) # Bulk viscosity. Try commenting out both stabilization terms to see what happens in periodic and non-periodic cases +)dΩ +res(t, (u, p), (v, q)) = Bᴳ(t, (u, p), (v, q)) + B_stab(t, (u, p), (v, q)) + +dBᴳ(t, (u, p), (du, dp), (v, q)) = ∫(((du ⋅ ∇(u)) ⋅ v) + ((u ⋅ ∇(du)) ⋅ v) + (∇(dp) ⋅ v) + (q * (∇ ⋅ du)))dΩ + ν * ∫(∇(v) ⊙ ∇(du))dΩ +dB_stab(t, (u, p), (du, dp), (v, q)) = ∫(((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v)' + ∇(q))) ⊙ dRm((u, p), (du, dp), (v, q))) + ((τ ∘ (u.cellfield, h) * (du ⋅ ∇(v)')) ⊙ Rm(t, (u, p))) + (τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ dRc(du)))dΩ + + +jac(t, (u, p), (du, dp), (v, q)) = dBᴳ(t, (u, p), (du, dp), (v, q)) + dB_stab(t, (u, p), (du, dp), (v, q)) + +jac_t(t, (u, p), (dut, dpt), (v, q)) = ∫(dut ⋅ v)dΩ + ∫(τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + (θvp)*∇(q)) ⊙ dut)dΩ + +op = TransientFEOperator(res, jac, jac_t, X,Y) + + +xh0 = zero(X) + + +#stokes_pc_fieldsplit_off_diag_use_amat -stokes_ksp_type pipegcr +#GridapPETSc.Init(args=split(options)) +GridapPETSc.with(args=split(options)) do + +#Definitions of IS +U_Parray, P_Parray = field_dof_split(Y,U,P) + +ISVel = PETScIS(U_Parray); +ISP = PETScIS(P_Parray); + +@check_error_code GridapPETSc.PETSC.ISView(ISVel.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) +@check_error_code GridapPETSc.PETSC.ISView(ISP.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) + +#@check_error_code GridapPETSc.PETSC.ISView(PETScIS(collect(1:5)).is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) + +function mysnessetup(snes) +ksp = Ref{GridapPETSc.PETSC.KSP}() +pc = Ref{GridapPETSc.PETSC.PC}() +@check_error_code GridapPETSc.PETSC.SNESSetFromOptions(snes[]) +@check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) +#@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) +#@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[], GridapPETSc.PETSC.KSPFGMRES) +@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) +@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCFIELDSPLIT) +@check_error_code GridapPETSc.PETSC.PCFieldSplitSetType(pc[],GridapPETSc.PETSC.PC_COMPOSITE_ADDITIVE) +@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[]) +@check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[]) +#After KSP setFromOption +@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"vel",ISVel.is[]) +@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"pres",ISP.is[]) + +end + +function solve_sim(solver_type) + if solver_type == :petsc + solver = PETScNonlinearSolver(mysnessetup) + elseif solver_type == :julia + solver = NLSolver(show_trace=true, method=:newton) + end + + ode_solver = ThetaMethod(solver, dt, θ) + + solt = solve(ode_solver, op, xh0, t0, tF) + iter = 0 + @time createpvd(parts,"LidDriven") do pvd + for (xh_tn, tn) in solt + println("iteration = $iter") + iter = iter +1 + uh_tn = xh_tn[1] + ph_tn = xh_tn[2] + ωh_tn = ∇ × uh_tn + pvd[tn] = createvtk(Ω, "LidDriven_$tn"*".vtu", cellfields=["uh" => uh_tn, "ph" => ph_tn, "wh" => ωh_tn]) + end + end +end + +solve_sim(:petsc) +solve_sim(:julia) + + + + +#GridapPETSc.Finalize() +end +end + + +function field_dof_split(Y,U,P) + xrows = Y.gids.partition + urows = U.gids.partition + prows = P.gids.partition + + ulrows,plrows = map_parts(urows,prows,xrows) do urows,prows,xrows + uloc_idx = collect(1:1:length(urows.oid_to_lid)) + ploc_idx = collect(length(uloc_idx)+1 :1: length(xrows.oid_to_lid)) + ur = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] + pr = xrows.lid_to_gid[xrows.oid_to_lid][ploc_idx] + + return ur, pr + end + ur = ulrows.part .-1 + pr = plrows.part .-1 + + return ur,pr +end + +function h_param(Ω::GridapDistributed.DistributedTriangulation, D::Int64) + h = map_parts(Ω.trians) do trian + h_param(trian, D) + end + h = CellData.CellField(h, Ω) + h +end + + +function h_param(Ω::Triangulation, D::Int64) + h = lazy_map(h -> h^(1 / D), get_cell_measure(Ω)) + + h +end + +partition =(2,2) +with_backend(main_ld, MPIBackend(), partition) + +#mpiexecjl --project=. -n 4 julia LidDrivenSUPG_IS_distributed.jl From 91a3ba00dd39008dd5dd97c33a7b904a0f48df78 Mon Sep 17 00:00:00 2001 From: carlodev Date: Mon, 13 Feb 2023 10:00:44 +0100 Subject: [PATCH 12/22] idx from AbstractVector --- src/PETScIndexes.jl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index f917053..a40dc3d 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -55,20 +55,20 @@ end Init(is) end - function PETScIS(idx::Vector, bs=1) + function PETScIS(idx::AbstractVector, bs=1) idx = PetscInt.(idx) PETScIS(idx) end -#Block Implementation - function PETScIS(array::Vector{PetscInt},n, bs=1) - comm = MPI.COMM_SELF - is = PETScIS(comm) - n = PetscInt(n) - bs = PetscInt(bs) - @check_error_code GridapPETSc.PETSC.ISCreateBlock(comm, n, bs, array, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) - Init(is) - end +# #Block Implementation +# function PETScIS(array::Vector{PetscInt},n, bs=1) +# comm = MPI.COMM_SELF +# is = PETScIS(comm) +# n = PetscInt(n) +# bs = PetscInt(bs) +# @check_error_code GridapPETSc.PETSC.ISCreateBlock(comm, n, bs, array, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) +# Init(is) +# end # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) From e323c98bad962b8591369e022d7e624f16359ba2 Mon Sep 17 00:00:00 2001 From: carlodev Date: Mon, 13 Feb 2023 10:01:13 +0100 Subject: [PATCH 13/22] read pc type --- src/PETSC.jl | 11 ++++++++++- src/PETScLinearSolvers.jl | 36 +++++++++++++++++++++++++++++++++++- 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/src/PETSC.jl b/src/PETSC.jl index d6b8b28..89b90cc 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -648,6 +648,9 @@ Base.unsafe_convert(::Type{Ptr{Cvoid}},v::PC) = v.ptr @wrapper(:PCFactorSetUpMatSolverType,PetscErrorCode,(PC,),(pc,),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorSetUpMatSolverType.html") @wrapper(:PCFactorGetMatrix,PetscErrorCode,(PC,Ptr{Mat}),(ksp,mat),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorGetMatrix.html") +#PCType() = PCType(Ptr{Cvoid}()) +@wrapper(:PCGetType,PetscErrorCode,(PC, Ptr{Ptr{Cstring}}),(pc,typ),"https://petsc.org/main/docs/manualpages/PC/PCGetType/") + """ Julia alias for the `SNES` C type. @@ -713,9 +716,15 @@ const SNESPATCH = "patch" IS() = IS(Ptr{Cvoid}()) Base.convert(::Type{IS},p::Ptr{Cvoid}) = IS(p) Base.unsafe_convert(::Type{Ptr{Cvoid}},v::IS) = v.ptr - const ISType = Cstring +""" +Julia alias for `ISType` C type. + +See [PETSc manual](https://petsc.org/main/docs/manualpages/IS/ISType/). +""" +const ISType = Cstring + """ Julia alias for `PetscCopyMode` C type. diff --git a/src/PETScLinearSolvers.jl b/src/PETScLinearSolvers.jl index ff993a2..21fb99b 100644 --- a/src/PETScLinearSolvers.jl +++ b/src/PETScLinearSolvers.jl @@ -1,9 +1,43 @@ +struct PETScFieldSplit{F} + setup::F +end + +function set_fieldsplit(pc) + println("here") + println(typeof(pc)) +end + +function PETScFieldSplit() + PETScFieldSplit(set_fieldsplit) +end + + + struct PETScLinearSolver{F} <: LinearSolver setup::F end -ksp_from_options(ksp) = @check_error_code PETSC.KSPSetFromOptions(ksp[]) +function ksp_from_options(ksp) + pc = Ref{GridapPETSc.PETSC.PC}() + pctype = Ref{Ptr{Cstring}}() + kspset = @check_error_code PETSC.KSPSetFromOptions(ksp[]) + @check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) + @check_error_code GridapPETSc.PETSC.PCGetType(pc[], pctype) + + #Get PC-string name + pc_ptr_conv = reinterpret(Ptr{UInt8}, pctype[]) + GC.@preserve pc_name = unsafe_string(pc_ptr_conv) + + if pc_name == "fieldsplit" + println("SplitFields Function") + else + splitfields() + end + println("Preondition type: $(pc_name)") + + kspset +end function PETScLinearSolver() PETScLinearSolver(ksp_from_options) From 92cc099d9bfc961166143e38d4bb6baa1a09fe20 Mon Sep 17 00:00:00 2001 From: carlodev Date: Mon, 13 Feb 2023 17:48:09 +0100 Subject: [PATCH 14/22] add GridapDistributed dep --- src/GridapPETSc.jl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index f2e5bc1..f4221c5 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -5,6 +5,11 @@ using Libdl using Gridap.Helpers using Gridap.Algebra using Gridap.Arrays +using Gridap.FESpaces +using Gridap.MultiField + +using GridapDistributed + using LinearAlgebra using SparseArrays using SparseMatricesCSR @@ -84,6 +89,10 @@ include("PETScArrays.jl") include("PETScIndexes.jl") include("PartitionedArrays.jl") + +export PETScFieldSplit +include("PETScFieldSplits.jl") + export PETScLinearSolver include("PETScLinearSolvers.jl") From 1f2158b932088446c6aaac5053f076b2db4e2c4c Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 09:31:42 +0100 Subject: [PATCH 15/22] add PetscSleep --- src/PETSC.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/PETSC.jl b/src/PETSC.jl index 89b90cc..d49868a 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -771,6 +771,12 @@ end @wrapper(:PetscPrintf,PetscErrorCode,(MPI.Comm, Cstring ),(comm, args ),"https://petsc.org/release/docs/manualpages/Sys/PetscPrintf/") @wrapper(:PetscSynchronizedPrintf,PetscErrorCode,(MPI.Comm, Cstring),(comm, args),"https://petsc.org/main/docs/manualpages/Sys/PetscSynchronizedPrintf/") + +#PETSc Sleep +@wrapper(:PetscSleep,PetscErrorCode,(PetscReal,),(s,),"https://petsc.org/main/docs/manualpages/Sys/PetscSleep/") + + + #PETSc Alloc @wrapper(:PetscMallocA,PetscErrorCode,(PetscInt, PetscBool, PetscInt, Ptr{Cstring}, Ptr{Cstring}, Csize_t, Ptr{Cvoid},), (n, clear, lineno, fun, fname, bytes0, ptr0,), "https://petsc.org/release/docs/manualpages/Sys/PetscMallocA/#petscmalloca") #PETSC_EXTERN PetscErrorCode PetscMallocA(int,PetscBool,int,const char *,const char *,size_t,void *,...); From 84a7e265dd62e5b63506dff84d8840e763b6c0df Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 09:32:08 +0100 Subject: [PATCH 16/22] add PETScFieldSplits.jl --- src/PETScFieldSplits.jl | 101 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 src/PETScFieldSplits.jl diff --git a/src/PETScFieldSplits.jl b/src/PETScFieldSplits.jl new file mode 100644 index 0000000..98b2c48 --- /dev/null +++ b/src/PETScFieldSplits.jl @@ -0,0 +1,101 @@ +# PETScFieldSplit +# MF (multifield) is a vector where each element is a SingleFieldFESpace or DistributedSingleFieldFESpace, eg: [U,P] +# tags is a vector containing the names of the field (in order), eg: ["vel", "pres"] +# show_idx is Bool vector, if true it prints the indexes +struct PETScFieldSplit + MF + tags::Vector{String} + show_idx::Bool +end + +# PETScFieldSplit constructor from Multifield +function PETScFieldSplit(MF::Union{MultiFieldFESpace, GridapDistributed.DistributedMultiFieldFESpace}, tags::Vector{String}; show_idx = false) + n_fields = length(MF) + n_tags = length(tags) + @assert n_tags == n_fields #Verify that at each field a name is assigned + M = [MF[1]] #Vector of SingleFieldFESpace + for i = 2:1:n_fields + M = [M...,MF[i]] + end + PETScFieldSplit(M, tags, show_idx) +end + +function PETScFieldSplit(MF::Union{Vector{SingleFieldFESpace}, Vector{<:GridapDistributed.DistributedSingleFieldFESpace}}, tags; show_idx = false) + n_fields = length(MF) + n_tags = length(tags) + @assert n_tags == n_fields + PETScFieldSplit(MF, tags, show_idx) +end + + + + +function field_dof_split(U) + X = MultiFieldFESpace(U) + field_dof_split(X,U) +end + +# It allows to obtain the dof of each field over each processor - dont know if the best/most elegant way +function field_dof_split(X, U) + @assert length(X) == length(U) + xrows = X.gids.partition + urows = Any[] + for Ui in X + urowsi = Ui.gids.partition + push!(urows, urowsi) + end + + offset = 0 + ulrows = Any[] + for urowsi in urows + ulrowsi = map_parts(urowsi,xrows) do urowsij,xrows + num_dof_Ui = length(urowsij.oid_to_lid) + uloc_idx = collect(1+offset:1:num_dof_Ui + offset) + uri = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] + offset = num_dof_Ui+offset + return uri + end + #It works only in MPI, non-MPI do not have .part fields + # .-1 to match the numbering in PETSc that starts from 0 + ur = ulrowsi.part .-1 + push!(ulrows,ur) + end + return ulrows +end + + + +function set_fieldsplit(sol, SplitField::PETScFieldSplit) + + #Get PC-string name + pc = Ref{GridapPETSc.PETSC.PC}() + pctype = Ref{Ptr{Cstring}}() + + #Get the PC, if the problem is linear ksp or non linear snes + if typeof(sol) <:Base.RefValue{GridapPETSc.PETSC.KSP} + ksp = sol + elseif typeof(sol) <:Base.RefValue{GridapPETSc.PETSC.SNES} + snes = sol + ksp = Ref{GridapPETSc.PETSC.KSP}() + @check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) + end + + @check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[], pc) + @check_error_code GridapPETSc.PETSC.PCGetType(pc[], pctype) + + pc_ptr_conv = reinterpret(Ptr{UInt8}, pctype[]) + GC.@preserve pc_name = unsafe_string(pc_ptr_conv) + + @assert pc_name == "fieldsplit" #Check that the preconditioner requires splitting fields + n_tags = length(SplitField.tags) + U_Parray = field_dof_split(SplitField.MF) + for i = 1:1:n_tags + ISU = PETScIS(U_Parray[i]) + @check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[], SplitField.tags[i], ISU.is[]) + if SplitField.show_idx + @check_error_code GridapPETSc.PETSC.ISView(ISU.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF) + end + end + + + end \ No newline at end of file From 2c9d92eec9564772ee1613d7e6bb8b9c6d5e0748 Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 09:33:23 +0100 Subject: [PATCH 17/22] modifying PETScLinearSolver and PETScNonLinearSolvers for supporting FieldSplit --- src/PETScLinearSolvers.jl | 108 ++++++++++++++--------------------- src/PETScNonlinearSolvers.jl | 17 +++++- 2 files changed, 60 insertions(+), 65 deletions(-) diff --git a/src/PETScLinearSolvers.jl b/src/PETScLinearSolvers.jl index 21fb99b..c87d5c3 100644 --- a/src/PETScLinearSolvers.jl +++ b/src/PETScLinearSolvers.jl @@ -1,53 +1,28 @@ - -struct PETScFieldSplit{F} +struct PETScLinearSolver{F} <: LinearSolver setup::F + fieldsplit::Union{PETScFieldSplit, Nothing} end -function set_fieldsplit(pc) - println("here") - println(typeof(pc)) -end - -function PETScFieldSplit() - PETScFieldSplit(set_fieldsplit) -end - +ksp_from_options(ksp) = @check_error_code PETSC.KSPSetFromOptions(ksp[]) - -struct PETScLinearSolver{F} <: LinearSolver - setup::F +function PETScLinearSolver(SplitField::PETScFieldSplit) + PETScLinearSolver(ksp_from_options,SplitField) end -function ksp_from_options(ksp) - pc = Ref{GridapPETSc.PETSC.PC}() - pctype = Ref{Ptr{Cstring}}() - kspset = @check_error_code PETSC.KSPSetFromOptions(ksp[]) - @check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) - @check_error_code GridapPETSc.PETSC.PCGetType(pc[], pctype) - - #Get PC-string name - pc_ptr_conv = reinterpret(Ptr{UInt8}, pctype[]) - GC.@preserve pc_name = unsafe_string(pc_ptr_conv) - - if pc_name == "fieldsplit" - println("SplitFields Function") - else - splitfields() - end - println("Preondition type: $(pc_name)") - - kspset +function PETScLinearSolver(ksp_options) + PETScLinearSolver(ksp_options,nothing) end function PETScLinearSolver() - PETScLinearSolver(ksp_from_options) + PETScLinearSolver(ksp_from_options, nothing) end + struct PETScLinearSolverSS{F} <: SymbolicSetup solver::PETScLinearSolver{F} end -function Algebra.symbolic_setup(solver::PETScLinearSolver,mat::AbstractMatrix) +function Algebra.symbolic_setup(solver::PETScLinearSolver, mat::AbstractMatrix) PETScLinearSolverSS(solver) end @@ -56,9 +31,9 @@ mutable struct PETScLinearSolverNS{T} <: NumericalSetup B::PETScMatrix ksp::Ref{KSP} initialized::Bool - function PETScLinearSolverNS(A,B::PETScMatrix) - T=typeof(A) - new{T}(A,B,Ref{KSP}(),false) + function PETScLinearSolverNS(A, B::PETScMatrix) + T = typeof(A) + new{T}(A, B, Ref{KSP}(), false) end end @@ -66,7 +41,7 @@ function Init(a::PETScLinearSolverNS) @assert Threads.threadid() == 1 _NREFS[] += 1 a.initialized = true - finalizer(Finalize,a) + finalizer(Finalize, a) end function Finalize(ns::PETScLinearSolverNS) @@ -83,53 +58,58 @@ function Finalize(ns::PETScLinearSolverNS) nothing end -function Algebra.numerical_setup(ss::PETScLinearSolverSS,A::AbstractMatrix) - B = convert(PETScMatrix,A) - ns = PETScLinearSolverNS(A,B) - @check_error_code PETSC.KSPCreate(B.comm,ns.ksp) - @check_error_code PETSC.KSPSetOperators(ns.ksp[],ns.B.mat[],ns.B.mat[]) +function Algebra.numerical_setup(ss::PETScLinearSolverSS, A::AbstractMatrix) + B = convert(PETScMatrix, A) + ns = PETScLinearSolverNS(A, B) + @check_error_code PETSC.KSPCreate(B.comm, ns.ksp) + @check_error_code PETSC.KSPSetOperators(ns.ksp[], ns.B.mat[], ns.B.mat[]) ss.solver.setup(ns.ksp) + + if typeof(ss.solver.fieldsplit) == PETScFieldSplit + set_fieldsplit(ns.ksp, ss.solver.fieldsplit) + end + @check_error_code PETSC.KSPSetUp(ns.ksp[]) Init(ns) end -function Algebra.solve!(x::PETScVector,ns::PETScLinearSolverNS,b::AbstractVector) +function Algebra.solve!(x::PETScVector, ns::PETScLinearSolverNS, b::AbstractVector) if (x.comm != MPI.COMM_SELF) gridap_petsc_gc() # Do garbage collection of PETSc objects end - B = convert(PETScVector,b) - @check_error_code PETSC.KSPSolve(ns.ksp[],B.vec[],x.vec[]) + B = convert(PETScVector, b) + @check_error_code PETSC.KSPSolve(ns.ksp[], B.vec[], x.vec[]) x end -function Algebra.solve!(x::Vector{PetscScalar},ns::PETScLinearSolverNS,b::AbstractVector) - X = convert(PETScVector,x) - solve!(X,ns,b) +function Algebra.solve!(x::Vector{PetscScalar}, ns::PETScLinearSolverNS, b::AbstractVector) + X = convert(PETScVector, x) + solve!(X, ns, b) x end -function Algebra.solve!(x::AbstractVector,ns::PETScLinearSolverNS,b::AbstractVector) - X = convert(Vector{PetscScalar},x) - solve!(X,ns,b) +function Algebra.solve!(x::AbstractVector, ns::PETScLinearSolverNS, b::AbstractVector) + X = convert(Vector{PetscScalar}, x) + solve!(X, ns, b) x .= X x end -function Algebra.solve!(x::PVector,ns::PETScLinearSolverNS,b::PVector) - X = similar(b,(axes(ns.A)[2],)) - B = similar(b,(axes(ns.A)[2],)) - copy!(X,x) - copy!(B,b) - Y = convert(PETScVector,X) - solve!(Y,ns,B) - copy!(x,Y) +function Algebra.solve!(x::PVector, ns::PETScLinearSolverNS, b::PVector) + X = similar(b, (axes(ns.A)[2],)) + B = similar(b, (axes(ns.A)[2],)) + copy!(X, x) + copy!(B, b) + Y = convert(PETScVector, X) + solve!(Y, ns, B) + copy!(x, Y) end -function Algebra.numerical_setup!(ns::PETScLinearSolverNS,A::AbstractMatrix) +function Algebra.numerical_setup!(ns::PETScLinearSolverNS, A::AbstractMatrix) ns.A = A - ns.B = convert(PETScMatrix,A) - @check_error_code PETSC.KSPSetOperators(ns.ksp[],ns.B.mat[],ns.B.mat[]) + ns.B = convert(PETScMatrix, A) + @check_error_code PETSC.KSPSetOperators(ns.ksp[], ns.B.mat[], ns.B.mat[]) @check_error_code PETSC.KSPSetUp(ns.ksp[]) ns end diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index abe6703..fca804e 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -1,6 +1,7 @@ mutable struct PETScNonlinearSolver{F} <: NonlinearSolver setup::F + fieldsplit::Union{PETScFieldSplit, Nothing} end mutable struct PETScNonlinearSolverCache{A,B,C,D,E} @@ -108,8 +109,18 @@ end snes_from_options(snes) = @check_error_code PETSC.SNESSetFromOptions(snes[]) + +function PETScNonlinearSolver(SplitField::PETScFieldSplit) + PETScNonlinearSolver(snes_from_options,SplitField) +end + +function PETScNonlinearSolver(snes_options) + PETScNonlinearSolver(snes_options,nothing) +end + + function PETScNonlinearSolver() - PETScNonlinearSolver(snes_from_options) + PETScNonlinearSolver(snes_from_options,nothing) end function _set_petsc_residual_function!(nls::PETScNonlinearSolver, cache) @@ -183,6 +194,10 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea nls.setup(cache.snes) + if typeof(nls.fieldsplit) == PETScFieldSplit + set_fieldsplit(cache.snes, nls.fieldsplit) + end + @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) copy!(x,cache.x_petsc) cache From 329c88f803ff1b3eea96fc0424cd9d1537d4846d Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 09:33:41 +0100 Subject: [PATCH 18/22] using GridapDistributed --- src/GridapPETSc.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index f4221c5..d1e34c0 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -9,6 +9,7 @@ using Gridap.FESpaces using Gridap.MultiField using GridapDistributed +using GridapDistributed.MultiField using LinearAlgebra using SparseArrays From 870beba7e78e9885a2b81e2c0f3c5b35dd8ac05d Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 10:01:26 +0100 Subject: [PATCH 19/22] remove allocation function from petsc --- src/PETSC.jl | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/src/PETSC.jl b/src/PETSC.jl index d49868a..3685245 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -763,10 +763,6 @@ end @wrapper(:PCFieldSplitSetType,PetscErrorCode,(PC, PCCompositeType),(pc, pcctype), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetType/") - - - - #PETSc Print @wrapper(:PetscPrintf,PetscErrorCode,(MPI.Comm, Cstring ),(comm, args ),"https://petsc.org/release/docs/manualpages/Sys/PetscPrintf/") @wrapper(:PetscSynchronizedPrintf,PetscErrorCode,(MPI.Comm, Cstring),(comm, args),"https://petsc.org/main/docs/manualpages/Sys/PetscSynchronizedPrintf/") @@ -775,25 +771,4 @@ end #PETSc Sleep @wrapper(:PetscSleep,PetscErrorCode,(PetscReal,),(s,),"https://petsc.org/main/docs/manualpages/Sys/PetscSleep/") - - -#PETSc Alloc -@wrapper(:PetscMallocA,PetscErrorCode,(PetscInt, PetscBool, PetscInt, Ptr{Cstring}, Ptr{Cstring}, Csize_t, Ptr{Cvoid},), (n, clear, lineno, fun, fname, bytes0, ptr0,), "https://petsc.org/release/docs/manualpages/Sys/PetscMallocA/#petscmalloca") -#PETSC_EXTERN PetscErrorCode PetscMallocA(int,PetscBool,int,const char *,const char *,size_t,void *,...); - -""" - @PetscMalloc1 - -See [PETSc manual](https://petsc.org/release/docs/manualpages/Sys/PetscMalloc1/). -""" -macro PetscMalloc1(m1,r1) - quote - func = Ptr{Nothing}() - linec = Ptr{Nothing}() - PetscMallocA(1,PETSC_FALSE, Cint(11), Ref(Cstring(linec)), Ref(Cstring(func)), (Csize_t)($m1)*sizeof($r1),($r1)) - end -end -# PetscMalloc1(m1,r1) PetscMallocA(1,PETSC_FALSE,__LINE__,PETSC_FUNCTION_NAME,__FILE__,(size_t)(m1)*sizeof(**(r1)),(r1)) - - end # module From fa6eaa20d6bee08b407f895d5db1da9d1ec9c8d4 Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 15:12:17 +0100 Subject: [PATCH 20/22] fix --- .vscode/settings.json | 1 + src/PETScNonlinearSolvers.jl | 3 +- test/LidDrivenSUPG_splitfield.jl | 247 ------------------------------- 3 files changed, 2 insertions(+), 249 deletions(-) create mode 100644 .vscode/settings.json delete mode 100644 test/LidDrivenSUPG_splitfield.jl diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index fca804e..47a1ea1 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -169,8 +169,7 @@ function Algebra.solve!(x::T, op::NonlinearOperator, cache::PETScNonlinearSolverCache{<:T}) where T <: AbstractVector - #@assert cache.op === op - cache.op = op + @assert cache.op === op @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) copy!(x,cache.x_petsc) cache diff --git a/test/LidDrivenSUPG_splitfield.jl b/test/LidDrivenSUPG_splitfield.jl deleted file mode 100644 index 1db6ce8..0000000 --- a/test/LidDrivenSUPG_splitfield.jl +++ /dev/null @@ -1,247 +0,0 @@ -using Revise -using Gridap -using Gridap.CellData - - -using Test -using GridapDistributed -using GridapDistributed.CellData - - -using PartitionedArrays -using MPI -using SparseArrays -using SparseMatricesCSR -using GridapPETSc -using BenchmarkTools - -# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ -# -pc_use_amat -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 \ -# -fieldsplit_vel_pc_type ilu -fieldsplit_vel_pc_factor_levels 2 -fieldsplit_velu_ksp_type gmres -fieldsplit_vel_ksp_monitor -fieldsplit_vel_ksp_converged_reason \ -# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_monitor fieldsplit_pres_ksp_converged_reason" #Very good pressure convergence - -# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ -# -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 -ksp_monitor_short \ -# -fieldsplit_vel_pc_type asm -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ -# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works - - -options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-6 -snes_atol 0.0 -snes_monitor -log_view \ --ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-10 -ksp_monitor_short \ --fieldsplit_vel_pc_type gamg -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ --fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works - - -#-fieldsplit_vel_ksp_converged_reason -ksp_monitor_short -#-fieldsplit_pres_ksp_converged_reason -#-log_view -mat_view :lid.m:ascii_matlab -function stretching_y_function(x) - gamma1 = 2.5 - S = 0.5815356159649889 #for rescaling the function over the domain -0.5 -> 0.5 - -tanh.(gamma1 .* (x)) ./ tanh.(gamma1) .* S -end - - -function stretching(x::Point) - m = zeros(length(x)) - m[1] = stretching_y_function(x[1]) - - - m[2] = stretching_y_function(x[2]) - Point(m) -end - -function main_ld(parts) -t0 = 0 -dt = 0.01 -tF = 0.05 -θ = 1 -θvp = 1 -ν = 0.0001 - -n = 256 -L = 0.5 -domain = (-L, L, -L, L) -partition = (n,n) -model = CartesianDiscreteModel(parts, domain, partition, map=stretching) - -labels = get_face_labeling(model) -add_tag_from_tags!(labels, "diri1", [5,]) -add_tag_from_tags!(labels, "diri0", [1, 2, 4, 3, 6, 7, 8]) -add_tag_from_tags!(labels, "p", [4,]) - -u_wall= VectorValue(0.0, 0.0) -u_top = VectorValue(1.0, 0.0) - -u_diri_values = [u_wall, u_top] - - -order = 1 -reffeᵤ = ReferenceFE(lagrangian,VectorValue{2,Float64},order) -reffeₚ = ReferenceFE(lagrangian,Float64,order) - -V = TestFESpace(model,reffeᵤ,dirichlet_tags=["diri0","diri1"],conformity=:H1) -Q = TestFESpace(model,reffeₚ,conformity=:H1,dirichlet_tags="p") -Y = MultiFieldFESpace([V,Q]) -U = TrialFESpace(V, u_diri_values) -P = TrialFESpace(Q,0.0) -X = MultiFieldFESpace([U,P]) - -degree = order*4 -Ω= Triangulation(model) -dΩ = Measure(Ω,degree) - -h = h_param(Ω,2) -function τ(u, h) - r = 1 - τ₂ = h^2 / (4 * ν) - val(x) = x - val(x::Gridap.Fields.ForwardDiff.Dual) = x.value - u = val(norm(u)) - - if iszero(u) - return τ₂ - end - - τ₃ = dt / 2 - τ₁ = h / (2 * u) - return 1 / (1 / τ₁^r + 1 / τ₂^r + 1 / τ₃^r) - - -end - - -τb(u, h) = (u ⋅ u) * τ(u, h) - - -Rm(t, (u, p)) = ∂t(u) + u ⋅ ∇(u) + ∇(p) #- hf(t) #- ν*Δ(u) -Rc(u) = ∇ ⋅ u -dRm((u, p), (du, dp), (v, q)) = du ⋅ ∇(u) + u ⋅ ∇(du) + ∇(dp) #- ν*Δ(du) -dRc(du) = ∇ ⋅ du - -Bᴳ(t, (u, p), (v, q)) = ∫(∂t(u) ⋅ v)dΩ + ∫((u ⋅ ∇(u)) ⋅ v)dΩ - ∫((∇ ⋅ v) * p)dΩ + ∫((q * (∇ ⋅ u)))dΩ + ν * ∫(∇(v) ⊙ ∇(u))dΩ #- ∫(hf(t) ⋅ v)dΩ -B_stab(t, (u, p), (v, q)) = ∫((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + ∇(q))) ⊙ Rm(t, (u, p)) # First term: SUPG, second term: PSPG u⋅∇(v) + ∇(q) -+ -τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ Rc(u) # Bulk viscosity. Try commenting out both stabilization terms to see what happens in periodic and non-periodic cases -)dΩ -res(t, (u, p), (v, q)) = Bᴳ(t, (u, p), (v, q)) + B_stab(t, (u, p), (v, q)) - -dBᴳ(t, (u, p), (du, dp), (v, q)) = ∫(((du ⋅ ∇(u)) ⋅ v) + ((u ⋅ ∇(du)) ⋅ v) + (∇(dp) ⋅ v) + (q * (∇ ⋅ du)))dΩ + ν * ∫(∇(v) ⊙ ∇(du))dΩ -dB_stab(t, (u, p), (du, dp), (v, q)) = ∫(((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v)' + ∇(q))) ⊙ dRm((u, p), (du, dp), (v, q))) + ((τ ∘ (u.cellfield, h) * (du ⋅ ∇(v)')) ⊙ Rm(t, (u, p))) + (τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ dRc(du)))dΩ - - -jac(t, (u, p), (du, dp), (v, q)) = dBᴳ(t, (u, p), (du, dp), (v, q)) + dB_stab(t, (u, p), (du, dp), (v, q)) - -jac_t(t, (u, p), (dut, dpt), (v, q)) = ∫(dut ⋅ v)dΩ + ∫(τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + (θvp)*∇(q)) ⊙ dut)dΩ - -op = TransientFEOperator(res, jac, jac_t, X,Y) - - -xh0 = zero(X) - - -#stokes_pc_fieldsplit_off_diag_use_amat -stokes_ksp_type pipegcr -#GridapPETSc.Init(args=split(options)) -GridapPETSc.with(args=split(options)) do - -#Definitions of IS -U_Parray, P_Parray = field_dof_split(Y,U,P) - -ISVel = PETScIS(U_Parray); -ISP = PETScIS(P_Parray); - -@check_error_code GridapPETSc.PETSC.ISView(ISVel.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) -@check_error_code GridapPETSc.PETSC.ISView(ISP.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) - -#@check_error_code GridapPETSc.PETSC.ISView(PETScIS(collect(1:5)).is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) - -function mysnessetup(snes) -ksp = Ref{GridapPETSc.PETSC.KSP}() -pc = Ref{GridapPETSc.PETSC.PC}() -@check_error_code GridapPETSc.PETSC.SNESSetFromOptions(snes[]) -@check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) -#@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) -#@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[], GridapPETSc.PETSC.KSPFGMRES) -@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) -@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCFIELDSPLIT) -@check_error_code GridapPETSc.PETSC.PCFieldSplitSetType(pc[],GridapPETSc.PETSC.PC_COMPOSITE_ADDITIVE) -@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[]) -@check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[]) -#After KSP setFromOption -@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"vel",ISVel.is[]) -@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"pres",ISP.is[]) - -end - -function solve_sim(solver_type) - if solver_type == :petsc - solver = PETScNonlinearSolver(mysnessetup) - elseif solver_type == :julia - solver = NLSolver(show_trace=true, method=:newton) - end - - ode_solver = ThetaMethod(solver, dt, θ) - - solt = solve(ode_solver, op, xh0, t0, tF) - iter = 0 - @time createpvd(parts,"LidDriven") do pvd - for (xh_tn, tn) in solt - println("iteration = $iter") - iter = iter +1 - uh_tn = xh_tn[1] - ph_tn = xh_tn[2] - ωh_tn = ∇ × uh_tn - pvd[tn] = createvtk(Ω, "LidDriven_$tn"*".vtu", cellfields=["uh" => uh_tn, "ph" => ph_tn, "wh" => ωh_tn]) - end - end -end - -solve_sim(:petsc) -solve_sim(:julia) - - - - -#GridapPETSc.Finalize() -end -end - - -function field_dof_split(Y,U,P) - xrows = Y.gids.partition - urows = U.gids.partition - prows = P.gids.partition - - ulrows,plrows = map_parts(urows,prows,xrows) do urows,prows,xrows - uloc_idx = collect(1:1:length(urows.oid_to_lid)) - ploc_idx = collect(length(uloc_idx)+1 :1: length(xrows.oid_to_lid)) - ur = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] - pr = xrows.lid_to_gid[xrows.oid_to_lid][ploc_idx] - - return ur, pr - end - ur = ulrows.part .-1 - pr = plrows.part .-1 - - return ur,pr -end - -function h_param(Ω::GridapDistributed.DistributedTriangulation, D::Int64) - h = map_parts(Ω.trians) do trian - h_param(trian, D) - end - h = CellData.CellField(h, Ω) - h -end - - -function h_param(Ω::Triangulation, D::Int64) - h = lazy_map(h -> h^(1 / D), get_cell_measure(Ω)) - - h -end - -partition =(2,2) -with_backend(main_ld, MPIBackend(), partition) - -#mpiexecjl --project=. -n 4 julia LidDrivenSUPG_IS_distributed.jl From f372f4549278fa3af5c04ee7730e1f740b6dd8ec Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 15:19:30 +0100 Subject: [PATCH 21/22] fix --- .vscode/settings.json | 1 + src/PETSC.jl | 19 +++ src/PETScFieldSplits.jl | 87 +++++----- src/PETScIndexes.jl | 241 +++++++------------------- test/LidDrivenSUPG_fieldsplit.jl | 246 +++++++++++++++++++++++++++ test/sequential/PETScIndexesTests.jl | 2 +- 6 files changed, 372 insertions(+), 224 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 test/LidDrivenSUPG_fieldsplit.jl diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/src/PETSC.jl b/src/PETSC.jl index 3685245..a0409a0 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -771,4 +771,23 @@ end #PETSc Sleep @wrapper(:PetscSleep,PetscErrorCode,(PetscReal,),(s,),"https://petsc.org/main/docs/manualpages/Sys/PetscSleep/") + +#PETSc Alloc +@wrapper(:PetscMallocA,PetscErrorCode,(PetscInt, PetscBool, PetscInt, Ptr{Cstring}, Ptr{Cstring}, Csize_t, Ptr{Cvoid},), (n, clear, lineno, fun, fname, bytes0, ptr0,), "https://petsc.org/release/docs/manualpages/Sys/PetscMallocA/#petscmalloca") +#PETSC_EXTERN PetscErrorCode PetscMallocA(int,PetscBool,int,const char *,const char *,size_t,void *,...); + +""" + @PetscMalloc1 + +See [PETSc manual](https://petsc.org/release/docs/manualpages/Sys/PetscMalloc1/). +""" +macro PetscMalloc1(m1,r1) + quote + func = Ptr{Nothing}() + linec = Ptr{Nothing}() + PetscMallocA(1,PETSC_FALSE, Cint(11), Ref(Cstring(linec)), Ref(Cstring(func)), (Csize_t)($m1)*sizeof($r1),($r1)) + end +end +# PetscMalloc1(m1,r1) PetscMallocA(1,PETSC_FALSE,__LINE__,PETSC_FUNCTION_NAME,__FILE__,(size_t)(m1)*sizeof(**(r1)),(r1)) + end # module diff --git a/src/PETScFieldSplits.jl b/src/PETScFieldSplits.jl index 98b2c48..9cdf9cc 100644 --- a/src/PETScFieldSplits.jl +++ b/src/PETScFieldSplits.jl @@ -3,24 +3,24 @@ # tags is a vector containing the names of the field (in order), eg: ["vel", "pres"] # show_idx is Bool vector, if true it prints the indexes struct PETScFieldSplit - MF - tags::Vector{String} - show_idx::Bool + MF + tags::Vector{String} + show_idx::Bool end # PETScFieldSplit constructor from Multifield -function PETScFieldSplit(MF::Union{MultiFieldFESpace, GridapDistributed.DistributedMultiFieldFESpace}, tags::Vector{String}; show_idx = false) +function PETScFieldSplit(MF::Union{MultiFieldFESpace,GridapDistributed.DistributedMultiFieldFESpace}, tags::Vector{String}; show_idx=false) n_fields = length(MF) n_tags = length(tags) @assert n_tags == n_fields #Verify that at each field a name is assigned M = [MF[1]] #Vector of SingleFieldFESpace for i = 2:1:n_fields - M = [M...,MF[i]] + M = [M..., MF[i]] end - PETScFieldSplit(M, tags, show_idx) + PETScFieldSplit(M, tags, show_idx) end -function PETScFieldSplit(MF::Union{Vector{SingleFieldFESpace}, Vector{<:GridapDistributed.DistributedSingleFieldFESpace}}, tags; show_idx = false) +function PETScFieldSplit(MF::Union{Vector{SingleFieldFESpace}, Vector{<:GridapDistributed.DistributedSingleFieldFESpace}}, tags; show_idx=false) n_fields = length(MF) n_tags = length(tags) @assert n_tags == n_fields @@ -32,35 +32,35 @@ end function field_dof_split(U) X = MultiFieldFESpace(U) - field_dof_split(X,U) + field_dof_split(X, U) end # It allows to obtain the dof of each field over each processor - dont know if the best/most elegant way function field_dof_split(X, U) @assert length(X) == length(U) - xrows = X.gids.partition - urows = Any[] - for Ui in X - urowsi = Ui.gids.partition - push!(urows, urowsi) - end + xrows = X.gids.partition + urows = Any[] + for Ui in X + urowsi = Ui.gids.partition + push!(urows, urowsi) + end - offset = 0 - ulrows = Any[] - for urowsi in urows - ulrowsi = map_parts(urowsi,xrows) do urowsij,xrows + offset = 0 + ulrows = Any[] + for urowsi in urows + ulrowsi = map_parts(urowsi, xrows) do urowsij, xrows num_dof_Ui = length(urowsij.oid_to_lid) - uloc_idx = collect(1+offset:1:num_dof_Ui + offset) - uri = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] - offset = num_dof_Ui+offset - return uri + uloc_idx = collect(1+offset:1:num_dof_Ui+offset) + uri = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] + offset = num_dof_Ui + offset + return uri end #It works only in MPI, non-MPI do not have .part fields # .-1 to match the numbering in PETSc that starts from 0 - ur = ulrowsi.part .-1 - push!(ulrows,ur) + ur = ulrowsi.part .- 1 + push!(ulrows, ur) end - return ulrows + return ulrows end @@ -70,14 +70,15 @@ function set_fieldsplit(sol, SplitField::PETScFieldSplit) #Get PC-string name pc = Ref{GridapPETSc.PETSC.PC}() pctype = Ref{Ptr{Cstring}}() - + #Get the PC, if the problem is linear ksp or non linear snes - if typeof(sol) <:Base.RefValue{GridapPETSc.PETSC.KSP} + #Maybe not the most elegant solution + if typeof(sol) <: Base.RefValue{GridapPETSc.PETSC.KSP} ksp = sol - elseif typeof(sol) <:Base.RefValue{GridapPETSc.PETSC.SNES} + elseif typeof(sol) <: Base.RefValue{GridapPETSc.PETSC.SNES} snes = sol - ksp = Ref{GridapPETSc.PETSC.KSP}() - @check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) + ksp = Ref{GridapPETSc.PETSC.KSP}() + @check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[], ksp) end @check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[], pc) @@ -85,17 +86,17 @@ function set_fieldsplit(sol, SplitField::PETScFieldSplit) pc_ptr_conv = reinterpret(Ptr{UInt8}, pctype[]) GC.@preserve pc_name = unsafe_string(pc_ptr_conv) - - @assert pc_name == "fieldsplit" #Check that the preconditioner requires splitting fields - n_tags = length(SplitField.tags) - U_Parray = field_dof_split(SplitField.MF) - for i = 1:1:n_tags - ISU = PETScIS(U_Parray[i]) - @check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[], SplitField.tags[i], ISU.is[]) - if SplitField.show_idx - @check_error_code GridapPETSc.PETSC.ISView(ISU.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF) - end + + @assert pc_name == "fieldsplit" #Check that the preconditioner requires splitting fields + n_tags = length(SplitField.tags) + U_Parray = field_dof_split(SplitField.MF) + for i = 1:1:n_tags + ISU = PETScIS(U_Parray[i]) + @check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[], SplitField.tags[i], ISU.is[]) + if SplitField.show_idx + @check_error_code GridapPETSc.PETSC.ISView(ISU.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF) end - - - end \ No newline at end of file + end + + +end \ No newline at end of file diff --git a/src/PETScIndexes.jl b/src/PETScIndexes.jl index a40dc3d..61221b4 100644 --- a/src/PETScIndexes.jl +++ b/src/PETScIndexes.jl @@ -1,64 +1,64 @@ # Index mutable struct PETScIS <: AbstractVector{PetscInt} - is::Base.RefValue{IS} - initialized::Bool - size::Tuple{Int} - comm::MPI.Comm - PETScIS(comm::MPI.Comm) = new(Ref{IS}(),false,(-1,),comm) + is::Base.RefValue{IS} + initialized::Bool + size::Tuple{Int} + comm::MPI.Comm + PETScIS(comm::MPI.Comm) = new(Ref{IS}(), false, (-1,), comm) end - - function Init(a::PETScIS) - n = Ref{PetscInt}() - @check_error_code PETSC.ISGetSize(a.is[], n) - a.size = (Int(n[]),) - @assert Threads.threadid() == 1 - _NREFS[] += 1 - a.initialized = true - finalizer(Finalize,a) - end - - function Finalize(a::PETScIS) - if a.initialized && GridapPETSc.Initialized() - if a.comm == MPI.COMM_SELF - @check_error_code PETSC.ISDestroy(a.is) - else - @check_error_code PETSC.PetscObjectRegisterDestroy(a.is[]) - end - a.initialized = false - @assert Threads.threadid() == 1 - _NREFS[] -= 1 + +function Init(a::PETScIS) + n = Ref{PetscInt}() + @check_error_code PETSC.ISGetSize(a.is[], n) + a.size = (Int(n[]),) + @assert Threads.threadid() == 1 + _NREFS[] += 1 + a.initialized = true + finalizer(Finalize, a) +end + +function Finalize(a::PETScIS) + if a.initialized && GridapPETSc.Initialized() + if a.comm == MPI.COMM_SELF + @check_error_code PETSC.ISDestroy(a.is) + else + @check_error_code PETSC.PetscObjectRegisterDestroy(a.is[]) end - nothing - end - - function Base.size(v::PETScIS) - @assert v.initialized - v.size - end - - Base.@propagate_inbounds function Base.getindex(v::PETScIS,i1::Integer) - @boundscheck checkbounds(v, i1) - n = one(PetscInt) - i0 = Ref(i1-n) - pi0 = reinterpret(Ptr{PetscInt},pointer_from_objref(i0)) - @check_error_code PETSC.ISGetIndices(v.is[], pi0) - return pi0 - end - - function PETScIS(idx::Vector{PetscInt}, bs=1) - comm = MPI.COMM_SELF - is = PETScIS(comm) - n = length(idx) - @check_error_code GridapPETSc.PETSC.ISCreateGeneral(comm, n, idx, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) - Init(is) + a.initialized = false + @assert Threads.threadid() == 1 + _NREFS[] -= 1 end + nothing +end - function PETScIS(idx::AbstractVector, bs=1) - idx = PetscInt.(idx) - PETScIS(idx) - end +function Base.size(v::PETScIS) + @assert v.initialized + v.size +end + +Base.@propagate_inbounds function Base.getindex(v::PETScIS, i1::Integer) + @boundscheck checkbounds(v, i1) + n = one(PetscInt) + i0 = Ref(i1 - n) + pi0 = reinterpret(Ptr{PetscInt}, pointer_from_objref(i0)) + @check_error_code PETSC.ISGetIndices(v.is[], pi0) + return pi0 +end + +function PETScIS(idx::Vector{PetscInt}, bs=1) + comm = MPI.COMM_SELF + is = PETScIS(comm) + n = length(idx) + @check_error_code GridapPETSc.PETSC.ISCreateGeneral(comm, n, idx, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is) + Init(is) +end + +function PETScIS(idx::AbstractVector, bs=1) + idx = PetscInt.(idx) + PETScIS(idx) +end # #Block Implementation # function PETScIS(array::Vector{PetscInt},n, bs=1) @@ -70,131 +70,12 @@ end # Init(is) # end +# Constructors - # Base.@propagate_inbounds function Base.setindex!(v::PETScIS,y,i1::Integer) - # @boundscheck checkbounds(v, i1) - # n = one(PetscInt) - # i0 = Ref(PetscInt(i1-n)) - # vi = Ref(PetscInt(y)) - # @check_error_code PETSC.VecSetValues(v.is[],n,i0,vi,PETSC.INSERT_VALUES) - # v.is[i0] .= y - # y - # end - - # Constructors - - function PETScIS(n::Integer) - println("PETScIS") - v = Ref{Ptr{PetscInt}}() - println("Construct\n") - @check_error_code PETSC.PetscMalloc1(n, v) - Init(v) - end - - - -# function PETScVector(a::PetscScalar,ax::AbstractUnitRange) -# PETScVector(fill(a,length(ax))) -# end - -# function Base.similar(::PETScVector,::Type{PetscScalar},ax::Tuple{Int}) -# PETScVector(ax[1]) -# end - -# function Base.similar(a::PETScVector,::Type{PetscScalar},ax::Tuple{<:Base.OneTo}) -# similar(a,PetscScalar,map(length,ax)) -# end - -# function Base.similar(::Type{PETScVector},ax::Tuple{Int}) -# PETScVector(ax[1]) -# end - -# function Base.similar(::Type{PETScVector},ax::Tuple{<:Base.OneTo}) -# PETScVector(map(length,ax)) -# end - -# function Base.copy(a::PETScVector) -# v = PETScVector(a.comm) -# @check_error_code PETSC.VecDuplicate(a.vec[],v.vec) -# @check_error_code PETSC.VecCopy(a.vec[],v.vec[]) -# Init(v) -# end - -# function Base.convert(::Type{PETScVector},a::PETScVector) -# a -# end - -# function Base.convert(::Type{PETScVector},a::AbstractVector) -# array = convert(Vector{PetscScalar},a) -# PETScVector(array) -# end - -# function Base.copy!(a::AbstractVector,b::PETScVector) -# @check length(a) == length(b) -# _copy!(a,b.vec[]) -# end - -# function _copy!(a::Vector,b::Vec) -# ni = length(a) -# ix = collect(PetscInt,0:(ni-1)) -# v = convert(Vector{PetscScalar},a) -# @check_error_code PETSC.VecGetValues(b,ni,ix,v) -# if !(v === a) -# a .= v -# end -# end - -# function Base.copy!(a::PETScVector,b::AbstractVector) -# @check length(a) == length(b) -# _copy!(a.vec[],b) -# end - -# function _copy!(a::Vec,b::Vector) -# ni = length(b) -# ix = collect(PetscInt,0:(ni-1)) -# v = convert(Vector{PetscScalar},b) -# @check_error_code PETSC.VecSetValues(a,ni,ix,v,PETSC.INSERT_VALUES) -# end - -# function _get_local_oh_vector(a::Vec) -# v=PETScVector(MPI.COMM_SELF) -# @check_error_code PETSC.VecGhostGetLocalForm(a,v.vec) -# if v.vec[] != C_NULL # a is a ghosted vector -# v.ownership=a -# Init(v) -# return v -# else # a is NOT a ghosted vector -# return _get_local_vector(a) -# end -# end - -# function _local_size(a::PETScVector) -# r_sz = Ref{PetscInt}() -# @check_error_code PETSC.VecGetLocalSize(a.vec[], r_sz) -# r_sz[] -# end - -# # This function works with either ghosted or non-ghosted MPI vectors. -# # In the case of a ghosted vector it solely returns the locally owned -# # entries. -# function _get_local_vector(a::PETScVector) -# r_pv = Ref{Ptr{PetscScalar}}() -# @check_error_code PETSC.VecGetArray(a.vec[], r_pv) -# v = unsafe_wrap(Array, r_pv[], _local_size(a); own = false) -# return v -# end - -# # This function works with either ghosted or non-ghosted MPI vectors. -# # In the case of a ghosted vector it solely returns the locally owned -# # entries. -# function _get_local_vector_read(a::PETScVector) -# r_pv = Ref{Ptr{PetscScalar}}() -# @check_error_code PETSC.VecGetArrayRead(a.vec[], r_pv) -# v = unsafe_wrap(Array, r_pv[], _local_size(a); own = false) -# return v -# end - -# function _restore_local_vector!(v::Array,a::PETScVector) -# @check_error_code PETSC.VecRestoreArray(a.vec[], Ref(pointer(v))) -# nothing -# end \ No newline at end of file +function PETScIS(n::Integer) + println("PETScIS") + v = Ref{Ptr{PetscInt}}() + println("Construct\n") + @check_error_code PETSC.PetscMalloc1(n, v) + Init(v) +end \ No newline at end of file diff --git a/test/LidDrivenSUPG_fieldsplit.jl b/test/LidDrivenSUPG_fieldsplit.jl new file mode 100644 index 0000000..b54340c --- /dev/null +++ b/test/LidDrivenSUPG_fieldsplit.jl @@ -0,0 +1,246 @@ +using Revise +using Gridap +using Gridap.CellData + + +using Test +using GridapDistributed +using GridapDistributed.CellData + + +using PartitionedArrays +using MPI +using SparseArrays +using SparseMatricesCSR +using GridapPETSc +using BenchmarkTools + +# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ +# -pc_use_amat -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 \ +# -fieldsplit_vel_pc_type ilu -fieldsplit_vel_pc_factor_levels 2 -fieldsplit_velu_ksp_type gmres -fieldsplit_vel_ksp_monitor -fieldsplit_vel_ksp_converged_reason \ +# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_monitor fieldsplit_pres_ksp_converged_reason" #Very good pressure convergence + +# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ +# -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 -ksp_monitor_short \ +# -fieldsplit_vel_pc_type asm -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ +# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works + + +options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-6 -snes_atol 0.0 -snes_monitor -log_view \ +-ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-10 -ksp_monitor_short \ +-fieldsplit_vel_pc_type gamg -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ +-fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works + + +#-fieldsplit_vel_ksp_converged_reason -ksp_monitor_short +#-fieldsplit_pres_ksp_converged_reason +#-log_view -mat_view :lid.m:ascii_matlab +function stretching_y_function(x) + gamma1 = 2.5 + S = 0.5815356159649889 #for rescaling the function over the domain -0.5 -> 0.5 + -tanh.(gamma1 .* (x)) ./ tanh.(gamma1) .* S +end + + +function stretching(x::Point) + m = zeros(length(x)) + m[1] = stretching_y_function(x[1]) + + + m[2] = stretching_y_function(x[2]) + Point(m) +end + +function main_ld(parts) +t0 = 0 +dt = 0.01 +tF = 0.05 +θ = 1 +θvp = 1 +ν = 0.0001 + +n = 256 +L = 0.5 +domain = (-L, L, -L, L) +partition = (n,n) +model = CartesianDiscreteModel(parts, domain, partition, map=stretching) + +labels = get_face_labeling(model) +add_tag_from_tags!(labels, "diri1", [5,]) +add_tag_from_tags!(labels, "diri0", [1, 2, 4, 3, 6, 7, 8]) +add_tag_from_tags!(labels, "p", [4,]) + +u_wall= VectorValue(0.0, 0.0) +u_top = VectorValue(1.0, 0.0) + +u_diri_values = [u_wall, u_top] + + +order = 1 +reffeᵤ = ReferenceFE(lagrangian,VectorValue{2,Float64},order) +reffeₚ = ReferenceFE(lagrangian,Float64,order) + +V = TestFESpace(model,reffeᵤ,dirichlet_tags=["diri0","diri1"],conformity=:H1) +Q = TestFESpace(model,reffeₚ,conformity=:H1,dirichlet_tags="p") +Y = MultiFieldFESpace([V,Q]) +U = TrialFESpace(V, u_diri_values) +P = TrialFESpace(Q,0.0) +X = MultiFieldFESpace([U,P]) + +degree = order*4 +Ω= Triangulation(model) +dΩ = Measure(Ω,degree) + +h = h_param(Ω,2) +function τ(u, h) + r = 1 + τ₂ = h^2 / (4 * ν) + val(x) = x + val(x::Gridap.Fields.ForwardDiff.Dual) = x.value + u = val(norm(u)) + + if iszero(u) + return τ₂ + end + + τ₃ = dt / 2 + τ₁ = h / (2 * u) + return 1 / (1 / τ₁^r + 1 / τ₂^r + 1 / τ₃^r) + + +end + + +τb(u, h) = (u ⋅ u) * τ(u, h) + + +Rm(t, (u, p)) = ∂t(u) + u ⋅ ∇(u) + ∇(p) #- hf(t) #- ν*Δ(u) +Rc(u) = ∇ ⋅ u +dRm((u, p), (du, dp), (v, q)) = du ⋅ ∇(u) + u ⋅ ∇(du) + ∇(dp) #- ν*Δ(du) +dRc(du) = ∇ ⋅ du + +Bᴳ(t, (u, p), (v, q)) = ∫(∂t(u) ⋅ v)dΩ + ∫((u ⋅ ∇(u)) ⋅ v)dΩ - ∫((∇ ⋅ v) * p)dΩ + ∫((q * (∇ ⋅ u)))dΩ + ν * ∫(∇(v) ⊙ ∇(u))dΩ #- ∫(hf(t) ⋅ v)dΩ +B_stab(t, (u, p), (v, q)) = ∫((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + ∇(q))) ⊙ Rm(t, (u, p)) # First term: SUPG, second term: PSPG u⋅∇(v) + ∇(q) ++ +τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ Rc(u) # Bulk viscosity. Try commenting out both stabilization terms to see what happens in periodic and non-periodic cases +)dΩ +res(t, (u, p), (v, q)) = Bᴳ(t, (u, p), (v, q)) + B_stab(t, (u, p), (v, q)) + +dBᴳ(t, (u, p), (du, dp), (v, q)) = ∫(((du ⋅ ∇(u)) ⋅ v) + ((u ⋅ ∇(du)) ⋅ v) + (∇(dp) ⋅ v) + (q * (∇ ⋅ du)))dΩ + ν * ∫(∇(v) ⊙ ∇(du))dΩ +dB_stab(t, (u, p), (du, dp), (v, q)) = ∫(((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v)' + ∇(q))) ⊙ dRm((u, p), (du, dp), (v, q))) + ((τ ∘ (u.cellfield, h) * (du ⋅ ∇(v)')) ⊙ Rm(t, (u, p))) + (τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ dRc(du)))dΩ + + +jac(t, (u, p), (du, dp), (v, q)) = dBᴳ(t, (u, p), (du, dp), (v, q)) + dB_stab(t, (u, p), (du, dp), (v, q)) + +jac_t(t, (u, p), (dut, dpt), (v, q)) = ∫(dut ⋅ v)dΩ + ∫(τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + (θvp)*∇(q)) ⊙ dut)dΩ + +op = TransientFEOperator(res, jac, jac_t, X,Y) + + +xh0 = zero(X) + + +#stokes_pc_fieldsplit_off_diag_use_amat -stokes_ksp_type pipegcr +#GridapPETSc.Init(args=split(options)) +GridapPETSc.with(args=split(options)) do + +#Definitions of IS +U_Parray, P_Parray = field_dof_split(Y,U,P) + +ISVel = PETScIS(U_Parray); +ISP = PETScIS(P_Parray); + +@check_error_code GridapPETSc.PETSC.ISView(ISVel.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) +@check_error_code GridapPETSc.PETSC.ISView(ISP.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) + +#@check_error_code GridapPETSc.PETSC.ISView(PETScIS(collect(1:5)).is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) + +function mysnessetup(snes) +ksp = Ref{GridapPETSc.PETSC.KSP}() +pc = Ref{GridapPETSc.PETSC.PC}() +@check_error_code GridapPETSc.PETSC.SNESSetFromOptions(snes[]) +@check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) +#@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) +#@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[], GridapPETSc.PETSC.KSPFGMRES) +@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) +@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCFIELDSPLIT) +@check_error_code GridapPETSc.PETSC.PCFieldSplitSetType(pc[],GridapPETSc.PETSC.PC_COMPOSITE_ADDITIVE) +@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[]) +@check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[]) +#After KSP setFromOption +@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"vel",ISVel.is[]) +@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"pres",ISP.is[]) +end + +function solve_sim(solver_type) + if solver_type == :petsc + solver = PETScNonlinearSolver(mysnessetup) + elseif solver_type == :julia + solver = NLSolver(show_trace=true, method=:newton) + end + + ode_solver = ThetaMethod(solver, dt, θ) + + solt = solve(ode_solver, op, xh0, t0, tF) + iter = 0 + @time createpvd(parts,"LidDriven") do pvd + for (xh_tn, tn) in solt + println("iteration = $iter") + iter = iter +1 + uh_tn = xh_tn[1] + ph_tn = xh_tn[2] + ωh_tn = ∇ × uh_tn + pvd[tn] = createvtk(Ω, "LidDriven_$tn"*".vtu", cellfields=["uh" => uh_tn, "ph" => ph_tn, "wh" => ωh_tn]) + end + end +end + +solve_sim(:petsc) +solve_sim(:julia) + + + + +#GridapPETSc.Finalize() +end +end + + +function field_dof_split(Y,U,P) + xrows = Y.gids.partition + urows = U.gids.partition + prows = P.gids.partition + + ulrows,plrows = map_parts(urows,prows,xrows) do urows,prows,xrows + uloc_idx = collect(1:1:length(urows.oid_to_lid)) + ploc_idx = collect(length(uloc_idx)+1 :1: length(xrows.oid_to_lid)) + ur = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] + pr = xrows.lid_to_gid[xrows.oid_to_lid][ploc_idx] + + return ur, pr + end + ur = ulrows.part .-1 + pr = plrows.part .-1 + + return ur,pr +end + +function h_param(Ω::GridapDistributed.DistributedTriangulation, D::Int64) + h = map_parts(Ω.trians) do trian + h_param(trian, D) + end + h = CellData.CellField(h, Ω) + h +end + + +function h_param(Ω::Triangulation, D::Int64) + h = lazy_map(h -> h^(1 / D), get_cell_measure(Ω)) + + h +end + +partition =(2,2) +with_backend(main_ld, MPIBackend(), partition) + +#mpiexecjl --project=. -n 4 julia LidDrivenSUPG_IS_distributed.jl diff --git a/test/sequential/PETScIndexesTests.jl b/test/sequential/PETScIndexesTests.jl index 7c93049..2f9af77 100644 --- a/test/sequential/PETScIndexesTests.jl +++ b/test/sequential/PETScIndexesTests.jl @@ -15,7 +15,7 @@ GridapPETSc.with(args=split(options)) do array = collect(1:1:100) is = PETScIS(array) -@check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) +#@check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) @check_error_code GridapPETSc.PETSC.ISView(is.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF) @test is.size[1] == length(array) @test is.initialized == true From c58483bdd03bfe76dfda47660ec042faf55945ac Mon Sep 17 00:00:00 2001 From: carlodev Date: Thu, 16 Feb 2023 15:27:41 +0100 Subject: [PATCH 22/22] remove lid driven test --- test/LidDrivenSUPG_fieldsplit.jl | 246 ------------------------------- 1 file changed, 246 deletions(-) delete mode 100644 test/LidDrivenSUPG_fieldsplit.jl diff --git a/test/LidDrivenSUPG_fieldsplit.jl b/test/LidDrivenSUPG_fieldsplit.jl deleted file mode 100644 index b54340c..0000000 --- a/test/LidDrivenSUPG_fieldsplit.jl +++ /dev/null @@ -1,246 +0,0 @@ -using Revise -using Gridap -using Gridap.CellData - - -using Test -using GridapDistributed -using GridapDistributed.CellData - - -using PartitionedArrays -using MPI -using SparseArrays -using SparseMatricesCSR -using GridapPETSc -using BenchmarkTools - -# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ -# -pc_use_amat -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 \ -# -fieldsplit_vel_pc_type ilu -fieldsplit_vel_pc_factor_levels 2 -fieldsplit_velu_ksp_type gmres -fieldsplit_vel_ksp_monitor -fieldsplit_vel_ksp_converged_reason \ -# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_monitor fieldsplit_pres_ksp_converged_reason" #Very good pressure convergence - -# options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-10 -snes_atol 0.0 -snes_monitor -log_view \ -# -ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-8 -ksp_monitor_short \ -# -fieldsplit_vel_pc_type asm -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ -# -fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works - - -options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-6 -snes_atol 0.0 -snes_monitor -log_view \ --ksp_type fgmres -ksp_converged_reason -ksp_max_it 50 -ksp_rtol 1e-3 -ksp_atol 1e-10 -ksp_monitor_short \ --fieldsplit_vel_pc_type gamg -fieldsplit_vel_ksp_type gmres -fieldsplit_vel_ksp_converged_reason \ --fieldsplit_pres_pc_type gamg -fieldsplit_pres_ksp_type gmres -fieldsplit_pres_ksp_converged_reason" #Works - - -#-fieldsplit_vel_ksp_converged_reason -ksp_monitor_short -#-fieldsplit_pres_ksp_converged_reason -#-log_view -mat_view :lid.m:ascii_matlab -function stretching_y_function(x) - gamma1 = 2.5 - S = 0.5815356159649889 #for rescaling the function over the domain -0.5 -> 0.5 - -tanh.(gamma1 .* (x)) ./ tanh.(gamma1) .* S -end - - -function stretching(x::Point) - m = zeros(length(x)) - m[1] = stretching_y_function(x[1]) - - - m[2] = stretching_y_function(x[2]) - Point(m) -end - -function main_ld(parts) -t0 = 0 -dt = 0.01 -tF = 0.05 -θ = 1 -θvp = 1 -ν = 0.0001 - -n = 256 -L = 0.5 -domain = (-L, L, -L, L) -partition = (n,n) -model = CartesianDiscreteModel(parts, domain, partition, map=stretching) - -labels = get_face_labeling(model) -add_tag_from_tags!(labels, "diri1", [5,]) -add_tag_from_tags!(labels, "diri0", [1, 2, 4, 3, 6, 7, 8]) -add_tag_from_tags!(labels, "p", [4,]) - -u_wall= VectorValue(0.0, 0.0) -u_top = VectorValue(1.0, 0.0) - -u_diri_values = [u_wall, u_top] - - -order = 1 -reffeᵤ = ReferenceFE(lagrangian,VectorValue{2,Float64},order) -reffeₚ = ReferenceFE(lagrangian,Float64,order) - -V = TestFESpace(model,reffeᵤ,dirichlet_tags=["diri0","diri1"],conformity=:H1) -Q = TestFESpace(model,reffeₚ,conformity=:H1,dirichlet_tags="p") -Y = MultiFieldFESpace([V,Q]) -U = TrialFESpace(V, u_diri_values) -P = TrialFESpace(Q,0.0) -X = MultiFieldFESpace([U,P]) - -degree = order*4 -Ω= Triangulation(model) -dΩ = Measure(Ω,degree) - -h = h_param(Ω,2) -function τ(u, h) - r = 1 - τ₂ = h^2 / (4 * ν) - val(x) = x - val(x::Gridap.Fields.ForwardDiff.Dual) = x.value - u = val(norm(u)) - - if iszero(u) - return τ₂ - end - - τ₃ = dt / 2 - τ₁ = h / (2 * u) - return 1 / (1 / τ₁^r + 1 / τ₂^r + 1 / τ₃^r) - - -end - - -τb(u, h) = (u ⋅ u) * τ(u, h) - - -Rm(t, (u, p)) = ∂t(u) + u ⋅ ∇(u) + ∇(p) #- hf(t) #- ν*Δ(u) -Rc(u) = ∇ ⋅ u -dRm((u, p), (du, dp), (v, q)) = du ⋅ ∇(u) + u ⋅ ∇(du) + ∇(dp) #- ν*Δ(du) -dRc(du) = ∇ ⋅ du - -Bᴳ(t, (u, p), (v, q)) = ∫(∂t(u) ⋅ v)dΩ + ∫((u ⋅ ∇(u)) ⋅ v)dΩ - ∫((∇ ⋅ v) * p)dΩ + ∫((q * (∇ ⋅ u)))dΩ + ν * ∫(∇(v) ⊙ ∇(u))dΩ #- ∫(hf(t) ⋅ v)dΩ -B_stab(t, (u, p), (v, q)) = ∫((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + ∇(q))) ⊙ Rm(t, (u, p)) # First term: SUPG, second term: PSPG u⋅∇(v) + ∇(q) -+ -τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ Rc(u) # Bulk viscosity. Try commenting out both stabilization terms to see what happens in periodic and non-periodic cases -)dΩ -res(t, (u, p), (v, q)) = Bᴳ(t, (u, p), (v, q)) + B_stab(t, (u, p), (v, q)) - -dBᴳ(t, (u, p), (du, dp), (v, q)) = ∫(((du ⋅ ∇(u)) ⋅ v) + ((u ⋅ ∇(du)) ⋅ v) + (∇(dp) ⋅ v) + (q * (∇ ⋅ du)))dΩ + ν * ∫(∇(v) ⊙ ∇(du))dΩ -dB_stab(t, (u, p), (du, dp), (v, q)) = ∫(((τ ∘ (u.cellfield, h) * (u ⋅ ∇(v)' + ∇(q))) ⊙ dRm((u, p), (du, dp), (v, q))) + ((τ ∘ (u.cellfield, h) * (du ⋅ ∇(v)')) ⊙ Rm(t, (u, p))) + (τb ∘ (u.cellfield, h) * (∇ ⋅ v) ⊙ dRc(du)))dΩ - - -jac(t, (u, p), (du, dp), (v, q)) = dBᴳ(t, (u, p), (du, dp), (v, q)) + dB_stab(t, (u, p), (du, dp), (v, q)) - -jac_t(t, (u, p), (dut, dpt), (v, q)) = ∫(dut ⋅ v)dΩ + ∫(τ ∘ (u.cellfield, h) * (u ⋅ ∇(v) + (θvp)*∇(q)) ⊙ dut)dΩ - -op = TransientFEOperator(res, jac, jac_t, X,Y) - - -xh0 = zero(X) - - -#stokes_pc_fieldsplit_off_diag_use_amat -stokes_ksp_type pipegcr -#GridapPETSc.Init(args=split(options)) -GridapPETSc.with(args=split(options)) do - -#Definitions of IS -U_Parray, P_Parray = field_dof_split(Y,U,P) - -ISVel = PETScIS(U_Parray); -ISP = PETScIS(P_Parray); - -@check_error_code GridapPETSc.PETSC.ISView(ISVel.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) -@check_error_code GridapPETSc.PETSC.ISView(ISP.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) - -#@check_error_code GridapPETSc.PETSC.ISView(PETScIS(collect(1:5)).is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_WORLD) - -function mysnessetup(snes) -ksp = Ref{GridapPETSc.PETSC.KSP}() -pc = Ref{GridapPETSc.PETSC.PC}() -@check_error_code GridapPETSc.PETSC.SNESSetFromOptions(snes[]) -@check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) -#@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) -#@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[], GridapPETSc.PETSC.KSPFGMRES) -@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) -@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCFIELDSPLIT) -@check_error_code GridapPETSc.PETSC.PCFieldSplitSetType(pc[],GridapPETSc.PETSC.PC_COMPOSITE_ADDITIVE) -@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[]) -@check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[]) -#After KSP setFromOption -@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"vel",ISVel.is[]) -@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[],"pres",ISP.is[]) -end - -function solve_sim(solver_type) - if solver_type == :petsc - solver = PETScNonlinearSolver(mysnessetup) - elseif solver_type == :julia - solver = NLSolver(show_trace=true, method=:newton) - end - - ode_solver = ThetaMethod(solver, dt, θ) - - solt = solve(ode_solver, op, xh0, t0, tF) - iter = 0 - @time createpvd(parts,"LidDriven") do pvd - for (xh_tn, tn) in solt - println("iteration = $iter") - iter = iter +1 - uh_tn = xh_tn[1] - ph_tn = xh_tn[2] - ωh_tn = ∇ × uh_tn - pvd[tn] = createvtk(Ω, "LidDriven_$tn"*".vtu", cellfields=["uh" => uh_tn, "ph" => ph_tn, "wh" => ωh_tn]) - end - end -end - -solve_sim(:petsc) -solve_sim(:julia) - - - - -#GridapPETSc.Finalize() -end -end - - -function field_dof_split(Y,U,P) - xrows = Y.gids.partition - urows = U.gids.partition - prows = P.gids.partition - - ulrows,plrows = map_parts(urows,prows,xrows) do urows,prows,xrows - uloc_idx = collect(1:1:length(urows.oid_to_lid)) - ploc_idx = collect(length(uloc_idx)+1 :1: length(xrows.oid_to_lid)) - ur = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx] - pr = xrows.lid_to_gid[xrows.oid_to_lid][ploc_idx] - - return ur, pr - end - ur = ulrows.part .-1 - pr = plrows.part .-1 - - return ur,pr -end - -function h_param(Ω::GridapDistributed.DistributedTriangulation, D::Int64) - h = map_parts(Ω.trians) do trian - h_param(trian, D) - end - h = CellData.CellField(h, Ω) - h -end - - -function h_param(Ω::Triangulation, D::Int64) - h = lazy_map(h -> h^(1 / D), get_cell_measure(Ω)) - - h -end - -partition =(2,2) -with_backend(main_ld, MPIBackend(), partition) - -#mpiexecjl --project=. -n 4 julia LidDrivenSUPG_IS_distributed.jl