Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ Gridap = "0.17"
GridapDistributed = "0.2"
MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19"
PETSc_jll = "3.13"
PartitionedArrays = "0.2.4"
PartitionedArrays = "0.2.13"
SparseMatricesCSR = "0.6.6"
julia = "1.3"

Expand Down
6 changes: 6 additions & 0 deletions src/Environment.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@ function Init(;args=String[],file="",help="",finalize_atexit=true)
if !MPI.Initialized()
MPI.Init()
end
#To avoid multiple printing of the same line in parallel
# if MPI.Comm_rank(MPI.COMM_WORLD) != 0
# redirect_stderr(devnull)
# redirect_stdout(devnull)
# end

if finalize_atexit
atexit(Finalize)
end
Expand Down
16 changes: 14 additions & 2 deletions src/GridapPETSc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ using Libdl
using Gridap.Helpers
using Gridap.Algebra
using Gridap.Arrays
using Gridap.FESpaces
using Gridap.MultiField

using GridapDistributed
using GridapDistributed.MultiField

using LinearAlgebra
using SparseArrays
using SparseMatricesCSR
Expand Down Expand Up @@ -69,19 +75,25 @@ end
include("PETSC.jl")

using GridapPETSc.PETSC: @check_error_code
using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, SNES
using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, SNES, IS
#export PETSC
export @check_error_code
export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC
export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, IS

include("Environment.jl")

export PETScVector
export PETScMatrix
export petsc_sparse
export PETScIS
include("PETScArrays.jl")
include("PETScIndexes.jl")
include("PartitionedArrays.jl")


export PETScFieldSplit
include("PETScFieldSplits.jl")

export PETScLinearSolver
include("PETScLinearSolvers.jl")

Expand Down
87 changes: 87 additions & 0 deletions src/PETSC.jl
Original file line number Diff line number Diff line change
Expand Up @@ -648,6 +648,9 @@ Base.unsafe_convert(::Type{Ptr{Cvoid}},v::PC) = v.ptr
@wrapper(:PCFactorSetUpMatSolverType,PetscErrorCode,(PC,),(pc,),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorSetUpMatSolverType.html")
@wrapper(:PCFactorGetMatrix,PetscErrorCode,(PC,Ptr{Mat}),(ksp,mat),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorGetMatrix.html")

#PCType() = PCType(Ptr{Cvoid}())
@wrapper(:PCGetType,PetscErrorCode,(PC, Ptr{Ptr{Cstring}}),(pc,typ),"https://petsc.org/main/docs/manualpages/PC/PCGetType/")


"""
Julia alias for the `SNES` C type.
Expand Down Expand Up @@ -703,4 +706,88 @@ const SNESPATCH = "patch"
@wrapper(:PetscObjectRegisterDestroy,PetscErrorCode,(Ptr{Cvoid},),(obj,),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroy.html")
@wrapper(:PetscObjectRegisterDestroyAll,PetscErrorCode,(),(),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroyAll.html")

"""
Julia alias for the `IS` C type.
See [PETSc manual](https://petsc.org/release/docs/manualpages/IS/IS/)).
"""
struct IS
ptr::Ptr{Cvoid}
end
IS() = IS(Ptr{Cvoid}())
Base.convert(::Type{IS},p::Ptr{Cvoid}) = IS(p)
Base.unsafe_convert(::Type{Ptr{Cvoid}},v::IS) = v.ptr


"""
Julia alias for `ISType` C type.

See [PETSc manual](https://petsc.org/main/docs/manualpages/IS/ISType/).
"""
const ISType = Cstring


"""
Julia alias for `PetscCopyMode` C type.
See [PETSc manual](https://petsc.org/main/docs/manualpages/Sys/PetscCopyMode/).
"""
@enum PetscCopyMode begin
PETSC_COPY_VALUES
PETSC_OWN_POINTER
PETSC_USE_POINTER
end
@wrapper(:PetscObjectSetName, PetscErrorCode, (Ptr{Cvoid}, Cstring), (field, name), "https://petsc.org/main/docs/manualpages/Sys/PetscObjectSetName/")
@wrapper(:ISCreateGeneral,PetscErrorCode,(MPI.Comm, PetscInt, Ptr{PetscInt}, PetscCopyMode, Ptr{IS}),(comm, n, idx, mode, is), "https://petsc.org/main/docs/manualpages/IS/ISCreateGeneral/")
@wrapper(:ISView,PetscErrorCode,(IS, PetscViewer),(is, viewer), "https://petsc.org/main/docs/manualpages/IS/ISView/")
@wrapper(:ISCreateBlock,PetscErrorCode,(MPI.Comm, PetscInt, PetscInt, Ptr{PetscInt}, PetscCopyMode, Ptr{IS}),(comm, bs, n, idx, mode, is), "https://petsc.org/release/docs/manualpages/IS/ISCreateBlock/")
@wrapper(:ISGetIndices,PetscErrorCode,(IS, Ptr{PetscInt}),(is, ptr), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/")
@wrapper(:ISExpand,PetscErrorCode,(IS, IS, Ptr{IS}),(is1, is2, isout), "https://petsc.org/release/docs/manualpages/IS/ISGetIndices/")
@wrapper(:ISGetSize,PetscErrorCode,(IS, Ptr{PetscInt}), (is, nsize), "https://petsc.org/main/docs/manualpages/IS/ISGetSize/")
@wrapper(:ISDestroy, PetscErrorCode,(Ptr{IS},), (pis,), "https://petsc.org/main/docs/manualpages/IS/ISDestroy/")

@wrapper(:PCFieldSplitSetIS,PetscErrorCode,(PC, Cstring, IS),(pc, Cfieldname, is), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetIS/")

"""
Julia alias for `PCCompositeType` C type.
See [PETSc manual](https://petsc.org/release/docs/manualpages/PC/PCCompositeType/).
"""
@enum PCCompositeType begin
PC_COMPOSITE_ADDITIVE
PC_COMPOSITE_MULTIPLICATIVE
PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE
PC_COMPOSITE_SPECIAL
PC_COMPOSITE_SCHUR
PC_COMPOSITE_GKB
end


@wrapper(:PCFieldSplitSetType,PetscErrorCode,(PC, PCCompositeType),(pc, pcctype), "https://petsc.org/release/docs/manualpages/PC/PCFieldSplitSetType/")


#PETSc Print
@wrapper(:PetscPrintf,PetscErrorCode,(MPI.Comm, Cstring ),(comm, args ),"https://petsc.org/release/docs/manualpages/Sys/PetscPrintf/")
@wrapper(:PetscSynchronizedPrintf,PetscErrorCode,(MPI.Comm, Cstring),(comm, args),"https://petsc.org/main/docs/manualpages/Sys/PetscSynchronizedPrintf/")


#PETSc Sleep
@wrapper(:PetscSleep,PetscErrorCode,(PetscReal,),(s,),"https://petsc.org/main/docs/manualpages/Sys/PetscSleep/")


#PETSc Alloc
@wrapper(:PetscMallocA,PetscErrorCode,(PetscInt, PetscBool, PetscInt, Ptr{Cstring}, Ptr{Cstring}, Csize_t, Ptr{Cvoid},), (n, clear, lineno, fun, fname, bytes0, ptr0,), "https://petsc.org/release/docs/manualpages/Sys/PetscMallocA/#petscmalloca")
#PETSC_EXTERN PetscErrorCode PetscMallocA(int,PetscBool,int,const char *,const char *,size_t,void *,...);

"""
@PetscMalloc1

See [PETSc manual](https://petsc.org/release/docs/manualpages/Sys/PetscMalloc1/).
"""
macro PetscMalloc1(m1,r1)
quote
func = Ptr{Nothing}()
linec = Ptr{Nothing}()
PetscMallocA(1,PETSC_FALSE, Cint(11), Ref(Cstring(linec)), Ref(Cstring(func)), (Csize_t)($m1)*sizeof($r1),($r1))
end
end
# PetscMalloc1(m1,r1) PetscMallocA(1,PETSC_FALSE,__LINE__,PETSC_FUNCTION_NAME,__FILE__,(size_t)(m1)*sizeof(**(r1)),(r1))

end # module
102 changes: 102 additions & 0 deletions src/PETScFieldSplits.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
# PETScFieldSplit
# MF (multifield) is a vector where each element is a SingleFieldFESpace or DistributedSingleFieldFESpace, eg: [U,P]
# tags is a vector containing the names of the field (in order), eg: ["vel", "pres"]
# show_idx is Bool vector, if true it prints the indexes
struct PETScFieldSplit
MF
tags::Vector{String}
show_idx::Bool
end

# PETScFieldSplit constructor from Multifield
function PETScFieldSplit(MF::Union{MultiFieldFESpace,GridapDistributed.DistributedMultiFieldFESpace}, tags::Vector{String}; show_idx=false)
n_fields = length(MF)
n_tags = length(tags)
@assert n_tags == n_fields #Verify that at each field a name is assigned
M = [MF[1]] #Vector of SingleFieldFESpace
for i = 2:1:n_fields
M = [M..., MF[i]]
end
PETScFieldSplit(M, tags, show_idx)
end

function PETScFieldSplit(MF::Union{Vector{SingleFieldFESpace}, Vector{<:GridapDistributed.DistributedSingleFieldFESpace}}, tags; show_idx=false)
n_fields = length(MF)
n_tags = length(tags)
@assert n_tags == n_fields
PETScFieldSplit(MF, tags, show_idx)
end




function field_dof_split(U)
X = MultiFieldFESpace(U)
field_dof_split(X, U)
end

# It allows to obtain the dof of each field over each processor - dont know if the best/most elegant way
function field_dof_split(X, U)
@assert length(X) == length(U)
xrows = X.gids.partition
urows = Any[]
for Ui in X
urowsi = Ui.gids.partition
push!(urows, urowsi)
end

offset = 0
ulrows = Any[]
for urowsi in urows
ulrowsi = map_parts(urowsi, xrows) do urowsij, xrows
num_dof_Ui = length(urowsij.oid_to_lid)
uloc_idx = collect(1+offset:1:num_dof_Ui+offset)
uri = xrows.lid_to_gid[xrows.oid_to_lid][uloc_idx]
offset = num_dof_Ui + offset
return uri
end
#It works only in MPI, non-MPI do not have .part fields
# .-1 to match the numbering in PETSc that starts from 0
ur = ulrowsi.part .- 1
push!(ulrows, ur)
end
return ulrows
end



function set_fieldsplit(sol, SplitField::PETScFieldSplit)

#Get PC-string name
pc = Ref{GridapPETSc.PETSC.PC}()
pctype = Ref{Ptr{Cstring}}()

#Get the PC, if the problem is linear ksp or non linear snes
#Maybe not the most elegant solution
if typeof(sol) <: Base.RefValue{GridapPETSc.PETSC.KSP}
ksp = sol
elseif typeof(sol) <: Base.RefValue{GridapPETSc.PETSC.SNES}
snes = sol
ksp = Ref{GridapPETSc.PETSC.KSP}()
@check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[], ksp)
end

@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[], pc)
@check_error_code GridapPETSc.PETSC.PCGetType(pc[], pctype)

pc_ptr_conv = reinterpret(Ptr{UInt8}, pctype[])
GC.@preserve pc_name = unsafe_string(pc_ptr_conv)

@assert pc_name == "fieldsplit" #Check that the preconditioner requires splitting fields
n_tags = length(SplitField.tags)
U_Parray = field_dof_split(SplitField.MF)
for i = 1:1:n_tags
ISU = PETScIS(U_Parray[i])
@check_error_code GridapPETSc.PETSC.PCFieldSplitSetIS(pc[], SplitField.tags[i], ISU.is[])
if SplitField.show_idx
@check_error_code GridapPETSc.PETSC.ISView(ISU.is[], GridapPETSc.PETSC.@PETSC_VIEWER_STDOUT_SELF)
end
end


end
81 changes: 81 additions & 0 deletions src/PETScIndexes.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# Index

mutable struct PETScIS <: AbstractVector{PetscInt}
is::Base.RefValue{IS}
initialized::Bool
size::Tuple{Int}
comm::MPI.Comm
PETScIS(comm::MPI.Comm) = new(Ref{IS}(), false, (-1,), comm)
end


function Init(a::PETScIS)
n = Ref{PetscInt}()
@check_error_code PETSC.ISGetSize(a.is[], n)
a.size = (Int(n[]),)
@assert Threads.threadid() == 1
_NREFS[] += 1
a.initialized = true
finalizer(Finalize, a)
end

function Finalize(a::PETScIS)
if a.initialized && GridapPETSc.Initialized()
if a.comm == MPI.COMM_SELF
@check_error_code PETSC.ISDestroy(a.is)
else
@check_error_code PETSC.PetscObjectRegisterDestroy(a.is[])
end
a.initialized = false
@assert Threads.threadid() == 1
_NREFS[] -= 1
end
nothing
end

function Base.size(v::PETScIS)
@assert v.initialized
v.size
end

Base.@propagate_inbounds function Base.getindex(v::PETScIS, i1::Integer)
@boundscheck checkbounds(v, i1)
n = one(PetscInt)
i0 = Ref(i1 - n)
pi0 = reinterpret(Ptr{PetscInt}, pointer_from_objref(i0))
@check_error_code PETSC.ISGetIndices(v.is[], pi0)
return pi0
end

function PETScIS(idx::Vector{PetscInt}, bs=1)
comm = MPI.COMM_SELF
is = PETScIS(comm)
n = length(idx)
@check_error_code GridapPETSc.PETSC.ISCreateGeneral(comm, n, idx, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is)
Init(is)
end

function PETScIS(idx::AbstractVector, bs=1)
idx = PetscInt.(idx)
PETScIS(idx)
end

# #Block Implementation
# function PETScIS(array::Vector{PetscInt},n, bs=1)
# comm = MPI.COMM_SELF
# is = PETScIS(comm)
# n = PetscInt(n)
# bs = PetscInt(bs)
# @check_error_code GridapPETSc.PETSC.ISCreateBlock(comm, n, bs, array, GridapPETSc.PETSC.PETSC_COPY_VALUES, is.is)
# Init(is)
# end

# Constructors

function PETScIS(n::Integer)
println("PETScIS")
v = Ref{Ptr{PetscInt}}()
println("Construct\n")
@check_error_code PETSC.PetscMalloc1(n, v)
Init(v)
end
Loading