CliMA / Oceananigans.jl

🌊 Julia software for fast, friendly, flexible, ocean-flavored fluid dynamics on CPUs and GPUs
https://clima.github.io/OceananigansDocumentation/stable
MIT License
972 stars 193 forks source link

`set!` cannot distribute global arrays #3816

Open glwagner opened 5 hours ago

glwagner commented 5 hours ago

Here's an example:

using Oceananigans
using MPI
using JLD2

arch = Distributed()
@show arch

x = y = z = (0, 1)
global_size = (8, 2, 2)
grid = RectilinearGrid(arch, size=global_size; x, y, z)
@show size(grid)

rank = arch.local_rank
if rank === 0
    bathymetry = 0.1 * rand(global_size...)
    @save "bathymetry.jld2" bathymetry
end

MPI.Barrier(arch.communicator)
@load "bathymetry.jld2" bathymetry

@show size(bathymetry)

grid = ImmersedBoundaryGrid(grid, GridFittedBottom(bathymetry))

@show grid

Run this with

mpiexec -n 2 julia --project mwe.jl

from the Oceananigans repo. I get

$ /Users/gregorywagner/.julia/bin/mpiexecjl -n 2 julia --project mwe.jl                                                  [17:11:03]
[ Info: MPI has not been initialized, so we are calling MPI.Init().
[ Info: MPI has not been initialized, so we are calling MPI.Init().
arch = arch = Distributed{CPU} across 2 = 2×1×1 ranks:
├── local_rank: 1 of 0-1
├── local_index: [2, 1, 1]
└── connectivity: east=0 west=0Distributed{CPU} across 2 = 2×1×1 ranks:
├── local_rank: 0 of 0-1
├── local_index: [1, 1, 1]
└── connectivity: east=1 west=1

size(grid) = size(grid) = (4, 2, 2)
(4, 2, 2)
size(bathymetry) = (8, 2, 2)
size(bathymetry) = (8, 2, 2)
ERROR: LoadError: ERROR: LoadError: ArgumentError: ERROR: DimensionMismatch: array could not be set to match destination field
Stacktrace:
 [1] ArgumentError: ERROR: DimensionMismatch: array could not be set to match destination fieldset!(u::Field{Center, Center, Nothing, Nothing, RectilinearGrid{Float64, FullyConnected, Periodic, Bounded, Float64, Float64, Float64, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, Distributed{CPU, false, Partition{Int64, Nothing, Nothing}, Tuple{Int64, Int64, Int64}, Int64, Tuple{Int64, Int64, Int64}, Oceananigans.DistributedComputations.RankConnectivity{Int64, Int64, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, MPI.Comm, Vector{MPI.Request}, Base.RefValue{Int64}}}, Tuple{Colon, Colon, Colon}, OffsetArrays.OffsetArray{Float64, 3, Array{Float64, 3}}, Float64, FieldBoundaryConditions{BoundaryCondition{Oceananigans.BoundaryConditions.DistributedCommunication, Oceananigans.DistributedComputations.HaloCommunicationRanks{Int64, Int64}}, BoundaryCondition{Oceananigans.BoundaryConditions.DistributedCommunication, Oceananigans.DistributedComputations.HaloCommunicationRanks{Int64, Int64}}, BoundaryCondition{Oceananigans.BoundaryConditions.Periodic, Nothing}, BoundaryCondition{Oceananigans.BoundaryConditions.Periodic, Nothing}, Nothing, Nothing, BoundaryCondition{Oceananigans.BoundaryConditions.Flux, Nothing}}, Nothing, Oceananigans.Fields.FieldBoundaryBuffers{@NamedTuple{send::Array{Float64, 3}, recv::Array{Float64, 3}}, @NamedTuple{send::Array{Float64, 3}, recv::Array{Float64, 3}}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}}, v::Array{Float64, 3})
   @ Oceananigans.DistributedComputations ~/Projects/Oceananigans.jl/src/DistributedComputations/distributed_fields.jl:53
Stacktrace:

  [2][1]  set!(u::Field{Center, Center, Nothing, Nothing, RectilinearGrid{Float64, FullyConnected, Periodic, Bounded, Float64, Float64, Float64, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, Distributed{CPU, false, Partition{Int64, Nothing, Nothing}, Tuple{Int64, Int64, Int64}, Int64, Tuple{Int64, Int64, Int64}, Oceananigans.DistributedComputations.RankConnectivity{Int64, Int64, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, MPI.Comm, Vector{MPI.Request}, Base.RefValue{Int64}}}, Tuple{Colon, Colon, Colon}, OffsetArrays.OffsetArray{Float64, 3, Array{Float64, 3}}, Float64, FieldBoundaryConditions{BoundaryCondition{Oceananigans.BoundaryConditions.DistributedCommunication, Oceananigans.DistributedComputations.HaloCommunicationRanks{Int64, Int64}}, BoundaryCondition{Oceananigans.BoundaryConditions.DistributedCommunication, Oceananigans.DistributedComputations.HaloCommunicationRanks{Int64, Int64}}, BoundaryCondition{Oceananigans.BoundaryConditions.Periodic, Nothing}, BoundaryCondition{Oceananigans.BoundaryConditions.Periodic, Nothing}, Nothing, Nothing, BoundaryCondition{Oceananigans.BoundaryConditions.Flux, Nothing}}, Nothing, Oceananigans.Fields.FieldBoundaryBuffers{@NamedTuple{send::Array{Float64, 3}, recv::Array{Float64, 3}}, @NamedTuple{send::Array{Float64, 3}, recv::Array{Float64, 3}}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}}, v::Array{Float64, 3})
   @ Oceananigans.DistributedComputations ~/Projects/Oceananigans.jl/src/DistributedComputations/distributed_fields.jl:53
 [2] ImmersedBoundaryGrid(grid::RectilinearGrid{Float64, FullyConnected, Periodic, Bounded, Float64, Float64, Float64, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OffsetArrays.OffsetVector{Float64, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, Distributed{CPU, false, Partition{Int64, Nothing, Nothing}, Tuple{Int64, Int64, Int64}, Int64, Tuple{Int64, Int64, Int64}, Oceananigans.DistributedComputations.RankConnectivity{Int64, Int64, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, MPI.Comm, Vector{MPI.Request}, Base.RefValue{Int64}}}, ib::GridFittedBottom{Array{Float64, 3}, Oceananigans.ImmersedBoundaries.CenterImmersedCondition})
   @ Oceananigans.ImmersedBoundaries ~/Projects/Oceananigans.jl/src/ImmersedBoundaries/grid_fitted_bottom.jl:89
 [3] top-level scope
   @ ~/Projects/Oceananigans.jl/mwe.jl:24
in expression starting at /Users/gregorywagner/Projects/Oceananigans.jl/mwe.jl:24

I think set! should be able to understand whether an array has a global size or local size and do the right thing accordingly. It should only error if the array has neither of those sizes.

glwagner commented 4 hours ago

something's going wrong here, because it looks like we do actually try to support this. Not sure if the test is broken or non-existent