Skip to content

Commit

Permalink
Merge pull request #10 from psrenergy/jg/optim
Browse files Browse the repository at this point in the history
Add optim test
  • Loading branch information
guilhermebodin authored Oct 9, 2024
2 parents dd9b14b + 15d4682 commit dc7a9f7
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 4 deletions.
3 changes: 2 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ julia = "1.6"
MPI = "0.20"

[extras]
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test"]
test = ["Optim", "Test"]
7 changes: 4 additions & 3 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ ENV["JULIA_PROJECT"] = dirname(Base.active_project())
JQM = JobQueueMPI

@testset verbose = true "JobQueueMPI Tests" begin
mpiexec(exe -> run(`$exe -n 3 $(Base.julia_cmd()) --project test_1.jl`))
mpiexec(exe -> run(`$exe -n 3 $(Base.julia_cmd()) --project test_2.jl`))
mpiexec(exe -> run(`$exe -n 3 $(Base.julia_cmd()) --project test_pmap_mpi.jl`))
run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_1.jl`)
run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_2.jl`)
run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_pmap_mpi.jl`)
run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_pmap_mpi_optim.jl`)
include("test_pmap_serial.jl")
end
52 changes: 52 additions & 0 deletions test/test_pmap_mpi_optim.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import JobQueueMPI as JQM
using Test
using MPI
using Optim

JQM.mpi_init()

N = 5
data = collect(1:N)
function g(x, i, data)
return i * (x[i] - 2 * i) ^ 2 + data[i]
end
x0 = zeros(N)
list_i = collect(1:N)
fake_input = Int[] # ignored


let
is_done = false
if JQM.is_controller_process()
ret = optimize(x0, NelderMead()) do x
MPI.bcast(is_done, MPI.COMM_WORLD)
g_x = JQM.pmap((v)->g(v[1], v[2], data), [(x, i) for i in list_i])
return sum(g_x)
end
# tell workers to stop calling pmap
is_done = true
MPI.bcast(is_done, MPI.COMM_WORLD)
@testset "pmap MPI Optim" begin
@test maximum(Optim.minimizer(ret) .- collect(2:2:2N)) 0.0 atol = 1e-3
@test Optim.minimum(ret) sum(1:N) atol = 1e-3
end
else
# optimize will call pmap multiple times
# hence, the other processes need to be continuously calling pmap
# to work with the controller.
# once the controller leaves the optimize function it will
# broadcast a is_done = true, so that the workers stop the
# infinite loop of pmaps.
while true
is_done = MPI.bcast(is_done, MPI.COMM_WORLD)
if is_done
break
end
JQM.pmap((v)->g(v[1], v[2], data), fake_input)
end
end
end

JQM.mpi_barrier()

JQM.mpi_finalize()

0 comments on commit dc7a9f7

Please sign in to comment.