diff --git a/Project.toml b/Project.toml index a8ddff4..3021adb 100644 --- a/Project.toml +++ b/Project.toml @@ -11,7 +11,8 @@ julia = "1.6" MPI = "0.20" [extras] +Optim = "429524aa-4258-5aef-a3af-852621145aeb" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test"] +test = ["Optim", "Test"] diff --git a/test/runtests.jl b/test/runtests.jl index ffb0040..785eb3f 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -6,8 +6,9 @@ ENV["JULIA_PROJECT"] = dirname(Base.active_project()) JQM = JobQueueMPI @testset verbose = true "JobQueueMPI Tests" begin - mpiexec(exe -> run(`$exe -n 3 $(Base.julia_cmd()) --project test_1.jl`)) - mpiexec(exe -> run(`$exe -n 3 $(Base.julia_cmd()) --project test_2.jl`)) - mpiexec(exe -> run(`$exe -n 3 $(Base.julia_cmd()) --project test_pmap_mpi.jl`)) + run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_1.jl`) + run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_2.jl`) + run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_pmap_mpi.jl`) + run(`$(mpiexec()) -n 3 $(Base.julia_cmd()) --project test_pmap_mpi_optim.jl`) include("test_pmap_serial.jl") end diff --git a/test/test_pmap_mpi_optim.jl b/test/test_pmap_mpi_optim.jl new file mode 100644 index 0000000..10f48c6 --- /dev/null +++ b/test/test_pmap_mpi_optim.jl @@ -0,0 +1,52 @@ +import JobQueueMPI as JQM +using Test +using MPI +using Optim + +JQM.mpi_init() + +N = 5 +data = collect(1:N) +function g(x, i, data) + return i * (x[i] - 2 * i) ^ 2 + data[i] +end +x0 = zeros(N) +list_i = collect(1:N) +fake_input = Int[] # ignored + + +let + is_done = false + if JQM.is_controller_process() + ret = optimize(x0, NelderMead()) do x + MPI.bcast(is_done, MPI.COMM_WORLD) + g_x = JQM.pmap((v)->g(v[1], v[2], data), [(x, i) for i in list_i]) + return sum(g_x) + end + # tell workers to stop calling pmap + is_done = true + MPI.bcast(is_done, MPI.COMM_WORLD) + @testset "pmap MPI Optim" begin + @test maximum(Optim.minimizer(ret) .- collect(2:2:2N)) ≈ 0.0 atol = 1e-3 + @test Optim.minimum(ret) ≈ sum(1:N) atol = 1e-3 + end + else + # optimize will call pmap multiple times + # hence, the other processes need to be continuously calling pmap + # to work with the controller. + # once the controller leaves the optimize function it will + # broadcast a is_done = true, so that the workers stop the + # infinite loop of pmaps. + while true + is_done = MPI.bcast(is_done, MPI.COMM_WORLD) + if is_done + break + end + JQM.pmap((v)->g(v[1], v[2], data), fake_input) + end + end +end + +JQM.mpi_barrier() + +JQM.mpi_finalize() \ No newline at end of file