Skip to content

Commit

Permalink
Test AD of soft contacts model also including its parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
diegoferigo committed Mar 28, 2024
1 parent 07e4642 commit 2b353d1
Showing 1 changed file with 14 additions and 5 deletions.
19 changes: 14 additions & 5 deletions tests/test_automatic_differentiation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import jaxsim.api as js
import jaxsim.rbda
import jaxsim.typing as jtp
from jaxsim import VelRepr

# All JaxSim algorithms, excluding the variable-step integrators, should support
Expand Down Expand Up @@ -300,17 +301,25 @@ def test_ad_soft_contacts(
# ====

# Get a closure exposing only the parameters to be differentiated.
soft_contacts = lambda p, v, m: jaxsim.rbda.SoftContacts(
parameters=parameters
).contact_model(position=p, velocity=v, tangential_deformation=m)
def close_over_inputs_and_parameters(
p: jtp.VectorLike,
v: jtp.VectorLike,
m: jtp.VectorLike,
params: jaxsim.rbda.SoftContactsParams,
) -> tuple[jtp.Vector, jtp.Vector]:
return jaxsim.rbda.SoftContacts(parameters=params).contact_model(
position=p, velocity=v, tangential_deformation=m
)

# Check derivatives against finite differences.
check_grads(
f=soft_contacts,
args=(p, v, m),
f=close_over_inputs_and_parameters,
args=(p, v, m, parameters),
order=AD_ORDER,
modes=["rev", "fwd"],
eps=ε,
# On GPU, the tolerance needs to be increased
rtol=0.02 if "gpu" in {d.platform for d in p.devices()} else None,
)


Expand Down

0 comments on commit 2b353d1

Please sign in to comment.