@@ -263,6 +263,10 @@ class RayCollector(DataCollectorBase):
263263 If not provided, a :class:`~torchrl.collectors.RayWeightUpdater` will be used by default, leveraging
264264 Ray's distributed capabilities.
265265 Consider using a constructor if the updater needs to be serialized.
266+ use_env_creator (bool, optional): if ``True``, the environment constructor functions will be wrapped
267+ in :class:`~torchrl.envs.EnvCreator`. This is useful for multiprocessed settings where shared memory
268+ needs to be managed, but Ray has its own object storage mechanism, so this is typically not needed.
269+ Defaults to ``False``.
266270
267271 Examples:
268272 >>> from torch import nn
@@ -326,6 +330,7 @@ def __init__(
326330 weight_updater : WeightUpdaterBase
327331 | Callable [[], WeightUpdaterBase ]
328332 | None = None ,
333+ use_env_creator : bool = False ,
329334 ):
330335 self .frames_per_batch = frames_per_batch
331336 if remote_configs is None :
@@ -400,9 +405,10 @@ def check_list_length_consistency(*lists):
400405 create_env_fn , collector_kwargs , remote_configs = out_lists
401406 num_collectors = len (create_env_fn )
402407
403- for i in range (len (create_env_fn )):
404- if not isinstance (create_env_fn [i ], (EnvBase , EnvCreator )):
405- create_env_fn [i ] = EnvCreator (create_env_fn [i ])
408+ if use_env_creator :
409+ for i in range (len (create_env_fn )):
410+ if not isinstance (create_env_fn [i ], (EnvBase , EnvCreator )):
411+ create_env_fn [i ] = EnvCreator (create_env_fn [i ])
406412
407413 # If ray available, try to connect to an existing Ray cluster or start one and connect to it.
408414 if not _has_ray :
0 commit comments