File tree Expand file tree Collapse file tree
Expand file tree Collapse file tree Original file line number Diff line number Diff line change 6363# %%bash
6464# pip3 install torchrl mujoco glfw
6565
66- import torchrl
67- import torch
68- import tqdm
69- from typing import Tuple
70-
7166# sphinx_gallery_start_ignore
7267import warnings
7368warnings .filterwarnings ("ignore" )
69+ import multiprocessing
70+ # TorchRL prefers spawn method, that restricts creation of ``~torchrl.envs.ParallelEnv`` inside
71+ # `__main__` method call, but for the easy of reading the code switch to fork
72+ # which is also a default spawn method in Google's Colaboratory
73+ try :
74+ multiprocessing .set_start_method ("fork" )
75+ except RuntimeError :
76+ assert multiprocessing .get_start_method () == "fork"
7477# sphinx_gallery_end_ignore
7578
79+
80+ import torchrl
81+ import torch
82+ import tqdm
83+ from typing import Tuple
84+
7685###############################################################################
7786# We will execute the policy on CUDA if available
7887device = torch .device ("cuda:0" if torch .cuda .is_available () else "cpu" )
@@ -1219,6 +1228,6 @@ def ceil_div(x, y):
12191228#
12201229# To iterate further on this loss module we might consider:
12211230#
1222- # - Using `@dispatch` (see `[Feature] Distpatch IQL loss module <https://github.com/pytorch/rl/pull/1230>`_.
1231+ # - Using `@dispatch` (see `[Feature] Distpatch IQL loss module <https://github.com/pytorch/rl/pull/1230>`_.)
12231232# - Allowing flexible TensorDict keys.
12241233#
You can’t perform that action at this time.
0 commit comments