Skip to content

Commit 6973eb2

Browse files
Update tutorials to use TensorPipeRpcBackendOptions. (#1164)
* Update tutorials to use TensorPipeRpcBackendOptions. * Commit Summary: Test Plan: Reviewers: Subscribers: Tasks: Tags: * Commit2 Summary: Test Plan: Reviewers: Subscribers: Tasks: Tags: Co-authored-by: pritam <pritam.damania@fb.com>
1 parent 258f422 commit 6973eb2

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

advanced_source/rpc_ddp_tutorial/main.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import torch.distributed as dist
77
import torch.distributed.autograd as dist_autograd
88
import torch.distributed.rpc as rpc
9-
from torch.distributed.rpc import ProcessGroupRpcBackendOptions
9+
from torch.distributed.rpc import TensorPipeRpcBackendOptions
1010
import torch.multiprocessing as mp
1111
import torch.optim as optim
1212
from torch.distributed.optim import DistributedOptimizer
@@ -128,7 +128,7 @@ def run_worker(rank, world_size):
128128
os.environ['MASTER_PORT'] = '29500'
129129

130130

131-
rpc_backend_options = ProcessGroupRpcBackendOptions()
131+
rpc_backend_options = TensorPipeRpcBackendOptions()
132132
rpc_backend_options.init_method='tcp://localhost:29501'
133133

134134
# Rank 2 is master, 3 is ps and 0 and 1 are trainers.

intermediate_source/dist_pipeline_parallel_tutorial.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@ where the ``shutdown`` by default will block until all RPC participants finish.
316316
def run_worker(rank, world_size, num_split):
317317
os.environ['MASTER_ADDR'] = 'localhost'
318318
os.environ['MASTER_PORT'] = '29500'
319-
options = rpc.ProcessGroupRpcBackendOptions(num_send_recv_threads=128)
319+
options = rpc.TensorPipeRpcBackendOptions(num_worker_threads=128)
320320
321321
if rank == 0:
322322
rpc.init_rpc(

0 commit comments

Comments
 (0)