Skip to content
This repository was archived by the owner on Oct 31, 2023. It is now read-only.

Commit fe06f56

Browse files
committed
apparently, async was never needed
removed a runaway '=True'
1 parent 4407460 commit fe06f56

File tree

3 files changed

+8
-8
lines changed

3 files changed

+8
-8
lines changed

elf/utils_elf.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -188,13 +188,13 @@ def hist(self, s, key=None):
188188
else:
189189
return self[key][s]
190190

191-
def transfer_cpu2gpu(self, batch_gpu, asynch=True):
191+
def transfer_cpu2gpu(self, batch_gpu):
192192
''' transfer batch data to gpu '''
193193
# For each time step
194194
for k, v in self.batch.items():
195-
batch_gpu[k].copy_(v, asynch=asynch)
195+
batch_gpu[k].copy_(v)
196196

197-
def transfer_cpu2cpu(self, batch_dst, asynch=True):
197+
def transfer_cpu2cpu(self, batch_dst):
198198
''' transfer batch data to cpu '''
199199

200200
# For each time step

elf_python/memory_receiver.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,19 +75,19 @@ def _cpu2gpu(batch_cpu, batch_gpu, allow_incomplete_batch=False):
7575
if isinstance(batch_cpu_t[k], (torch.FloatTensor, torch.LongTensor)):
7676
if allow_incomplete_batch:
7777
if len(batch_cpu_t[k].size()) == 1:
78-
batch_gpu_t[k] = batch_cpu_t[k][:batchsize].cuda(asynch=True)
78+
batch_gpu_t[k] = batch_cpu_t[k][:batchsize].cuda()
7979
else:
80-
batch_gpu_t[k] = batch_cpu_t[k][:batchsize, :].cuda(asynch=True)
80+
batch_gpu_t[k] = batch_cpu_t[k][:batchsize, :].cuda()
8181
else:
8282
if isinstance(batch_cpu_t[k], torch.FloatTensor):
8383
if k not in batch_gpu_t:
8484
batch_gpu_t[k] = torch.cuda.FloatTensor(batch_cpu_t[k].size())
85-
batch_gpu_t[k].copy_(batch_cpu_t[k], asynch=True)
85+
batch_gpu_t[k].copy_(batch_cpu_t[k])
8686

8787
elif isinstance(batch_cpu_t[k], torch.LongTensor):
8888
if k not in batch_gpu_t:
8989
batch_gpu_t[k] = torch.cuda.LongTensor(batch_cpu_t[k].size())
90-
batch_gpu_t[k].copy_(batch_cpu_t[k], asynch=True)
90+
batch_gpu_t[k].copy_(batch_cpu_t[k])
9191
else:
9292
batch_gpu_t[k] = batch_cpu_t[k]
9393

rlpytorch/runner/parameter_server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def process_main(self, i, gpu_id):
215215

216216
while True:
217217
self.cvs_recv[i].wait()
218-
utils_elf.transfer_cpu2gpu(batch, batch_gpu, asynch=True)
218+
utils_elf.transfer_cpu2gpu(batch, batch_gpu)
219219
self.cvs_send[i].notify()
220220
self.cb_remote_batch_process(context, batch_gpu)
221221

0 commit comments

Comments
 (0)