From 7319f85324d6b89982e37f156b2fa108b3e21aa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Andr=C3=A9=20Cournoyer?= Date: Wed, 28 Dec 2016 09:39:50 -0500 Subject: [PATCH] Disable CUDA cross-GPU checks when 2 GPUs are used Fixes the following error when training on two GPUs: > Assertion `THCTensor_(checkGPU)(state, 3, self_, src1, src2)' Solution found here: https://github.com/torch/cutorch/issues/434#issuecomment-237281807 --- train.lua | 2 ++ 1 file changed, 2 insertions(+) diff --git a/train.lua b/train.lua index 69752d6..da4a98a 100644 --- a/train.lua +++ b/train.lua @@ -951,6 +951,8 @@ function main() print('using CUDA on GPU ' .. opt.gpuid .. '...') if opt.gpuid2 >= 0 then print('using CUDA on second GPU ' .. opt.gpuid2 .. '...') + -- Disable cross-GPU checks + cutorch.setKernelPeerToPeerAccess(true) end require 'cutorch' require 'cunn'