From 202e13a88a0db4c440903a05dc2fe99e65da4ecc Mon Sep 17 00:00:00 2001 From: Li Xiang Date: Wed, 21 Apr 2021 21:32:20 +0800 Subject: [PATCH 1/2] use original layer and blob names MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 使用原始层名与变量名,替换原来层类型加数字方案 --- pytorch_to_caffe.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pytorch_to_caffe.py b/pytorch_to_caffe.py index 3ab7154..f30a180 100755 --- a/pytorch_to_caffe.py +++ b/pytorch_to_caffe.py @@ -49,6 +49,7 @@ def __init__(self): self._blobs_data=[] self.cnet=caffe_net.Caffemodel('') self.debug=True + self.pytorch_layer_name='' def init(self,inputs): """ @@ -56,6 +57,8 @@ def init(self,inputs): """ self.add_blobs(inputs) def add_layer(self,name='layer'): + name+='_' + name+='self.pytorch_layer_name' if name in self.layers: return self.layers[name] if name not in self.detail_layers.keys(): @@ -67,7 +70,9 @@ def add_layer(self,name='layer'): print("{} was added to layers".format(self.layers[name])) return self.layers[name] - def add_blobs(self, blobs,name='blob',with_num=True): + def add_blobs(self, blobs,name='blob',with_num=False): + name+='_' + name+='self.pytorch_layer_name' rst=[] for blob in blobs: self._blobs_data.append(blob) # to block the memory address be rewrited From 31571b55e26fe4c899a318f3805e9bc604bb6e77 Mon Sep 17 00:00:00 2001 From: Li Xiang Date: Wed, 21 Apr 2021 21:33:38 +0800 Subject: [PATCH 2/2] test and bugfix --- pytorch_to_caffe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_to_caffe.py b/pytorch_to_caffe.py index f30a180..eb524cb 100755 --- a/pytorch_to_caffe.py +++ b/pytorch_to_caffe.py @@ -58,7 +58,7 @@ def init(self,inputs): self.add_blobs(inputs) def add_layer(self,name='layer'): name+='_' - name+='self.pytorch_layer_name' + name+=self.pytorch_layer_name if name in self.layers: return self.layers[name] if name not in self.detail_layers.keys(): @@ -72,7 +72,7 @@ def add_layer(self,name='layer'): def add_blobs(self, blobs,name='blob',with_num=False): name+='_' - name+='self.pytorch_layer_name' + name+=self.pytorch_layer_name rst=[] for blob in blobs: self._blobs_data.append(blob) # to block the memory address be rewrited