Skip to content

Commit

Permalink
Merge pull request #14 from skumra/release/0.2.1
Browse files Browse the repository at this point in the history
Release/0.2.1
  • Loading branch information
skumra committed Jul 15, 2020
2 parents 95ddfc1 + 75c939b commit 273e461
Show file tree
Hide file tree
Showing 17 changed files with 205 additions and 212 deletions.
3 changes: 3 additions & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
.96 filter=lfs diff=lfs merge=lfs -text
.97 filter=lfs diff=lfs merge=lfs -text
.98 filter=lfs diff=lfs merge=lfs -text
72 changes: 72 additions & 0 deletions inference/models/grasp_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import torch.nn as nn
import torch.nn.functional as F


class GraspModel(nn.Module):
"""
An abstract model for grasp network in a common format.
"""

def __init__(self):
super(GraspModel, self).__init__()

def forward(self, x_in):
raise NotImplementedError()

def compute_loss(self, xc, yc):
y_pos, y_cos, y_sin, y_width = yc
pos_pred, cos_pred, sin_pred, width_pred = self(xc)

p_loss = F.smooth_l1_loss(pos_pred, y_pos)
cos_loss = F.smooth_l1_loss(cos_pred, y_cos)
sin_loss = F.smooth_l1_loss(sin_pred, y_sin)
width_loss = F.smooth_l1_loss(width_pred, y_width)

return {
'loss': p_loss + cos_loss + sin_loss + width_loss,
'losses': {
'p_loss': p_loss,
'cos_loss': cos_loss,
'sin_loss': sin_loss,
'width_loss': width_loss
},
'pred': {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
}

def predict(self, xc):
pos_pred, cos_pred, sin_pred, width_pred = self(xc)
return {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}


class ResidualBlock(nn.Module):
"""
A residual block with dropout option
"""

def __init__(self, in_channels, out_channels, kernel_size=3, dropout=False, prob=0.0):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn2 = nn.BatchNorm2d(in_channels)

self.dropout = dropout
self.dropout1 = nn.Dropout(p=prob)

def forward(self, x_in):
x = self.bn1(self.conv1(x_in))
x = F.relu(x)
if self.dropout:
x = self.dropout1(x)
x = self.bn2(self.conv2(x))
return x + x_in
52 changes: 2 additions & 50 deletions inference/models/grconvnet.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,10 @@
import torch.nn as nn
import torch.nn.functional as F

from inference.models.grasp_model import GraspModel, ResidualBlock

class ResidualBlock(nn.Module):

def __init__(self, in_channels, out_channels, kernel_size=3):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn2 = nn.BatchNorm2d(in_channels)

def forward(self, x_in):
x = self.bn1(self.conv1(x_in))
x = F.relu(x)
x = self.bn2(self.conv2(x))
return x + x_in


class GenerativeResnet(nn.Module):
class GenerativeResnet(GraspModel):

def __init__(self, input_channels=1, dropout=False, prob=0.0, channel_size=32):
super(GenerativeResnet, self).__init__()
Expand Down Expand Up @@ -75,37 +61,3 @@ def forward(self, x_in):
width_output = self.width_output(self.dropout1(x))

return pos_output, cos_output, sin_output, width_output

def compute_loss(self, xc, yc):
y_pos, y_cos, y_sin, y_width = yc
pos_pred, cos_pred, sin_pred, width_pred = self(xc)

p_loss = F.smooth_l1_loss(pos_pred, y_pos)
cos_loss = F.smooth_l1_loss(cos_pred, y_cos)
sin_loss = F.smooth_l1_loss(sin_pred, y_sin)
width_loss = F.smooth_l1_loss(width_pred, y_width)

return {
'loss': p_loss + cos_loss + sin_loss + width_loss,
'losses': {
'p_loss': p_loss,
'cos_loss': cos_loss,
'sin_loss': sin_loss,
'width_loss': width_loss
},
'pred': {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
}

def predict(self, xc):
pos_pred, cos_pred, sin_pred, width_pred = self(xc)
return {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
57 changes: 2 additions & 55 deletions inference/models/grconvnet2.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,10 @@
import torch.nn as nn
import torch.nn.functional as F

from inference.models.grasp_model import GraspModel, ResidualBlock

class ResidualBlock(nn.Module):

def __init__(self, in_channels, out_channels, kernel_size=3, dropout=False, prob=0.0):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn2 = nn.BatchNorm2d(in_channels)

self.dropout = dropout
self.dropout1 = nn.Dropout(p=prob)

def forward(self, x_in):
x = self.bn1(self.conv1(x_in))
x = F.relu(x)
if self.dropout:
x = self.dropout1(x)
x = self.bn2(self.conv2(x))
return x + x_in


class GenerativeResnet(nn.Module):
class GenerativeResnet(GraspModel):

def __init__(self, input_channels=4, output_channels=1, channel_size=32, dropout=False, prob=0.0):
super(GenerativeResnet, self).__init__()
Expand Down Expand Up @@ -92,37 +73,3 @@ def forward(self, x_in):
width_output = self.width_output(x)

return pos_output, cos_output, sin_output, width_output

def compute_loss(self, xc, yc):
y_pos, y_cos, y_sin, y_width = yc
pos_pred, cos_pred, sin_pred, width_pred = self(xc)

p_loss = F.smooth_l1_loss(pos_pred, y_pos)
cos_loss = F.smooth_l1_loss(cos_pred, y_cos)
sin_loss = F.smooth_l1_loss(sin_pred, y_sin)
width_loss = F.smooth_l1_loss(width_pred, y_width)

return {
'loss': p_loss + cos_loss + sin_loss + width_loss,
'losses': {
'p_loss': p_loss,
'cos_loss': cos_loss,
'sin_loss': sin_loss,
'width_loss': width_loss
},
'pred': {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
}

def predict(self, xc):
pos_pred, cos_pred, sin_pred, width_pred = self(xc)
return {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
52 changes: 2 additions & 50 deletions inference/models/grconvnet3.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,10 @@
import torch.nn as nn
import torch.nn.functional as F

from inference.models.grasp_model import GraspModel, ResidualBlock

class ResidualBlock(nn.Module):

def __init__(self, in_channels, out_channels, kernel_size=3):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn2 = nn.BatchNorm2d(in_channels)

def forward(self, x_in):
x = self.bn1(self.conv1(x_in))
x = F.relu(x)
x = self.bn2(self.conv2(x))
return x + x_in


class GenerativeResnet(nn.Module):
class GenerativeResnet(GraspModel):

def __init__(self, input_channels=4, output_channels=1, channel_size=32, dropout=False, prob=0.0):
super(GenerativeResnet, self).__init__()
Expand Down Expand Up @@ -87,37 +73,3 @@ def forward(self, x_in):
width_output = self.width_output(x)

return pos_output, cos_output, sin_output, width_output

def compute_loss(self, xc, yc):
y_pos, y_cos, y_sin, y_width = yc
pos_pred, cos_pred, sin_pred, width_pred = self(xc)

p_loss = F.smooth_l1_loss(pos_pred, y_pos)
cos_loss = F.smooth_l1_loss(cos_pred, y_cos)
sin_loss = F.smooth_l1_loss(sin_pred, y_sin)
width_loss = F.smooth_l1_loss(width_pred, y_width)

return {
'loss': p_loss + cos_loss + sin_loss + width_loss,
'losses': {
'p_loss': p_loss,
'cos_loss': cos_loss,
'sin_loss': sin_loss,
'width_loss': width_loss
},
'pred': {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
}

def predict(self, xc):
pos_pred, cos_pred, sin_pred, width_pred = self(xc)
return {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
52 changes: 2 additions & 50 deletions inference/models/grconvnet4.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,10 @@
import torch.nn as nn
import torch.nn.functional as F

from inference.models.grasp_model import GraspModel, ResidualBlock

class ResidualBlock(nn.Module):

def __init__(self, in_channels, out_channels, kernel_size=3):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size, padding=1)
self.bn2 = nn.BatchNorm2d(in_channels)

def forward(self, x_in):
x = self.bn1(self.conv1(x_in))
x = F.relu(x)
x = self.bn2(self.conv2(x))
return x + x_in


class GenerativeResnet(nn.Module):
class GenerativeResnet(GraspModel):

def __init__(self, input_channels=4, output_channels=1, channel_size=32, dropout=False, prob=0.0):
super(GenerativeResnet, self).__init__()
Expand Down Expand Up @@ -87,37 +73,3 @@ def forward(self, x_in):
width_output = self.width_output(x)

return pos_output, cos_output, sin_output, width_output

def compute_loss(self, xc, yc):
y_pos, y_cos, y_sin, y_width = yc
pos_pred, cos_pred, sin_pred, width_pred = self(xc)

p_loss = F.smooth_l1_loss(pos_pred, y_pos)
cos_loss = F.smooth_l1_loss(cos_pred, y_cos)
sin_loss = F.smooth_l1_loss(sin_pred, y_sin)
width_loss = F.smooth_l1_loss(width_pred, y_width)

return {
'loss': p_loss + cos_loss + sin_loss + width_loss,
'losses': {
'p_loss': p_loss,
'cos_loss': cos_loss,
'sin_loss': sin_loss,
'width_loss': width_loss
},
'pred': {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
}

def predict(self, xc):
pos_pred, cos_pred, sin_pred, width_pred = self(xc)
return {
'pos': pos_pred,
'cos': cos_pred,
'sin': sin_pred,
'width': width_pred
}
2 changes: 1 addition & 1 deletion run_offline.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

def parse_args():
parser = argparse.ArgumentParser(description='Evaluate network')
parser.add_argument('--network', type=str, default='cornell_rgbd_iou_0.95',
parser.add_argument('--network', type=str,
help='Path to saved network to evaluate')
parser.add_argument('--rgb_path', type=str, default='cornell/08/pcd0845r.png',
help='RGB Image path')
Expand Down
9 changes: 5 additions & 4 deletions train_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@ def parse_args():
parser = argparse.ArgumentParser(description='Train network')

# Network
parser.add_argument('--network', type=str, default='grconvnet',
parser.add_argument('--network', type=str, default='grconvnet3',
help='Network name in inference/models')
parser.add_argument('--use-depth', type=int, default=1,
help='Use Depth image for training (1/0)')
parser.add_argument('--use-rgb', type=int, default=1,
help='Use RGB image for training (1/0)')
parser.add_argument('--use-dropout', type=int, default=0,
help='Use dropout for training (0/1)')
parser.add_argument('--use-dropout', type=int, default=1,
help='Use dropout for training (1/0)')
parser.add_argument('--dropout-prob', type=float, default=0.1,
help='Dropout prob for training (0-1)')
parser.add_argument('--channel-size', type=int, default=32,
Expand Down Expand Up @@ -116,7 +116,8 @@ def validate(net, device, val_data):
q_out, ang_out, w_out = post_process_output(lossd['pred']['pos'], lossd['pred']['cos'],
lossd['pred']['sin'], lossd['pred']['width'])

s = evaluation.calculate_iou_match(q_out, ang_out,
s = evaluation.calculate_iou_match(q_out,
ang_out,
val_data.dataset.get_gtbb(didx, rot, zoom_factor),
no_grasps=1,
grasp_width=w_out,
Expand Down
Loading

0 comments on commit 273e461

Please sign in to comment.