|
|
@@ -580,38 +580,54 @@ def heatmaps_to_lines(maps, rois):
|
|
|
# consistency with keypoints_to_heatmap_labels by using the conversion from
|
|
|
# Heckbert 1990: c = d + 0.5, where d is a discrete coordinate and c is a
|
|
|
# continuous coordinate.
|
|
|
- xy_preds = torch.zeros((len(rois), 3, 2), dtype=torch.float32, device=maps.device)
|
|
|
- end_scores = torch.zeros((len(rois), 2), dtype=torch.float32, device=maps.device)
|
|
|
+ line_preds = torch.zeros((len(rois), 3, 2), dtype=torch.float32, device=maps.device)
|
|
|
+ line_end_scores = torch.zeros((len(rois), 2), dtype=torch.float32, device=maps.device)
|
|
|
|
|
|
- for i in range(len(rois)):
|
|
|
- roi_map = maps[i]
|
|
|
+ point_preds = torch.zeros((len(rois), 2), dtype=torch.float32, device=maps.device)
|
|
|
+ point_end_scores = torch.zeros((len(rois), 1), dtype=torch.float32, device=maps.device)
|
|
|
|
|
|
- print(f'roi_map:{roi_map.shape}')
|
|
|
- # roi_map_probs = scores_to_probs(roi_map.copy())
|
|
|
- w = roi_map.shape[2]
|
|
|
- flatten_map = non_maximum_suppression(roi_map).reshape(1, -1)
|
|
|
- score, index = torch.topk(flatten_map, k=2)
|
|
|
+ print(f'heatmaps_to_lines:{maps.shape}')
|
|
|
+ point_maps=maps[:,0]
|
|
|
+ line_maps=maps[:,1]
|
|
|
|
|
|
- print(f'index:{index}')
|
|
|
+ print(f'point_map:{point_maps.shape}')
|
|
|
+ for i in range(len(rois)):
|
|
|
+ line_roi_map = line_maps[i].unsqueeze(0)
|
|
|
|
|
|
+ print(f'line_roi_map:{line_roi_map.shape}')
|
|
|
+ # roi_map_probs = scores_to_probs(roi_map.copy())
|
|
|
+ w = line_roi_map.shape[1]
|
|
|
+ flatten_line_roi_map = non_maximum_suppression(line_roi_map).reshape(1, -1)
|
|
|
+ line_score, line_index = torch.topk(flatten_line_roi_map, k=2)
|
|
|
+ print(f'line index:{line_index}')
|
|
|
# pos = roi_map.reshape(num_keypoints, -1).argmax(dim=1)
|
|
|
+ pos = line_index
|
|
|
+ line_x = pos % w
|
|
|
+ line_y = torch.div(pos - line_x, w, rounding_mode="floor")
|
|
|
+ line_preds[i, 0, :] = line_x
|
|
|
+ line_preds[i, 1, :] = line_y
|
|
|
+ line_preds[i, 2, :] = 1
|
|
|
+ line_end_scores[i, :] = line_roi_map[torch.arange(1, device=line_roi_map.device), line_y, line_x]
|
|
|
|
|
|
- pos = index
|
|
|
+ point_roi_map = point_maps[i].unsqueeze(0)
|
|
|
|
|
|
- # x_int = pos % w
|
|
|
- #
|
|
|
- # y_int = torch.div(pos - x_int, w, rounding_mode="floor")
|
|
|
+ print(f'point_roi_map:{point_roi_map.shape}')
|
|
|
+ # roi_map_probs = scores_to_probs(roi_map.copy())
|
|
|
+ w = point_roi_map.shape[2]
|
|
|
+ flatten_point_roi_map = non_maximum_suppression(point_roi_map).reshape(1, -1)
|
|
|
+ point_score, point_index = torch.topk(flatten_point_roi_map, k=1)
|
|
|
+ print(f'point index:{point_index}')
|
|
|
+ # pos = roi_map.reshape(num_keypoints, -1).argmax(dim=1)
|
|
|
|
|
|
- x = pos % w
|
|
|
+ point_x =point_index % w
|
|
|
+ point_y = torch.div(point_index - point_x, w, rounding_mode="floor")
|
|
|
+ point_preds[i, 0,] = point_x
|
|
|
+ point_preds[i, 1,] = point_y
|
|
|
|
|
|
- y = torch.div(pos - x, w, rounding_mode="floor")
|
|
|
+ point_end_scores[i, :] = point_roi_map[torch.arange(1, device=point_roi_map.device), point_y, point_x]
|
|
|
|
|
|
- xy_preds[i, 0, :] = x
|
|
|
- xy_preds[i, 1, :] = y
|
|
|
- xy_preds[i, 2, :] = 1
|
|
|
- end_scores[i, :] = roi_map[torch.arange(1, device=roi_map.device), y, x]
|
|
|
|
|
|
- return xy_preds.permute(0, 2, 1), end_scores
|
|
|
+ return line_preds.permute(0, 2, 1), line_end_scores,point_preds,point_end_scores
|
|
|
|
|
|
|
|
|
def lines_features_align(features, proposals, img_size):
|
|
|
@@ -630,7 +646,7 @@ def lines_features_align(features, proposals, img_size):
|
|
|
align_feat[:, :, y1:y2 + 1, x1:x2 + 1] = feat[:, :, y1:y2 + 1, x1:x2 + 1]
|
|
|
align_feat_list.append(align_feat)
|
|
|
|
|
|
- print(f'align_feat_list:{align_feat_list}')
|
|
|
+ # print(f'align_feat_list:{align_feat_list}')
|
|
|
feats_tensor = torch.cat(align_feat_list)
|
|
|
|
|
|
print(f'align features :{feats_tensor.shape}')
|
|
|
@@ -702,7 +718,7 @@ def compute_point_loss(line_logits, proposals, gt_points, point_matched_idxs):
|
|
|
discretization_size = H
|
|
|
|
|
|
gs_heatmaps = []
|
|
|
- print(f'point_matched_idxs:{point_matched_idxs}')
|
|
|
+ # print(f'point_matched_idxs:{point_matched_idxs}')
|
|
|
for proposals_per_image, gt_kp_in_image, midx in zip(proposals, gt_points, point_matched_idxs):
|
|
|
print(f'proposals_per_image:{proposals_per_image.shape}')
|
|
|
kp = gt_kp_in_image[midx]
|
|
|
@@ -834,8 +850,13 @@ def line_iou_loss(x, boxes, gt_lines, matched_idx, img_size=511, alpha=1.0, beta
|
|
|
return torch.mean(torch.cat(losses))
|
|
|
|
|
|
|
|
|
+
|
|
|
+
|
|
|
def line_inference(x, boxes):
|
|
|
# type: (Tensor, List[Tensor]) -> Tuple[List[Tensor], List[Tensor]]
|
|
|
+ lines_probs = []
|
|
|
+ lines_scores = []
|
|
|
+
|
|
|
points_probs = []
|
|
|
points_scores = []
|
|
|
|
|
|
@@ -843,11 +864,14 @@ def line_inference(x, boxes):
|
|
|
x2 = x.split(boxes_per_image, dim=0)
|
|
|
|
|
|
for xx, bb in zip(x2, boxes):
|
|
|
- p_prob, scores = heatmaps_to_lines(xx, bb)
|
|
|
- points_probs.append(p_prob)
|
|
|
- points_scores.append(scores)
|
|
|
+ line_prob, line_scores,point_prob,point_scores = heatmaps_to_lines(xx, bb)
|
|
|
+ lines_probs.append(line_prob)
|
|
|
+ lines_scores.append(line_scores)
|
|
|
+
|
|
|
+ points_probs.append(point_prob)
|
|
|
+ points_scores.append(point_scores)
|
|
|
|
|
|
- return points_probs, points_scores
|
|
|
+ return lines_probs, lines_scores,points_probs,points_scores
|
|
|
|
|
|
|
|
|
def keypointrcnn_loss(keypoint_logits, proposals, gt_keypoints, keypoint_matched_idxs):
|
|
|
@@ -1376,8 +1400,9 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
if self.has_line():
|
|
|
print(f'roi_heads forward has_line()!!!!')
|
|
|
- print(f'labels:{labels}')
|
|
|
+ # print(f'labels:{labels}')
|
|
|
line_proposals = [p["boxes"] for p in result]
|
|
|
+ point_proposals = [p["boxes"] for p in result]
|
|
|
print(f'boxes_proposals:{len(line_proposals)}')
|
|
|
|
|
|
# if line_proposals is None or len(line_proposals) == 0:
|
|
|
@@ -1431,8 +1456,8 @@ class RoIHeads(nn.Module):
|
|
|
# line_proposals.append(proposals[img_id][pos])
|
|
|
# pos_matched_idxs.append(matched_idxs[img_id][pos])
|
|
|
|
|
|
- line_pos = torch.where(labels[img_id].item() == 2)[0]
|
|
|
- point_pos = torch.where(labels[img_id].item() == 1)[0]
|
|
|
+ line_pos = torch.where(labels[img_id] == 2)[0]
|
|
|
+ point_pos = torch.where(labels[img_id] == 1)[0]
|
|
|
|
|
|
line_proposals.append(proposals[img_id][line_pos])
|
|
|
point_proposals.append(proposals[img_id][point_pos])
|
|
|
@@ -1471,7 +1496,7 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
loss_line = {}
|
|
|
loss_line_iou = {}
|
|
|
- model_loss_point = {}
|
|
|
+ loss_point = {}
|
|
|
if self.training:
|
|
|
|
|
|
if targets is None or pos_matched_idxs is None:
|
|
|
@@ -1497,7 +1522,7 @@ class RoIHeads(nn.Module):
|
|
|
loss_line_iou = line_iou_loss(line_logits, line_proposals, gt_lines, line_pos_matched_idxs, img_size)
|
|
|
|
|
|
if gt_points_tensor.shape[0]>0:
|
|
|
- model_loss_point = compute_point_loss(
|
|
|
+ loss_point = compute_point_loss(
|
|
|
line_logits, point_proposals, gt_points, point_pos_matched_idxs
|
|
|
)
|
|
|
|
|
|
@@ -1509,7 +1534,7 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
loss_line = {"loss_line": loss_line}
|
|
|
loss_line_iou = {'loss_line_iou': loss_line_iou}
|
|
|
- loss_point = {"loss_point": model_loss_point}
|
|
|
+ loss_point = {"loss_point": loss_point}
|
|
|
|
|
|
else:
|
|
|
if targets is not None:
|
|
|
@@ -1517,15 +1542,20 @@ class RoIHeads(nn.Module):
|
|
|
img_size = h
|
|
|
gt_lines = [t["lines"] for t in targets]
|
|
|
gt_points = [t["points"] for t in targets]
|
|
|
+ gt_lines_tensor = torch.cat(gt_lines)
|
|
|
+ gt_points_tensor = torch.cat(gt_points)
|
|
|
|
|
|
- loss_line = lines_point_pair_loss(
|
|
|
- line_logits, line_proposals, gt_lines, line_pos_matched_idxs
|
|
|
- )
|
|
|
- loss_line_iou = line_iou_loss(line_logits, line_proposals, gt_lines, line_pos_matched_idxs, img_size)
|
|
|
+ if gt_lines_tensor.shape[0] > 0:
|
|
|
+ loss_line = lines_point_pair_loss(
|
|
|
+ line_logits, line_proposals, gt_lines, line_pos_matched_idxs
|
|
|
+ )
|
|
|
+ loss_line_iou = line_iou_loss(line_logits, line_proposals, gt_lines, line_pos_matched_idxs,
|
|
|
+ img_size)
|
|
|
|
|
|
- model_loss_point = compute_point_loss(
|
|
|
- line_logits, point_proposals, gt_points, point_pos_matched_idxs
|
|
|
- )
|
|
|
+ if gt_points_tensor.shape[0] > 0:
|
|
|
+ loss_point = compute_point_loss(
|
|
|
+ line_logits, point_proposals, gt_points, point_pos_matched_idxs
|
|
|
+ )
|
|
|
|
|
|
if not loss_line :
|
|
|
loss_line=torch.tensor(0.0,device=line_features.device)
|
|
|
@@ -1535,7 +1565,7 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
loss_line = {"loss_line": loss_line}
|
|
|
loss_line_iou = {'loss_line_iou': loss_line_iou}
|
|
|
- loss_point={"loss_point":model_loss_point}
|
|
|
+ loss_point={"loss_point":loss_point}
|
|
|
|
|
|
|
|
|
else:
|
|
|
@@ -1544,11 +1574,13 @@ class RoIHeads(nn.Module):
|
|
|
"both keypoint_logits and keypoint_proposals should not be None when not in training mode"
|
|
|
)
|
|
|
|
|
|
- lines_probs, kp_scores = line_inference(line_logits, line_proposals)
|
|
|
+ lines_probs, lines_scores,point_probs,points_scores = line_inference(line_logits, line_proposals)
|
|
|
|
|
|
- for keypoint_prob, kps, r in zip(lines_probs, kp_scores, result):
|
|
|
+ for keypoint_prob, kps, points,ps,r in zip(lines_probs, lines_scores,point_probs,points_scores, result):
|
|
|
r["lines"] = keypoint_prob
|
|
|
r["liness_scores"] = kps
|
|
|
+ r["points"] = points
|
|
|
+ r["points_scores"] = ps
|
|
|
|
|
|
losses.update(loss_line)
|
|
|
losses.update(loss_line_iou)
|
|
|
@@ -1636,8 +1668,8 @@ class RoIHeads(nn.Module):
|
|
|
"both keypoint_logits and keypoint_proposals should not be None when not in training mode"
|
|
|
)
|
|
|
|
|
|
- keypoints_probs, kp_scores = keypointrcnn_inference(keypoint_logits, keypoint_proposals)
|
|
|
- for keypoint_prob, kps, r in zip(keypoints_probs, kp_scores, result):
|
|
|
+ keypoints_probs, lines_scores = keypointrcnn_inference(keypoint_logits, keypoint_proposals)
|
|
|
+ for keypoint_prob, kps, r in zip(keypoints_probs, lines_scores, result):
|
|
|
r["keypoints"] = keypoint_prob
|
|
|
r["keypoints_scores"] = kps
|
|
|
losses.update(loss_keypoint)
|