|
@@ -13,7 +13,7 @@ import libs.vision_libs.models.detection._utils as det_utils
|
|
|
from collections import OrderedDict
|
|
from collections import OrderedDict
|
|
|
|
|
|
|
|
from models.line_detect.heads.head_losses import point_inference, compute_point_loss, line_iou_loss, \
|
|
from models.line_detect.heads.head_losses import point_inference, compute_point_loss, line_iou_loss, \
|
|
|
- lines_point_pair_loss, features_align, line_inference, compute_mask_loss, arc_inference, compute_circle_loss, \
|
|
|
|
|
|
|
+ lines_point_pair_loss, features_align, line_inference, compute_ins_loss, ins_inference, compute_circle_loss, \
|
|
|
circle_inference
|
|
circle_inference
|
|
|
|
|
|
|
|
|
|
|
|
@@ -1057,8 +1057,8 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
|
|
if feature_logits is not None:
|
|
if feature_logits is not None:
|
|
|
lines_probs, lines_scores = line_inference(feature_logits,line_proposals)
|
|
lines_probs, lines_scores = line_inference(feature_logits,line_proposals)
|
|
|
- for keypoint_prob, kps, r in zip(lines_probs, lines_scores, result):
|
|
|
|
|
- r["lines"] = keypoint_prob
|
|
|
|
|
|
|
+ for masks, kps, r in zip(lines_probs, lines_scores, result):
|
|
|
|
|
+ r["lines"] = masks
|
|
|
r["lines_scores"] = kps
|
|
r["lines_scores"] = kps
|
|
|
|
|
|
|
|
|
|
|
|
@@ -1179,8 +1179,8 @@ class RoIHeads(nn.Module):
|
|
|
if feature_logits is not None:
|
|
if feature_logits is not None:
|
|
|
|
|
|
|
|
points_probs, points_scores = point_inference(feature_logits,point_proposals)
|
|
points_probs, points_scores = point_inference(feature_logits,point_proposals)
|
|
|
- for keypoint_prob, kps, r in zip(points_probs, points_scores, result):
|
|
|
|
|
- r["points"] = keypoint_prob
|
|
|
|
|
|
|
+ for masks, kps, r in zip(points_probs, points_scores, result):
|
|
|
|
|
+ r["points"] = masks
|
|
|
r["points_scores"] = kps
|
|
r["points_scores"] = kps
|
|
|
|
|
|
|
|
print(f'loss_point:{loss_point}')
|
|
print(f'loss_point:{loss_point}')
|
|
@@ -1258,7 +1258,7 @@ class RoIHeads(nn.Module):
|
|
|
# if gt_arcs_tensor.shape[0] > 0:
|
|
# if gt_arcs_tensor.shape[0] > 0:
|
|
|
# print(f'start to compute point_loss')
|
|
# print(f'start to compute point_loss')
|
|
|
if len(gt_arcs) > 0 and feature_logits is not None:
|
|
if len(gt_arcs) > 0 and feature_logits is not None:
|
|
|
- loss_arc = compute_mask_loss(feature_logits, arc_proposals, gt_arcs, arc_pos_matched_idxs)
|
|
|
|
|
|
|
+ loss_arc = compute_ins_loss(feature_logits, arc_proposals, gt_arcs, arc_pos_matched_idxs)
|
|
|
|
|
|
|
|
if loss_arc is None:
|
|
if loss_arc is None:
|
|
|
print(f'loss_arc is None111')
|
|
print(f'loss_arc is None111')
|
|
@@ -1286,7 +1286,7 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
|
|
if len(gt_arcs) > 0 and feature_logits is not None:
|
|
if len(gt_arcs) > 0 and feature_logits is not None:
|
|
|
print(f'start to compute arc_loss')
|
|
print(f'start to compute arc_loss')
|
|
|
- loss_arc = compute_mask_loss(feature_logits, arc_proposals, gt_arcs, arc_pos_matched_idxs)
|
|
|
|
|
|
|
+ loss_arc = compute_ins_loss(feature_logits, arc_proposals, gt_arcs, arc_pos_matched_idxs)
|
|
|
|
|
|
|
|
|
|
|
|
|
if loss_arc is None:
|
|
if loss_arc is None:
|
|
@@ -1309,8 +1309,8 @@ class RoIHeads(nn.Module):
|
|
|
|
|
|
|
|
if feature_logits is not None and arc_proposals is not None:
|
|
if feature_logits is not None and arc_proposals is not None:
|
|
|
|
|
|
|
|
- arcs_probs, arcs_scores, arcs_point = arc_inference(feature_logits,arc_proposals, th=0)
|
|
|
|
|
- for keypoint_prob, kps, kp, r in zip(arcs_probs, arcs_scores, arcs_point, result):
|
|
|
|
|
|
|
+ arcs_probs, arcs_scores, arcs_point = ins_inference(feature_logits, arc_proposals, th=0)
|
|
|
|
|
+ for masks, kps, kp, r in zip(arcs_probs, arcs_scores, arcs_point, result):
|
|
|
# r["arcs"] = keypoint_prob
|
|
# r["arcs"] = keypoint_prob
|
|
|
r["arcs"] = feature_logits
|
|
r["arcs"] = feature_logits
|
|
|
r["arcs_scores"] = kps
|
|
r["arcs_scores"] = kps
|
|
@@ -1324,8 +1324,8 @@ class RoIHeads(nn.Module):
|
|
|
if self.has_ins and self.detect_ins:
|
|
if self.has_ins and self.detect_ins:
|
|
|
print(f'roi_heads forward has_circle()!!!!')
|
|
print(f'roi_heads forward has_circle()!!!!')
|
|
|
# print(f'labels:{labels}')
|
|
# print(f'labels:{labels}')
|
|
|
- circle_proposals = [p["boxes"] for p in result]
|
|
|
|
|
- print(f'boxes_proposals:{len(circle_proposals)}')
|
|
|
|
|
|
|
+ ins_proposals = [p["boxes"] for p in result]
|
|
|
|
|
+ print(f'boxes_proposals:{len(ins_proposals)}')
|
|
|
|
|
|
|
|
# if line_proposals is None or len(line_proposals) == 0:
|
|
# if line_proposals is None or len(line_proposals) == 0:
|
|
|
# # è¿å空ç¹å¾æè
è·³è¿è¯¥é¨å计ç®
|
|
# # è¿å空ç¹å¾æè
è·³è¿è¯¥é¨å计ç®
|
|
@@ -1335,129 +1335,129 @@ class RoIHeads(nn.Module):
|
|
|
# during training, only focus on positive boxes
|
|
# during training, only focus on positive boxes
|
|
|
num_images = len(proposals)
|
|
num_images = len(proposals)
|
|
|
print(f'num_images:{num_images}')
|
|
print(f'num_images:{num_images}')
|
|
|
- circle_proposals = []
|
|
|
|
|
- circle_pos_matched_idxs = []
|
|
|
|
|
|
|
+ ins_proposals = []
|
|
|
|
|
+ ins_pos_matched_idxs = []
|
|
|
if matched_idxs is None:
|
|
if matched_idxs is None:
|
|
|
raise ValueError("if in trainning, matched_idxs should not be None")
|
|
raise ValueError("if in trainning, matched_idxs should not be None")
|
|
|
for img_id in range(num_images):
|
|
for img_id in range(num_images):
|
|
|
circle_pos = torch.where(labels[img_id] == 4)[0]
|
|
circle_pos = torch.where(labels[img_id] == 4)[0]
|
|
|
- circle_proposals.append(proposals[img_id][circle_pos])
|
|
|
|
|
- circle_pos_matched_idxs.append(matched_idxs[img_id][circle_pos])
|
|
|
|
|
|
|
+ ins_proposals.append(proposals[img_id][circle_pos])
|
|
|
|
|
+ ins_pos_matched_idxs.append(matched_idxs[img_id][circle_pos])
|
|
|
else:
|
|
else:
|
|
|
if targets is not None:
|
|
if targets is not None:
|
|
|
|
|
|
|
|
num_images = len(proposals)
|
|
num_images = len(proposals)
|
|
|
- circle_proposals = []
|
|
|
|
|
|
|
+ ins_proposals = []
|
|
|
|
|
|
|
|
- circle_pos_matched_idxs = []
|
|
|
|
|
|
|
+ ins_pos_matched_idxs = []
|
|
|
print(f'val num_images:{num_images}')
|
|
print(f'val num_images:{num_images}')
|
|
|
if matched_idxs is None:
|
|
if matched_idxs is None:
|
|
|
raise ValueError("if in trainning, matched_idxs should not be None")
|
|
raise ValueError("if in trainning, matched_idxs should not be None")
|
|
|
|
|
|
|
|
for img_id in range(num_images):
|
|
for img_id in range(num_images):
|
|
|
circle_pos = torch.where(labels[img_id] == 4)[0]
|
|
circle_pos = torch.where(labels[img_id] == 4)[0]
|
|
|
- circle_proposals.append(proposals[img_id][circle_pos])
|
|
|
|
|
- circle_pos_matched_idxs.append(matched_idxs[img_id][circle_pos])
|
|
|
|
|
|
|
+ ins_proposals.append(proposals[img_id][circle_pos])
|
|
|
|
|
+ ins_pos_matched_idxs.append(matched_idxs[img_id][circle_pos])
|
|
|
|
|
|
|
|
else:
|
|
else:
|
|
|
pos_matched_idxs = None
|
|
pos_matched_idxs = None
|
|
|
|
|
|
|
|
# circle_proposals_tensor=torch.cat(circle_proposals)
|
|
# circle_proposals_tensor=torch.cat(circle_proposals)
|
|
|
|
|
|
|
|
- circle_proposals_valid = self.check_proposals(circle_proposals)
|
|
|
|
|
|
|
+ ins_proposals_valid = self.check_proposals(ins_proposals)
|
|
|
|
|
|
|
|
- if circle_proposals_valid:
|
|
|
|
|
|
|
+ if ins_proposals_valid:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
print(f'features from backbone:{features['0'].shape}')
|
|
print(f'features from backbone:{features['0'].shape}')
|
|
|
- feature_logits = self.ins_forward1(features, image_shapes, circle_proposals)
|
|
|
|
|
|
|
+ feature_logits = self.ins_forward1(features, image_shapes, ins_proposals)
|
|
|
|
|
|
|
|
- loss_circle = None
|
|
|
|
|
- loss_circle_extra=None
|
|
|
|
|
|
|
+ loss_ins = None
|
|
|
|
|
+ loss_ins_extra=None
|
|
|
|
|
|
|
|
if self.training:
|
|
if self.training:
|
|
|
|
|
|
|
|
- if targets is None or circle_pos_matched_idxs is None:
|
|
|
|
|
|
|
+ if targets is None or ins_pos_matched_idxs is None:
|
|
|
raise ValueError("both targets and pos_matched_idxs should not be None when in training mode")
|
|
raise ValueError("both targets and pos_matched_idxs should not be None when in training mode")
|
|
|
|
|
|
|
|
- gt_circles = [t["circle_masks"] for t in targets if "circle_masks" in t]
|
|
|
|
|
|
|
+ gt_inses = [t["circle_masks"] for t in targets if "circle_masks" in t]
|
|
|
gt_labels = [t["labels"] for t in targets]
|
|
gt_labels = [t["labels"] for t in targets]
|
|
|
|
|
|
|
|
- print(f'gt_circle:{gt_circles[0].shape}')
|
|
|
|
|
|
|
+ print(f'gt_ins:{gt_inses[0].shape}')
|
|
|
h, w = targets[0]["img_size"]
|
|
h, w = targets[0]["img_size"]
|
|
|
img_size = h
|
|
img_size = h
|
|
|
|
|
|
|
|
- gt_circles_tensor = torch.zeros(0, 0)
|
|
|
|
|
- if len(gt_circles) > 0:
|
|
|
|
|
- gt_circles_tensor = torch.cat(gt_circles)
|
|
|
|
|
- print(f'gt_circles_tensor:{gt_circles_tensor.shape}')
|
|
|
|
|
|
|
+ gt_ins_tensor = torch.zeros(0, 0)
|
|
|
|
|
+ if len(gt_inses) > 0:
|
|
|
|
|
+ gt_ins_tensor = torch.cat(gt_inses)
|
|
|
|
|
+ print(f'gt_ins_tensor:{gt_ins_tensor.shape}')
|
|
|
|
|
|
|
|
- if gt_circles_tensor.shape[0] > 0:
|
|
|
|
|
|
|
+ if gt_ins_tensor.shape[0] > 0:
|
|
|
print(f'start to compute circle_loss')
|
|
print(f'start to compute circle_loss')
|
|
|
|
|
|
|
|
- loss_circle = compute_mask_loss(feature_logits, circle_proposals, gt_circles, circle_pos_matched_idxs)
|
|
|
|
|
|
|
+ loss_ins = compute_ins_loss(feature_logits, ins_proposals, gt_inses, ins_pos_matched_idxs)
|
|
|
|
|
|
|
|
- # loss_circle_extra=compute_circle_extra_losses(feature_logits, circle_proposals, gt_circles, circle_pos_matched_idxs)
|
|
|
|
|
|
|
+ # loss_ins_extra=compute_circle_extra_losses(feature_logits, circle_proposals, gt_circles, circle_pos_matched_idxs)
|
|
|
|
|
|
|
|
- if loss_circle is None:
|
|
|
|
|
- print(f'loss_circle is None111')
|
|
|
|
|
- loss_circle = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
+ if loss_ins is None:
|
|
|
|
|
+ print(f'loss_ins is None111')
|
|
|
|
|
+ loss_ins = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
|
- if loss_circle_extra is None:
|
|
|
|
|
- print(f'loss_circle_extra is None111')
|
|
|
|
|
- loss_circle_extra = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
+ if loss_ins_extra is None:
|
|
|
|
|
+ print(f'loss_ins_extra is None111')
|
|
|
|
|
+ loss_ins_extra = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
|
- loss_circle = {"loss_circle": loss_circle}
|
|
|
|
|
- loss_circle_extra = {"loss_circle_extra": loss_circle_extra}
|
|
|
|
|
|
|
+ loss_ins = {"loss_ins": loss_ins}
|
|
|
|
|
+ loss_ins_extra = {"loss_ins_extra": loss_ins_extra}
|
|
|
|
|
|
|
|
else:
|
|
else:
|
|
|
if targets is not None:
|
|
if targets is not None:
|
|
|
h, w = targets[0]["img_size"]
|
|
h, w = targets[0]["img_size"]
|
|
|
img_size = h
|
|
img_size = h
|
|
|
- gt_circles = [t["circle_masks"] for t in targets if "circle_masks" in t]
|
|
|
|
|
|
|
+ gt_inses = [t["circle_masks"] for t in targets if "circle_masks" in t]
|
|
|
gt_labels = [t["labels"] for t in targets]
|
|
gt_labels = [t["labels"] for t in targets]
|
|
|
- gt_circles_tensor = torch.zeros(0, 0)
|
|
|
|
|
- if len(gt_circles) > 0:
|
|
|
|
|
- gt_circles_tensor = torch.cat(gt_circles)
|
|
|
|
|
- print(f'gt_circles_tensor:{gt_circles_tensor.shape}')
|
|
|
|
|
|
|
+ gt_ins_tensor = torch.zeros(0, 0)
|
|
|
|
|
+ if len(gt_inses) > 0:
|
|
|
|
|
+ gt_ins_tensor = torch.cat(gt_inses)
|
|
|
|
|
+ print(f'gt_ins_tensor:{gt_ins_tensor.shape}')
|
|
|
|
|
|
|
|
- if gt_circles_tensor.shape[0] > 0:
|
|
|
|
|
|
|
+ if gt_ins_tensor.shape[0] > 0:
|
|
|
print(f'start to compute circle_loss')
|
|
print(f'start to compute circle_loss')
|
|
|
|
|
|
|
|
- loss_circle = compute_mask_loss(feature_logits, circle_proposals, gt_circles,
|
|
|
|
|
- circle_pos_matched_idxs)
|
|
|
|
|
|
|
+ loss_ins = compute_ins_loss(feature_logits, ins_proposals, gt_inses,
|
|
|
|
|
+ ins_pos_matched_idxs)
|
|
|
|
|
|
|
|
- # loss_circle_extra = compute_circle_extra_losses(feature_logits, circle_proposals, gt_circles,circle_pos_matched_idxs)
|
|
|
|
|
|
|
+ # loss_ins_extra = compute_circle_extra_losses(feature_logits, circle_proposals, gt_circles,circle_pos_matched_idxs)
|
|
|
|
|
|
|
|
- if loss_circle is None:
|
|
|
|
|
- print(f'loss_circle is None111')
|
|
|
|
|
- loss_circle = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
+ if loss_ins is None:
|
|
|
|
|
+ print(f'loss_ins is None111')
|
|
|
|
|
+ loss_ins = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
|
- if loss_circle_extra is None:
|
|
|
|
|
- print(f'loss_circle_extra is None111')
|
|
|
|
|
- loss_circle_extra = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
+ if loss_ins_extra is None:
|
|
|
|
|
+ print(f'loss_ins_extra is None111')
|
|
|
|
|
+ loss_ins_extra = torch.tensor(0.0, device=device)
|
|
|
|
|
|
|
|
- loss_circle = {"loss_circle": loss_circle}
|
|
|
|
|
- loss_circle_extra = {"loss_circle_extra": loss_circle_extra}
|
|
|
|
|
|
|
+ loss_ins = {"loss_ins": loss_ins}
|
|
|
|
|
+ loss_ins_extra = {"loss_ins_extra": loss_ins_extra}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
else:
|
|
else:
|
|
|
- loss_circle = {}
|
|
|
|
|
- loss_circle_extra = {}
|
|
|
|
|
- if feature_logits is None or circle_proposals is None:
|
|
|
|
|
|
|
+ loss_ins = {}
|
|
|
|
|
+ loss_ins_extra = {}
|
|
|
|
|
+ if feature_logits is None or ins_proposals is None:
|
|
|
raise ValueError(
|
|
raise ValueError(
|
|
|
"both keypoint_logits and keypoint_proposals should not be None when not in training mode"
|
|
"both keypoint_logits and keypoint_proposals should not be None when not in training mode"
|
|
|
)
|
|
)
|
|
|
|
|
|
|
|
if feature_logits is not None:
|
|
if feature_logits is not None:
|
|
|
|
|
|
|
|
- circles_probs, circles_scores, circle_points = arc_inference(feature_logits,
|
|
|
|
|
- circle_proposals, th=0)
|
|
|
|
|
|
|
+ ins_masks, ins_scores, circle_points = ins_inference(feature_logits,
|
|
|
|
|
+ ins_proposals, th=0)
|
|
|
# print(f'circles_probs:{circles_probs.shape}, circles_scores:{circles_scores.shape}')
|
|
# print(f'circles_probs:{circles_probs.shape}, circles_scores:{circles_scores.shape}')
|
|
|
- proposals_per_image = [box.size(0) for box in circle_proposals]
|
|
|
|
|
- print(f'circle_proposals_per_image:{proposals_per_image}')
|
|
|
|
|
|
|
+ proposals_per_image = [box.size(0) for box in ins_proposals]
|
|
|
|
|
+ print(f'ins_proposals_per_image:{proposals_per_image}')
|
|
|
feature_logits_props = []
|
|
feature_logits_props = []
|
|
|
start_idx = 0
|
|
start_idx = 0
|
|
|
for num_p in proposals_per_image:
|
|
for num_p in proposals_per_image:
|
|
@@ -1466,17 +1466,18 @@ class RoIHeads(nn.Module):
|
|
|
feature_logits_props.append(merged_feature)
|
|
feature_logits_props.append(merged_feature)
|
|
|
start_idx += num_p
|
|
start_idx += num_p
|
|
|
|
|
|
|
|
- for keypoint_prob, kps, r, f in zip(circles_probs, circles_scores, result,
|
|
|
|
|
|
|
+ for masks, kps, r, f in zip(ins_masks, ins_scores, result,
|
|
|
feature_logits_props):
|
|
feature_logits_props):
|
|
|
- r["circles"] = keypoint_prob
|
|
|
|
|
- r["circles_scores"] = kps
|
|
|
|
|
- print(f'circles feature map:{f.shape}')
|
|
|
|
|
|
|
+ r["ins_masks"] = masks
|
|
|
|
|
+ r["ins_scores"] = kps
|
|
|
|
|
+ print(f'ins feature map:{f.shape}')
|
|
|
r["features"] = f.squeeze(0)
|
|
r["features"] = f.squeeze(0)
|
|
|
|
|
|
|
|
- print(f'loss_circle:{loss_circle}')
|
|
|
|
|
- print(f'loss_circle_extra:{loss_circle_extra}')
|
|
|
|
|
- losses.update(loss_circle)
|
|
|
|
|
- losses.update(loss_circle_extra)
|
|
|
|
|
|
|
+
|
|
|
|
|
+ print(f'loss_ins:{loss_ins}')
|
|
|
|
|
+ print(f'loss_ins_extra:{loss_ins_extra}')
|
|
|
|
|
+ losses.update(loss_ins)
|
|
|
|
|
+ losses.update(loss_ins_extra)
|
|
|
print(f'losses:{losses}')
|
|
print(f'losses:{losses}')
|
|
|
|
|
|
|
|
|
|
|
|
@@ -1562,8 +1563,8 @@ class RoIHeads(nn.Module):
|
|
|
)
|
|
)
|
|
|
|
|
|
|
|
keypoints_probs, lines_scores = keypointrcnn_inference(keypoint_logits, keypoint_proposals)
|
|
keypoints_probs, lines_scores = keypointrcnn_inference(keypoint_logits, keypoint_proposals)
|
|
|
- for keypoint_prob, kps, r in zip(keypoints_probs, lines_scores, result):
|
|
|
|
|
- r["keypoints"] = keypoint_prob
|
|
|
|
|
|
|
+ for masks, kps, r in zip(keypoints_probs, lines_scores, result):
|
|
|
|
|
+ r["keypoints"] = masks
|
|
|
r["keypoints_scores"] = kps
|
|
r["keypoints_scores"] = kps
|
|
|
losses.update(loss_keypoint)
|
|
losses.update(loss_keypoint)
|
|
|
|
|
|