Skip to content

Commit

Permalink
Merge branch 'windows_support' of git://github.com/Basseldonk/SKU110K…
Browse files Browse the repository at this point in the history
…_code into Basseldonk-windows_support

# Conflicts:
#	object_detector_retinanet/keras_retinanet/bin/train.py
  • Loading branch information
Eran_G committed May 27, 2019
1 parent 3c6face commit 00cb183
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ Contributions to this project are welcome.

## Usage

Move the unzipped SKU100K folder to "$HOME"/Documents
Move the unzipped SKU110K folder to "$HOME"/Documents

Set $PYTHONPATH to the repository root

Expand All @@ -34,7 +34,7 @@ train:
(1) Train the base model:
`python -u object_detector_retinanet/keras_retinanet/bin/train.py csv`

(2) train the IoU layer:
(2) Train the IoU layer:

`python -u object_detector_retinanet/keras_retinanet/bin/train_iou.py --weights WEIGHT_FILE csv`
where WEIGHT_FILE is the full path to the h5 file from step (1)
Expand Down
10 changes: 5 additions & 5 deletions object_detector_retinanet/keras_retinanet/utils/EmMerger.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def filter_duplicate_candidates(self, data, image):

def find_new_candidates(self, contours, heat_map, data, original_detection_centers, image):
candidates = []
for contour_i, contour in enumerate(contours[0]):
for contour_i, contour in enumerate(contours[1]):
contour_bounding_rect = cv2.boundingRect(contour)

contour_bbox = extract_boxes_from_edge_boxes(numpy.array(contour_bounding_rect))[0]
Expand Down Expand Up @@ -149,8 +149,9 @@ def find_new_candidates(self, contours, heat_map, data, original_detection_cente
cov, mu, num, roi = self.remove_redundant(contour_bbox, cov, k, mu, image, sub_heat_map)
self.set_candidates(candidates, cov, heat_map, mu, num, offset, roi, sub_heat_map)
elif (k == n):
#print (n, k, ' k==n')
self.perform_nms(candidates, contour_i, curr_data)
pass
# print n, k, ' k==n'
# self.perform_nms(candidates, contour_i, curr_data)

return candidates

Expand Down Expand Up @@ -381,8 +382,7 @@ def merge_detections(image_name, results):
# project = result_df['project'].iloc[0]
image_name = result_df['image_name'].iloc[0]
if pixel_data is None:
# pixel_data = read_image_bgr(os.path.join(root_dir(), image_name))
pixel_data = read_image_bgr(image_name)
pixel_data = read_image_bgr(os.path.join(root_dir(), image_name))

filtered_data = duplicate_merger.filter_duplicate_candidates(result_df, pixel_data)
return filtered_data
Expand Down

0 comments on commit 00cb183

Please sign in to comment.