60 lines
1.6 KiB
Python
Executable File
60 lines
1.6 KiB
Python
Executable File
import cv2
|
|
import numpy as np
|
|
import pickle as pickle
|
|
import os
|
|
|
|
def resize(max_side, img):
|
|
if img.shape[1] > img.shape[0]:
|
|
r = max_side / img.shape[1]
|
|
dim = (max_side, int(img.shape[0] * r))
|
|
else:
|
|
r = max_side / img.shape[0]
|
|
dim = (int(img.shape[1] * r), max_side)
|
|
|
|
# perform the actual resizing of the image and show it
|
|
resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
|
|
return resized
|
|
|
|
|
|
def pickle_keypoints(keypoints, descriptors):
|
|
i = 0
|
|
temp_array = []
|
|
for point in keypoints:
|
|
temp = (point.pt, point.size, point.angle, point.response, point.octave,
|
|
point.class_id, descriptors[i])
|
|
i += 1
|
|
temp_array.append(temp)
|
|
return temp_array
|
|
|
|
|
|
def unpickle_keypoints(array):
|
|
keypoints = []
|
|
descriptors = []
|
|
for point in array:
|
|
temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1], size=point[1], angle=point[2], response=point[3], octave=point[4], class_id=point[5])
|
|
temp_descriptor = point[6]
|
|
keypoints.append(temp_feature)
|
|
descriptors.append(temp_descriptor)
|
|
return keypoints, np.array(descriptors)
|
|
|
|
|
|
def load(lf_path):
|
|
print('loading LF dataset ' + lf_path)
|
|
ser_dataset = pickle.load(open(lf_path, "rb"))
|
|
lf_dataset = []
|
|
for item in ser_dataset:
|
|
kp, desc = unpickle_keypoints(item)
|
|
lf_dataset.append((kp, desc))
|
|
return lf_dataset
|
|
|
|
|
|
def save(lf_data, lf_path):
|
|
data = []
|
|
for lf in lf_data:
|
|
data.append(pickle_keypoints(lf[0], lf[1]))
|
|
pickle.dump(data, open(lf_path, 'wb'))
|
|
|
|
|
|
|
|
|