mirror of
https://github.com/EyeTrackVR/EyeTrackVR.git
synced 2025-11-04 14:39:42 +08:00
Merge branch 'HSF-and-new-algos-feature-branch' of https://github.com/RedHawk989/EyeTrackVR into HSF-and-new-algos-feature-branch
This commit is contained in:
commit
fdb4e7eb1f
@ -265,28 +265,26 @@ class DADDY_cls(object):
|
||||
kps = pred.astype(np.int32)
|
||||
|
||||
# eyecenter = kps[:6].mean(axis=0).astype(int)
|
||||
# temp_eyecenter = pred[:6].mean(axis=0)
|
||||
ear = self.beer.ear(pred)
|
||||
# ear=self.ear_oef(ear[np.newaxis])#memo: Parameters need tuning
|
||||
|
||||
if save_video or imshow_enable:
|
||||
pupil_center = pred[7:].mean(axis=0)
|
||||
pupil_center_x = int(pupil_center[0])
|
||||
pupil_center_y = int(pupil_center[1])
|
||||
|
||||
for i in range(kps.shape[0]):
|
||||
if i < 6:
|
||||
color = (0, 0, 255)
|
||||
elif i == 6:
|
||||
color = 128 # (0, 255, 0)
|
||||
color = 128
|
||||
else:
|
||||
color = (255, 0, 0)
|
||||
cv2.circle(gray_frame, (kps[i, 0], kps[i, 1]), 1, color, 2)
|
||||
cv2.putText(gray_frame, str(i), (kps[i, 0] - 10, kps[i, 1] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
|
||||
# cv2.circle(gray_frame, (eyecenter.tolist()), 1, (128, 128, 0), 2)
|
||||
# cv2.circle(gray_frame, (iriscenter.tolist()), 1, (0, 128, 128), 2)
|
||||
# cv2.circle(gray_frame,(eyecenter.tolist()),int(self.center_dist_med.median()),(0, 0, 255),1)
|
||||
# cv2.ellipse(gray_frame, (eyecenter.tolist(), (int(self.iris_med_w.median()*2.5), int(self.iris_med_h.median()*2.5)), np.rad2deg(self.eye_med_rad.median())), (255, 0, 0), thickness=1)
|
||||
cv2.putText(gray_frame, "EAR: "+str(ear), (self.current_image_gray.shape[1]//10, self.current_image_gray.shape[0]//10), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (255,0,0), 1)
|
||||
cv2.imshow("gray_frame", gray_frame)
|
||||
if cv2.waitKey(1) & 0xFF == ord("q"):
|
||||
pass
|
||||
# todo: We should have a proper variable for drawing.
|
||||
cv2.circle(self.current_image_gray, (kps[i, 0], kps[i, 1]), 1, color, 2)
|
||||
# cv2.putText(self.current_image_gray, str(i), (kps[i, 0] - 10, kps[i, 1] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
|
||||
# cv2.putText(self.current_image_gray, "EAR: "+str(ear), (self.current_image_gray.shape[1]//10, self.current_image_gray.shape[0]//10), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (255,0,0), 1)
|
||||
|
||||
|
||||
# global loopnum
|
||||
# if loopnum < 1350*2:
|
||||
# # self.video.write(cv2.resize(gray_frame.copy(), (200, 150), None))
|
||||
@ -302,7 +300,8 @@ class DADDY_cls(object):
|
||||
# i == [0:6] = Inner and outer corners of eyes and eyelids
|
||||
# i == [6] = pupil
|
||||
# i == [7:] = iris
|
||||
return ear
|
||||
|
||||
return pupil_center_x, pupil_center_y, ear
|
||||
|
||||
|
||||
class External_Run_DADDY(object):
|
||||
@ -311,8 +310,8 @@ class External_Run_DADDY(object):
|
||||
|
||||
def run(self, current_image_gray):
|
||||
self.algo.current_image_gray = current_image_gray
|
||||
pupil = self.algo.single_run()
|
||||
return pupil
|
||||
pupil_x, pupil_y, ear = self.algo.single_run()
|
||||
return pupil_x, pupil_y, ear
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -68,6 +68,7 @@ class InformationOrigin(Enum):
|
||||
FAILURE = 3
|
||||
HSF = 4
|
||||
HSRAC = 5
|
||||
DADDY = 6
|
||||
|
||||
bbb = 0
|
||||
@dataclass
|
||||
@ -153,6 +154,7 @@ class EyeProcessor:
|
||||
|
||||
self.er_hsf = None
|
||||
self.er_hsrac = None
|
||||
self.er_daddy = None
|
||||
|
||||
self.ibo = IntensityBasedOpeness(eyeside=EyeLR.LEFT if self.eye_id is EyeId.LEFT else EyeLR.RIGHT if eye_id is EyeId.RIGHT else -1)
|
||||
self.roi_include_set = {"rotation_angle", "roi_window_x", "roi_window_y"}
|
||||
@ -175,7 +177,6 @@ class EyeProcessor:
|
||||
self.prev_x = None
|
||||
self.prev_y = None
|
||||
|
||||
self.daddy = None
|
||||
self.current_algo = InformationOrigin.HSRAC
|
||||
|
||||
|
||||
@ -278,7 +279,13 @@ class EyeProcessor:
|
||||
self.eyeopen = BLINK(self)
|
||||
|
||||
def DADDYM(self):
|
||||
landmark = self.daddy.run(self.current_image_gray)
|
||||
# todo: We should have a proper variable for drawing.
|
||||
self.thresh=self.current_image_gray.copy()
|
||||
self.rawx, self.rawy, self.eyeopen = self.er_daddy.run(self.current_image_gray)
|
||||
# Daddy also uses a one euro filter, so I'll have to use it twice, but I'm not going to think too much about it.
|
||||
self.out_x, self.out_y = cal_osc(self, self.rawx, self.rawy)
|
||||
|
||||
self.current_algorithm = InformationOrigin.DADDY
|
||||
|
||||
def HSRACM(self):
|
||||
# todo: added process to initialise er_hsrac when resolution changes
|
||||
@ -310,8 +317,9 @@ class EyeProcessor:
|
||||
self.current_algorithm = InformationOrigin.HSF
|
||||
|
||||
def RANSAC3DM(self):
|
||||
current_image_gray_copy = self.current_image_gray.copy() # Duplicate before overwriting in RANSAC3D.
|
||||
self.rawx, self.rawy, self.thresh = RANSAC3D(self)
|
||||
self.eyeopen = self.ibo.intense(self.rawx, self.rawy, self.current_image_gray)
|
||||
self.eyeopen = self.ibo.intense(self.rawx, self.rawy, current_image_gray_copy)
|
||||
out_x, out_y = cal_osc(self, self.rawx, self.rawy)
|
||||
self.current_algorithm = InformationOrigin.RANSAC
|
||||
|
||||
@ -360,11 +368,6 @@ class EyeProcessor:
|
||||
algolist = [None, None, None, None, None]
|
||||
|
||||
#set algo priorities
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if self.settings.gui_HSF:
|
||||
if self.er_hsf is None:
|
||||
self.er_hsf = External_Run_HSF(self.settings.gui_skip_autoradius, self.settings.gui_HSF_radius)
|
||||
@ -375,22 +378,21 @@ class EyeProcessor:
|
||||
|
||||
if self.settings.gui_HSRAC:
|
||||
if self.er_hsrac is None:
|
||||
self.er_hsrac = External_Run_HSRACS(self.settings.gui_skip_autoradius, self.settings.gui_HSF_radius)
|
||||
self.er_hsrac = External_Run_HSRACS(self.settings.gui_skip_autoradius, self.settings.gui_HSF_radius, self.settings.gui_thresh_add)
|
||||
algolist[self.settings.gui_HSRACP] = self.HSRACM
|
||||
else:
|
||||
if self.er_hsrac is not None:
|
||||
self.er_hsrac = None
|
||||
|
||||
_, self.firstalgo, self.secondalgo, self.thirdalgo, self.fourthalgo = algolist
|
||||
if self.settings.gui_DADDY:
|
||||
if self.er_daddy is None:
|
||||
self.er_daddy = External_Run_DADDY()
|
||||
algolist[self.settings.gui_DADDYP] = self.DADDYM
|
||||
else:
|
||||
if self.er_daddy is not None:
|
||||
self.er_daddy = None
|
||||
|
||||
if self.settings.gui_HSF and self.settings.gui_HSFP == 1: #I feel like this is super innefficient though it only runs at startup and no solution is coming to me atm
|
||||
self.firstalgo = self.HSFM
|
||||
elif self.settings.gui_HSF and self.settings.gui_HSFP == 2:
|
||||
self.secondalgo = self.HSFM
|
||||
elif self.settings.gui_HSF and self.settings.gui_HSFP == 3:
|
||||
self.thirdalgo = self.HSFM
|
||||
elif self.settings.gui_HSF and self.settings.gui_HSFP == 4:
|
||||
self.fourthalgo = self.HSFM
|
||||
_, self.firstalgo, self.secondalgo, self.thirdalgo, self.fourthalgo = algolist
|
||||
|
||||
if self.settings.gui_RANSAC3D and self.settings.gui_RANSAC3DP == 1:
|
||||
self.firstalgo = self.RANSAC3DM
|
||||
@ -401,15 +403,6 @@ class EyeProcessor:
|
||||
elif self.settings.gui_RANSAC3D and self.settings.gui_RANSAC3DP == 4:
|
||||
self.fourthalgo = self.RANSAC3DM
|
||||
|
||||
if self.settings.gui_HSRAC and self.settings.gui_HSRACP == 1:
|
||||
self.firstalgo = self.HSRACM
|
||||
elif self.settings.gui_HSRAC and self.settings.gui_HSRACP == 2:
|
||||
self.secondalgo = self.HSRACM
|
||||
elif self.settings.gui_HSRAC and self.settings.gui_HSRACP == 3:
|
||||
self.thirdalgo = self.HSRACM
|
||||
elif self.settings.gui_HSRAC and self.settings.gui_HSRACP == 4:
|
||||
self.fourthalgo = self.HSRACM
|
||||
|
||||
if self.settings.gui_BLOB and self.settings.gui_BLOBP == 1:
|
||||
self.firstalgo = self.BLOBM
|
||||
elif self.settings.gui_BLOB and self.settings.gui_BLOBP == 2:
|
||||
|
||||
@ -320,7 +320,7 @@ class HSRAC_cls(object):
|
||||
# If the average value of cropped_image is greater than response_max
|
||||
# (i.e., if the cropimage is whitish
|
||||
# blink
|
||||
print("BLINK BD")
|
||||
# print("BLINK BD")
|
||||
blink_bd = True
|
||||
|
||||
# if imshow_enable or save_video:
|
||||
@ -385,13 +385,14 @@ class HSRAC_cls(object):
|
||||
min_val = cv2.minMaxLoc(frame_gray_crop)[0]
|
||||
# threshold_value = min_val + thresh_add
|
||||
|
||||
if not blink_bd and self.blink_detector.enable_detect_flg:
|
||||
cv2.threshold(frame_gray_crop, ((min_val + self.center_q1.quartile_1) - thresh_add) / 2, 255, cv2.THRESH_BINARY_INV, dst=th_frame)
|
||||
cv2.morphologyEx(th_frame, cv2.MORPH_OPEN, self.kernel, dst=fic_frame)
|
||||
# cv2.morphologyEx(fic_frame, cv2.MORPH_CLOSE, self.kernel, dst=fic_frame)
|
||||
# cv2.erode(fic_frame,self.kernel,dst=fic_frame)
|
||||
# cv2.bitwise_not(fic_frame, fic_frame)
|
||||
else:
|
||||
# if not blink_bd and self.blink_detector.enable_detect_flg:
|
||||
# cv2.threshold(frame_gray_crop, ((min_val + self.center_q1.quartile_1) - thresh_add) / 2, 255, cv2.THRESH_BINARY_INV, dst=th_frame)
|
||||
# cv2.morphologyEx(th_frame, cv2.MORPH_OPEN, self.kernel, dst=fic_frame)
|
||||
# # cv2.morphologyEx(fic_frame, cv2.MORPH_CLOSE, self.kernel, dst=fic_frame)
|
||||
# # cv2.erode(fic_frame,self.kernel,dst=fic_frame)
|
||||
# # cv2.bitwise_not(fic_frame, fic_frame)
|
||||
# else:
|
||||
|
||||
cv2.threshold(frame_gray_crop, min_val + thresh_add, 255, cv2.THRESH_BINARY, dst=th_frame)
|
||||
cv2.morphologyEx(th_frame, cv2.MORPH_OPEN, self.kernel, dst=fic_frame) # or cv2.MORPH_CLOSE
|
||||
cv2.morphologyEx(fic_frame, cv2.MORPH_CLOSE, self.kernel, dst=fic_frame)
|
||||
@ -497,12 +498,13 @@ class HSRAC_cls(object):
|
||||
|
||||
|
||||
class External_Run_HSRACS(object):
|
||||
def __init__(self, skip_autoradius_flg=False, radius=20):
|
||||
def __init__(self, skip_autoradius_flg=False, radius=20, threshold=10):
|
||||
# temporary code
|
||||
global skip_autoradius,default_radius
|
||||
global skip_autoradius,default_radius, thresh_add
|
||||
skip_autoradius = skip_autoradius_flg
|
||||
if skip_autoradius:
|
||||
default_radius = radius
|
||||
thresh_add = threshold
|
||||
|
||||
self.algo = HSRAC_cls()
|
||||
|
||||
|
||||
@ -84,6 +84,7 @@ class IntensityBasedOpeness:
|
||||
# self.img_roi = self.now_roi == {"rotation": 0, "x": 0, "y": 0}
|
||||
self.img_roi = np.zeros(3, dtype=np.int32)
|
||||
self.now_roi = np.zeros(3, dtype=np.int32)
|
||||
self.prev_val = 0.5
|
||||
|
||||
def check(self, frameshape):
|
||||
# 0 in data is used as the initial value.
|
||||
@ -143,11 +144,14 @@ class IntensityBasedOpeness:
|
||||
self.now_roi[:] = [v for v in roiinfo.values()]
|
||||
|
||||
def intense(self, x, y, frame):
|
||||
# x,y = 0~(frame.shape[1 or 0]-1), frame = 1-channel frame cropped by ROI
|
||||
self.check(frame.shape)
|
||||
int_x, int_y = int(x), int(y)
|
||||
upper_x = min(int_x + 25, frame.shape[1]) #TODO make this a setting
|
||||
if int_x < 0 or int_y < 0:
|
||||
return self.prev_val
|
||||
upper_x = min(int_x + 25, frame.shape[1]-1) #TODO make this a setting
|
||||
lower_x = max(int_x - 25, 0)
|
||||
upper_y = min(int_y + 25, frame.shape[0])
|
||||
upper_y = min(int_y + 25, frame.shape[0]-1)
|
||||
lower_y = max(int_y - 25, 0)
|
||||
|
||||
# frame_crop = frame[lower_y:upper_y, lower_x:upper_x]
|
||||
@ -228,4 +232,5 @@ class IntensityBasedOpeness:
|
||||
if changed and ((time.time() - self.lct) > 4): # save every 4 seconds if something changed to save disk usage
|
||||
self.save()
|
||||
self.lct = time.time()
|
||||
self.prev_val = eyeopen
|
||||
return eyeopen
|
||||
|
||||
@ -26,7 +26,7 @@ class SettingsWidget:
|
||||
self.gui_DADDYP = f"-DADDYP{widget_id}-"
|
||||
self.gui_RANSAC3D = f"-RANSAC3D{widget_id}-"
|
||||
self.gui_BLINK = f"-BLINK{widget_id}-"
|
||||
self.gui_IBO = f"-THRESHADD{widget_id}-"
|
||||
self.gui_IBO = f"-IBO{widget_id}-"
|
||||
self.gui_HSRAC = f"-HSRAC{widget_id}-"
|
||||
self.gui_HSF_radius = f"-HSFRADIUS{widget_id}-"
|
||||
self.gui_blob_maxsize = f"-BLOBMAXSIZE{widget_id}-"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user