From fe04229479d36e910adcfdffc8ec80a72ed1d971 Mon Sep 17 00:00:00 2001 From: Luna Gava Date: Fri, 19 May 2023 14:05:18 +0200 Subject: [PATCH 1/4] starting code for circle tracking demo (control part is missing) --- affine/CMakeLists.txt | 38 ++++ affine/affine.h | 447 ++++++++++++++++++++++++++++++++++++++++++ affine/circle.png | Bin 0 -> 10139 bytes affine/eros.h | 64 ++++++ affine/main.cpp | 283 ++++++++++++++++++++++++++ 5 files changed, 832 insertions(+) create mode 100755 affine/CMakeLists.txt create mode 100755 affine/affine.h create mode 100644 affine/circle.png create mode 100755 affine/eros.h create mode 100755 affine/main.cpp diff --git a/affine/CMakeLists.txt b/affine/CMakeLists.txt new file mode 100755 index 0000000..f76d2c1 --- /dev/null +++ b/affine/CMakeLists.txt @@ -0,0 +1,38 @@ +# requires minimum cmake version +cmake_minimum_required(VERSION 3.5) + +# produce the cmake var PROJECT_NAME +project(shape_position) + +if(NOT CMAKE_BUILD_TYPE) +set_property(CACHE CMAKE_BUILD_TYPE PROPERTY VALUE "Release") +endif() + +#include(GNUInstallDirs) + +# mandatory use of these packages +find_package(YCM REQUIRED) +find_package(OpenCV REQUIRED) +find_package(YARP COMPONENTS OS sig math dev REQUIRED) +find_package(event-driven REQUIRED) + +add_executable(${PROJECT_NAME} main.cpp eros.h affine.h) + +target_link_libraries(stimulus PRIVATE YARP::YARP_OS + YARP::YARP_init + YARP::YARP_sig + YARP::YARP_dev + YARP::YARP_math + ev::event-driven + ${OpenCV_LIBRARIES}) + +target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_OS + YARP::YARP_init + YARP::YARP_sig + YARP::YARP_dev + YARP::YARP_math + ev::event-driven + ${OpenCV_LIBRARIES}) + +install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) + diff --git a/affine/affine.h b/affine/affine.h new file mode 100755 index 0000000..08613c8 --- /dev/null +++ b/affine/affine.h @@ -0,0 +1,447 @@ +#pragma once + +#include + +using namespace yarp::os; + +class affineTransforms +{ +public: + + enum cam_param_name{w,h,cx,cy,fx,fy}; + + typedef struct affine_struct + { + cv::Mat A; + cv::Mat warped_img; + double score; + cv::Mat rmp; + cv::Mat rmsp; + int axis{0}; + double delta{0.0}; + + } affine_bundle; + + std::array affine_info; + std::vector affines_vector; + std::vector scores_vector; + std::array state; + + cv::Rect2d roi_around_shape, square; + cv::Mat initial_template, roi_template, roi_template_64f, roi_resized, mexican_template, mexican_template_64f; + cv::Mat eros_filtered, eros_tracked, eros_tracked_64f, eros_resized; + cv::Mat rot_scaled_tr_template; + double translation, angle, pscale, nscale, scaling; + cv::Point initial_position, new_position; + cv::Point2d new_center; + int blur{11}; + int gaussian_blur_eros{5}; + double template_scale{0.32}; //0.4 + + cv::Size proc_size{cv::Size(100, 100)}; + cv::Rect proc_roi; + cv::Mat prmx, prmy, nrmx, nrmy; + + + std::array cam; + +public: + + void init(double trans, double rot, double ps, double ns, double sc){ + + // proc_roi = cv::Rect(cv::Point(0, 0), proc_size); + + for(auto &affine : affine_info) { + affine.A = cv::Mat::zeros(2,3, CV_64F); + // affine.warped_img = cv::Mat::zeros(proc_size, CV_64F); + } + + prmx = cv::Mat::zeros(proc_size, CV_32F); + prmy = cv::Mat::zeros(proc_size, CV_32F); + nrmx = cv::Mat::zeros(proc_size, CV_32F); + nrmy = cv::Mat::zeros(proc_size, CV_32F); + + this->translation = trans; + this->angle = rot; + this->pscale = ps; + this->nscale = ns; + this->scaling = sc; + } + + void initState(){ + state[0]=0; state[1]=0; state[2]=0; state[3]=1; + } + + void createAffines(double translation, cv::Point2f center, double angle, double pscale, double nscale){ + + affine_info[0].A.at(0,0) = 1; + affine_info[0].A.at(0,2) = translation; + affine_info[0].A.at(1,1) = 1; + affine_info[0].A.at(1,2) = 0; + + affine_info[1].A.at(0,0) = 1; + affine_info[1].A.at(0,2) = -translation; + affine_info[1].A.at(1,1) = 1; + affine_info[1].A.at(1,2) = 0; + + affine_info[2].A.at(0,0) = 1; + affine_info[2].A.at(0,2) = 0; + affine_info[2].A.at(1,1) = 1; + affine_info[2].A.at(1,2) = translation; + + affine_info[3].A.at(0,0) = 1; + affine_info[3].A.at(0,2) = 0; + affine_info[3].A.at(1,1) = 1; + affine_info[3].A.at(1,2) = -translation; + + affine_info[4].A = cv::getRotationMatrix2D(center, angle, 1); + affine_info[5].A = cv::getRotationMatrix2D(center, -angle, 1); + + affine_info[6].A = cv::getRotationMatrix2D(center, 0, pscale); + affine_info[7].A = cv::getRotationMatrix2D(center, 0, nscale); + + affine_info[8].A.at(0,0) = 1; + affine_info[8].A.at(1,1) = 1; + } + + void updateAffines(){ + + new_center = cv::Point2d(roi_around_shape.width/2, roi_around_shape.height/2); + + affine_info[4].A = cv::getRotationMatrix2D(new_center, angle, 1); + affine_info[5].A = cv::getRotationMatrix2D(new_center, -angle, 1); + + affine_info[6].A = cv::getRotationMatrix2D(new_center, 0, pscale); + affine_info[7].A = cv::getRotationMatrix2D(new_center, 0, nscale); + } + + void setROI(int buffer = 10){ + + static cv::Rect2d full_roi = cv::Rect2d(cv::Point(0, 0), rot_scaled_tr_template.size()); + + roi_around_shape = cv::boundingRect(rot_scaled_tr_template); + + if(roi_around_shape.width == 0) { + roi_around_shape = full_roi; + return; + } + roi_around_shape.x -= buffer; + roi_around_shape.y -= buffer; + roi_around_shape.width += buffer * 2; + roi_around_shape.height += buffer * 2; + + roi_around_shape = roi_around_shape & full_roi; + + roi_template = rot_scaled_tr_template(roi_around_shape); + roi_template.convertTo(roi_template_64f, CV_64F); // is 6 type CV_64FC1 + + // cv::resize(roi_template_64f, roi_resized, proc_roi.size(), 0, 0, cv::INTER_CUBIC); + + } + + cv::Mat updateTrMat(double translation_x, double translation_y){ + + // the state is an affine + cv::Mat trMat = cv::Mat::zeros(2,3, CV_64F); + + trMat.at(0,0) = 1; trMat.at(0,1) = 0; trMat.at(0,2) = translation_x; + trMat.at(1,0) = 0; trMat.at(1,1) = 1; trMat.at(1,2) = translation_y; + + return trMat; + } + + void createStaticTemplate(const cv::Size &res, double scaling_percentage){ + + initial_template = cv::Mat::zeros(res.height, res.width, CV_8UC1); + double square_width = res.height*(scaling_percentage/100); + double square_height = res.height*(scaling_percentage/100); + square = cv::Rect2d((res.width-square_width)/2,(res.height-square_height)/2, square_width, square_height); + cv::rectangle(initial_template, square, cv::Scalar(255),1,8,0); + + initial_position.x = square.x + square.width/2; + initial_position.y = square.y + square.height/2; + + } + + void loadTemplate(const cv::Size &res, std::string filename){ + + cv::Mat shape_image = cv::imread(filename, 0); + + cv::resize(shape_image, shape_image, cv::Size(template_scale*shape_image.cols, template_scale*shape_image.rows), 0, 0, cv::INTER_LINEAR); + + // static cv::Mat shape_blur; + // cv::GaussianBlur(shape_image, shape_blur, cv::Size(3,3),0,0); + + // cv::Mat sobelxy; + // cv::Sobel(shape_blur, sobelxy, CV_64F, 1, 1, 5); + + // static cv::Mat edges; + // cv::Canny(shape_blur, edges, 100, 200, 3, false); + + initial_template = cv::Mat::zeros(res.height, res.width, CV_8UC1); + + initial_position.x = res.width/2; + initial_position.y = res.height/2; + + cv::Rect mask = cv::Rect(res.width/2 - shape_image.cols/2, res.height/2 - shape_image.rows/2, shape_image.cols, shape_image.rows); + + shape_image.copyTo(initial_template(mask)); + + } + + void squareTemplate(const cv::Size &res){ + initial_template = cv::Mat::zeros(res.height, res.width, CV_8UC1); + square = cv::Rect(306,263, 87, 93); + cv::rectangle(initial_template, square, cv::Scalar(255),1,8,0); + + initial_position.x = square.x + square.width/2; + initial_position.y = square.y + square.height/2; + } + + // void createDynamicTemplate(){ + + // cv::Mat rotMatfunc = getRotationMatrix2D(initial_position, state[2], state[3]); + // rotMatfunc.at(0, 2) += (state[0] - initial_position.x); + // rotMatfunc.at(1, 2) += (state[1] - initial_position.y); + // cv::warpAffine(initial_template, rot_scaled_tr_template, rotMatfunc, rot_scaled_tr_template.size()); + // new_position = cv::Point2d(initial_position.x+state[0],initial_position.y+state[1]); + + // } + + void createDynamicTemplate(){ + + cv::Mat rot_scaled_template; + + cv::Mat rotMatfunc = getRotationMatrix2D(initial_position, state[2], state[3]); + cv::warpAffine(initial_template, rot_scaled_template, rotMatfunc, rot_scaled_template.size()); + cv::Mat trMat = updateTrMat(state[0], state[1]); + cv::warpAffine(rot_scaled_template, rot_scaled_tr_template, trMat, rot_scaled_tr_template.size()); + new_position = cv::Point2d(initial_position.x+state[0],initial_position.y+state[1]); + + } + + void make_template() { + + static cv::Mat canny_img, f, pos_hat, neg_hat; + static cv::Size pblur(blur, blur); + static cv::Size nblur(2*blur-1, 2*blur-1); + static double minval, maxval; + + // cv::GaussianBlur(roi_resized, pos_hat, pblur, 0); + // cv::GaussianBlur(roi_resized, neg_hat, nblur, 0); + cv::GaussianBlur(roi_template_64f, pos_hat, pblur, 0); + cv::GaussianBlur(roi_template_64f, neg_hat, nblur, 0); + mexican_template = pos_hat - neg_hat; + + cv::minMaxLoc(mexican_template, &minval, &maxval); + double scale_factor = 1.0 / (2 * std::max(fabs(minval), fabs(maxval))); + mexican_template *= scale_factor; + } + + void createWarpings(){ + + mexican_template.convertTo(mexican_template_64f, CV_64F); // is 6 type CV_64FC1 + + for (int affine = 0; affine < affine_info.size(); affine++) { + cv::warpAffine(mexican_template_64f, affine_info[affine].warped_img, affine_info[affine].A, mexican_template_64f.size()); + } + } + + void createMapWarpings(){ + + mexican_template.convertTo(mexican_template_64f, CV_64F); // is 6 type CV_64FC1 + + for (int affine = 0; affine < affine_info.size()-1; affine++) { + cv::remap(mexican_template_64f, affine_info[affine].warped_img, affine_info[affine].rmp, affine_info[affine].rmsp, cv::INTER_LINEAR); + cv::imshow("affine remap" + std::to_string(affine), affine_info[affine].warped_img); + } + + affine_info[8].warped_img = mexican_template_64f; + + } + + void setEROS(const cv::Mat &eros) + { + // cv::GaussianBlur(eros, eros_filtered, cv::Size(5, 5), 0); + // cv::Mat eros_blurred1; + // cv::medianBlur(eros, eros_blurred1, 3); + cv::GaussianBlur(eros, eros_filtered, cv::Size(gaussian_blur_eros, gaussian_blur_eros), 0); + eros_filtered(roi_around_shape).copyTo(eros_tracked); + eros_tracked.convertTo(eros_tracked_64f, CV_64F, 0.003921569); + // cv::resize(eros_tracked_64f, eros_resized, proc_roi.size(), 0, 0, cv::INTER_CUBIC); + } + + double similarity_score(const cv::Mat &observation, const cv::Mat &expectation) { + static cv::Mat muld; + muld = expectation.mul(observation); + return cv::sum(cv::sum(muld))[0]; + } + + void performComparisons(){ + + for (int affine = 0; affine < affine_info.size(); affine++) { + affine_info[affine].score = similarity_score(eros_tracked_64f, affine_info[affine].warped_img); + // affine_info[affine].score = similarity_score(eros_resized, affine_info[affine].warped_img); + scores_vector.push_back(affine_info[affine].score); +// cv::imshow("affine"+std::to_string(affine), affine_info[affine].warped_img); + } + } + + void updateState(){ + + int best_score_index = max_element(scores_vector.begin(), scores_vector.end()) - scores_vector.begin(); + // double best_score = *max_element(scores_vector.begin(), scores_vector.end()); + // yInfo() << scores_vector; + // yInfo() << "highest score =" << best_score_index << best_score; + scores_vector.clear(); + + if (best_score_index == 0) + state[0] += translation; + else if (best_score_index == 1) + state[0] -= translation; + else if (best_score_index == 2) + state[1] += translation; + else if (best_score_index == 3) + state[1] -= translation; + else if (best_score_index == 4) + state[2] += angle; + else if (best_score_index == 5) + state[2] -= angle; + else if (best_score_index == 6) + state[3] = state[3]*pscale; + else if (best_score_index == 7) + state[3] = state[3]*nscale; + } + + void updateStateAll(){ + + //int best_score_index = max_element(scores_vector.begin(), scores_vector.end()) - scores_vector.begin(); + // double best_score = *max_element(scores_vector.begin(), scores_vector.end()); + // yInfo() << scores_vector; + // yInfo() << "highest score =" << best_score_index << best_score; + double no_motion = scores_vector[8]; + if(scores_vector[0] > no_motion) state[0] += translation; + if(scores_vector[1] > no_motion) state[0] -= translation; + if(scores_vector[2] > no_motion) state[1] += translation; + if(scores_vector[3] > no_motion) state[1] -= translation; + if(scores_vector[4] > no_motion) state[2] += angle; + if(scores_vector[5] > no_motion) state[2] -= angle; + if(scores_vector[6] > no_motion) state[3]*= pscale; + if(scores_vector[7] > no_motion) state[3]*= nscale; + + scores_vector.clear(); + } + + void create_map_x(double dp, int affine_n){ + //x shift by dp + for(int x = 0; x < proc_size.width; x++) { + for(int y = 0; y < proc_size.height; y++) { + //positive + prmx.at(y, x) = x-dp; + prmy.at(y, x) = y; + //negative + nrmx.at(y, x) = x+dp; + nrmy.at(y, x) = y; + } + } + if (affine_n == 0){ + cv::convertMaps(prmx, prmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = dp; + } + else{ + cv::convertMaps(nrmx, nrmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = -dp; + } + affine_info[affine_n].axis = affine_n; + } + + void create_map_y(double dp, int affine_n){ + for(int x = 0; x < proc_size.width; x++) { + for(int y = 0; y < proc_size.height; y++) { + //positive + prmx.at(y, x) = x; + prmy.at(y, x) = y-dp; + //negative + nrmx.at(y, x) = x; + nrmy.at(y, x) = y+dp; + } + } + + if (affine_n == 2){ + cv::convertMaps(prmx, prmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = dp; + } + else{ + cv::convertMaps(nrmx, nrmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = -dp; + } + affine_info[affine_n].axis = affine_n; + } + + void create_map_scale(double sc, int affine_n){ + double cy = proc_size.height * 0.5; + double cx = proc_size.width * 0.5; + for(int x = 0; x < proc_size.width; x++) { + for(int y = 0; y < proc_size.height; y++) { + double dx = -(x-cx) * sc; + double dy = -(y-cy) * sc; + //positive + prmx.at(y, x) = x - dx; + prmy.at(y, x) = y - dy; + //negative + nrmx.at(y, x) = x + dx; + nrmy.at(y, x) = y + dy; + } + } + if (affine_n == 6){ + cv::convertMaps(prmx, prmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = sc; + } + else{ + cv::convertMaps(nrmx, nrmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = -sc; + } + affine_info[affine_n].axis = affine_n; + } + + void create_map_rot(double angle, int affine_n){ + double cy = proc_size.height * 0.5; + double cx = proc_size.width * 0.5; + double theta = atan2(angle, std::max(proc_size.width, proc_size.height)*0.5); + for(int x = 0; x < proc_size.width; x++) { + for(int y = 0; y < proc_size.height; y++) { + double dx = -(y - cy) * cam[fx] / cam[fy] * theta; + double dy = (x - cx) * cam[fy] / cam[fx] * theta; + //positive + prmx.at(y, x) = x - dx; + prmy.at(y, x) = y - dy; + //negative + nrmx.at(y, x) = x + dx; + nrmy.at(y, x) = y + dy; + } + } + if (affine_n == 6){ + cv::convertMaps(prmx, prmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = theta; + } + else{ + cv::convertMaps(nrmx, nrmy, affine_info[affine_n].rmp, affine_info[affine_n].rmsp, CV_16SC2); + affine_info[affine_n].delta = -theta; + } + + affine_info[affine_n].axis = affine_n; + } + + void create_maps(){ + create_map_x(translation, 0); + create_map_x(translation, 1); + create_map_y(translation, 2); + create_map_y(translation, 3); + create_map_rot(angle, 4); + create_map_rot(angle, 5); + create_map_scale(scaling, 6); + create_map_scale(scaling, 7); + } + + +}; diff --git a/affine/circle.png b/affine/circle.png new file mode 100644 index 0000000000000000000000000000000000000000..1d9479a64da95ba33d381978fc7bea106ce7fd1b GIT binary patch literal 10139 zcmdUVi93{S`1T`3$?_(J$asq+$sPuywve(vSEulu~t>%8vpE4r81SdOqD2*ReRp=N*} z3~BK9;a(>Are%Y$8~)kteO~kGUid@Ud-DN&=EYyUjyH6-$KST`vO^r)++FR&y=}eh z?A*L>x#MRUYjMy>9Bp*L%gzSx=(3|FI>E;F*4JGAcBad+CNwQl4l0|J(8Dfw&|oyiqwmjIc8xUZ+DCod?qHbq~% zcI`g(`oa3j>+9MB0|TcdB#b3_;O)30 ze0)B0lz)z@1YB*3K5BU3LI`{#PTN)RX|--SA~I6WfBvRUsf$=JAnx`_HshuMFnT62_J0|7@N*eOfkUcL)}Xy?Xt6&h{#4yG<{6 z-B3mTLqCe6rme!0 zx;T+Ra#~-cojQ5)ijh$~tSV|~w(IvmmCty}?Y!sDKj7I7Sg9eZ(gBONHm4m_$i;W zY0F(3mjE&{2m-C1N^FkA)+4qLdKMSN%7#Vpm+juU&fyC`MNFc-_*XTfM#Y zr;-}2r;4MP+`ec5nvt%0&CQJ()rnk)5=ZkQ{t+w=kvb>4Ko$c)} zjw<<1tWOoor`%)sBKu(YE_HNlEH5)t z4aTkf^SerCJRPI3*4^z~T%F}e&hixDGpB(wx7P&zL2-yIs=yINV!LK!N6O?2cMW_f$C!!R=Cg8}*M{0iwGKb{X|E=q`t%lP%{#!)O@qrBfM zlE$CRsEa#054(h#aeQA#9rwiKWP75Vb16Uq46{Fqr1Ksy5x$9MVPVnEMnuK9SRDT> z&s5HT71PP4I7{M0ivY-UbacLc`*zjdK8waLI4aD!Z*4OOASk!IybI4>vwLVQ^A#*4 z_vOorxHQr*LyLi_X(GU`&)7F!ousqh(Kgh$7l*}7GvV~iU3R7@1K2t(jD0t)@OVz+ zBHU%wY}UC%A&Fbnn4@R)vzhwcps!!Qwm&hp%-z@^RBx?MWqHpj62{|hr275ZH$YhU z&dTvfO+`f|VE8`w;=pWB7oG+8VKwtnnRAyD1Qb>dNo3bu6i6En9x#B>kDYZNEXXy? zo>z0SMlK#ZPz}gfx!A6ylU*S|joalr)B7?p@d(5*vM7-X&%ufhl!jSt39L$9BM-hd zH$RDsv&HW5c&H)t0z#|fboc8lL-R`v2Yp6=-MzK> z=bEKu%hX>)u9zoJUd9~1gt`Mfyib4KhK54@40Ta?d002GmF}l}L?xi~t^1H0wn2^u z6X`Pe-sIp3ZOYg#!HcN#y?txMch*HR$f8PnT!FK8fmhJj-1ELsp=rY$a`tCWPp<#M zm?Ln4EJOYMnP|I*xx=|!NXer++7lJ>Hw()wG?ysc)6s)V~KJJH+ zLg1Z|*{$ER_-65P4=p*$m3Qynoe~w*SK$BI=U(fIZCYM!5!tsj-MXbMQM(~sT+3vh zURHL^$H%Alop&~k%gmOrE~=seoawQaE3w(bP=9+ayhjhP-^j`;4Q6*3s2jQv8Y|j# zcxZcbNbct6zx=B$$)d>Vk)?oZ=H>;L1pGhYBV46$<0!hAzOuf{vVXr$HX>=gIyYRo zx!fn`(85m^ef#M)ZajQw9(jb@V!NWQ9!g{F8y@i%c)^es=W%Iz`9_7(DK>mjttJ6;ADHDlf2omq5rgi3_Z?iMgm5Ib zyxMc7D&;=7p5u5+O1}{s#o2RhW?B;GFiiOnk|=3&4{UMPlTyb36!Z&T{WA4UU)S}w zF3&T2>vwT>&#e?ERh_SPdX1iS35Ho??C%ZgF0B1P59rs$!|l0g)@zde)j zS?V-SYSGx&?=ctN*r=a3h6VY?}^xyzYv!a6; zLrDLTaS5J$EE7qqti+=52>lnldUYjuYdqKuTfes}m4IR=!2dIt{|#WX?$t~2VnavA zzQ(meg0LFHjZlF{%DVL{?D#~)sJSAJLwSw}0Yb+CAwn39m#~7ayo#!(&k|NW3)47!Q3mH z#;R2@HX;Lqbar%rhY%&MnVC%`W?NL*6XMTUe7&uL7?^^4!2g}F^_)fTrhoSJ<@fiS zgCXXg1Ox<_(-XOklLgWPxL6N9MhBjX%+SL{9u_x{Jpvn# zjDMS5afVK_WSF8dn#O;cxnBcz4<4%EaR2^&)6!cp&=NWIXGRY18LzY7uiJ~v#iBO;VSZFGRduYZ)O zP9KslS;(izyqcq*>BG59NV1Tssukp@(kU@lqxn$ZZDb~9_*GjrRS$G{)-LR03hAdY z)pNRe2Ss$R3UNxoJT$o}IKUxyB)#KEq`i3~y=NB#_@*QUND%<9j>BTo@kR$AG0Hs%QOr+S;31!U3tqie@dg3nJP!sb zdIT95NC{-#&9Mh`UO6YOp%ujA{`>dJ08n55Yk5L9IcE`nX(Jn{GX}P1;ObgPiwg4o zB(e;tqK*hcv~zND(iYhn3dNLzNS(Cq>bH{3S%R_v>HPD*IJovuj)bdyT(*+yuCg=$ z>SStasturEr<a?Ri_ht+Jf>^zeg ze2c*##7*dEeSLjrNJxeMXQa(w1WD$o3SO4NA=Vdsx2H=sS3P>zh z;{Ws1mVnfSf>`n2+l1BOFx1dqB6_K^DQyxkPE|0C2^p{dZ5#*_fM~GuSX^8@>mX`M z+m&qw!~+MRK}TdUa;9k<%`cz>15k!Afs23)=<-WTOHXpK0K_BBY8m>VCMEr6-#d4z zX51Bb6TJ_FtMOpSpiyy=nowA`19XN@+74)++3((&e6ViAGa=Vc@UZr7g@(v#?js-A z9H#qmvE28|5JkE)gg7tjm9O-C7+hLkEh43+3V^#>KSL!W-3j?V5(y1^&vapYh2hsX zqIuFlqDVwP@+*3Mf-M2ZWj~7jbkrR&|672xlf0w8Ph9}G1$_iXb_VXfyB9w&W56}! zFGaONWb?0-LyUn-MX%<4I6(DYowMmvwd6-y)IkQgGh6L|%@}goR!Y+YU`ORGw2w|Q zIb$cx!m)@WjYL(cB9tjvS=p!P`HO1ZVUuw5lMQiG{4zKt-)Rc3JHju{!|DWS)qe-P z#C-?{83uYBtzZ1eIUYnkt~e3OSlpFwE%}*Iok79qSPV-0IQRVtc2OD|IrvZ=B}zN( z2ZOf*FR_wF(&;gopspq9!jwuJS_R1aP@poY4N@WXFiLow|sA3N^Q-rbMUoQ4sJ%>(HPm)44+*W}1?%_`p+=@bOfEQeqT9gkM* zbj-oKhjJ2}qWJxM1T8AltAEv{w^S3f2owPwAySw5FW939Tro??Bn{0PUb|+8cK)mT zwJpR8;9^&!mDV&2z-4H=5sgHM3efrZXSzvsK-u}{C}El?2Q6ZIYjztNc0hPIZ= z`|(KK0Wj0gLNf>KaT@zFdm63|zVf2c*qiS9wW; zT_6o4sO{LHS$Q-Feup5qG}#G3@LyYrDt~2x`{(~2oMo(g3$EHuPVraw7z=)a+qyUh z_zfUb=kGn=#TgQK*(JRQ{DIgZl7EtZuVBe*H0)699f0_>7eoqLM;5Z-bQ6@Bn~F@I76Z; z5S8Y{#lNG?4~YzoE{i&BcLm;{Ha>*oNGT|^svX3CBziZjDld220%=79yH!49DI}HU zrx3r#BbwdXn`*gQnww?NN4eSAm-g)01HC7U>EdD+o2|AC!v7WXJxtVr3|5hQJ=5j&C{kSGSMt@KtzJom4O%r|<`BR)%xRoC zg3(;3=>_LO#s>Q^7YrJR3NT;jlA74;md3&q@;@OxIP-s;E7Z>1XFewL5+<(}~We+LLApq3O;%nWm6H2? z*#w1wm!OJ3(Gh_!_eC(8iPClvkQ+@#V7wSKTtO2^l(G$l72U!%7{Hqv?Ss{KKmoo5 z&e+k`MyVW*F~{O!@k4TR=O$xK>qaA0eyAQ-6Xf6b>1ZB+_8nr1QAwC{rHbolb`k!sjFThLGNqA|% zyHy4F8L*1Fi||tXX_Fh+`jcI01fksAT+RA04)D!7VN@vCf2N`iAF5xZ3R7->skQc- znJA1-4kT2d;L&ldBGCdW5S<|=BmBQV#~eQ*V;{AXDG+|$8TunIw2AOsf#fcDK|a_P zdt6u&m*zM6#JJS9i5-2Tpr*!hM+AuBte9R_g!JmUW$>oYp~Ht0br~pc%h!Z)$Pfm+{m5IKp)hbI{!A1bPH|dre`? zhjFbYPnB+;(W_?iFhVmBRFvZtCD9f4LrH{8C8-vj6ST(eIma?Zqy0c-=$*5o>5pdG zfcS0y<)`N&df&Px*|(;;V(Sm{qN1#ANtL)Lpt8 z9cG=Hnu=?pN~PE+ZFFubgZ!UcQbGoUhM-=wlaRa;ZX8zzB(W4TE*haR6SY9MEWG~u zd~x-1B8UOM-k7PPBJUAJFQEhq!mTO9iS-)UtB0d8H)f}`lgWo78+b5v7dgXs%kwWC z3=Y_YXzv3hj{NHC0Z7>PJz!Yjs#{G+3P!5orQ3OK9QG=K73#aK4$_y9Rv3aY>x!TZltYDtR*X zffxd+do?H}l_RlQ+Sv+BR2c+_leDxn0JcUfXmbF`WZvkHR;|qE?CgYRw;A}4{k1pe z#fy*lP^u)3F&)lam*pMt+jR_QS7--WQNEe2z<#@m?Gd$!1k13HvHwP4SEvhxhy;~@ zEGYau)%6r=bUv!nIBQsCeFZc+@68(nASYvo zUFw*?_ez~QPl=nn0pTaZvpnhIS(ol~Es5&K47&`T4$VTrr+RX75~d(PwSA6yW+K)n z526UDLaPTM+|0gTh~9CbhC?9h&GeP)*)~QE%Yy_284OxnS$gPBsK-ZuQp*G^1QvP^ zdOimy08QS*4~it}dLC+QLKo)@BGdaIZpo8}BWb|#c0%$m?8fJgM_<2Qp6M;qdVT#H zo`rHb4OjedSSlorWOFH+U*E?^uJs<{X`pHN>29fq7}Y@-%yV9<0Hn4s_M--<{KZvw z2o3*0LqeM*8%6eLgZP9_90DRd`vE0_44AnyL=})PNp#HvP!D*G8e-=dd1S`NA9n42 z^^dWI&l4Kg1G=WD_jpU^LaS;q$^u1+8HB|jmjbrd#>+g0&yz)0cena#N!9)tn0*Yj zLR8XK5)|AATywG8mgKlLKbmDYr`SIe|6*&hc)1J!U#4) ztBvv~Fzx^taR$5!t;IoblTEoNQMZ~|0nxCmwA5qQt`>unTn11ZH8FXHl5Bp$J~sso z&%5W&fplbA?soL{M+U2mjEoCYKMM^#J*Dd7pkkC%0X2xFkNYJiKpieArZw+meeYz|FQpW*+*d;-qSzY@sjv!1E=v@F;P>>*W&QS9SkPXy{M zhEV<)+WdVxLhV?86msc-+Oc^*<;t-{=hC3f0J5S6XJgazt(D+yV_>mA)Bg#9BEK~<>usk_F$U}200@a^pb_~)~>1IS~n~+ zV^p)-nb)3Fb*S2Cpn@N;h4h*l99fenBq~742X-Q6RB%O8b6-7`Gnrt13$*Tl-|xoJ zqB}}Qp-2V>2Kj1YxzF7gD(5u*>ksIjuWL7&YdcC>abm#mp;80wVmuU1gE=AOIm5w& z!W@z?U_K3+;zK45fr2oB01YSv_a6#l;AXiG2)hK*YhGz7SX4?E_eco%rpi)>VWMhq z&HViQ^^qRa5XrirC|F3*i|%U$5}%mRn-8ZP7@~B9BNp?v#jT9A@Kky06lKN zrpN#g+9H2nl*Fx`2gv$K)>AE{k+Yk}7oN4;{jPTG0I*xvo;MevsiCcH22CiFr;*vp zabp!iYw#L3sySZ>Wz|RPLPD9Z0DXbe&-9e+gjj#%WDOTJq{IL!*USQb=b+La(?r!r zbtj?BM^MXxxV1FZHPxiL^%^p_o7jD#(NzN;Mrb_?w9bDZE_cebDj%m&tupHe07#)m zx41~wbMDB|M}<*df#gHFI0bif8EBCh4#xbv7XUOAcrQXW3%4wm+AeU=UN;HIq%guU zXmghzz{dqaeLJ_Z(iC*y{&dO}NMFD+om^Zxj+2yMKvi^GDrGmHE#Z+YI=;p2ftR&g zQ?*7=83rYRCbVdHET#=C1f^PZE>pSLsqIji(RM&9+^i3h+$uHE(w~#Fv#=&`@6bq( zGw94gN>RD9JV9^5bOFcG7WrTUt#g3yodcDtZKY;K1?nK8Vq#jTo>Vfo2hts2;V7hi zI;eLGau(20q*L~U-5^*$^Q1P?#**BB{%VN4cI8TXax%Z2gF{k_mkK!4@851{G;dYi zR@O@h73RV4NCj>9iC=$pZfIjnUe2{w7W6!iV{bG7&j7fWe-Gq;Pm*^Y{rUt;Sbar1 zUBOCaqyp`}3WpX-OeSz9jf{;GpgIE;44rJn^I!+~(btbbZzw>Olm-R5A0SZaWPbo+ zkeS>pQELu`>y)wYNg`>j*$|A~t}8GDSi+SI@JSFg*O!We3w@@3?$m(D#8ye1h9f99 z=mii|M;9x8Zve5%7)JH-^73ox7lFufh~++dod&&40@)DU;OOFWfBzh=osj+OxWOh$ z5VPCh0z@9j1x_Fd&aV-m98=`CM-6gK2tuInjjsGD^O-V2?~b^?r-u#_7Iffc6I+7y z9oVNtW&bxP^im5z!qWx;+{nkLEHN>0WMOTD?gzhJybU0qnf(3R==5PjJv|a|@I`>0 z$-?qMG`A>&e12xA)(RMOmf_un#?`vlaLeb~e^+rp*mnk;$ji;u_xFDX^&a#t%8M7r zz)?lvY73-0@%^sckFHxHO*K7ou+ad$nvEQygBH;gS>P4;SYU&5OEfMdz*W{U5zP+@b&g literal 0 HcmV?d00001 diff --git a/affine/eros.h b/affine/eros.h new file mode 100755 index 0000000..6a76f58 --- /dev/null +++ b/affine/eros.h @@ -0,0 +1,64 @@ +#pragma once + +#include +#include +#include + +using namespace yarp::os; + +class EROSfromYARP +{ +public: + + ev::window input_port; + ev::EROS eros; + std::thread eros_worker; + cv::Rect eros_update_roi; + double dt_not_read_events{0}; + double tic{-1}; + double dur{0}; + int packet_events{0}; + + void setEROSupdateROI(cv::Rect roi) { + this -> eros_update_roi = roi; + } + + void erosUpdate() + { + while (!input_port.isStopping()) { + ev::info my_info = input_port.readAll(true); + tic = my_info.timestamp; + dur = my_info.duration; + packet_events = my_info.count; + + for(auto &v : input_port){ + if((v.x) > eros_update_roi.x && (v.x) < eros_update_roi.x + eros_update_roi.width && (v.y) > eros_update_roi.y && (v.y) < eros_update_roi.y + eros_update_roi.height) + eros.update(v.x, v.y); + } + + dt_not_read_events = input_port.stats_unprocessed().duration; + + } + } + +public: + bool start(cv::Size resolution, std::string sourcename, std::string portname, int k, double d) + { + eros.init(resolution.width, resolution.height, k, d); + + if (!input_port.open(portname)) + return false; + + yarp::os::Network::connect(sourcename, portname, "fast_tcp"); + + eros_worker = std::thread([this]{erosUpdate();}); + return true; + } + + void stop() + { + input_port.stop(); + eros_worker.join(); + } + +}; diff --git a/affine/main.cpp b/affine/main.cpp new file mode 100755 index 0000000..65917ca --- /dev/null +++ b/affine/main.cpp @@ -0,0 +1,283 @@ +#include +using namespace yarp::os; + +#include "eros.h" +#include "affine.h" +#include "ROScommunication.h" + +class affineTracking: public yarp::os::RFModule +{ +private: + + cv::Size img_size; + double period{0.01}; + double eros_k, eros_d; + double tau_latency{0}; + double recording_duration, elapsed_time{0}; + double translation{2.0}, angle{0.8}, pscale{1.05}, nscale{0.95}, scaling{0.01}; + bool run{false}; + double dt_warpings{0}, dt_comparison{0}, dt_eros{0}, toc_count{0}; + std::string filename; + std::deque< std::array > data_to_save; + std::ofstream fs; + + struct fake_latency{ + std::array state; + double tstamp; + }; + + std::deque fakeLat_queue; + + EROSfromYARP eros_handler; + affineTransforms affine_handler; + YarpToRos ros_publish; + std::thread computation_thread; + +public: + + bool configure(yarp::os::ResourceFinder& rf) override + { + // options and parameters + + eros_k = rf.check("eros_k", Value(9)).asInt32(); + eros_d = rf.check("eros_d", Value(0.5)).asFloat64(); + period = rf.check("period", Value(0.01)).asFloat64(); + tau_latency=rf.check("tau", Value(0.0)).asFloat64(); + recording_duration = rf.check("rec_time", Value(10000)).asFloat64(); + filename = rf.check("shape-file", Value("/usr/local/src/affine2dtracking/shapes/thin_star.png")).asString(); + + // module name + setName((rf.check("name", Value("/shape-position")).asString()).c_str()); + + yarp::os::Bottle& intrinsic_parameters = rf.findGroup("CAMERA_CALIBRATION"); + if (intrinsic_parameters.isNull()) { + yError() << "Wrong .ini file or [CAMERA_CALIBRATION] not present. Deal breaker."; + return false; + } + affine_handler.cam[affineTransforms::w] = intrinsic_parameters.find("w").asInt32(); + affine_handler.cam[affineTransforms::h] = intrinsic_parameters.find("h").asInt32(); + affine_handler.cam[affineTransforms::cx] = intrinsic_parameters.find("cx").asFloat32(); + affine_handler.cam[affineTransforms::cy] = intrinsic_parameters.find("cy").asFloat32(); + affine_handler.cam[affineTransforms::fx] = intrinsic_parameters.find("fx").asFloat32(); + affine_handler.cam[affineTransforms::fy] = intrinsic_parameters.find("fy").asFloat32(); + img_size = cv::Size(affine_handler.cam[affineTransforms::w], affine_handler.cam[affineTransforms::h]); + + yInfo() << "Camera Size:" << img_size.width << "x" << img_size.height; + + if (!eros_handler.start(img_size, "/atis3/AE:o", getName("/AE:i"), eros_k, eros_d)) { + yError() << "could not open the YARP eros handler"; + return false; + } + yInfo()<<"eros started"; + + + if (rf.check("data-file")) { + std::string ouputfilename = rf.find("data-file").asString(); + fs.open(ouputfilename); + if (!fs.is_open()) { + yError() << "Could not open output file" << filename; + return false; + } + } + + eros_handler.setEROSupdateROI(cv::Rect(0,0,640,480)); + + affine_handler.init(translation, angle, pscale, nscale, scaling); + affine_handler.initState(); + + affine_handler.loadTemplate(img_size, filename); + // affine_handler.createStaticTemplate(img_size, 30); + // affine_handler.squareTemplate(img_size); + + yInfo()<<"shape template created"; + + affine_handler.createAffines(translation, cv::Point(img_size.width/2,img_size.height/2), angle, pscale, nscale); + + affine_handler.create_maps(); + + // yInfo()<<"maps created"; + ros_publish.initPublisher(); + + computation_thread = std::thread([this]{tracking_loop();}); + + if(!run) yInfo() << "WARNING: press G to start tracking (--run)"; + + return true; + } + + double getPeriod() override{ + return period; + } + + bool updateModule() { + cv::Mat norm_mexican; + if (run){ + static double start_time = eros_handler.tic; + elapsed_time = eros_handler.tic - start_time; + + // cv::Mat intersection_mat; + // cv::bitwise_and(affine_handler.eros_filtered(affine_handler.roi_around_shape), affine_handler.rot_scaled_tr_template(affine_handler.roi_around_shape),intersection_mat); + // cv::Mat union_mat = affine_handler.eros_filtered(affine_handler.roi_around_shape)+affine_handler.rot_scaled_tr_template(affine_handler.roi_around_shape); + // double total_pixels = cv::countNonZero(union_mat); + + // double matches = cv::countNonZero(intersection_mat); + // double percentage = (100*matches/total_pixels); + + // yInfo()< recording_duration){ + yInfo()<<"recording duration reached"; + return false; + } + + return true; + } + + void tracking_loop() { + + double tic = yarp::os::Time::now(); + while (!isStopping()) { + + if (run){ + + double dT = yarp::os::Time::now() - tic; + tic += dT; + affine_handler.createDynamicTemplate(); + // yInfo()<<"template"; + affine_handler.setROI(); + // yInfo()<<"roi"; + affine_handler.updateAffines(); + // yInfo()<<"update affine"; + affine_handler.make_template(); + // yInfo()<<"mexican"; + // double tic_warpings = Time::now(); + affine_handler.createWarpings(); + // double toc_warpings = Time::now(); + // affine_handler.createMapWarpings(); + // yInfo()<<"remap"; + // double tic_eros = Time::now(); + double eros_time_before = eros_handler.tic; + affine_handler.setEROS(eros_handler.eros.getSurface()); + // double toc_eros= Time::now(); + // yInfo()<<"eros"; + // double tic_comparison = Time::now(); + affine_handler.performComparisons(); + // double toc_comparison= Time::now(); + // yInfo()<<"comp"; + affine_handler.updateStateAll(); + // yInfo()<<"state"; + eros_handler.setEROSupdateROI(affine_handler.roi_around_shape); + double eros_time_after = eros_handler.tic; + double eros_diff_time = eros_time_after-eros_time_before; + + if(tau_latency>0){ + fakeLat_queue.push_back({double(affine_handler.new_position.x), double(affine_handler.new_position.y), affine_handler.state[2], affine_handler.state[3], yarp::os::Time::now()}); + std::array current_state; + bool found_pos_sent=false; + while(fakeLat_queue.size()>0 && (yarp::os::Time::now()-fakeLat_queue.front().tstamp)>tau_latency){ + current_state = fakeLat_queue.front().state; + found_pos_sent = true; + fakeLat_queue.pop_front(); + } + if (found_pos_sent){ + ros_publish.publishTargetPos(img_size, current_state[0], current_state[1], current_state[2], current_state[3]); + data_to_save.push_back({elapsed_time, eros_handler.dur, double(eros_handler.packet_events), current_state[0], current_state[1], current_state[2], current_state[3], double(eros_handler.dt_not_read_events), eros_diff_time, dT, affine_handler.roi_around_shape.x, affine_handler.roi_around_shape.y, affine_handler.roi_around_shape.width, affine_handler.roi_around_shape.height}); + } + + }else{ + ros_publish.publishTargetPos(img_size, affine_handler.new_position.x, affine_handler.new_position.y, affine_handler.state[2], affine_handler.state[3]); + if (fs.is_open() && eros_handler.tic > 0) { + data_to_save.push_back({elapsed_time, eros_handler.dur, double(eros_handler.packet_events), double(affine_handler.new_position.x), double(affine_handler.new_position.y), affine_handler.state[2], affine_handler.state[3], double(eros_handler.dt_not_read_events), eros_diff_time, dT, affine_handler.roi_around_shape.x, affine_handler.roi_around_shape.y, affine_handler.roi_around_shape.width, affine_handler.roi_around_shape.height}); + } + } + + // this->dt_warpings = toc_warpings - tic_warpings; + // this->dt_comparison = toc_comparison - tic_comparison; + // this->dt_eros = toc_eros - tic_eros; + // this->toc_count++; + + // yInfo()<<"running"; + + } + + } + } + + bool interruptModule() override { + return true; + } + + bool close() override { + + yInfo() << "waiting for eros handler ... "; + eros_handler.stop(); + yInfo() << "waiting for computation thread ... "; + computation_thread.join(); + + if(fs.is_open()) + { + yInfo() << "Writing data"; + fs << "tr="< Date: Fri, 19 May 2023 14:19:04 +0200 Subject: [PATCH 2/4] code used for gaze following neuroeng workshop with camera on the helmet --- affine/visual_servoing.h | 334 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 334 insertions(+) create mode 100644 affine/visual_servoing.h diff --git a/affine/visual_servoing.h b/affine/visual_servoing.h new file mode 100644 index 0000000..4098ae5 --- /dev/null +++ b/affine/visual_servoing.h @@ -0,0 +1,334 @@ +#pragma once + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace yarp::os; +using namespace yarp::sig; +using namespace std; + +class PID +{ + double Kp,Ki; + double integral; + +public: + // constructor + PID() : Kp(0.0), Ki(0.0), integral(0.0) { } + + // helper function to set up sample time and gains + void set(const double Kp, const double Ki) + { + this->Kp=Kp; + this->Ki=Ki; + } + + // compute the control command + double command(const double reference, const double feedback, const double Ts) + { + // the actual error between reference and feedback + double error=reference-feedback; + + // accumulate the error + integral+=error*Ts; + + // compute the PID output + return (Kp*error+Ki*integral); + } + + void reset() + { + integral = 0; + } +}; + +class headControl { + +private: + + yarp::dev::PolyDriver vel_driver, pos_driver; + yarp::dev::IEncoders *ienc; + yarp::dev::IControlMode *imod; + yarp::dev::IVelocityControl *ivel; + yarp::dev::IPositionControl *ipos; + yarp::dev::IControlLimits *ilim; + + int nAxes; + std::vector controllers; + std::vector velocity; + std::vector encs; + + int u_fixation; + int v_fixation; + +protected: + +public: + + headControl() : nAxes(6), u_fixation(0), v_fixation(0) {} + + bool initVelControl(int height, int width) + { + yarp::os::Property option; + option.put("device","remote_controlboard"); + option.put("remote","/icub/head"); + option.put("local","/vel_controller"); + + if (!vel_driver.open(option)) + { + yError()<<"Unable to open the device vel_driver"; + return false; + } + + // open the views + + if(!vel_driver.view(ienc)) { + yError() << "Driver does not implement encoder mode"; + return false; + } + if(!vel_driver.view(imod)) { + yError() << "Driver does not implement control mode"; + return false; + } + if(!vel_driver.view(ivel)) { + yError() << "Driver does not implement velocity mode"; + return false; + } + + // retrieve number of axes + int readAxes; + ienc->getAxes(&readAxes); + if(readAxes != nAxes) { + yError() << "Incorrect number of axes" << readAxes << nAxes; + return false; + } + + velocity.resize(nAxes, 0.0); + encs.resize(nAxes); + controllers.resize(nAxes); + for(int i = 0; i < nAxes; i++) + controllers[i] = new PID; + + // set up our controllers + controllers[0]->set(2.0, 0.0); //neck pitch + controllers[1]->set(0.0, 0.0); //neck roll + controllers[2]->set(2.0, 0.0); //neck yaw + + u_fixation = width / 2; + v_fixation = height / 2; + + //set velocity control mode + return setVelocityControl(); + + } + + bool initPosControl(){ + yarp::os::Property option; + option.put("device", "remote_controlboard"); + option.put("remote", "/icub/head"); + option.put("local", "/pos_controller"); + + pos_driver.open(option); + pos_driver.view(ipos); + pos_driver.view(imod); + pos_driver.view(ilim); + + return true; + } + + bool setVelocityControl() + { + int naxes; + ivel->getAxes(&naxes); + std::vector modes(naxes, VOCAB_CM_VELOCITY); + + imod->setControlModes(modes.data()); + + return true; + } + + void resetRobotHome(){ + + int naxes; + ipos->getAxes(&naxes); + std::vector modes(naxes, VOCAB_CM_POSITION); + std::vector vels(naxes, 10.); + std::vector accs(naxes, std::numeric_limits::max()); + std::vector poss(naxes, 0.); + poss[2]=0; + poss[0]=-23; + + imod->setControlModes(modes.data()); + ipos->setRefSpeeds(vels.data()); + ipos->setRefAccelerations(accs.data()); + ipos->positionMove(poss.data()); + + auto done = false; + while(!done) { + yarp::os::Time::delay(1.); + ipos->checkMotionDone(&done); + } + + setVelocityControl(); + } + + void scroll_yaw(){ + int naxes; + ipos->getAxes(&naxes); + std::vector modes(naxes, VOCAB_CM_POSITION); + std::vector vels(naxes, 20.); + std::vector accs(naxes, std::numeric_limits::max()); + std::vector poss(naxes, 0.); + poss[2]=-10; + poss[0]=5.977; + + imod->setControlModes(modes.data()); + ipos->setRefSpeeds(vels.data()); + ipos->setRefAccelerations(accs.data()); + ipos->positionMove(poss.data()); + + auto done = false; + while(!done) { + yarp::os::Time::delay(1.); + ipos->checkMotionDone(&done); + } + } + + void controlMono(int u, int v, double dt) + { + + double neck_tilt=controllers[0]->command(v_fixation,v, dt); // neck pitch + double neck_pan=controllers[2]->command(u_fixation,u, dt); // neck yaw + + // send commands to the robot head + velocity[0]=neck_tilt; // neck pitch + velocity[1]=0.0; // neck roll + velocity[2]=neck_pan; // neck yaw + velocity[3]=0.0; // eyes tilt + velocity[4]=0.0; // eyes vers + velocity[5]=0.0; // eyes verg + +// yInfo()<<"vel: "<velocityMove(0, neck_tilt); + //ivel->velocityMove(2, neck_pan); + ivel->velocityMove(velocity.data()); + +// double vel_tilt, vel_pan; +// ivel->getRefVelocity(0,&vel_tilt); +// ivel->getRefVelocity(2,&vel_pan); +// yInfo()<<"current vel"<getEncoders(encs.data()); + + double eyes_pan=-controllers[4]->command(u_fixation, ul, dt); + double eyes_tilt=controllers[3]->command(v_fixation, vl, dt); + if(encs[3] < -6 && (v_fixation - vl) < 0) { + eyes_tilt = 0.0; + controllers[3]->reset(); + } + if(encs[3] > 20 && (v_fixation - vl) > 0) { + eyes_tilt = 0.0; + controllers[3]->reset(); + } + double eyes_ver=-controllers[5]->command(0,ul-ur, dt); + + // feed-forward correction to reduce + // interplay between vergence and pan + eyes_pan-=eyes_ver/2.0; + + double neck_tilt=-controllers[0]->command(0.0,encs[3], dt); + double neck_pan=controllers[2]->command(0.0,encs[4], dt); + + // send commands to the robot head + velocity[0]=neck_tilt; // neck pitch + velocity[1]=0.0; // neck roll + velocity[2]=neck_pan; // neck yaw + velocity[3]=eyes_tilt; // eyes tilt + velocity[4]=eyes_pan; // eyes pan + velocity[5]=eyes_ver; // eyes vergence + ivel->velocityMove(velocity.data()); + } + + void controlReset() + { + for(int i = 0; i < nAxes; i++) { + controllers[i]->reset(); + velocity[i] = 0; + } + ivel->velocityMove(velocity.data()); + } + + double getJointPos(int joint_number){ + ienc->getEncoders(encs.data()); + return encs[joint_number]; + } + + void getJointLimits(int joint_num, double* joint_min, double* joint_max){ + + double min, max; + ilim->getLimits(joint_num, &min, &max); + + *joint_min = min; + *joint_max = max; + } + + void closeToLimit(int joint_number){ + + double joint_pos_min, joint_pos_max, current_joint; + ienc->getEncoders(encs.data()); + if (joint_number == 0){ + joint_pos_min=-27; joint_pos_max=20; + current_joint = encs[0]; + } + if (joint_number == 2){ + joint_pos_min=-44; joint_pos_max=44; + current_joint = encs[2]; + } + +// getJointLimits(joint_number, &joint_pos_min, &joint_pos_max); + + double error_min = current_joint - joint_pos_max; + double error_max = current_joint - joint_pos_min; + + double smallTh = 3; + + if (error_min < smallTh || error_max < smallTh){ + resetRobotHome(); + yInfo()<<"joints resetted"; + } + } + + + +}; + From 50964e2b710980b0076affe3693910b25fb3bd31 Mon Sep 17 00:00:00 2001 From: Luna Gava Date: Fri, 19 May 2023 14:49:45 +0200 Subject: [PATCH 3/4] removed ros communication --- affine/CMakeLists.txt | 8 -------- affine/main.cpp | 26 +------------------------- 2 files changed, 1 insertion(+), 33 deletions(-) diff --git a/affine/CMakeLists.txt b/affine/CMakeLists.txt index f76d2c1..8ff4a31 100755 --- a/affine/CMakeLists.txt +++ b/affine/CMakeLists.txt @@ -18,14 +18,6 @@ find_package(event-driven REQUIRED) add_executable(${PROJECT_NAME} main.cpp eros.h affine.h) -target_link_libraries(stimulus PRIVATE YARP::YARP_OS - YARP::YARP_init - YARP::YARP_sig - YARP::YARP_dev - YARP::YARP_math - ev::event-driven - ${OpenCV_LIBRARIES}) - target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_OS YARP::YARP_init YARP::YARP_sig diff --git a/affine/main.cpp b/affine/main.cpp index 65917ca..ea32479 100755 --- a/affine/main.cpp +++ b/affine/main.cpp @@ -3,7 +3,6 @@ using namespace yarp::os; #include "eros.h" #include "affine.h" -#include "ROScommunication.h" class affineTracking: public yarp::os::RFModule { @@ -30,7 +29,6 @@ class affineTracking: public yarp::os::RFModule EROSfromYARP eros_handler; affineTransforms affine_handler; - YarpToRos ros_publish; std::thread computation_thread; public: @@ -96,7 +94,6 @@ class affineTracking: public yarp::os::RFModule affine_handler.create_maps(); // yInfo()<<"maps created"; - ros_publish.initPublisher(); computation_thread = std::thread([this]{tracking_loop();}); @@ -200,27 +197,6 @@ class affineTracking: public yarp::os::RFModule eros_handler.setEROSupdateROI(affine_handler.roi_around_shape); double eros_time_after = eros_handler.tic; double eros_diff_time = eros_time_after-eros_time_before; - - if(tau_latency>0){ - fakeLat_queue.push_back({double(affine_handler.new_position.x), double(affine_handler.new_position.y), affine_handler.state[2], affine_handler.state[3], yarp::os::Time::now()}); - std::array current_state; - bool found_pos_sent=false; - while(fakeLat_queue.size()>0 && (yarp::os::Time::now()-fakeLat_queue.front().tstamp)>tau_latency){ - current_state = fakeLat_queue.front().state; - found_pos_sent = true; - fakeLat_queue.pop_front(); - } - if (found_pos_sent){ - ros_publish.publishTargetPos(img_size, current_state[0], current_state[1], current_state[2], current_state[3]); - data_to_save.push_back({elapsed_time, eros_handler.dur, double(eros_handler.packet_events), current_state[0], current_state[1], current_state[2], current_state[3], double(eros_handler.dt_not_read_events), eros_diff_time, dT, affine_handler.roi_around_shape.x, affine_handler.roi_around_shape.y, affine_handler.roi_around_shape.width, affine_handler.roi_around_shape.height}); - } - - }else{ - ros_publish.publishTargetPos(img_size, affine_handler.new_position.x, affine_handler.new_position.y, affine_handler.state[2], affine_handler.state[3]); - if (fs.is_open() && eros_handler.tic > 0) { - data_to_save.push_back({elapsed_time, eros_handler.dur, double(eros_handler.packet_events), double(affine_handler.new_position.x), double(affine_handler.new_position.y), affine_handler.state[2], affine_handler.state[3], double(eros_handler.dt_not_read_events), eros_diff_time, dT, affine_handler.roi_around_shape.x, affine_handler.roi_around_shape.y, affine_handler.roi_around_shape.width, affine_handler.roi_around_shape.height}); - } - } // this->dt_warpings = toc_warpings - tic_warpings; // this->dt_comparison = toc_comparison - tic_comparison; @@ -280,4 +256,4 @@ int main(int argc, char *argv[]) { /* run the module: runModule() calls configure first and, if successful, it then runs */ return affinetracking.runModule(rf); -} \ No newline at end of file +} From bcb146531b8bdaa0b565a82a6a918d9a84ef5103 Mon Sep 17 00:00:00 2001 From: Luna Gava Date: Fri, 19 May 2023 15:46:48 +0200 Subject: [PATCH 4/4] latest speed-up version --- affine/affine.h | 254 +++++++++++++++++++++++++++++++++--------------- affine/eros.h | 16 ++- affine/main.cpp | 155 ++++++++++++++++++----------- 3 files changed, 281 insertions(+), 144 deletions(-) diff --git a/affine/affine.h b/affine/affine.h index 08613c8..8170576 100755 --- a/affine/affine.h +++ b/affine/affine.h @@ -28,32 +28,34 @@ class affineTransforms std::array state; cv::Rect2d roi_around_shape, square; - cv::Mat initial_template, roi_template, roi_template_64f, roi_resized, mexican_template, mexican_template_64f; + cv::Mat initial_template, roi_template, roi_template_64f, roi_resized, mexican_template, mexican_resized, mexican_template_64f; cv::Mat eros_filtered, eros_tracked, eros_tracked_64f, eros_resized; cv::Mat rot_scaled_tr_template; - double translation, angle, pscale, nscale, scaling; - cv::Point initial_position, new_position; + double translation, angle, pscale, nscale; + cv::Point2d initial_position, new_position; cv::Point2d new_center; - int blur{11}; + int blur{7}; int gaussian_blur_eros{5}; - double template_scale{0.32}; //0.4 + double template_scale{0.4}; //0.4, 0.33 - cv::Size proc_size{cv::Size(100, 100)}; - cv::Rect proc_roi; + cv::Size proc_size{cv::Size(50, 50)}; + cv::Rect proc_roi, o_proc_roi; cv::Mat prmx, prmy, nrmx, nrmy; - - + std::array cam; public: - void init(double trans, double rot, double ps, double ns, double sc){ + void init(double trans, double rot, double ps, double ns){ - // proc_roi = cv::Rect(cv::Point(0, 0), proc_size); + roi_resized = cv::Mat(proc_size, CV_64F); + eros_resized = cv::Mat(proc_size, CV_64F); + mexican_resized = cv::Mat(proc_size, CV_64F); + o_proc_roi = proc_roi = cv::Rect(cv::Point(0, 0), proc_size); for(auto &affine : affine_info) { affine.A = cv::Mat::zeros(2,3, CV_64F); - // affine.warped_img = cv::Mat::zeros(proc_size, CV_64F); + affine.warped_img = cv::Mat::zeros(proc_size, CV_64F); } prmx = cv::Mat::zeros(proc_size, CV_32F); @@ -65,7 +67,6 @@ class affineTransforms this->angle = rot; this->pscale = ps; this->nscale = ns; - this->scaling = sc; } void initState(){ @@ -115,8 +116,9 @@ class affineTransforms affine_info[7].A = cv::getRotationMatrix2D(new_center, 0, nscale); } - void setROI(int buffer = 10){ + void setROI(int buffer = 0){ + roi_resized = 0; static cv::Rect2d full_roi = cv::Rect2d(cv::Point(0, 0), rot_scaled_tr_template.size()); roi_around_shape = cv::boundingRect(rot_scaled_tr_template); @@ -135,8 +137,8 @@ class affineTransforms roi_template = rot_scaled_tr_template(roi_around_shape); roi_template.convertTo(roi_template_64f, CV_64F); // is 6 type CV_64FC1 - // cv::resize(roi_template_64f, roi_resized, proc_roi.size(), 0, 0, cv::INTER_CUBIC); - + cv::resize(roi_template_64f, roi_resized(proc_roi), proc_roi.size(), 0, 0, cv::INTER_LINEAR); + make_template(roi_resized, mexican_resized); } cv::Mat updateTrMat(double translation_x, double translation_y){ @@ -163,41 +165,62 @@ class affineTransforms } + void squareTemplate (const cv::Size &res){ + initial_template = cv::Mat::zeros(res.height, res.width, CV_8UC1); + square = cv::Rect(306,263, 87, 93); + cv::rectangle(initial_template, square, cv::Scalar(255),1,8,0); + + initial_position.x = square.x + square.width/2; + initial_position.y = square.y + square.height/2; + } + + void starTemplate(const cv::Size &res){ + + initial_template = cv::Mat::zeros(res.height, res.width, CV_8UC1); + + cv::line(initial_template, cv::Point(327,172), cv::Point(336,204), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(336,204), cv::Point(367,208), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(367,208), cv::Point(341,226), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(341,226), cv::Point(348,258), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(348,258), cv::Point(324,237), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(324,237), cv::Point(296,256), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(296,256), cv::Point(308,225), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(308,225), cv::Point(283,203), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(283,203), cv::Point(316,202), cv::Scalar(255),1,8,0); + cv::line(initial_template, cv::Point(316,202), cv::Point(327,172), cv::Scalar(255),1,8,0); + + initial_position.x = 325; + initial_position.y = 219; + } + void loadTemplate(const cv::Size &res, std::string filename){ cv::Mat shape_image = cv::imread(filename, 0); - cv::resize(shape_image, shape_image, cv::Size(template_scale*shape_image.cols, template_scale*shape_image.rows), 0, 0, cv::INTER_LINEAR); + cv::resize(shape_image, shape_image, cv::Size(template_scale*shape_image.cols, template_scale*shape_image.rows), 0, 0, cv::INTER_CUBIC); + + // yInfo()<(0,2) += (state[0]); + rotMatfunc.at(1,2) += (state[1]); + cv::warpAffine(initial_template, rot_scaled_tr_template, rotMatfunc, initial_template.size()); + // cv::warpAffine(rot_scaled_template, rot_scaled_tr_template, rotMatfunc, rot_scaled_template.size()); new_position = cv::Point2d(initial_position.x+state[0],initial_position.y+state[1]); } - void make_template() { + void make_template(const cv::Mat &input, cv::Mat &output) { static cv::Mat canny_img, f, pos_hat, neg_hat; static cv::Size pblur(blur, blur); static cv::Size nblur(2*blur-1, 2*blur-1); static double minval, maxval; - // cv::GaussianBlur(roi_resized, pos_hat, pblur, 0); - // cv::GaussianBlur(roi_resized, neg_hat, nblur, 0); - cv::GaussianBlur(roi_template_64f, pos_hat, pblur, 0); - cv::GaussianBlur(roi_template_64f, neg_hat, nblur, 0); - mexican_template = pos_hat - neg_hat; + cv::GaussianBlur(input, pos_hat, pblur, 0); + cv::GaussianBlur(input, neg_hat, nblur, 0); + // cv::GaussianBlur(roi_template_64f, pos_hat, pblur, 0); + // cv::GaussianBlur(roi_template_64f, neg_hat, nblur, 0); + output = pos_hat - neg_hat; - cv::minMaxLoc(mexican_template, &minval, &maxval); + cv::minMaxLoc(output, &minval, &maxval); double scale_factor = 1.0 / (2 * std::max(fabs(minval), fabs(maxval))); - mexican_template *= scale_factor; + output *= scale_factor; } void createWarpings(){ @@ -249,26 +277,49 @@ class affineTransforms void createMapWarpings(){ - mexican_template.convertTo(mexican_template_64f, CV_64F); // is 6 type CV_64FC1 + // std::vector affine_matrices; + // std::vector couple_matrix; + + // cv::Mat concat_affines; + mexican_resized.convertTo(mexican_template_64f, CV_64F); // is 6 type CV_64FC1 for (int affine = 0; affine < affine_info.size()-1; affine++) { cv::remap(mexican_template_64f, affine_info[affine].warped_img, affine_info[affine].rmp, affine_info[affine].rmsp, cv::INTER_LINEAR); - cv::imshow("affine remap" + std::to_string(affine), affine_info[affine].warped_img); + // affine_matrices.push_back(affine_info[affine].warped_img); + // cv::imshow("affine remap" + std::to_string(affine), affine_info[affine].warped_img); } affine_info[8].warped_img = mexican_template_64f; - + // for (int i=0; i > data_to_save; + std::deque< std::array > data_to_save; std::ofstream fs; + std::vector gt_values; + bool gt_sending{false}; + int gt_index{0}; struct fake_latency{ std::array state; @@ -33,16 +40,48 @@ class affineTracking: public yarp::os::RFModule public: + std::vector readFromCsv(std::string fname){ + + std::vector> content; + std::vector times, gt_values; + std::string line, word, ts, gt; + + std::fstream file (fname, std::ios::in); + if(file.is_open()) + { + while(std::getline(file, line)) + { + std::stringstream str(line); + + getline(str, ts, ' '); + times.push_back(std::stod(ts)); + getline(str, gt, '\n'); + gt_values.push_back(std::stod(gt)); + + } + } + else + std::cout<<"Could not open the file\n"; + + // for(int i=0;idt_eros = toc_eros - tic_eros; // this->toc_count++; - // yInfo()<<"running"; + // yInfo()<<"dt"<