diff --git a/Makefile b/Makefile
index fe6a37945590a97edae19fe8e62a8b5d93a7a6c6..ed78d6601f002c8cb4a1f243f4a7f2f71feb23a5 100644
--- a/Makefile
+++ b/Makefile
@@ -1,8 +1,8 @@
-all:
+lib:
 	$(MAKE) -C src
 	$(MAKE) -C build
 
-app:
+apps:
 	$(MAKE) -C app
 
 clean:
diff --git a/app/Makefile b/app/Makefile
index cd2d75d8a17a31a2bcee1f728915e003addd762d..92c0ae9da88c132ef4d5a687801cd169a1395e79 100644
--- a/app/Makefile
+++ b/app/Makefile
@@ -1,9 +1,10 @@
 LIBPATH=../../build
 SUBDIR=SandboxSetup
+APPS=SandboxSetup/SandboxSetup
 
 .PHONY: genMakefile genExe
 
-all: 
+all:
 	-$(MAKE) genMakefile
 	-$(MAKE) genExe
 
diff --git a/app/SandboxSetup/croppingmask.cpp b/app/SandboxSetup/croppingmask.cpp
index cf84ff2556a3cd567052ecb2cb71df69fcdc579e..8661153ce877e853b8dfdfab2e334f9406bec98a 100644
--- a/app/SandboxSetup/croppingmask.cpp
+++ b/app/SandboxSetup/croppingmask.cpp
@@ -30,12 +30,10 @@ void CroppingMask::valideRoutine(){
 
     timer->stop();
     setup->getCamera()->capture();
-    cv::Size s = setup->getCamera()->getDepthFrame().size();
-    cv::Point center(s.width / 2, s.height / 2);
     std::vector<cv::Point> rectPoints = getRectPoints();
+    cv::Point2i center = setup->getCenterOfQuadrilateral(rectPoints);
     cv::Mat depthFrame = setup->getCamera()->getDepthFrame();
 
-    // TODO : center should be defined by the quadrilateral from rectPoints
     setup->setupAdjustMatrix(rectPoints, center);
     setup->setupCroppingMask(rectPoints);
     setup->getProjection()->setDistanceTopSandbox(depthFrame.at<float>(center));
diff --git a/inc/beamer.h b/inc/beamer.h
index 18328202e18d89d4dbaddbd1960fc3a0cf970fd2..e1395a582dc39bb7e9d0a0380f2fa4265360c169 100644
--- a/inc/beamer.h
+++ b/inc/beamer.h
@@ -34,6 +34,7 @@ class Beamer{
         FrameProcessProfil* getProfil(){ return profil; };
         void setProfil(FrameProcessProfil p){ profil->setProfil(p); };
 
+        int LineLineIntersect( cv::Point3d p1, cv::Point3d p2, cv::Point3d p3, cv::Point3d p4, cv::Point3d *pa, cv::Point3d *pb, double *mua, double *mub );
         cv::Mat editContrast(cv::Mat image, double contrast, int brightness);
         cv::Point3f deprojectPixel(cv::Point2i circle, cv::Mat *depth, Camera *camera);
         std::vector<cv::Point2i> getCrossList();
diff --git a/inc/camera.h b/inc/camera.h
index fd3998f9ff9184446be812f79e1771e2cdc5a5c1..58491ffc3e5cee352fb8c6f6fd26ea7baa0ddd77 100644
--- a/inc/camera.h
+++ b/inc/camera.h
@@ -28,13 +28,8 @@ class Camera{
         ~Camera();
 
         // return values from depth matrix to real world (matrix of floats in meter)
-        cv::Mat getDepthFrame(){
-            cv::Mat meters;
-            cv::Mat values = cv::Mat(cv::Size(depth_frame->get_width(), depth_frame->get_height()), CV_16UC1, (void *)depth_frame->get_data(), cv::Mat::AUTO_STEP);
-            values.convertTo(meters,CV_32FC1, depth_scale);
-            return meters;
-        };
-        cv::Mat getColorFrame(){ return cv::Mat(cv::Size(color_frame->get_width(), color_frame->get_height()), CV_8UC3, (void *)color_frame->get_data(), cv::Mat::AUTO_STEP); };
+        cv::Mat getDepthFrame();
+        cv::Mat getColorFrame();
         void setCroppingMask(cv::Rect mask){ croppingMask = mask; };
         cv::Rect getCroppingMask(){ return croppingMask; };
         
diff --git a/inc/projection.h b/inc/projection.h
index 08dec60af6c7c78bd9a7557ad54f439dee8cb002..c85b20d56f725c1123f8eef62ec1e730db54df00 100644
--- a/inc/projection.h
+++ b/inc/projection.h
@@ -9,10 +9,16 @@ class Projection{
     private:
         cv::Mat adjustingMatrix;
         float distanceTopSandbox;
+        // Buffer for the builded virtual frame, which is scaled to n * depth_frame.size for the building process
+        cv::Mat_<cv::Vec3b> resized_dst;
+        // Buffer containing the pixels's new location when deprojected to beamer's POV
+        cv::Mat_<cv::Point2i> deprojectMap;
+        // Buffer indicating from where to get the pixels in the source frame
+        cv::Mat_<cv::Point2i> frameMap;
 
         void deprojectPixelsFromDepth(cv::Mat_<float> &depth, cv::Rect mask, Camera *camera, cv::Point3f beamer_pos, cv::Mat_<cv::Point2i> &deprojectMap);
-        void filterLowestDeprojectedPoints(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &deprojectMap, cv::Mat_<cv::Point2i> &frameMapMask);
-        void buildFrame(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &frameMapMask, cv::Mat_<cv::Vec3b> &src, cv::Mat_<cv::Vec3b> &dst);
+        void filterLowestDeprojectedPoints(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &deprojectMap, cv::Mat_<cv::Point2i> &frameMap);
+        void buildFrame(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &frameMap, cv::Mat_<cv::Vec3b> &src, cv::Mat_<cv::Vec3b> &dst);
         cv::Point2i findMatchingPixel(int i, int j, float z, Camera *camera, cv::Point3f beamer_pos);
         void copyPixelsInto(cv::Point2i pixel_dst, cv::Mat_<cv::Vec3b> &dst, cv::Point2i pixel_src, cv::Mat_<cv::Vec3b> &src, cv::Mat_<float> &depth);
         cv::Size getMatchingSize(cv::Mat &src, cv::Mat &base);
diff --git a/inc/sandbox.h b/inc/sandbox.h
index 3368f4a3fbb9b90a0cea3a6141c635c93878720f..02828dbd5304e80478c9db5215c0ae625c75f01f 100644
--- a/inc/sandbox.h
+++ b/inc/sandbox.h
@@ -27,6 +27,7 @@ class Sandbox{
         cv::Mat_<cv::Vec3b> getColorFrame();
         cv::Mat_<float> getDepthFrame();
         cv::Mat_<cv::Vec3b> adjustProjection(cv::Mat_<cv::Vec3b> frame);
+        cv::Mat_<cv::Vec3b> adjustProjection(cv::Mat_<cv::Vec3b> frame, cv::Mat_<float> depth);
         int loadConfig();
         int loadConfigFrom(char *path);
     
diff --git a/inc/sandboxSetup.h b/inc/sandboxSetup.h
index 94cbf17a797db29dd46f9c1a5962bba5c7f8dc22..967a75c40c2d30d3c984282db89a86774bab9608 100644
--- a/inc/sandboxSetup.h
+++ b/inc/sandboxSetup.h
@@ -14,6 +14,8 @@ class SandboxSetup{
         Camera *camera;
         Beamer *beamer;
 
+        std::vector<std::vector<cv::Point2i>> getTriangles(std::vector<cv::Point2i> rectPoints);
+        std::vector<cv::Point2i> getCentroids(std::vector<std::vector<cv::Point2i>> triangles);
         double toDegrees(double radians);
 
     public:
@@ -35,6 +37,7 @@ class SandboxSetup{
         int loadCroppingMask();
 
         // edit variables of config => not persistant
+        cv::Point2i getCenterOfQuadrilateral(std::vector<cv::Point> rectPoints);
         void setupAdjustMatrix(std::vector<cv::Point> rectPoints, cv::Point center);
         void setupCroppingMask(std::vector<cv::Point> rectPoints);
 };
diff --git a/src/components/beamer.cpp b/src/components/beamer.cpp
index 7f92c3bcdf76e277293949d37c4eeb2ca598518a..ac12d434b550b9eb3a6941985814220e46f458c6 100644
--- a/src/components/beamer.cpp
+++ b/src/components/beamer.cpp
@@ -151,7 +151,7 @@ typedef struct
     double x, y, z;
 } XYZ;
 
-int LineLineIntersect(
+int Beamer::LineLineIntersect(
     cv::Point3d p1, cv::Point3d p2, cv::Point3d p3, cv::Point3d p4, cv::Point3d *pa, cv::Point3d *pb,
     double *mua, double *mub)
 {
diff --git a/src/components/camera.cpp b/src/components/camera.cpp
index 4e90345556492df6156f2c6625ccf8a01641ce49..e786cbae4094ab9284fa6e142eabb7a985ce4c9d 100644
--- a/src/components/camera.cpp
+++ b/src/components/camera.cpp
@@ -30,6 +30,20 @@ Camera::~Camera(){
 */
 
 
+cv::Mat Camera::getDepthFrame(){
+    static cv::Mat values = cv::Mat(depth_frame->get_height(), depth_frame->get_width(), CV_16UC1);
+    static cv::Mat meters = cv::Mat(depth_frame->get_height(), depth_frame->get_width(), CV_32FC1);
+    values.data = (uchar*)depth_frame->get_data();
+    values.convertTo(meters, CV_32FC1, depth_scale);
+    return meters;
+};
+
+cv::Mat Camera::getColorFrame(){
+    return cv::Mat(color_frame->get_height(), color_frame->get_width(), CV_8UC3, (void *)color_frame->get_data());
+};
+        
+
+
 int Camera::start(){
     
     // check for a device available
diff --git a/src/components/projection.cpp b/src/components/projection.cpp
index 0b2aa97a2647bc6260ffd841a0a7c25f3ed34fd9..412dc9048095ccc96cb2cfe1c9fafe26443eb9cb 100644
--- a/src/components/projection.cpp
+++ b/src/components/projection.cpp
@@ -10,7 +10,6 @@ Projection::Projection(){
     distanceTopSandbox = 1.0f;
 }
 
-
 cv::Point2i Projection::rotatePixel(cv::Point2i pixel){
 
     cv::Mat tmp = (cv::Mat_<cv::Vec2f>(1, 1) << cv::Vec2f(pixel.x, pixel.y));
@@ -31,23 +30,28 @@ cv::Point2i Projection::revertRotatePixel(cv::Point2i pixel){
 // Adjust the projected frame with the topology from the camera to the beamer POV
 void Projection::adjustFrame(cv::Mat_<float> depth, cv::Mat_<cv::Vec3b> src, cv::Mat_<cv::Vec3b> &dst, Camera *camera, cv::Point3f beamer_pos){
 
+    if(deprojectMap.empty() || deprojectMap.size() != depth.size()){
+        if(!deprojectMap.empty()){
+            deprojectMap.release();
+            frameMap.release();
+            resized_dst.release();
+        }
+        deprojectMap.create(depth.rows, depth.cols);
+        frameMap.create(depth.rows, depth.cols);
+        resized_dst.create(getMatchingSize(dst, depth));
+    }
+
+    deprojectMap = cv::Point2i(-1,-1);
+    frameMap = cv::Point2i(-1,-1);
+
     // resize the frames to be a multiple of the camera size :
     //      src.size = n * camera.depth.size , where n is uint > 0
-    static cv::Mat_<cv::Vec3b> resized_dst = cv::Mat_<cv::Vec3b>(getMatchingSize(dst, depth));
     cv::resize(dst, resized_dst, resized_dst.size());
     cv::resize(src, src, resized_dst.size());
-   
-    static cv::Mat_<cv::Point2i> deprojectMap = cv::Mat_<cv::Point2i>(depth.size());
-    deprojectMap = cv::Point2i(-1,-1);
-
-    static cv::Mat_<cv::Point2i> frameMapMask = cv::Mat_<cv::Point2i>(depth.size(), cv::Point2i(-1,-1));
-    frameMapMask = cv::Point2i(-1,-1);
 
     deprojectPixelsFromDepth(depth, camera->getCroppingMask(), camera, beamer_pos, deprojectMap);
-
-    filterLowestDeprojectedPoints(depth, deprojectMap, frameMapMask);
-
-    buildFrame(depth, frameMapMask, src, resized_dst);
+    filterLowestDeprojectedPoints(depth, deprojectMap, frameMap);
+    buildFrame(depth, frameMap, src, resized_dst);
 
     cv::resize(resized_dst, dst, dst.size());
     cv::warpAffine(dst, dst, adjustingMatrix, dst.size());
@@ -84,7 +88,7 @@ void Projection::deprojectPixelsFromDepth(cv::Mat_<float> &depth, cv::Rect mask,
 }
 
 
-void Projection::filterLowestDeprojectedPoints(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &deprojectMap, cv::Mat_<cv::Point2i> &frameMapMask){
+void Projection::filterLowestDeprojectedPoints(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &deprojectMap, cv::Mat_<cv::Point2i> &frameMap){
 
     for (int j = 0; j < deprojectMap.rows; j++){
         for (int i = 0; i < deprojectMap.cols; i++){
@@ -97,26 +101,26 @@ void Projection::filterLowestDeprojectedPoints(cv::Mat_<float> &depth, cv::Mat_<
                 (0 <= deprojectedPixel.y && deprojectedPixel.y < depth.rows) ){
                 
                 // check and keep the highest point at the location pointed by pixel
-                cv::Point2i currentDepthPixel = frameMapMask.at<cv::Point2i>(deprojectedPixel);
+                cv::Point2i currentDepthPixel = frameMap.at<cv::Point2i>(deprojectedPixel);
                 if( (0 <= currentDepthPixel.x && currentDepthPixel.x < depth.cols) &&
                     (0 <= currentDepthPixel.y && currentDepthPixel.y < depth.rows) ){
                         if(depth.at<float>(currentDepthPixel) <= depth.at<float>(j,i)){
                             highestDepthPixel = currentDepthPixel;
                         }
                 }
-                frameMapMask.at<cv::Point2i>(deprojectedPixel) = highestDepthPixel;
+                frameMap.at<cv::Point2i>(deprojectedPixel) = highestDepthPixel;
             }
         }
     }
 }
 
 
-void Projection::buildFrame(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &frameMapMask, cv::Mat_<cv::Vec3b> &src, cv::Mat_<cv::Vec3b> &dst){
+void Projection::buildFrame(cv::Mat_<float> &depth, cv::Mat_<cv::Point2i> &frameMap, cv::Mat_<cv::Vec3b> &src, cv::Mat_<cv::Vec3b> &dst){
     
-    for (int j = 0; j < frameMapMask.rows; j++){
-        for (int i = 0; i < frameMapMask.cols; i++){
+    for (int j = 0; j < frameMap.rows; j++){
+        for (int i = 0; i < frameMap.cols; i++){
 
-            cv::Point2i pixel_src = frameMapMask.at<cv::Point2i>(j,i);
+            cv::Point2i pixel_src = frameMap.at<cv::Point2i>(j,i);
             cv::Point2i pixel_dst = cv::Point2i(i,j);
 
             if( (0<=pixel_src.x && pixel_src.x<depth.cols) && (0<=pixel_src.y && pixel_src.y<depth.rows) ){
@@ -165,6 +169,9 @@ void Projection::copyPixelsInto(cv::Point2i pixel_dst, cv::Mat_<cv::Vec3b> &dst,
     C : Camera position
     B : Beamer position
     P : Point computed by camera depth
+    V : Point adjusted to plan
+    A : Point of the right-angle triangle PAB
+    E : Point of the right-angle triangle VEP
     
     Where
         CP : distance from camera to point (value of depth_frame)
diff --git a/src/lib/sandbox.cpp b/src/lib/sandbox.cpp
index 477edfa481d8dda1508883507de22d3452fc58f9..f5747f0a20b99ede034b607ec5ae2d97fa79eba1 100644
--- a/src/lib/sandbox.cpp
+++ b/src/lib/sandbox.cpp
@@ -38,13 +38,10 @@ cv::Mat_<float> Sandbox::getDepthFrame(){
     return camera->getDepthFrame()(camera->getCroppingMask());
 }
 
-
-cv::Mat_<cv::Vec3b> Sandbox::adjustProjection(cv::Mat_<cv::Vec3b> frame){
+cv::Mat_<cv::Vec3b> Sandbox::adjustProjection(cv::Mat_<cv::Vec3b> frame, cv::Mat_<float> depth){
     
-    captureFrame();
-    cv::Mat_<float> depth = getDepthFrame();
     static cv::Mat_<cv::Vec3b> imageCalibrate = cv::Mat_<cv::Vec3b>(cv::Size(beamer->getWidth(), beamer->getHeight()));
-    imageCalibrate =  cv::Vec3b(0, 0, 0);
+    imageCalibrate = cv::Vec3b(0, 0, 0);
     projection->adjustFrame(depth, frame, imageCalibrate, camera, beamer->getPosition());
 
     // frame after process
@@ -54,6 +51,13 @@ cv::Mat_<cv::Vec3b> Sandbox::adjustProjection(cv::Mat_<cv::Vec3b> frame){
     return imageCalibrate;
 }
 
+cv::Mat_<cv::Vec3b> Sandbox::adjustProjection(cv::Mat_<cv::Vec3b> frame){
+    
+    captureFrame();
+    cv::Mat_<float> depth = getDepthFrame();
+    return adjustProjection(frame, depth);
+}
+
 int Sandbox::loadConfigFrom(char *path){
 
     int err = SandboxConfig::loadAdjustingMatrixFrom(path, projection);
diff --git a/src/lib/sandboxSetup.cpp b/src/lib/sandboxSetup.cpp
index 6b969a39842eb2a2e3ce60a95be445d9e93dda42..e6226e593857e129459e54b8a8494ac6b8b6d3cb 100644
--- a/src/lib/sandboxSetup.cpp
+++ b/src/lib/sandboxSetup.cpp
@@ -62,6 +62,38 @@ int SandboxSetup::loadFrameProcessProfil(){
     return loadFrameProcessProfilFrom(defaultConfigFilePath);
 }
 
+/*
+    Get the centroid of a quadrilateral
+    source : http://jwilson.coe.uga.edu/EMT668/EMT668.Folders.F97/Patterson/EMT%20669/centroid%20of%20quad/Centroid.html
+*/
+cv::Point2i SandboxSetup::getCenterOfQuadrilateral(std::vector<cv::Point> rectPoints){
+    
+    std::vector<std::vector<cv::Point2i>> triangles = getTriangles(rectPoints);
+    std::vector<cv::Point2i> centroids = getCentroids(triangles);
+
+    /*
+        Pa = P1 + mua (P2 - P1)
+        Pb = P3 + mub (P4 - P3)
+    */
+    cv::Point3d pa;
+    cv::Point3d pb;
+    double mua;
+    double mub;
+    beamer->LineLineIntersect( cv::Point3d(centroids.at(0).x, centroids.at(0).y, 0),
+                               cv::Point3d(centroids.at(1).x, centroids.at(1).y, 0),
+                               cv::Point3d(centroids.at(2).x, centroids.at(2).y, 0),
+                               cv::Point3d(centroids.at(3).x, centroids.at(3).y, 0),
+                               &pa, &pb, &mua, &mub );
+
+    // pa and pb should be the same
+    cv::Point2i center;
+    center.x = (pa.x + pb.x) / 2;
+    center.y = (pa.y + pb.y) / 2;
+
+    return center;
+}
+
+
 /*  Assuming points positions are :
  *      pts[0] : top left
  *      pts[1] : bottom left
@@ -79,6 +111,7 @@ void SandboxSetup::setupAdjustMatrix(std::vector<cv::Point> rectPoints, cv::Poin
     projection->setAdjustingMatrix(matRotation);
 }
 
+
 /*  Assuming points positions are :
  *      pts[0] : top left
  *      pts[1] : bottom left
@@ -105,6 +138,47 @@ void SandboxSetup::setupCroppingMask(std::vector<cv::Point2i> rectPoints){
 //
 
 
+/*
+    Get the 4 triangles in the quadrilateral
+*/
+std::vector<std::vector<cv::Point2i>> SandboxSetup::getTriangles(std::vector<cv::Point2i> rectPoints){
+    
+    std::vector<std::vector<cv::Point2i>> triangles;
+    std::vector<cv::Point2i> A, B, C, D;
+    std::vector<cv::Point2i> lst[4] = {A,B,C,D};
+
+    // 4 triangles in the quadrilateral
+    for (int i=0; i<4; i++){
+        // corners in the triangle
+        for(int j=0; j<3; j++){
+            lst[i].push_back(rectPoints.at( (i+j)%rectPoints.size() ));
+        }
+        triangles.push_back(lst[i]);
+    }
+
+    return triangles;
+}
+
+/*
+    Get the centroid of each of the 4 triangles
+    source : https://www.khanacademy.org/math/geometry-home/triangle-properties/medians-centroids/v/triangle-medians-and-centroids
+*/
+std::vector<cv::Point2i> SandboxSetup::getCentroids(std::vector<std::vector<cv::Point2i>> triangles){
+    
+    std::vector<cv::Point2i> centroids;
+
+    // the centroid is the average of the 3 coordinates
+    for(int i=0; i<(int)triangles.size(); i++){
+        std::vector<cv::Point2i> tr = triangles.at(i);
+        cv::Point2i center;
+        center.x = (tr.at(0).x + tr.at(1).x + tr.at(2).x) / 3;
+        center.y = (tr.at(0).y + tr.at(1).y + tr.at(2).y) / 3;
+        centroids.push_back(center);
+    }
+
+    return centroids;
+}
+
 double SandboxSetup::toDegrees(double radians){
     return radians * (180.0 / M_PI);
 }