#include "Behaviors/Demos/Navigation/PilotDemo.h"
#define SIDE_SIZE 500
#define VISION_LENGTH 2000
#define BOUNDARY 2.0


$nodeclass Explore {
	$provide std::vector<DualCoding::Shape<LineData> > walls;
	$provide std::vector<DualCoding::Shape<PolygonData> > squares;
	$provide int targetX;
	$provide int targetY;
	$provide float targetHeading;
	$provide Point targetPoint;

	enum choice {moveNow, stopMotion};

	$nodeclass InitWorld : VisualRoutinesStateNode : doStart {
		const float scale = 0.05f;
		const float worldwidth = VRmixin::worldSkS.getWidth();
		const float worldheight = VRmixin::worldSkS.getHeight();
		worldSkS.setTmat(scale, worldwidth/2, worldheight/2);
		//create a sketch for the things the robot has seen
		NEW_SKETCH(seen, bool, visops::zeros(worldSkS));
		//create a sketch for occluded areas the robot should be interested in
		NEW_SKETCH(interest, bool, visops::zeros(worldSkS));
		NEW_SKETCH(targetPositions, bool, visops::zeros(worldSkS));
	}

	$nodeclass FindStuff : MapBuilderNode(MapBuilderRequest::worldMap) : doStart {
		mapreq.setAprilTagFamily();
		mapreq.searchArea = Lookout::groundSearchPoints();
		mapreq.pursueShapes = true;
		mapreq.setVerbosity = MapBuilder::MBVdeleteShape;
	}

	$nodeclass DetermineBoxes : VisualRoutinesStateNode : doStart {
		$reference Explore::walls;
		$reference Explore::squares;

		NEW_SHAPEVEC(tags, AprilTagData, select_type<AprilTagData>(worldShS));
		if (tags.empty()){
			return;
		}
		SHAPEVEC_ITERATE(tags, AprilTagData, tag) {
			// TODO IF we happen to repeat seeing apriltags, 
			//	create a hashmap of previously seen apriltags, 
			//	and only iterate over new tags.

			Point tagCenter = tag->getCentroid();
			AngSignPi angle = tag->getQuaternion().ypr()[0];
			float originX = tagCenter.coordX();
			float originY = tagCenter.coordY();

			cout << "Angle: " << angle << " X: " << originX << " Y: " << originY << endl;

			// line segment that corresponds to box side
			float d = SIDE_SIZE/2; //distance
			float dx = sin(angle)*d;
			float dy = (-1)*cos(angle)*d; //y is flipped

			cout << "dx: " << dx << " dy: " << dy << endl;
	
			Point p1 = Point(originX + dx, originY + dy, 0, allocentric);
			Point p2 = Point(originX - dx, originY - dy, 0, allocentric);

			NEW_SHAPE(line, LineData, new LineData(worldShS, p1, p2));

			std::vector<Point> points(4);

			float dist = SIDE_SIZE;
			dx = dist*cos(angle);
			dy = dist*sin(angle);
			Point p3 = Point(p1.coordX()-dx, p1.coordY()-dy, 0, allocentric);
			Point p4 = Point(p2.coordX()-dx, p2.coordY()-dy, 0, allocentric);


			points.push_back(p1);
			points.push_back(p3);
			points.push_back(p4);
			points.push_back(p2);
			NEW_SHAPE(square, PolygonData, new PolygonData(worldShS, points, true) )

			//Add the 3 inferred square lines to known walls
			NEW_SHAPE(inferredLineR, LineData, new LineData(worldShS, p2, p4));
			NEW_SHAPE(inferredLineL, LineData, new LineData(worldShS, p1, p3));
			NEW_SHAPE(inferredLineB, LineData, new LineData(worldShS, p3, p4));
			inferredLineR->setObstacle(true);
			inferredLineL->setObstacle(true);
			inferredLineB->setObstacle(true);
			walls.push_back(inferredLineR);
			walls.push_back(inferredLineL);
			walls.push_back(inferredLineB);
				


			line->setObstacle(true);
			walls.push_back(line);
			squares.push_back(square);
		
		} END_ITERATE;
	}	

	//@param epicenter: the origin of the ray trace
	//@param orientation: the starting orientation of the vision sweep
	//@param step: the frequency of lines drawn
	//@param walls: the known walls in the world
	//@param ignoreWalls: whether or not the rayTracer accounts for walls 
	static Sketch<bool> rayTrace(Point epicenter, float orientation, float step, std::vector<DualCoding::Shape<LineData> > walls, bool ignoreWalls) {
		int length = VISION_LENGTH;
		float boundary = BOUNDARY;
		int numSteps = floor(boundary*2/step);
		std::vector<DualCoding::Shape<LineData> > lines;

		//First draw lines
		for (int i = 0; i < numSteps; i++){
			float x = epicenter.coordX() + length * sin(orientation);
			float y = epicenter.coordY() + length * cos(orientation) * (-1); 
			Point sight = Point(x, y, 0, allocentric);
			NEW_SHAPE(newL, LineData, new LineData(worldShS, epicenter, sight));
			lines.push_back(newL);
			orientation += step;
		}

		//Iterate over all lines, find intersections with walls, 
		//and trim down the lines that end before "length".
		if (!ignoreWalls) {
			SHAPEVEC_ITERATE(lines, LineData, l) {
				for(unsigned int i = 0; i < walls.size(); i++){
					if(walls[i]->intersectsLine(l)){
						l->setEndPts(epicenter, walls[i]->intersectionWithLine(l));
					}
				}
			} END_ITERATE;
		}

		//Iterate over all lines AGAIN, and now make some appropriate polygons.
		std::vector< Point > points;
		points.push_back(epicenter);
		for(unsigned int i = 0; i < lines.size(); i++){
			//Skip the last line... there is no "next" line
			if(i != lines.size() - 1){
				LineData l1 = *lines[i];
				LineData l2 = *lines[i+1];

				if(l1.end1Pt() == epicenter)
					points.push_back(l1.end2Pt());
				else {
					points.push_back(l1.end1Pt());
				}
				if(l2.end1Pt() == epicenter)
					points.push_back(l2.end2Pt());
				else {
					points.push_back(l2.end1Pt());
				}
			}
		}

		//create a closed polygon representing the ray trace
		bool closed = true;
		NEW_SHAPE(poly, PolygonData, new PolygonData(worldShS, points, closed));
		poly->setObstacle(false);
		//create a sketch representing this polygon's rendering
		NEW_SKETCH_N(bounds, bool, poly->getRendering());


		//TODO: logic for filling bounds intelligently, then just return the filled in rendering


		//Remove vision rays -- we have the vision "areas" of polygons.
		SHAPEVEC_ITERATE(lines, LineData, l) {
			l.deleteShape();
		} END_ITERATE;

		return bounds;
	}
	
	$nodeclass DrawRays : VisualRoutinesStateNode : doStart {
		$reference Explore::walls;
		$reference Explore::squares;

		cout << "Draw rays started" << endl;
		//This is like an empty SHAPEVEC	
		std::vector<DualCoding::Shape<LineData> > lines;
		float boundary = BOUNDARY;
		Point robot = theAgent->getCentroid();
		float orient = theAgent->getOrientation();
		orient += (M_PI / 2);
				//TODO: - Figure out frame of reference properly, pi
				//theAgent->getOrientation();	
		float step = 0.02;
		orient -= boundary; 
		
		//Create a vision cone that does ray tracing ignoring walls
		Sketch<bool> coneBounds = rayTrace(robot, orient, step, walls, true);
		NEW_SKETCH_N(cone, bool, visops::fillInterior(coneBounds));

		//Create a vision cone that does ray tracing that accounts for walls
		Sketch<bool> bounds = rayTrace(robot, orient, step, walls, false);
		NEW_SKETCH_N(fov, bool, visops::fillInterior(bounds));


		//Add robot's fov to what it knows it has already seen
		GET_SKETCH(seen, bool, worldSkS);
		seen |= fov;

		//Add occluded areas to what robot considers interesting
		GET_SKETCH(interest, bool, worldSkS);
		interest |= (cone^seen);
		
		//The occluded areas previously included the known squares, remove the squares
 		for(unsigned int i = 0; i < squares.size(); i++){
			NEW_SKETCH_N(squareBounds, bool, squares[i]->getRendering());
			NEW_SKETCH_N(squareRender, bool, visops::fillInterior(squareBounds));
			//Remove square render from interest
			interest = interest & !(squareRender);
		}
	}


	$nodeclass MoveToPoint : PilotNode(PilotTypes::goToShape) : doStart {
		$reference Explore::targetX;
		$reference Explore::targetY;
		$reference Explore::targetHeading;
		$reference Explore::squares;

		NEW_SHAPE(destination, EllipseData, 
			  new EllipseData(worldShS, 
					  Point(targetX, targetY, 0, allocentric), 25, 25));
		destination->setColor(rgb(255,0,0));
		destination->setObstacle(false);
		pilotreq.targetShape = destination;
		pilotreq.targetHeading = targetHeading; 
	}
	
	$nodeclass EvaluateMotion : StateNode : doStart {
		$reference Explore::targetX;
		$reference Explore::targetY;
		$reference Explore::targetHeading;
		$reference Explore::targetPoint;
		$reference Explore::walls;
		$reference Explore::squares;
		
		//Information about robot's search history
		GET_SKETCH(seen, bool, worldSkS);
		GET_SKETCH(targetPositions, bool, worldSkS);
		Point robot = theAgent->getCentroid();

		/* ************************************* */
		//DEBUG: HARD CODED  targetPoint
		targetPoint = Point(robot.coordX(),robot.coordY()-2000,0);
		NEW_SHAPE(destination, EllipseData, new EllipseData(worldShS, targetPoint, 25, 25));
		destination->setColor(rgb(255,0,0));
		destination->setObstacle(false);
		/* ************************************* */

		//Given a targetPoint, try to find a good way to view the point
		//We can only move to points we have seen before
 		fmat::Column<3> worldPt = fmat::pack(robot.coordX(), robot.coordY(), 0);
		cout << "Robot coordinates in world space: " << worldPt << endl;
		worldSkS.applyTmat(worldPt);
		cout << "Robot coordinates in sketch space: " << worldPt << endl;
		int sketchRobotX = worldPt[0];
		int sketchRobotY = worldPt[1];

		//Create a vision bubble using ray tracing of possible view locations
		float orient = M_PI/2;
		int length = VISION_LENGTH;	
		float boundary = M_PI+0.1;
		float step = 0.01;
		int numSteps = floor(boundary * 2 / step);
		orient -= boundary; 
		
		Sketch<bool> polyBounds = rayTrace(targetPoint, orient, step, walls, false);

		//fill in the circle intelligently
		fmat::Column<3> tPt = fmat::pack(targetPoint.coordX(), targetPoint.coordY(), 0);
		worldSkS.applyTmat(tPt);
		skindex ep = polyBounds->indexOf((int)tPt[0],(int)tPt[1]);
		cout << "tPt: " << tPt << "ep: " << ep << " x=" << polyBounds->indexX(ep) << " y=" << polyBounds->indexY(ep) << endl;
		polyBounds[ep] = 0;
		NEW_SKETCH(targetBubble, bool, visops::seedfill(polyBounds, ep));
		NEW_SKETCH(tb2, bool, visops::zeros(polyBounds));
		tb2[ep] = 1;


		//any overlap with possible vision and seen areas
		targetPositions |= targetBubble&seen;
	}

	$setupmachine{
		InitWorld =N=> find 
		find : FindStuff
		evalMotion : EvaluateMotion
		find =C=> DetermineBoxes =N=> DrawRays =N=> evalMotion
		evalMotion =S<choice>(moveNow)=> MoveToPoint =C=> find 
		evalMotion =S<choice>(stopMotion)=> SpeechNode("Done.")
		
	}
}

REGISTER_BEHAVIOR(Explore);