[Jderobot-admin] jderobot-r942 - in trunk/src/components: openniServer openniServer/build-independent openniServer-kinect openniServer-kinect/build-independent

frivas en jderobot.org frivas en jderobot.org
Mar Jul 16 10:08:44 CEST 2013


Author: frivas
Date: 2013-07-16 10:07:43 +0200 (Tue, 16 Jul 2013)
New Revision: 942

Modified:
   trunk/src/components/openniServer-kinect/CMakeLists.txt
   trunk/src/components/openniServer-kinect/build-independent/CMakeLists.txt
   trunk/src/components/openniServer-kinect/openniServer-kinect.cpp
   trunk/src/components/openniServer/CMakeLists.txt
   trunk/src/components/openniServer/build-independent/CMakeLists.txt
   trunk/src/components/openniServer/openniServer.cpp
Log:
modificador openniserver y openniserverkinect para funcionar sin gearbox


Modified: trunk/src/components/openniServer/CMakeLists.txt
===================================================================
--- trunk/src/components/openniServer/CMakeLists.txt	2013-07-16 07:52:13 UTC (rev 941)
+++ trunk/src/components/openniServer/CMakeLists.txt	2013-07-16 08:07:43 UTC (rev 942)
@@ -38,11 +38,9 @@
 			file (COPY ${NITE2_LIB}/NiTE2 DESTINATION ${CMAKE_CURRENT_SOURCE_DIR})
 		endif()
 	TARGET_LINK_LIBRARIES(openniServer
-		${Gearbox_LIBRARIES}
         ${ZeroCIce_LIBRARIES}
         ${opencv_LIBRARIES}
         ${gsl_LIBRARIES}
-		${LIBS_DIR}/jderobotice/libjderobotice.so
         ${LIBS_DIR}/jderobotutil/libjderobotutil.so
         ${LIBS_DIR}/progeo/libprogeo.so
         ${LIBS_DIR}/colorspaces/libcolorspacesmm.so
@@ -52,11 +50,9 @@
 	)
 	else()
 	TARGET_LINK_LIBRARIES(openniServer
-		${Gearbox_LIBRARIES}
 		${ZeroCIce_LIBRARIES}
 		${opencv_LIBRARIES}
 		${gsl_LIBRARIES}
-		${LIBS_DIR}/jderobotice/libjderobotice.so
         ${LIBS_DIR}/jderobotutil/libjderobotutil.so
         ${LIBS_DIR}/progeo/libprogeo.so
         ${LIBS_DIR}/colorspaces/libcolorspacesmm.so

Modified: trunk/src/components/openniServer/build-independent/CMakeLists.txt
===================================================================
--- trunk/src/components/openniServer/build-independent/CMakeLists.txt	2013-07-16 07:52:13 UTC (rev 941)
+++ trunk/src/components/openniServer/build-independent/CMakeLists.txt	2013-07-16 08:07:43 UTC (rev 942)
@@ -53,25 +53,6 @@
 link_directories(${NITE2_LIB}/libNiTE2.so)
 
 
-#manual gearbox libraries
-FIND_PATH( Gearbox_INCLUDE_DIR NAMES gbxutilacfr/gbxutilacfr.h  PATHS ENV C++LIB ENV PATH PATH_SUFFIXES gearbox)
- 
-IF( Gearbox_INCLUDE_DIR )
-    FIND_LIBRARY( Gearbox_LIBRARY1 NAMES GbxUtilAcfr PATHS ENV C++LIB ENV PATH PATH_SUFFIXES lib/gearbox lib64/gearbox )
-    FIND_LIBRARY( Gearbox_LIBRARY2 NAMES GbxIceUtilAcfr PATHS ENV C++LIB ENV PATH PATH_SUFFIXES lib/gearbox lib64/gearbox)
-    SET (Gearbox_LIBRARIES ${Gearbox_LIBRARY1} ${Gearbox_LIBRARY2})
-    IF( Gearbox_LIBRARIES )
-		MESSAGE ("-- Gearbox found at ${Gearbox_LIBRARIES}")
-		include_directories(${Gearbox_INCLUDE_DIR})
-		link_directories(${Gearbox_LIBRARIES})
-    ENDIF( Gearbox_LIBRARIES )
-ENDIF(Gearbox_INCLUDE_DIR)
-
-IF(NOT Gearbox_LIBRARIES)
-		MESSAGE ("*** Gearbox not found")
-ENDIF()
-
-
 #manual ICE
 FIND_PATH( Ice_INCLUDE_DIR NAMES Ice/Ice.h  PATHS ENV C++LIB ENV)
  
@@ -121,11 +102,9 @@
 	TARGET_LINK_LIBRARIES(openniServer
 	  ${opencv_LIBRARIES}
 	  ${gsl_LIBRARIES}
-	  ${Gearbox_LIBRARIES}
 	  ${Ice_LIBRARIES}
 	  ${LIBS_DIR}/libcolorspacesmm.so
 	  ${LIBS_DIR}/libJderobotInterfaces.so
-	  ${LIBS_DIR}/libjderobotice.so
 	  ${LIBS_DIR}/libjderobotutil.so
 	  ${LIBS_DIR}/libprogeo.so
 	  ${OPENNI2_LIB}/libOpenNI2.so
@@ -135,11 +114,9 @@
 	TARGET_LINK_LIBRARIES(openniServer
 	  ${opencv_LIBRARIES}
 	  ${gsl_LIBRARIES}
-	  ${Gearbox_LIBRARIES}
 	  ${Ice_LIBRARIES}
 	  ${LIBS_DIR}/libcolorspacesmm.so
 	  ${LIBS_DIR}/libJderobotInterfaces.so
-	  ${LIBS_DIR}/libjderobotice.so
 	  ${LIBS_DIR}/libjderobotutil.so
 	  ${LIBS_DIR}/libprogeo.so
 	  ${OPENNI2_LIB}/libOpenNI2.so

Modified: trunk/src/components/openniServer/openniServer.cpp
===================================================================
--- trunk/src/components/openniServer/openniServer.cpp	2013-07-16 07:52:13 UTC (rev 941)
+++ trunk/src/components/openniServer/openniServer.cpp	2013-07-16 08:07:43 UTC (rev 942)
@@ -25,18 +25,14 @@
 
 #include <Ice/Ice.h>
 #include <IceUtil/IceUtil.h>
-#include <gbxsickacfr/gbxiceutilacfr/safethread.h>
 #include <jderobot/kinectleds.h>
 #include <jderobot/camera.h>
 #include <jderobot/pose3dmotors.h>
 #include <jderobot/pointcloud.h>
 #include <colorspaces/colorspacesmm.h>
-#include <jderobotice/component.h>
-#include <jderobotice/application.h>
 #include <tr1/memory>
 #include <list>
 #include <sstream>
-#include <jderobotice/exceptions.h>
 #include <math.h>
 #include <cv.h>
 #include <highgui.h>
@@ -44,6 +40,9 @@
 #include <boost/date_time/posix_time/posix_time.hpp>
 #include "myprogeo.h"
 #include <OpenNI.h>
+#include <opencv2/imgproc/imgproc_c.h>
+#include <opencv2/video/background_segm.hpp>
+#include <signal.h>
 
 
 #ifdef WITH_NITE2
@@ -63,11 +62,7 @@
                                                     \
 }
 
-openni::VideoFrameRef		m_depthFrame;
-openni::VideoFrameRef		m_colorFrame;
-openni::Device			m_device;
 
-
 #ifdef WITH_NITE2
 	nite::UserTracker* m_pUserTracker;
 	nite::UserTrackerFrameRef userTrackerFrame;
@@ -76,28 +71,48 @@
 
 
 
+
+
+//global configuration
+openni::VideoFrameRef		m_depthFrame;
+openni::VideoFrameRef		m_colorFrame;
+openni::Device			m_device;
+int cameraR, cameraD;
+int colors[10][3];
+int SELCAM;
+int configWidth;
+int configHeight;
+int configFps;
+pthread_mutex_t mutex;
+bool componentAlive;
+pthread_t updateThread;
+
+
 namespace openniServer{
 
-pthread_mutex_t mutex;
-int SELCAM;
+
 std::vector<int> distances;
 std::vector<int> pixelsID;
 cv::Mat* srcRGB;
-int colors[10][3];
 int userGeneratorActive=0;
+openni::VideoStream depth, color;
+openni::VideoStream** m_streams;
 int m_width;
 int m_height;
-openni::VideoStream depth, color;
-openni::VideoStream** m_streams;
 
-int configWidth;
-int configHeight;
-int configFps;
+//fondo
+cv::Mat back;
 
 
+struct timeval a,b;
 
+int segmentationType=1; //0 ninguna, 1 NITE, 2 fondo
+
+
+
 void* updateThread(void*)
 {
+
 	openni::Status rc = openni::STATUS_OK;
 	rc = openni::OpenNI::initialize();
 	if (rc != openni::STATUS_OK)
@@ -139,122 +154,181 @@
 	}
 	#endif
 
-
 	
-	m_device.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR );
-	//m_device.Device::setDepthColorSyncEnabled(true);
+	if (cameraR && cameraD)
+		m_device.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR );
 
+
 	//depth
-	rc = depth.create(m_device, openni::SENSOR_DEPTH);
-	if (rc == openni::STATUS_OK)
-	{
-		rc = depth.start();
-		if (rc != openni::STATUS_OK)
+	if (cameraD){
+		rc = depth.create(m_device, openni::SENSOR_DEPTH);
+		if (rc == openni::STATUS_OK)
 		{
-			std::cout << "OpenniServer: Couldn't start depth stream: "<< openni::OpenNI::getExtendedError() << std::endl;
-			depth.destroy();
+			rc = depth.start();
+			if (rc != openni::STATUS_OK)
+			{
+				std::cout << "OpenniServer: Couldn't start depth stream: "<< openni::OpenNI::getExtendedError() << std::endl;
+				depth.destroy();
+			}
 		}
+		else
+		{
+			std::cout << "OpenniServer: Couldn't find depth stream: " <<  openni::OpenNI::getExtendedError() << std::endl;
+		}
 	}
-	else
-	{
-		std::cout << "OpenniServer: Couldn't find depth stream: " <<  openni::OpenNI::getExtendedError() << std::endl;
-	}
 
 	//color
-	rc = color.create(m_device, openni::SENSOR_COLOR);
-	if (rc == openni::STATUS_OK)
-	{
-		rc = color.start();
-		if (rc != openni::STATUS_OK)
+	if (cameraR){
+		rc = color.create(m_device, openni::SENSOR_COLOR);
+		if (rc == openni::STATUS_OK)
 		{
-			std::cout << "OpenniServer: Couldn't start color stream: " << openni::OpenNI::getExtendedError() << std::endl;
-			color.destroy();
+			rc = color.start();
+			if (rc != openni::STATUS_OK)
+			{
+				std::cout << "OpenniServer: Couldn't start color stream: " << openni::OpenNI::getExtendedError() << std::endl;
+				color.destroy();
+			}
 		}
+		else
+		{
+			std::cout << "OpenniServer: Couldn't find color stream: " << openni::OpenNI::getExtendedError() << std::endl;
+		}
 	}
-	else
-	{
-		std::cout << "OpenniServer: Couldn't find color stream: " << openni::OpenNI::getExtendedError() << std::endl;
-	}
 
 	openni::VideoMode depthVideoMode;
 	openni::VideoMode colorVideoMode;
 
+	/*std::cout << "DEPTH ------------------------" << std::endl;
+	const openni::SensorInfo *depthSensorInfo = m_device.getSensorInfo(openni::SENSOR_DEPTH);
 
+	for(int i=0;i < depthSensorInfo->getSupportedVideoModes().getSize();i++)
+	{
+		openni::VideoMode videoMode = depthSensorInfo->getSupportedVideoModes()[i];
 
-	colorVideoMode.setResolution(configWidth,configHeight);
-	colorVideoMode.setFps( configFps );
-	colorVideoMode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 );
-	color.setVideoMode(colorVideoMode);
-	depthVideoMode.setResolution(configWidth,configHeight);
-	depthVideoMode.setFps( configFps );
-	depthVideoMode.setPixelFormat( openni::PIXEL_FORMAT_DEPTH_1_MM );
-	depth.setVideoMode(depthVideoMode);
+		std::cout << "fps: " << videoMode.getFps() << "x: " << videoMode.getResolutionX() << "y " <<  videoMode.getResolutionY() << std::endl;
+	}
 
+	std::cout << "COLOR ------------------------" << std::endl;
+	const openni::SensorInfo *COLORSensorInfo = m_device.getSensorInfo(openni::SENSOR_COLOR);
 
-	if (depth.isValid() && color.isValid())
-	{
-		depthVideoMode = depth.getVideoMode();
-		colorVideoMode = color.getVideoMode();
+		for(int i=0;i < COLORSensorInfo->getSupportedVideoModes().getSize();i++)
+		{
+			openni::VideoMode videoMode = COLORSensorInfo->getSupportedVideoModes()[i];
 
-		int depthWidth = depthVideoMode.getResolutionX();
-		int depthHeight = depthVideoMode.getResolutionY();
-		int colorWidth = colorVideoMode.getResolutionX();
-		int colorHeight = colorVideoMode.getResolutionY();
+			std::cout << "fps: " << videoMode.getFps() << "x: " << videoMode.getResolutionX() << "y " <<  videoMode.getResolutionY() << std::endl;
+		}*/
 
-		if (depthWidth == colorWidth &&
-			depthHeight == colorHeight)
+	//std::cout << "voy a fijar resolucion" << std::endl;
+	if (cameraR){
+		colorVideoMode.setResolution(configWidth,configHeight);
+		colorVideoMode.setFps( configFps );
+		colorVideoMode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 );
+		//std::cout << "color" << std::endl;
+		color.setVideoMode(colorVideoMode);
+		color.setMirroringEnabled(true);
+	}
+	if (cameraD){
+		depthVideoMode.setResolution(configWidth,configHeight);
+		depthVideoMode.setFps( configFps );
+		depthVideoMode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM);
+		//std::cout << "depth" << std::endl;
+		rc= depth.setVideoMode(depthVideoMode);
+		//std::cout << "done" << std::endl;
+		if (rc != openni::STATUS_OK)
 		{
-			m_width = depthWidth;
-			m_height = depthHeight;
+			std::cout << "OpenniServer: error at set depth videoMode: " << openni::OpenNI::getExtendedError() << std::endl;
+			//color.destroy();
 		}
-		else
+		depth.setMirroringEnabled(false);
+	}
+
+
+	std::cout << "fps:" << configFps << "w" << configWidth << "h" << configHeight << std::endl;
+
+	if (cameraR && cameraD){
+
+		if (depth.isValid() && color.isValid())
 		{
-			std::cout <<  "Error - expect color and depth to be in same resolution: D: " << depthWidth << "x" << depthHeight << "C: " << colorWidth << "x" <<  colorHeight << std::endl;
+			depthVideoMode = depth.getVideoMode();
+			colorVideoMode = color.getVideoMode();
 
+			int depthWidth = depthVideoMode.getResolutionX();
+			int depthHeight = depthVideoMode.getResolutionY();
+			int colorWidth = colorVideoMode.getResolutionX();
+			int colorHeight = colorVideoMode.getResolutionY();
 
+			if (depthWidth == colorWidth &&
+				depthHeight == colorHeight)
+			{
+				m_width = depthWidth;
+				m_height = depthHeight;
+			}
+			else
+			{
+				std::cout <<  "Error - expect color and depth to be in same resolution: D: " << depthWidth << "x" << depthHeight << "C: " << colorWidth << "x" <<  colorHeight << std::endl;
+
+
+			}
 		}
+		else if (depth.isValid())
+		{
+			depthVideoMode = depth.getVideoMode();
+			m_width = depthVideoMode.getResolutionX();
+			m_height = depthVideoMode.getResolutionY();
+		}
+		else if (color.isValid())
+		{
+			colorVideoMode = color.getVideoMode();
+			m_width = colorVideoMode.getResolutionX();
+			m_height = colorVideoMode.getResolutionY();
+		}
+		else
+		{
+			std::cout << "Error - expects at least one of the streams to be valid..." << std::endl;
+		}
 	}
-	else if (depth.isValid())
-	{
-		depthVideoMode = depth.getVideoMode();
-		m_width = depthVideoMode.getResolutionX();
-		m_height = depthVideoMode.getResolutionY();
-	}
-	else if (color.isValid())
-	{
-		colorVideoMode = color.getVideoMode();
-		m_width = colorVideoMode.getResolutionX();
-		m_height = colorVideoMode.getResolutionY();
-	}
-	else
-	{
-		std::cout << "Error - expects at least one of the streams to be valid..." << std::endl;
-	}
 	
 	distances.resize(m_width*m_height);
 	pixelsID.resize(m_width*m_height);
 
+	/*std::cout << "aqui" << std::endl;
+	m_device.Device::setDepthColorSyncEnabled(true);
+	std::cout << "2" << std::endl;*/
+
 	m_streams = new openni::VideoStream*[2];
 	m_streams[0] = &depth;
 	m_streams[1] = &color;
 	
 
-	depth.setMirroringEnabled(false);
-	color.setMirroringEnabled(true);
 
-	
-	while(1){
+	//diferente en arm que en x86???
+
+
+	struct timeval Na, Nb;
+
+	gettimeofday(&Na,NULL);
+
+	while(componentAlive){
+		long long int timeInicio = Na.tv_sec*1000000+Na.tv_usec;
+		gettimeofday(&Nb,NULL);
+		long long int timeNew = Nb.tv_sec*1000000+Nb.tv_usec;
+		//std::cout << "Tiempo completo: " << (timeNew - timeInicio)/1000 << std::endl;
+		gettimeofday(&Na,NULL);
+
 		pthread_mutex_lock(&mutex);
+
+
 		int changedIndex;
 
 		openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex);
+
 		#ifndef WITH_NITE2
 		if (rc != openni::STATUS_OK)
 		{
 			std::cout << "Wait failed" << std::endl;
 		}
 
-		switch (changedIndex)
+		/*switch (changedIndex)
 		{
 		case 0:
 			depth.readFrame(&m_depthFrame);
@@ -265,16 +339,27 @@
 		default:
 			std::cout << "Error in wait" << std::endl;
 			break;
-		}
+		}*/
+
+
+		if (cameraD)
+			depth.readFrame(&m_depthFrame);
+		if (cameraR)
+			color.readFrame(&m_colorFrame);
+
+
 		//nite
+
 		#else
-		color.readFrame(&m_colorFrame);
-		rcN = m_pUserTracker->readFrame(&userTrackerFrame);
-		m_depthFrame = userTrackerFrame.getDepthFrame();
-		if (rcN != nite::STATUS_OK)
-		{
-			std::cout << "GetNextData failed" << std::endl;
-			//return;
+		if (segmentationType==1){
+			color.readFrame(&m_colorFrame);
+			rcN = m_pUserTracker->readFrame(&userTrackerFrame);
+			m_depthFrame = userTrackerFrame.getDepthFrame();
+			if (rcN != nite::STATUS_OK)
+			{
+				std::cout << "GetNextData failed" << std::endl;
+				//return;
+			}
 		}
 		#endif
 
@@ -284,6 +369,7 @@
 		pthread_mutex_unlock(&mutex);
 		//OJO it control
 		usleep(1000);
+
    }
    return NULL;
 }
@@ -295,22 +381,21 @@
 */
 	class CameraRGB: virtual public jderobot::Camera {
 public:
-	CameraRGB(std::string& propertyPrefix, const jderobotice::Context& context)
-      : prefix(propertyPrefix),context(context),
+	CameraRGB(std::string& propertyPrefix, const Ice::PropertiesPtr propIn)
+      : prefix(propertyPrefix),
 	imageFmt(),
 	imageDescription(new jderobot::ImageDescription()),
 	cameraDescription(new jderobot::CameraDescription()),
 	replyTask()
 	{
-	Ice::PropertiesPtr prop = context.properties();
+	Ice::PropertiesPtr prop = propIn;
 
 	//fill cameraDescription
 	cameraDescription->name = prop->getProperty(prefix+"Name");
 	if (cameraDescription->name.size() == 0)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO,"Camera name not configured");
+		std::cout << "Camera name not configured" << std::endl;
 
-	cameraDescription->shortDescription = prop->getProperty(prefix+"ShortDescription");
+	cameraDescription->shortDescription = prop->getProperty(prefix + "ShortDescription");
 
 	//fill imageDescription
 	imageDescription->width = configWidth;
@@ -324,20 +409,22 @@
 	std::string fmtStr = prop->getPropertyWithDefault(prefix+"Format","YUY2");//default format YUY2
 	imageFmt = colorspaces::Image::Format::searchFormat(fmtStr);
 	if (!imageFmt)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO, "Format " + fmtStr + " unknown");
+		std::cout <<  "Format " << fmtStr << " unknown" << std::endl;
 	imageDescription->size = imageDescription->width * imageDescription->height * CV_ELEM_SIZE(imageFmt->cvType);
 	imageDescription->format = imageFmt->name;
 
-	context.tracer().info("Starting thread for camera: " + cameraDescription->name);
-	replyTask = new ReplyTask(this, imageDescription->width, imageDescription->height,fps, playerdetection);
+	std::cout << "Starting thread for camera: " << cameraDescription->name << std::endl;
+	replyTask = new ReplyTask(this,fps, playerdetection);
 
-	replyTask->start();//my own thread
+	this->control=replyTask->start();//my own thread
 	}
 
 	virtual ~CameraRGB(){
-		context.tracer().info("Stopping and joining thread for camera: " + cameraDescription->name);
-		gbxiceutilacfr::stopAndJoin(replyTask);
+		std::cout << "-------------------------------------------Stopping and joining thread for camera: " << cameraDescription->name << std::endl;
+		replyTask->destroy();
+		this->control.join();
+		color.stop();
+		color.destroy();
 	}
     
 	virtual jderobot::ImageDescriptionPtr getImageDescription(const Ice::Current& c){
@@ -353,12 +440,12 @@
 	}
 
 	virtual std::string startCameraStreaming(const Ice::Current&){
-		context.tracer().info("Should be made anything to start camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to start camera streaming: " << cameraDescription->name<< std::endl;
 		return std::string("");
 	}
 
 	virtual void stopCameraStreaming(const Ice::Current&) {
-		context.tracer().info("Should be made anything to stop camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to stop camera streaming: " <<  cameraDescription->name << std::endl;
 	}
 
 	virtual Ice::Int setCameraDescription(const jderobot::CameraDescriptionPtr&, const Ice::Current&){
@@ -366,10 +453,9 @@
 	}
 
 private:
-	class ReplyTask: public gbxiceutilacfr::SafeThread{
+	class ReplyTask: public IceUtil::Thread{
 	public:
-		ReplyTask(CameraRGB* camera, int width, int height, int fps, int playerdetection)
-	: gbxiceutilacfr::SafeThread(camera->context.tracer()), mycameravga(camera) {
+		ReplyTask(CameraRGB* camera, int fps, int playerdetection):mycameravga(camera),_done(false) {
 		segmentation=playerdetection;
 		this->fps=fps;
       }
@@ -380,7 +466,7 @@
 		requests.push_back(cb);
 	}
 
-    virtual void walk(){
+    virtual void run(){
 
 		jderobot::ImageDataPtr reply(new jderobot::ImageData);
 		reply->description = mycameravga->imageDescription;
@@ -398,7 +484,7 @@
 		cycle=(float)(1/(float)fps)*1000000;
 		
 	
-		while(!isStopping()){
+		while(!(_done)){
 			gettimeofday(&a,NULL);
 			totala=a.tv_sec*1000000+a.tv_usec;
 			pthread_mutex_lock(&mutex);
@@ -412,11 +498,12 @@
 			}
 
 			//nite
-			#ifdef WITH_NITE2
-			const nite::UserMap& userLabels = userTrackerFrame.getUserMap();
-			const nite::UserId* pLabels = userLabels.getPixels();
-			#endif
 
+				#ifdef WITH_NITE2
+				const nite::UserMap& userLabels = userTrackerFrame.getUserMap();
+				const nite::UserId* pLabels = userLabels.getPixels();
+				#endif
+
 			const openni::RGB888Pixel* pImageRow = (const openni::RGB888Pixel*)m_colorFrame.getData();
 			int rowSize = m_colorFrame.getStrideInBytes() / sizeof(openni::RGB888Pixel);
 
@@ -425,37 +512,51 @@
 				const openni::RGB888Pixel* pImage = pImageRow;
 				for (int x = 0; x < m_colorFrame.getWidth(); ++x, ++pImage)
 				{
-					#ifdef WITH_NITE2
-					if (segmentation){
-						pixelsID[(y*m_colorFrame.getWidth() + x)]= *pLabels;
-						if (*pLabels!=0)
-		                {
-		                    srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = colors[*pLabels][0];
-							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = colors[*pLabels][1];
-							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = colors[*pLabels][2];
-						}
-						else{
-							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = 0;
-							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = 0;
-							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = 0;
-						}
-						++pLabels;
+					switch(segmentationType){
+						case 0:
+							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = pImage->r;
+							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = pImage->g;
+							srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = pImage->b;
+							break;
+						case 1:
+							#ifdef WITH_NITE2
+							if (segmentation){
+								pixelsID[(y*m_colorFrame.getWidth() + x)]= *pLabels;
+								if (*pLabels!=0)
+								{
+									srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = colors[*pLabels][0];
+									srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = colors[*pLabels][1];
+									srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = colors[*pLabels][2];
+								}
+								else{
+									srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = 0;
+									srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = 0;
+									srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = 0;
+								}
+								++pLabels;
+							}
+							else{
+								srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = pImage->r;
+								srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = pImage->g;
+								srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = pImage->b;
+							}
+							#endif
+							break;
+						case 2:
+
+						default:
+							std::cout << "openniServer: Error segmentation not supported" << std::endl;
+							break;
 					}
-					else{
-						srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = pImage->r;
-						srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = pImage->g;
-						srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = pImage->b;
-					}
-					#else
-						srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 0] = pImage->r;
-						srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 1] = pImage->g;
-						srcRGB->data[(y*m_colorFrame.getWidth() + x)*3 + 2] = pImage->b;
-					#endif
+
+
 				}
 				pImageRow += rowSize;
 			}	
 	
 
+			cv::imshow("color", *srcRGB);
+
 			//test
 			//CalculateJoints();
 
@@ -500,28 +601,29 @@
 			totalpre=totala;
 		}
 	}
-	
-	CameraRGB* mycameravga;
-	IceUtil::Mutex requestsMutex;
-	std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
-	unsigned int m_nTexMapX;
-	unsigned int m_nTexMapY;
-	int m_width;
-	int	m_height;
-	openni::RGB888Pixel* m_pTexMap;
-	int segmentation;
-	int fps;
-	
+    virtual void destroy(){
+		this->_done=true;
+	}
+
+
+	private:
+		CameraRGB* mycameravga;
+		IceUtil::Mutex requestsMutex;
+		std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+		int segmentation;
+		int fps;
+		bool _done;
+
     };
     typedef IceUtil::Handle<ReplyTask> ReplyTaskPtr;
 
 
     std::string prefix;
-	jderobotice::Context context;
     colorspaces::Image::FormatPtr imageFmt;
     jderobot::ImageDescriptionPtr imageDescription;
     jderobot::CameraDescriptionPtr cameraDescription;
     ReplyTaskPtr replyTask;
+    IceUtil::ThreadControl control;
 
 
   };
@@ -530,8 +632,8 @@
 //*********************************************************************/
 	class CameraDEPTH: virtual public jderobot::Camera {
 public:
-	CameraDEPTH(std::string& propertyPrefix, const jderobotice::Context& context)
-      : prefix(propertyPrefix),context(context),
+	CameraDEPTH(std::string& propertyPrefix, const Ice::PropertiesPtr propIn)
+      : prefix(propertyPrefix),
 	imageFmt(),
 	imageDescription(new jderobot::ImageDescription()),
 	cameraDescription(new jderobot::CameraDescription()),
@@ -539,13 +641,12 @@
 	{
       
       
-	Ice::PropertiesPtr prop = context.properties();
+	Ice::PropertiesPtr prop = propIn;
 
 	//fill cameraDescription
 	cameraDescription->name = prop->getProperty(prefix+"Name");
 	if (cameraDescription->name.size() == 0)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO,"Camera name not configured");
+		std::cout << "Camera name not configured" << std::endl;
 
 	cameraDescription->shortDescription = prop->getProperty(prefix+"ShortDescription");
 
@@ -561,20 +662,22 @@
 	std::string fmtStr = prop->getPropertyWithDefault(prefix+"Format","YUY2");//default format YUY2
 	imageFmt = colorspaces::Image::Format::searchFormat(fmtStr);
 	if (!imageFmt)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO, "Format " + fmtStr + " unknown");
+		std::cout << "Format " <<  fmtStr << " unknown" << std::endl;
 	imageDescription->size = imageDescription->width * imageDescription->height * CV_ELEM_SIZE(imageFmt->cvType);
 	imageDescription->format = imageFmt->name;
 
-	context.tracer().info("Starting thread for camera: " + cameraDescription->name);
+	std::cout << "Starting thread for camera: " <<  cameraDescription->name << std::endl;
 	replyTask = new ReplyTask(this, imageDescription->width, imageDescription->height,fps, playerdetection);
 
-	replyTask->start();//my own thread
+	this->control=replyTask->start();//my own thread
 	}
 
 	virtual ~CameraDEPTH(){
-		context.tracer().info("Stopping and joining thread for camera: " + cameraDescription->name);
-		gbxiceutilacfr::stopAndJoin(replyTask);
+		std::cout << "Stopping and joining thread for camera: " << cameraDescription->name << std::endl;
+		replyTask->destroy();
+		this->control.join();
+		depth.stop();
+		depth.destroy();
 	}
     
 	virtual jderobot::ImageDescriptionPtr getImageDescription(const Ice::Current& c){
@@ -590,12 +693,12 @@
 	}
 
 	virtual std::string startCameraStreaming(const Ice::Current&){
-		context.tracer().info("Should be made anything to start camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to start camera streaming: " << cameraDescription->name << std::endl;
 		return std::string("");
 	}
 
 	virtual void stopCameraStreaming(const Ice::Current&) {
-		context.tracer().info("Should be made anything to stop camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to stop camera streaming: "  << cameraDescription->name << std::endl;
 	}
 	
 	virtual Ice::Int setCameraDescription(const jderobot::CameraDescriptionPtr&, const Ice::Current&){
@@ -603,12 +706,13 @@
 	}
 
 private:
-	class ReplyTask: public gbxiceutilacfr::SafeThread{
+	class ReplyTask: public IceUtil::Thread{
 	public:
 		ReplyTask(CameraDEPTH* camera, int width, int height, int fps, int playerDetection)
-	: gbxiceutilacfr::SafeThread(camera->context.tracer()), mycameradepth(camera) {
+	:mycameradepth(camera),_done(false) {
 		segmentation=playerDetection;
 		this->fps=fps;
+		this->minToTrain=15;
       }
 		
 
@@ -617,7 +721,7 @@
 		requests.push_back(cb);
 	}
 
-    virtual void walk(){
+    virtual void run(){
 		int test;
 		
 
@@ -626,7 +730,6 @@
 		reply->pixelData.resize(mycameradepth->imageDescription->width*mycameradepth->imageDescription->height*3);
 		cv::Mat dst_resize(cv::Size(mycameradepth->imageDescription->width, mycameradepth->imageDescription->height),CV_8UC3);
 		cv::Mat src(cv::Size(mycameradepth->imageDescription->width, mycameradepth->imageDescription->height),CV_8UC3);
-
 		struct timeval a, b;
 		int cycle; // duración del ciclo
 		long totalb,totala;
@@ -636,10 +739,9 @@
 		//std::cout << "FPS depth: " << fps << std::endl;
 		cycle=(float)(1/(float)fps)*1000000;
 
-
 		
 	
-		while(!isStopping()){
+		while(!(_done)){
 			gettimeofday(&a,NULL);
 			totala=a.tv_sec*1000000+a.tv_usec;
 			pthread_mutex_lock(&mutex);
@@ -661,6 +763,7 @@
 			const nite::UserId* pLabels = userLabels.getPixels();
 			#endif
 
+
 			const openni::DepthPixel* pDepth = (const openni::DepthPixel*)m_depthFrame.getData();
 			int restOfRow = m_depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel) - m_depthFrame.getWidth();
 
@@ -668,38 +771,56 @@
 			{	
 				for (int x = 0; x < m_depthFrame.getWidth(); ++x, ++pDepth)
 				{
-					#ifdef WITH_NITE2
-					if ((*pLabels!=0)||(!segmentation)){
-						distances[(y*m_depthFrame.getWidth() + x)] = *pDepth;
-						if (*pDepth != 0)
-						{
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = (float(*pDepth)/(float)MAX_LENGHT)*255.;
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = (*pDepth)>>8;
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = (*pDepth)&0xff;
+					switch(segmentationType){
+						case 0:
+							distances[(y*m_depthFrame.getWidth() + x)] = *pDepth;
+							if (*pDepth != 0)
+							{
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = (float(*pDepth)/(float)MAX_LENGHT)*255.;
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = (*pDepth)>>8;
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = (*pDepth)&0xff;
+							}
+							break;
+						case 1:
+							#ifdef WITH_NITE2
+							if ((*pLabels!=0)||(!segmentation)){
+								distances[(y*m_depthFrame.getWidth() + x)] = *pDepth;
+								if (*pDepth != 0)
+								{
+									src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = (float(*pDepth)/(float)MAX_LENGHT)*255.;
+									src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = (*pDepth)>>8;
+									src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = (*pDepth)&0xff;
 
-						}
-						else{
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = 0;
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = 0;
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = 0;
-						}
+								}
+								else{
+									src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = 0;
+									src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = 0;
+									src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = 0;
+								}
+							}
+							else{
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = 0;
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = 0;
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = 0;
+							}
+							++pLabels;
+							#endif
+							break;
+						case 2:
+							distances[(y*m_depthFrame.getWidth() + x)] = *pDepth;
+							if (*pDepth != 0)
+							{
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = (float(*pDepth)/(float)MAX_LENGHT)*255.;
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = (*pDepth)>>8;
+								src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = (*pDepth)&0xff;
+							}
+							break;
+						default:
+							std::cout << "openniServer: Error segmentation not supported" << std::endl;
+							break;
 					}
-					else{
-						src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = 0;
-						src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = 0;
-						src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = 0;
-					}
-					++pLabels;
-					#else
-						distances[(y*m_depthFrame.getWidth() + x)] = *pDepth;
-						if (*pDepth != 0)
-						{
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+0] = (float(*pDepth)/(float)MAX_LENGHT)*255.;
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+1] = (*pDepth)>>8;
-							src.data[(y*m_depthFrame.getWidth()+ x)*3+2] = (*pDepth)&0xff;
-					}
-					#endif
 				}
+
 				pDepth += restOfRow;
 			}
 
@@ -740,25 +861,36 @@
 		}
 	}
 	
-	CameraDEPTH* mycameradepth;
-	IceUtil::Mutex requestsMutex;
-	std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
-	
+    virtual void destroy(){
+		this->_done=true;
+	}
 
-	int segmentation;
-	int fps;
+
+	private:
+		CameraDEPTH* mycameradepth;
+		IceUtil::Mutex requestsMutex;
+		std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+
 	
+		int segmentation;
+		int fps;
+		int minToTrain;
+		cv::BackgroundSubtractorMOG2 bg;
+		cv::Mat fore;
+		cv::Mat trainImage;
+		bool _done;
 	
+	
     };
     typedef IceUtil::Handle<ReplyTask> ReplyTaskPtr;
 
 
     std::string prefix;
-    jderobotice::Context context;
     colorspaces::Image::FormatPtr imageFmt;
     jderobot::ImageDescriptionPtr imageDescription;
     jderobot::CameraDescriptionPtr cameraDescription;
     ReplyTaskPtr replyTask;
+    IceUtil::ThreadControl control;
 
   };
 
@@ -768,18 +900,25 @@
 
 	class pointCloudI: virtual public jderobot::pointCloud{
 		public:
-			pointCloudI (std::string& propertyPrefix, const jderobotice::Context& context):
-				prefix(propertyPrefix),context(context),data(new jderobot::pointCloudData()) {
-					Ice::PropertiesPtr prop = context.properties();
+			pointCloudI (std::string& propertyPrefix, const Ice::PropertiesPtr propIn):
+				prefix(propertyPrefix),data(new jderobot::pointCloudData()) {
+					Ice::PropertiesPtr prop = propIn;
 
 					int playerdetection = prop->getPropertyAsIntWithDefault("openniServer.PlayerDetection",0);
 					int fps =prop->getPropertyAsIntWithDefault("openniServer.pointCloud.Fps",10);
 					#ifndef WITH_NITE2
 						playerdetection=0;
 					#endif
+						pthread_mutex_init(&this->localMutex, NULL);
 					   replyCloud = new ReplyCloud(this,prop->getProperty("openniServer.calibration"), playerdetection, configWidth, configHeight,fps);
-					   replyCloud->start();
+					   this->control=replyCloud->start();
 				}
+
+			virtual ~pointCloudI(){
+				std::cout << "Stopping and joining thread for pointCloud" << std::endl;
+				replyCloud->destroy();
+				this->control.join();
+			}
 		
 
 		virtual jderobot::pointCloudDataPtr getCloudData(const Ice::Current&){
@@ -788,18 +927,21 @@
 			};
 		   
 		   private:
-			 class ReplyCloud :public gbxiceutilacfr::SafeThread{ 
+			 class ReplyCloud :public IceUtil::Thread{
 		       public: 
-		       	ReplyCloud (pointCloudI* pcloud, std::string filepath,  int playerDetection, int widthIn, int heightIn, int fpsIn) : gbxiceutilacfr::SafeThread(pcloud->context.tracer()), data(new jderobot::pointCloudData()), data2(new jderobot::pointCloudData())
+		       	ReplyCloud (pointCloudI* pcloud, std::string filepath,  int playerDetection, int widthIn, int heightIn, int fpsIn) : data(new jderobot::pointCloudData()), data2(new jderobot::pointCloudData()), _done(false)
 		        	{
 					path=filepath;
 					segmentation=playerDetection;
 					cWidth = widthIn;
 					cHeight = heightIn;
 					fps=fpsIn;
+					myCloud=pcloud;
+					mypro=NULL;
+
 				}
 		       
-		        void walk()
+		        void run()
 		        {
 				mypro= new openniServer::myprogeo();
 				mypro->load_cam((char*)path.c_str(),0, cWidth, cHeight);
@@ -811,14 +953,20 @@
 
 				cycle=(float)(1/(float)fps)*1000000;
 
-				while(!isStopping()){
+				while(!(_done)){
 					float distance;
 					gettimeofday(&a,NULL);
 					totala=a.tv_sec*1000000+a.tv_usec;
 					pthread_mutex_lock(&mutex);
+					//creamos una copia local de la imagen de color y de las distancias.
+					cv::Mat localRGB;
+					srcRGB->copyTo(localRGB);
+					std::vector<int> localDistance(distances);
+					pthread_mutex_unlock(&mutex);
+					pthread_mutex_lock(&(this->myCloud->localMutex));
 					data2->p.clear();
 					for( unsigned int i = 0 ; (i < cWidth*cHeight)&&(distances.size()>0); i=i+9) {
-							distance=(float)distances[i];
+							distance=(float)localDistance[i];
 							if (distance!=0){
 								//if (((unsigned char)srcRGB->data[3*i]!=0) && ((unsigned char)srcRGB->data[3*i+1]!=0) && ((unsigned char)srcRGB->data[3*i+2]!=0)){
 									float xp,yp,zp,camx,camy,camz;
@@ -857,14 +1005,14 @@
 									if ( segmentation){
 										auxP.id=pixelsID[i];
 									}
-									auxP.r=(float)(int) (unsigned char)srcRGB->data[3*i];
-									auxP.g=(float)(int) (unsigned char)srcRGB->data[3*i+1];
-									auxP.b=(float)(int) (unsigned char)srcRGB->data[3*i+2];
+									auxP.r=(float)(int) (unsigned char)localRGB.data[3*i];
+									auxP.g=(float)(int) (unsigned char)localRGB.data[3*i+1];
+									auxP.b=(float)(int) (unsigned char)localRGB.data[3*i+2];
 									data2->p.push_back(auxP);
 								}
 							//}
 						}
-					pthread_mutex_unlock(&mutex);
+					pthread_mutex_unlock(&(this->myCloud->localMutex));
 					if (totalpre !=0){
 						if ((totala - totalpre) > cycle ){
 							std::cout<<"-------- openniServer: WARNING- POINTCLOUD timeout-" << std::endl; 
@@ -877,9 +1025,26 @@
 						std::cout << "cloud: " <<  1000000/(totala-totalpre) << std::endl;
 					}*/
 					totalpre=totala;
-				}
 		        }
-		        myprogeo *mypro;
+		    }
+		        
+
+		       jderobot::pointCloudDataPtr getCloud()
+		       {
+		        pthread_mutex_lock(&(this->myCloud->localMutex));
+				data->p=data2->p;
+				pthread_mutex_unlock(&(this->myCloud->localMutex));
+		          return data;
+		       }
+
+		    virtual void destroy(){
+				this->_done=true;
+			}
+
+
+
+		   private:
+		       myprogeo *mypro;
 				int cWidth;
 				int cHeight;
 				int fps;
@@ -887,280 +1052,198 @@
 				jderobot::RGBPoint auxP;
 				std::string path;
 				int segmentation;
-		        
-		       jderobot::pointCloudDataPtr getCloud()
-		       {
-		          pthread_mutex_lock(&mutex);
-				data->p=data2->p;
-				pthread_mutex_unlock(&mutex);
-		          return data;
-		       }
-				
+				pointCloudI* myCloud;
+				bool _done;
 		      
-		    	};	
+		    };
 
 			typedef IceUtil::Handle<ReplyCloud> ReplyCloudPtr;
 			ReplyCloudPtr replyCloud;
 			std::string prefix;
-			jderobotice::Context context;
 			jderobot::pointCloudDataPtr data;
+			pthread_mutex_t localMutex;
+			IceUtil::ThreadControl control;
+
 			
 			
 		};
+} //namespace
 
 
+Ice::CommunicatorPtr ic;
+bool killed;
+openniServer::CameraRGB *camRGB;
+openniServer::CameraDEPTH *camDEPTH;
+openniServer::pointCloudI *pc1;
 
+void exitApplication(int s){
 
-/**
-* \brief Class wich contains all the functions and variables to controle the Pose3DMotors module
-*/
-/*class Pose3DMotorsI: virtual public jderobot::Pose3DMotors {
-	public:
-		Pose3DMotorsI(XN_USB_DEV_HANDLE* d, std::string& propertyPrefix, const jderobotice::Context& context): prefix(propertyPrefix),context(context),Pose3DMotorsData(new jderobot::Pose3DMotorsData()), Pose3DMotorsParams(new jderobot::Pose3DMotorsParams())
-		{
-			Ice::PropertiesPtr prop = context.properties();
-			Pose3DMotorsData->tilt=0;
-			Pose3DMotorsData->tiltSpeed=0;
-			rc= XN_STATUS_OK;
-			dev=d;
-			rc=xnUSBSendControl( *dev, XN_USB_CONTROL_TYPE_VENDOR, 0x06, 1, 0x00, NULL, 0, 0 );
-			CHECK_RC(rc,"led");
-     	}
 
-		virtual ~Pose3DMotorsI(){};
+	killed=true;
+	componentAlive=false;
 
-		virtual  Ice::Int setPose3DMotorsData(const jderobot::Pose3DMotorsDataPtr& p, const Ice::Current&){
-			Pose3DMotorsData=p;
-			uint8_t empty[0x1];
-			//int angle = 25 * 2;
-			rc = xnUSBSendControl(*dev, XN_USB_CONTROL_TYPE_VENDOR, 0x31, (XnUInt16)p->tilt, 0x0, empty, 0x0, 0);
-			CHECK_RC(rc,"Changing angle");
+	if (camRGB!= NULL)
+		delete camRGB;
+	if (camDEPTH != NULL)
+		delete camDEPTH;
+	if (pc1 != NULL){
+		delete pc1;
+	}
+	ic->shutdown();
 
-		};
-	
-		virtual jderobot::Pose3DMotorsParamsPtr getPose3DMotorsParams(const Ice::Current&){
-			return Pose3DMotorsParams;
-		};
 
-		virtual jderobot::Pose3DMotorsDataPtr getPose3DMotorsData (const Ice::Current&){
-			return Pose3DMotorsData;
-		};
 
-	private:
-		std::string prefix;
-		jderobotice::Context context;
-		jderobot::Pose3DMotorsDataPtr Pose3DMotorsData;
-		jderobot::Pose3DMotorsParamsPtr Pose3DMotorsParams;
-		XnStatus rc;
-		XN_USB_DEV_HANDLE* dev;
-    };*/
+	pthread_join(updateThread, NULL);
 
-/**
-* \brief Class wich contains all the functions and variables to controle the KinectLeds module
-*/
-/*class KinectLedsI: virtual public jderobot::KinectLeds {
-	public:
-		KinectLedsI(XN_USB_DEV_HANDLE* d, std::string& propertyPrefix, const jderobotice::Context& context): prefix(propertyPrefix),context(context)
-		{
-			Ice::PropertiesPtr prop = context.properties();
-			dev=d;
-     	}
 
-		virtual ~KinectLedsI(){};
+	m_device.close();
+	openni::OpenNI::shutdown();
+	exit(1);
 
-		virtual  void setLedActive(jderobot::KinectLedsAvailable led, const Ice::Current&){
-			int iled;
-			if (led==jderobot::OFF)
-				iled=0;
-			if (led==jderobot::GREEN)
-				iled=1;
-			if (led==jderobot::RED)
-				iled=2;
-			if (led==jderobot::YELLOW)
-				iled=3;
-			if (led==jderobot::BLINKYELLOW)
-				iled=4;
-			if (led==jderobot::BLINKGREEN)
-				iled=5;
-			if (led==jderobot::BLINKRED)
-				iled=6;
-			uint8_t empty[0x1];
-			rc = xnUSBSendControl(*dev, XN_USB_CONTROL_TYPE_VENDOR, 0x6, iled, 0x0, empty, 0x0, 0);
-			CHECK_RC(rc,"Changing led");
-		}
+}
 
-	private:
-		std::string prefix;
-		jderobotice::Context context;
-		XN_USB_DEV_HANDLE* dev;
-    };*/
 
-/**
-* \brief Main Class of the component wich create the diferents devices activated using the Ice configuration file.
-*/
-class Component: public jderobotice::Component{
-public:
-	Component()
-	:jderobotice::Component("openniServer"){}
+int main(int argc, char** argv){
 
-	virtual void start(){
-		Ice::PropertiesPtr prop = context().properties();
-		int cameraR = prop->getPropertyAsIntWithDefault(context().tag() + ".CameraRGB",0);
-		int cameraD = prop->getPropertyAsIntWithDefault(context().tag() + ".CameraDEPTH",0);
-		int motors = prop->getPropertyAsIntWithDefault(context().tag() + ".Pose3DMotorsActive",0);
-		int leds = prop->getPropertyAsIntWithDefault(context().tag() + ".KinectLedsActive",0);
-		int pointCloud = prop->getPropertyAsIntWithDefault(context().tag() + ".pointCloudActive",0);
-		int playerdetection = prop->getPropertyAsIntWithDefault(context().tag() + ".PlayerDetection",0);
-		configWidth=prop->getPropertyAsIntWithDefault("openniServer.Width", 320);
-		configHeight=prop->getPropertyAsIntWithDefault("openniServer.Height",240);
-		configFps=prop->getPropertyAsIntWithDefault("openniServer.Fps",30);
-		
-		
+	componentAlive=true;
+	killed=false;
+	struct sigaction sigIntHandler;
 
-		SELCAM = prop->getPropertyAsIntWithDefault(context().tag() + ".deviceId",0);
-		std::cout << "Selected device: " << SELCAM << std::endl;
-		int nCameras=0;
+	sigIntHandler.sa_handler = exitApplication;
+	sigemptyset(&sigIntHandler.sa_mask);
+	sigIntHandler.sa_flags = 0;
 
+	sigaction(SIGINT, &sigIntHandler, NULL);
 
-		/*COLORS*/
-		colors[0][0]=0;
-		colors[0][1]=0;
-		colors[0][2]=255;
-		colors[1][0]=0;
-		colors[1][1]=255;
-		colors[1][2]=255;
-		colors[2][0]=255;
-		colors[2][1]=255;
-		colors[2][2]=0;
-		colors[3][0]=255;
-		colors[3][1]=0;
-		colors[3][2]=0;
-		colors[4][0]=0;
-		colors[4][1]=255;
-		colors[4][2]=0;
-		colors[5][0]=255;
-		colors[5][1]=255;
-		colors[5][2]=0;
-		colors[6][0]=0;
-		colors[6][1]=0;
-		colors[6][2]=0;
-		colors[7][0]=150;
-		colors[7][1]=150;
-		colors[7][2]=0;
-		colors[8][0]=150;
-		colors[8][1]=150;
-		colors[8][2]=150;
-		colors[9][0]=0;
-		colors[9][1]=150;
-		colors[9][2]=150;
 
-		nCameras=cameraR + cameraD;
-		//g_context =  new xn::Context;
-		std::cout << "NCAMERAS = " << nCameras << std::endl;
-		cameras.resize(nCameras);
-		pthread_mutex_init(&mutex, NULL);
-		if ((nCameras>0)||(pointCloud)){
-			
 
-			pthread_create(&threads[0], NULL, &openniServer::updateThread, NULL);
+	Ice::ObjectPtr Pose3DMotors1;
+	Ice::ObjectPtr kinectleds1;
+	Ice::ObjectPtr pointcloud1;
+	Ice::PropertiesPtr prop;
 
-		}
 
-		if ((motors) || (leds)){
-			/*const XnUSBConnectionString *paths; 
-			XnUInt32 count; 
-			std::cout << "inicializo el dispositivo" << std::endl;
-			rc = xnUSBInit();
-			CHECK_RC(rc, "USB Initialization") ;
-			//rc = xnUSBOpenDevice(VID_MICROSOFT, PID_NUI_MOTOR, NULL, NULL, &dev);
-			CHECK_RC(rc,"Openning Device");
-			rc = xnUSBEnumerateDevices(VID_MICROSOFT, PID_NUI_MOTOR, &paths, &count);
-             	CHECK_RC(rc,"xnUSBEnumerateDevices failed");
+	try{
+			ic = Ice::initialize(argc,argv);
+			prop = ic->getProperties();
+	}
+	catch (const Ice::Exception& ex) {
+			std::cerr << ex << std::endl;
+			return 1;
+	}
+	catch (const char* msg) {
+			std::cerr <<"Error :" << msg << std::endl;
+			return 1;
+	}
+	std::string componentPrefix("openniServer");
 
+	cameraR = prop->getPropertyAsIntWithDefault(componentPrefix + ".CameraRGB",0);
+	cameraD = prop->getPropertyAsIntWithDefault(componentPrefix + ".CameraDEPTH",0);
+	int motors = prop->getPropertyAsIntWithDefault(componentPrefix + ".Pose3DMotorsActive",0);
+	int leds = prop->getPropertyAsIntWithDefault(componentPrefix + ".KinectLedsActive",0);
+	int pointCloud = prop->getPropertyAsIntWithDefault(componentPrefix + ".pointCloudActive",0);
+	int playerdetection = prop->getPropertyAsIntWithDefault(componentPrefix + ".PlayerDetection",0);
+	configWidth=prop->getPropertyAsIntWithDefault(componentPrefix + ".Width", 640);
+	configHeight=prop->getPropertyAsIntWithDefault(componentPrefix+ ".Height",480);
+	configFps=prop->getPropertyAsIntWithDefault(componentPrefix + ".Fps",30);
+	std::string Endpoints = prop->getProperty(componentPrefix + ".Endpoints");
+	Ice::ObjectAdapterPtr adapter =ic->createObjectAdapterWithEndpoints(componentPrefix, Endpoints);
 
-	        	// Open first found device
-        		rc = xnUSBOpenDeviceByPath(paths[SELCAM], &dev);
-	        	CHECK_RC(rc,"xnUSBOpenDeviceByPath failed");*/
-		}
 
-		if (cameraR){
-			std::string objPrefix(context().tag() + ".CameraRGB.");
-			std::string cameraName = prop->getProperty(objPrefix + "Name");
-			if (cameraName.size() == 0){//no name specified, we create one using the index
-				cameraName = "cameraR";
-				prop->setProperty(objPrefix + "Name",cameraName);//set the value
-				}
-			context().tracer().info("Creating camera " + cameraName);
-			cameras[0] = new CameraRGB(objPrefix,context());
-			context().createInterfaceWithString(cameras[0],cameraName);
-			std::cout<<"              -------- openniServer: Component: CameraRGB created successfully   --------" << std::endl;
-		}
-		if (cameraD){
-			std::string objPrefix(context().tag() + ".CameraDEPTH.");
-			std::string cameraName = prop->getProperty(objPrefix + "Name");
-			if (cameraName.size() == 0){//no name specified, we create one using the index
-				cameraName = "cameraD";
-				prop->setProperty(objPrefix + "Name",cameraName);//set the value
-				}
-			context().tracer().info("Creating camera " + cameraName);
-			cameras[1] = new CameraDEPTH(objPrefix,context());
-			context().createInterfaceWithString(cameras[1],cameraName);
-			//test camera ok
-			std::cout<<"              -------- openniServer: Component: CameraDEPTH created successfully   --------" << std::endl;
-		}
-		if (motors){
-			/*std::string objPrefix4="Pose3DMotors1";
-			std::string Pose3DMotorsName = "Pose3DMotors1";
-			context().tracer().info("Creating Pose3DMotors1 " + Pose3DMotorsName);
-			Pose3DMotors1 = new Pose3DMotorsI(&dev,objPrefix4,context());
-			context().createInterfaceWithString(Pose3DMotors1,Pose3DMotorsName);
-			std::cout<<"              -------- openniServer: Component: Pose3DMotors created successfully   --------" << std::endl;*/
-		}
-			
-		if (leds){
-			/*std::string objPrefix4="kinectleds1";
-			std::string Name = "kinectleds1";
-			context().tracer().info("Creating kinectleds1 " + Name);
-			kinectleds1 = new KinectLedsI(&dev,objPrefix4,context());
-			context().createInterfaceWithString(kinectleds1,Name);
-			std::cout<<"              -------- openniServer: Component: KinectLeds created successfully   --------" << std::endl;
-			*/
-		}
-		if (pointCloud){
-			std::string objPrefix5="pointcloud1";
-			std::string Name = "pointcloud1";
-			context().tracer().info("Creating pointcloud1 " + Name);
-			pointcloud1 = new pointCloudI(objPrefix5,context());
-			context().createInterfaceWithString(pointcloud1,Name);
-			std::cout<<"              -------- openniServer: Component: PointCloud created successfully   --------" << std::endl;
-		}
 
-		std::cout << "LISTOOOOOOOOOOOO" << std::endl;
-		sleep(50);
-    }
+	if (playerdetection){
+		cameraR=1;
+		cameraD=1;
+	}
 
-    virtual ~Component(){
-    }
+	SELCAM = prop->getPropertyAsIntWithDefault(componentPrefix + ".deviceId",0);
+	std::cout << "Selected device: " << SELCAM << std::endl;
+	int nCameras=0;
 
-  private:
-    std::vector<Ice::ObjectPtr> cameras;
-	Ice::ObjectPtr Pose3DMotors1;
-	Ice::ObjectPtr kinectleds1;
-	Ice::ObjectPtr pointcloud1;
-	pthread_t threads[NUM_THREADS];
-	//XN_USB_DEV_HANDLE dev;
 
-  };
+	/*COLORS*/
+	colors[0][0]=0;
+	colors[0][1]=0;
+	colors[0][2]=255;
+	colors[1][0]=0;
+	colors[1][1]=255;
+	colors[1][2]=255;
+	colors[2][0]=255;
+	colors[2][1]=255;
+	colors[2][2]=0;
+	colors[3][0]=255;
+	colors[3][1]=0;
+	colors[3][2]=0;
+	colors[4][0]=0;
+	colors[4][1]=255;
+	colors[4][2]=0;
+	colors[5][0]=255;
+	colors[5][1]=255;
+	colors[5][2]=0;
+	colors[6][0]=0;
+	colors[6][1]=0;
+	colors[6][2]=0;
+	colors[7][0]=150;
+	colors[7][1]=150;
+	colors[7][2]=0;
+	colors[8][0]=150;
+	colors[8][1]=150;
+	colors[8][2]=150;
+	colors[9][0]=0;
+	colors[9][1]=150;
+	colors[9][2]=150;
 
-} //namespace
+	nCameras=cameraR + cameraD;
+	//g_context =  new xn::Context;
+	std::cout << "NCAMERAS = " << nCameras << std::endl;
+	pthread_mutex_init(&mutex, NULL);
+	if ((nCameras>0)||(pointCloud)){
 
-int main(int argc, char** argv){
 
-  openniServer::Component component;
+		pthread_create(&updateThread, NULL, &openniServer::updateThread, NULL);
 
-	//usleep(1000);
-     jderobotice::Application app(component);
+	}
 
-     return app.jderobotMain(argc,argv);
+	if (cameraR){
+		std::string objPrefix(componentPrefix + ".CameraRGB.");
+		std::string cameraName = prop->getProperty(objPrefix + "Name");
+		if (cameraName.size() == 0){//no name specified, we create one using the index
+			cameraName = "cameraR";
+			prop->setProperty(objPrefix + "Name",cameraName);//set the value
+			}
+		std::cout << "Creating camera " << cameraName << std::endl;
+		camRGB = new openniServer::CameraRGB(objPrefix,prop);
+		adapter->add(camRGB, ic->stringToIdentity(cameraName));
+		std::cout<<"              -------- openniServer: Component: CameraRGB created successfully   --------" << std::endl;
+	}
+	if (cameraD){
+		std::string objPrefix(componentPrefix + ".CameraDEPTH.");
+		std::string cameraName = prop->getProperty(objPrefix + "Name");
+		if (cameraName.size() == 0){//no name specified, we create one using the index
+			cameraName = "cameraD";
+			prop->setProperty(objPrefix + "Name",cameraName);//set the value
+			}
+		std::cout << "Creating camera " <<  cameraName << std::endl;
+		camDEPTH = new openniServer::CameraDEPTH(objPrefix,prop);
+		adapter->add(camDEPTH, ic->stringToIdentity(cameraName));
+		//test camera ok
+		std::cout<<"              -------- openniServer: Component: CameraDEPTH created successfully   --------" << std::endl;
+	}
+	if (pointCloud){
+		std::string objPrefix(componentPrefix + ".PointCloud.");
+		std::string Name = prop->getProperty(objPrefix + "Name");
+		std::cout << "Creating pointcloud1 " << Name << std::endl;
+		pc1 = new openniServer::pointCloudI(objPrefix,prop);
+		adapter->add(pc1 , ic->stringToIdentity(Name));
+		adapter->add(pointcloud1, ic->stringToIdentity(Name));
+		std::cout<<"              -------- openniServer: Component: PointCloud created successfully   --------" << std::endl;
+	}
 
+	adapter->activate();
+	ic->waitForShutdown();
+
+	if (!killed)
+		exitApplication(0);
+	return 0;
+
 }

Modified: trunk/src/components/openniServer-kinect/CMakeLists.txt
===================================================================
--- trunk/src/components/openniServer-kinect/CMakeLists.txt	2013-07-16 07:52:13 UTC (rev 941)
+++ trunk/src/components/openniServer-kinect/CMakeLists.txt	2013-07-16 08:07:43 UTC (rev 942)
@@ -17,12 +17,10 @@
 	    add_executable (openniServer-kinect ${SOURCE_FILES})
 
         TARGET_LINK_LIBRARIES(openniServer-kinect
-            ${LIBS_DIR}/jderobotice/libjderobotice.so
             ${LIBS_DIR}/jderobotutil/libjderobotutil.so
             ${LIBS_DIR}/progeo/libprogeo.so
             ${LIBS_DIR}/colorspaces/libcolorspacesmm.so
             ${INTERFACES_CPP_DIR}/jderobot/libJderobotInterfaces.so
-            ${Gearbox_LIBRARIES}
 	        ${ZeroCIce_LIBRARIES}
 	        ${openni_LIBRARIES}
 			${nite_LIBRARIES}

Modified: trunk/src/components/openniServer-kinect/build-independent/CMakeLists.txt
===================================================================
--- trunk/src/components/openniServer-kinect/build-independent/CMakeLists.txt	2013-07-16 07:52:13 UTC (rev 941)
+++ trunk/src/components/openniServer-kinect/build-independent/CMakeLists.txt	2013-07-16 08:07:43 UTC (rev 942)
@@ -61,25 +61,6 @@
 
 
 
-#manual gearbox libraries
-FIND_PATH( Gearbox_INCLUDE_DIR NAMES gbxutilacfr/gbxutilacfr.h  PATHS ENV C++LIB ENV PATH PATH_SUFFIXES gearbox)
- 
-IF( Gearbox_INCLUDE_DIR )
-    FIND_LIBRARY( Gearbox_LIBRARY1 NAMES GbxUtilAcfr PATHS ENV C++LIB ENV PATH PATH_SUFFIXES lib/gearbox lib64/gearbox )
-    FIND_LIBRARY( Gearbox_LIBRARY2 NAMES GbxIceUtilAcfr PATHS ENV C++LIB ENV PATH PATH_SUFFIXES lib/gearbox lib64/gearbox)
-    SET (Gearbox_LIBRARIES ${Gearbox_LIBRARY1} ${Gearbox_LIBRARY2})
-    IF( Gearbox_LIBRARIES )
-		MESSAGE ("-- Gearbox found at ${Gearbox_LIBRARIES}")
-		include_directories(${Gearbox_INCLUDE_DIR})
-		link_directories(${Gearbox_LIBRARIES})
-    ENDIF( Gearbox_LIBRARIES )
-ENDIF(Gearbox_INCLUDE_DIR)
-
-IF(NOT Gearbox_LIBRARIES)
-		MESSAGE ("*** Gearbox not found")
-ENDIF()
-
-
 #manual ICE
 FIND_PATH( Ice_INCLUDE_DIR NAMES Ice/Ice.h  PATHS ENV C++LIB ENV)
  
@@ -112,13 +93,11 @@
 TARGET_LINK_LIBRARIES(openniServer-kinect
   ${opencv_LIBRARIES}
   ${gsl_LIBRARIES}
-  ${Gearbox_LIBRARIES}
   ${Ice_LIBRARIES} 
   ${openni_LIBRARIES}
   ${nite_LIBRARIES}
   ${LIBS_DIR}/libcolorspacesmm.so
   ${LIBS_DIR}/libJderobotInterfaces.so
-  ${LIBS_DIR}/libjderobotice.so
   ${LIBS_DIR}/libjderobotutil.so
   ${LIBS_DIR}/libprogeo.so
  

Modified: trunk/src/components/openniServer-kinect/openniServer-kinect.cpp
===================================================================
--- trunk/src/components/openniServer-kinect/openniServer-kinect.cpp	2013-07-16 07:52:13 UTC (rev 941)
+++ trunk/src/components/openniServer-kinect/openniServer-kinect.cpp	2013-07-16 08:07:43 UTC (rev 942)
@@ -25,21 +25,16 @@
 
 #include <Ice/Ice.h>
 #include <IceUtil/IceUtil.h>
-#include <gbxsickacfr/gbxiceutilacfr/safethread.h>
 #include <jderobot/kinectleds.h>
 #include <jderobot/camera.h>
 #include <jderobot/pose3dmotors.h>
 #include <jderobot/remoteCloud.h>
 #include <jderobot/remoteConfig.h>
 #include <colorspaces/colorspacesmm.h>
-#include <jderobotice/component.h>
-#include <jderobotice/application.h>
 #include <tr1/memory>
 #include <list>
 #include <sstream>
-#include <jderobotice/exceptions.h>
 #include <math.h>
-//#include <controller.h>
 #include <cv.h>
 #include <highgui.h>
 #include <XnOS.h>
@@ -74,40 +69,18 @@
 #define MAX_DEVICES 5
 #define MAX_CAMERAS_SERVER 2
 
-
-
-
-using namespace xn;
-
-namespace openniServer{
-
-/*user tracker*/
-xn::Player g_Player;
-
-XnBool g_bNeedPose = FALSE;
-XnChar g_strPose[20] = "";
-XnBool g_bDrawBackground = TRUE;
-XnBool g_bDrawPixels = TRUE;
-XnBool g_bDrawSkeleton = TRUE;
-XnBool g_bPrintID = TRUE;
-XnBool g_bPrintState = TRUE;
-
-XnStatus rc=XN_STATUS_OK;;
-xn::Context g_context;	
-pthread_mutex_t mutex;
-int n_devices=0;
-int SELCAM;
-std::vector<int> distances;
-IplImage* srcRGB=NULL;
 int colors[10][3];
-int userGeneratorActive=1;
 int width;
 int height;
+int SELCAM;
+pthread_mutex_t mutex;
+XnStatus rc=XN_STATUS_OK;
+xn::Context g_context;
 
-
 /*OJO solo funciona con imágenes de 640x480, no con imágenes redimensionadas, si valdría con tamaños fijados con configuración openni, pero no hemos conseguido que funcione variar la resolución por configuración*/
 std::vector<int> pixelsID;
 //int pixelsID[640*480];
+int userGeneratorActive=1;
 
 
 struct KinectDevice
@@ -131,6 +104,39 @@
 };
 
 KinectDevice sensors[MAX_DEVICES];
+int n_devices=0;
+
+/*user tracker*/
+xn::Player g_Player;
+
+XnBool g_bNeedPose = FALSE;
+XnChar g_strPose[20] = "";
+XnBool g_bDrawBackground = TRUE;
+XnBool g_bDrawPixels = TRUE;
+XnBool g_bDrawSkeleton = TRUE;
+XnBool g_bPrintID = TRUE;
+XnBool g_bPrintState = TRUE;
+
+
+using namespace xn;
+
+namespace openniServer{
+
+
+
+
+
+
+std::vector<int> distances;
+IplImage* srcRGB=NULL;
+
+
+
+
+
+
+
+
 //std::vector<KinectDevice> sensors;
 
 
@@ -163,6 +169,8 @@
       usleep(10);
 
    }
+   pthread_exit(NULL);
+   return NULL;
    
 }
 
@@ -336,20 +344,19 @@
 */
 	class CameraRGB: virtual public jderobot::Camera {
 public:
-	CameraRGB(std::string& propertyPrefix, const jderobotice::Context& context)
-      : prefix(propertyPrefix),context(context),
+	CameraRGB(std::string& propertyPrefix, Ice::PropertiesPtr propIn)
+      : prefix(propertyPrefix),
 	imageFmt(),
 	imageDescription(new jderobot::ImageDescription()),
 	cameraDescription(new jderobot::CameraDescription()),
 	replyTask()
 	{
-	Ice::PropertiesPtr prop = context.properties();
+	Ice::PropertiesPtr prop = propIn;
 
 	//fill cameraDescription
 	cameraDescription->name = prop->getProperty(prefix+"Name");
 	if (cameraDescription->name.size() == 0)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO,"Camera name not configured");
+		std::cout << "Camera name not configured" << std::endl;
 
 	cameraDescription->shortDescription = prop->getProperty(prefix+"ShortDescription");
 
@@ -365,20 +372,20 @@
 	std::string fmtStr = prop->getPropertyWithDefault(prefix+"Format","YUY2");//default format YUY2
 	imageFmt = colorspaces::Image::Format::searchFormat(fmtStr);
 	if (!imageFmt)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO, "Format " + fmtStr + " unknown");
+		std::cout <<  "Format " <<  fmtStr <<  " unknown" << std::endl;
 	imageDescription->size = imageDescription->width * imageDescription->height * CV_ELEM_SIZE(imageFmt->cvType);
 	imageDescription->format = imageFmt->name;
 
-	context.tracer().info("Starting thread for camera: " + cameraDescription->name);
+	std::cout << "Starting thread for camera: " + cameraDescription->name << std::endl;
 	replyTask = new ReplyTask(this, imageDescription->width, imageDescription->height,fps, playerdetection);
 
 	replyTask->start();//my own thread
 	}
 
 	virtual ~CameraRGB(){
-		context.tracer().info("Stopping and joining thread for camera: " + cameraDescription->name);
-		gbxiceutilacfr::stopAndJoin(replyTask);
+		std::cout << "Stopping and joining thread for camera: " + cameraDescription->name << std::endl;
+		this->replyTask->destroy();
+		this->control.join();
 	}
     
 	virtual jderobot::ImageDescriptionPtr getImageDescription(const Ice::Current& c){
@@ -394,24 +401,28 @@
 	}
 
 	virtual std::string startCameraStreaming(const Ice::Current&){
-		context.tracer().info("Should be made anything to start camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to start camera streaming: " + cameraDescription->name << std::endl;
+		return std::string("");
 	}
 
 	virtual void stopCameraStreaming(const Ice::Current&) {
-		context.tracer().info("Should be made anything to stop camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to stop camera streaming: " + cameraDescription->name << std::endl;
 	}
 
 	virtual Ice::Int setCameraDescription(const jderobot::CameraDescriptionPtr&, const Ice::Current&){
-		
+		return 0;
 	}
 
 private:
-	class ReplyTask: public gbxiceutilacfr::SafeThread{
+	class ReplyTask: public IceUtil::Thread{
 	public:
 		ReplyTask(CameraRGB* camera, int width, int height, int fps, int playerdetection)
-	: gbxiceutilacfr::SafeThread(camera->context.tracer()), mycameravga(camera) {
+	:mycameravga(camera), _done(false) {
 		segmentation=playerdetection;
 		this->fps=fps;
+		g_pTexMap=NULL;
+		g_nTexMapX=0;
+		g_nTexMapY=0;
       }
 		
 
@@ -420,7 +431,7 @@
 		requests.push_back(cb);
 	}
 
-    virtual void walk(){
+    virtual void run(){
 		int h=sensors[SELCAM].imageMD.YRes();
 		int w=sensors[SELCAM].imageMD.XRes();
 
@@ -447,7 +458,7 @@
 		cycle=(float)(1/(float)fps)*1000000;
 		
 	
-		while(!isStopping()){
+		while(!(_done)){
 			gettimeofday(&a,NULL);
 			totala=a.tv_sec*1000000+a.tv_usec;
 			pthread_mutex_lock(&mutex);
@@ -539,28 +550,32 @@
 			totalpre=totala;
 		}
 	}
+    virtual void destroy(){
+		this->_done=true;
+	}
 	
-	CameraRGB* mycameravga;
-	IceUtil::Mutex requestsMutex;
-	std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+	private:
+		CameraRGB* mycameravga;
+		IceUtil::Mutex requestsMutex;
+		std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+
+		std::vector<uint8_t> rgb;
+		XnRGB24Pixel* g_pTexMap;
+		unsigned int g_nTexMapX,g_nTexMapY;
+		int segmentation;
+		int fps;
+		bool _done;
 	
-	std::vector<uint8_t> rgb;
-	char *img; 
-	XnRGB24Pixel* g_pTexMap;
-	unsigned int g_nTexMapX,g_nTexMapY;
-	int segmentation;
-	int fps;
-	
     };
     typedef IceUtil::Handle<ReplyTask> ReplyTaskPtr;
 
 
     std::string prefix;
-	jderobotice::Context context;
     colorspaces::Image::FormatPtr imageFmt;
     jderobot::ImageDescriptionPtr imageDescription;
     jderobot::CameraDescriptionPtr cameraDescription;
     ReplyTaskPtr replyTask;
+    IceUtil::ThreadControl control;
 
 
   };
@@ -569,8 +584,8 @@
 //*********************************************************************
 	class CameraDEPTH: virtual public jderobot::Camera {
 public:
-	CameraDEPTH(std::string& propertyPrefix, const jderobotice::Context& context)
-      : prefix(propertyPrefix),context(context),
+	CameraDEPTH(std::string& propertyPrefix, Ice::PropertiesPtr propIn)
+      : prefix(propertyPrefix),
 	imageFmt(),
 	imageDescription(new jderobot::ImageDescription()),
 	cameraDescription(new jderobot::CameraDescription()),
@@ -578,13 +593,12 @@
 	{
       
       
-	Ice::PropertiesPtr prop = context.properties();
+	Ice::PropertiesPtr prop = propIn;
 
 	//fill cameraDescription
 	cameraDescription->name = prop->getProperty(prefix+"Name");
 	if (cameraDescription->name.size() == 0)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO,"Camera name not configured");
+		std::cout << "Camera name not configured" << std::endl;
 
 	cameraDescription->shortDescription = prop->getProperty(prefix+"ShortDescription");
 
@@ -599,20 +613,20 @@
 	std::string fmtStr = prop->getPropertyWithDefault(prefix+"Format","YUY2");//default format YUY2
 	imageFmt = colorspaces::Image::Format::searchFormat(fmtStr);
 	if (!imageFmt)
-	throw 
-		jderobotice::ConfigFileException(ERROR_INFO, "Format " + fmtStr + " unknown");
+		std::cout << "Format " << fmtStr << " unknown" << std::endl;
 	imageDescription->size = imageDescription->width * imageDescription->height * CV_ELEM_SIZE(imageFmt->cvType);
 	imageDescription->format = imageFmt->name;
 
-	context.tracer().info("Starting thread for camera: " + cameraDescription->name);
+	std::cout << "Starting thread for camera: " + cameraDescription->name << std::endl;
 	replyTask = new ReplyTask(this, imageDescription->width, imageDescription->height,fps, playerdetection);
 
 	replyTask->start();//my own thread
 	}
 
 	virtual ~CameraDEPTH(){
-		context.tracer().info("Stopping and joining thread for camera: " + cameraDescription->name);
-		gbxiceutilacfr::stopAndJoin(replyTask);
+		std::cout << "Stopping and joining thread for camera: " + cameraDescription->name << std::endl;
+		this->replyTask->destroy();
+		this->control.join();
 	}
     
 	virtual jderobot::ImageDescriptionPtr getImageDescription(const Ice::Current& c){
@@ -628,24 +642,28 @@
 	}
 
 	virtual std::string startCameraStreaming(const Ice::Current&){
-		context.tracer().info("Should be made anything to start camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to start camera streaming: " + cameraDescription->name << std::endl;
+		return std::string("");
 	}
 
 	virtual void stopCameraStreaming(const Ice::Current&) {
-		context.tracer().info("Should be made anything to stop camera streaming: " + cameraDescription->name);
+		std::cout << "Should be made anything to stop camera streaming: " + cameraDescription->name << std::endl;
 	}
 	
 	virtual Ice::Int setCameraDescription(const jderobot::CameraDescriptionPtr&, const Ice::Current&){
-		
+		return 0;
 	}
 
 private:
-	class ReplyTask: public gbxiceutilacfr::SafeThread{
+	class ReplyTask: public IceUtil::Thread{
 	public:
 		ReplyTask(CameraDEPTH* camera, int width, int height, int fps, int playerDetection)
-	: gbxiceutilacfr::SafeThread(camera->context.tracer()), mycameradepth(camera) {
+	: mycameradepth(camera), _done(false) {
 		segmentation=playerDetection;
 		this->fps=fps;
+		g_pTexMap=NULL;
+		g_nTexMapX=0;
+		g_nTexMapY=0;
       }
 		
 
@@ -654,7 +672,7 @@
 		requests.push_back(cb);
 	}
 
-    virtual void walk(){
+    virtual void run(){
 		int test;
 		
 
@@ -677,7 +695,7 @@
 		//std::cout << "FPS depth: " << fps << std::endl;
 		cycle=(float)(1/(float)fps)*1000000;
 		
-		while(!isStopping()){
+		while(!(_done)){
 			gettimeofday(&a,NULL);
 			totala=a.tv_sec*1000000+a.tv_usec;
 			pthread_mutex_lock(&mutex);
@@ -753,43 +771,46 @@
 			totalpre=totala;
 		}
 	}
+
+    virtual void destroy(){
+		this->_done=true;
+	}
 	
-	CameraDEPTH* mycameradepth;
-	IceUtil::Mutex requestsMutex;
-	std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+	private:
+		CameraDEPTH* mycameradepth;
+		IceUtil::Mutex requestsMutex;
+		std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+
+		int segmentation;
+		XnRGB24Pixel* g_pTexMap;
+		unsigned int g_nTexMapX,g_nTexMapY;
+		int fps;
+		bool _done;
 	
-	xn::Context *context;
-	int segmentation;
-	char *img; 
-	XnRGB24Pixel* g_pTexMap;
-	unsigned int g_nTexMapX,g_nTexMapY;	
-	float g_pDepthHist[100000];
-	int fps;
 	
-	
     };
     typedef IceUtil::Handle<ReplyTask> ReplyTaskPtr;
 
 
     std::string prefix;
-    jderobotice::Context context;
     colorspaces::Image::FormatPtr imageFmt;
     jderobot::ImageDescriptionPtr imageDescription;
     jderobot::CameraDescriptionPtr cameraDescription;
     ReplyTaskPtr replyTask;
+    IceUtil::ThreadControl control;
 
   };
 
 /**
-* \brief Class wich contains all the functions and variables to serve point cloud interface
+* \brief Class which contains all the functions and variables to serve point cloud interface
 */
 
 	class pointCloudI: virtual public jderobot::remoteCloud{
 		public:
-			pointCloudI (std::string& propertyPrefix, const jderobotice::Context& context):
-				prefix(propertyPrefix),context(context),data(new jderobot::pointCloudData()) {
-					Ice::PropertiesPtr prop = context.properties();
-
+			pointCloudI (std::string& propertyPrefix, Ice::PropertiesPtr propIn):
+				prefix(propertyPrefix),data(new jderobot::pointCloudData()) {
+					Ice::PropertiesPtr prop = propIn;
+					idLocal=0;
 					int playerdetection = prop->getPropertyAsIntWithDefault("openniServer.PlayerDetection",0);
 					int fps =prop->getPropertyAsIntWithDefault("openniServer.pointCloud.Fps",10);
 					if (!(userGeneratorActive))
@@ -797,6 +818,10 @@
 					   replyCloud = new ReplyCloud(this,prop->getProperty("openniServer.calibration"), playerdetection, width, height,fps);
 					   replyCloud->start();
 				}
+		~pointCloudI(){
+			this->replyCloud->destroy();
+			this->control.join();
+		}
 		
 
 		virtual jderobot::pointCloudDataPtr getCloudData(const Ice::Current&){
@@ -826,13 +851,16 @@
 		};
 
 		virtual std::string read(Ice::Int id, const Ice::Current&){
+			return std::string("");
 		};
 
 
 		virtual Ice::Int write(const std::string& data, Ice::Int id, const Ice::Current&){
 			if (id == idLocal){
 				f2 << data << std::endl;
+				return 1;
 			}
+			return 0;
 		};
 
 		virtual Ice::Int setConfiguration(Ice::Int id, const Ice::Current&){
@@ -840,21 +868,24 @@
 				replyCloud->setCalibrationFile(path);
 				id=0;
 				f2.close();
+				return 1;
 			}
+			return 0;
 
 			//guardar el xml nuevo encima del cargado por defecto (la siguiente vez empezará directamente con la nueva configuración
 		};
 		   
 		   private:
-			 class ReplyCloud :public gbxiceutilacfr::SafeThread{ 
+			 class ReplyCloud :public IceUtil::Thread{
 		       public: 
-		       	ReplyCloud (pointCloudI* pcloud, std::string filepath,  int playerDetection, int widthIn, int heightIn, int fpsIn) : gbxiceutilacfr::SafeThread(pcloud->context.tracer()), data(new jderobot::pointCloudData()), data2(new jderobot::pointCloudData())
+		       	ReplyCloud (pointCloudI* pcloud, std::string filepath,  int playerDetection, int widthIn, int heightIn, int fpsIn) : data(new jderobot::pointCloudData()), data2(new jderobot::pointCloudData()), _done(false)
 		        	{
 					path=filepath;
 					segmentation=playerDetection;
 					cWidth = widthIn;
 					cHeight = heightIn;
 					fps=fpsIn;
+					mypro=NULL;
 				}
 
 				void setCalibrationFile(std::string path){
@@ -862,7 +893,7 @@
 				}
 
 		       
-		        void walk()
+		        void run()
 		        {
 				mypro= new openniServer::myprogeo();
 				mypro->load_cam((char*)path.c_str(),0, cWidth, cHeight);
@@ -874,7 +905,7 @@
 
 				cycle=(float)(1/(float)fps)*1000000;
 
-				while(!isStopping()){
+				while(!(_done)){
 					float distance;
 					gettimeofday(&a,NULL);
 					totala=a.tv_sec*1000000+a.tv_usec;
@@ -943,14 +974,7 @@
 					totalpre=totala;
 				}
 		        }
-		        myprogeo *mypro;
-				int cWidth;
-				int cHeight;
-				int fps;
-				jderobot::pointCloudDataPtr data, data2;
-				jderobot::RGBPoint auxP;
-				std::string path;
-				int segmentation;
+
 		        
 		       jderobot::pointCloudDataPtr getCloud()
 		       {
@@ -959,6 +983,21 @@
 				pthread_mutex_unlock(&mutex);
 		          return data;
 		       }
+
+		       virtual void destroy(){
+					this->_done=true;
+				}
+
+		       private:
+					myprogeo *mypro;
+					int cWidth;
+					int cHeight;
+					int fps;
+					jderobot::pointCloudDataPtr data, data2;
+					jderobot::RGBPoint auxP;
+					std::string path;
+					int segmentation;
+					bool _done;
 				
 		      
 		    	};	
@@ -966,11 +1005,11 @@
 			typedef IceUtil::Handle<ReplyCloud> ReplyCloudPtr;
 			ReplyCloudPtr replyCloud;
 			std::string prefix;
-			jderobotice::Context context;
 			jderobot::pointCloudDataPtr data;
 			std::ofstream f2;
 			int idLocal;
 			std::string path;
+			IceUtil::ThreadControl control;
 			
 			
 		};
@@ -983,9 +1022,9 @@
 */
 class Pose3DMotorsI: virtual public jderobot::Pose3DMotors {
 	public:
-		Pose3DMotorsI(XN_USB_DEV_HANDLE* d, std::string& propertyPrefix, const jderobotice::Context& context): prefix(propertyPrefix),context(context),Pose3DMotorsData(new jderobot::Pose3DMotorsData()), Pose3DMotorsParams(new jderobot::Pose3DMotorsParams())
+		Pose3DMotorsI(XN_USB_DEV_HANDLE* d, std::string& propertyPrefix, Ice::PropertiesPtr propIn): prefix(propertyPrefix),Pose3DMotorsData(new jderobot::Pose3DMotorsData()), Pose3DMotorsParams(new jderobot::Pose3DMotorsParams())
 		{
-			Ice::PropertiesPtr prop = context.properties();
+			Ice::PropertiesPtr prop = propIn;
 			Pose3DMotorsData->tilt=0;
 			Pose3DMotorsData->tiltSpeed=0;
 			rc= XN_STATUS_OK;
@@ -1002,7 +1041,7 @@
 			//int angle = 25 * 2;
 			rc = xnUSBSendControl(*dev, XN_USB_CONTROL_TYPE_VENDOR, 0x31, (XnUInt16)p->tilt, 0x0, empty, 0x0, 0);
 			CHECK_RC(rc,"Changing angle");
-
+			return 0;
 		};
 	
 		virtual jderobot::Pose3DMotorsParamsPtr getPose3DMotorsParams(const Ice::Current&){
@@ -1015,7 +1054,6 @@
 
 	private:
 		std::string prefix;
-		jderobotice::Context context;
 		jderobot::Pose3DMotorsDataPtr Pose3DMotorsData;
 		jderobot::Pose3DMotorsParamsPtr Pose3DMotorsParams;
 		XnStatus rc;
@@ -1027,9 +1065,9 @@
 */
 class KinectLedsI: virtual public jderobot::KinectLeds {
 	public:
-		KinectLedsI(XN_USB_DEV_HANDLE* d, std::string& propertyPrefix, const jderobotice::Context& context): prefix(propertyPrefix),context(context)
+		KinectLedsI(XN_USB_DEV_HANDLE* d, std::string& propertyPrefix, Ice::PropertiesPtr propIn): prefix(propertyPrefix)
 		{
-			Ice::PropertiesPtr prop = context.properties();
+			Ice::PropertiesPtr prop = propIn;
 			dev=d;
      	}
 
@@ -1058,7 +1096,6 @@
 
 	private:
 		std::string prefix;
-		jderobotice::Context context;
 		XN_USB_DEV_HANDLE* dev;
     };
 
@@ -1069,10 +1106,10 @@
 */
 class RemoteConfigI: virtual public jderobot::remoteConfig {
 	public:
-		RemoteConfigI(Ice::ObjectPtr pointcloud1, std::string& propertyPrefix, const jderobotice::Context& context): prefix(propertyPrefix),context(context)
+		RemoteConfigI(Ice::ObjectPtr pointcloud1, std::string& propertyPrefix, Ice::PropertiesPtr propIn): prefix(propertyPrefix)
 		{
-			Ice::PropertiesPtr prop = context.properties();
-			
+			Ice::PropertiesPtr prop=propIn;
+			idLocal=0;
      	}
 
 		virtual ~RemoteConfigI(){};
@@ -1098,21 +1135,25 @@
 		};
 
 		virtual std::string read(Ice::Int id, const Ice::Current&){
+			return std::string("");
 		};
 
 
 		virtual Ice::Int write(const std::string& data, Ice::Int id, const Ice::Current&){
 			if (id == idLocal){
 				f2 << data << std::endl;
+				return 1;
 			}
+			return 0;
 		};
 
 		virtual Ice::Int setConfiguration(Ice::Int id, const Ice::Current&){
 			if (id == idLocal){
 				id=0;
 				f2.close();
+				return 1;
 			}
-
+			return 0;
 			//guardar el xml nuevo encima del cargado por defecto (la siguiente vez empezará directamente con la nueva configuración
 		};
 
@@ -1120,390 +1161,422 @@
 	private:
 		
 		std::string prefix;
-		jderobotice::Context context;
 		std::ofstream f2;
 		int idLocal;
     };
 
+} //namespace
 
 
-/**
-* \brief Main Class of the component wich create the diferents devices activated using the Ice configuration file.
-*/
-class Component: public jderobotice::Component{
-public:
-	Component()
-	:jderobotice::Component("openniServer"), cameras(0) {}
+openniServer::CameraRGB* camRGB;
+openniServer::CameraDEPTH* camDEPTH;
+openniServer::pointCloudI* pc1;
+openniServer::Pose3DMotorsI* Pose3DMotors1;
+openniServer::KinectLedsI* kinectleds1;
+openniServer::pointCloudI* pointcloud1;
+pthread_t threads[NUM_THREADS];
+XN_USB_DEV_HANDLE dev;
 
-	virtual void start(){
-		Ice::PropertiesPtr prop = context().properties();
-		int cameraR = prop->getPropertyAsIntWithDefault(context().tag() + ".CameraRGB",0);
-		int cameraD = prop->getPropertyAsIntWithDefault(context().tag() + ".CameraDEPTH",0);
-		int motors = prop->getPropertyAsIntWithDefault(context().tag() + ".Pose3DMotorsActive",0);
-		int leds = prop->getPropertyAsIntWithDefault(context().tag() + ".KinectLedsActive",0);
-		int pointCloud = prop->getPropertyAsIntWithDefault(context().tag() + ".pointCloudActive",0);
-		int playerdetection = prop->getPropertyAsIntWithDefault(context().tag() + ".PlayerDetection",0);
-		width=prop->getPropertyAsIntWithDefault("openniServer.Width", 640);
-		height=prop->getPropertyAsIntWithDefault("openniServer.Height",480);
-		int fps=prop->getPropertyAsIntWithDefault("openniServer.Fps",30);
-		
 
-		SELCAM = prop->getPropertyAsIntWithDefault(context().tag() + ".deviceId",0);
-		std::cout << "Selected device: " << SELCAM << std::endl;
-		int nCameras=0;
-		XnLicense m_license;
+Ice::CommunicatorPtr ic;
+bool killed;
 
-//		std::vector<DeviceInfo> sensors;
 
-		/*COLORS*/
-		colors[0][0]=0;
-		colors[0][1]=0;
-		colors[0][2]=255;
-		colors[1][0]=0;
-		colors[1][1]=255;
-		colors[1][2]=255;
-		colors[2][0]=255;
-		colors[2][1]=255;
-		colors[2][2]=0;
-		colors[3][0]=255;
-		colors[3][1]=0;
-		colors[3][2]=0;
-		colors[4][0]=0;
-		colors[4][1]=255;
-		colors[4][2]=0;
-		colors[5][0]=255;
-		colors[5][1]=255;
-		colors[5][2]=0;
-		colors[6][0]=0;
-		colors[6][1]=0;
-		colors[6][2]=0;
-		colors[7][0]=150;
-		colors[7][1]=150;
-		colors[7][2]=0;
-		colors[8][0]=150;
-		colors[8][1]=150;
-		colors[8][2]=150;
-		colors[9][0]=0;
-		colors[9][1]=150;
-		colors[9][2]=150;
 
-		nCameras=cameraR + cameraD;
-		//g_context =  new xn::Context;
-		std::cout << "NCAMERAS = " << nCameras << std::endl;
-		cameras.resize(MAX_CAMERAS_SERVER);
-		pthread_mutex_init(&mutex, NULL);
-		if ((nCameras>0)||(pointCloud)){
-			// Getting Sensors information and configure all sensors
-			rc = g_context.Init();
-			xn::NodeInfoList device_node_info_list;
-    			rc = g_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, device_node_info_list);
-    			if (rc != XN_STATUS_OK && device_node_info_list.Begin () != device_node_info_list.End ())
-				std::cout << "enumerating devices failed. Reason: " << xnGetStatusString(rc) << std::endl;
-	    		for (xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
-         			nodeIt != device_node_info_list.End (); ++nodeIt)
-    				{
-				
-        			const xn::NodeInfo& deviceInfo = *nodeIt;
-        			const XnProductionNodeDescription& description = deviceInfo.GetDescription();
-        			std::cout << cv::format("Found device: vendor %s name %s", description.strVendor, description.strName) << std::endl;
-			
 
-		   		sensors[n_devices].camera_type = description.strName;
-		   		sensors[n_devices].vendor = description.strVendor;
-		   		sensors[n_devices].creation_info = deviceInfo.GetCreationInfo();
+void exitApplication(int s){
 
-		   		unsigned short vendor_id;
-		   		unsigned short product_id;
-		   		unsigned char bus;
-		   		unsigned char address;        
-		   		sscanf(deviceInfo.GetCreationInfo(), "%hx/%hx@%hhu/%hhu", &vendor_id, &product_id, &bus, &address);
-		   		sensors[n_devices].vendor_id = vendor_id;
-		   		sensors[n_devices].product_id = product_id;
-		   		sensors[n_devices].bus = bus;
-		   		sensors[n_devices].address = address;
-		   		//sensors.push_back(info);
-				n_devices++;
-    			}
+	killed=true;
 
-			strcpy(m_license.strVendor, "PrimeSense");
-    			strcpy(m_license.strKey, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
-    			g_context.AddLicense(m_license);
+	if (camRGB != NULL)
+		delete camRGB;
+	if (camDEPTH != NULL)
+		delete camDEPTH;
+	if (pc1 != NULL)
+		delete pc1;
+	if (Pose3DMotors1 != NULL)
+		delete Pose3DMotors1;
+	if (kinectleds1 != NULL)
+		delete kinectleds1;
+	if (pointcloud1 != NULL)
+		delete pointcloud1;
 
-			/*licencias */
-			libusb_context *context = 0;
-			int result = libusb_init(&context); //initialize a library session
-			if (result < 0) 
-				return;
+	ic->shutdown();
+	exit(0);
+}
 
-			libusb_device **devices;
-			int count = libusb_get_device_list (context, &devices);
-			if (count < 0) 
-				return;   //Count is the number of USB devices
 
-			for (int devIdx = 0; devIdx < count; ++devIdx)
-			{
-				libusb_device* device = devices[devIdx];
-				uint8_t busId = libusb_get_bus_number (device);
-				uint8_t address = libusb_get_device_address (device);
 
-				int device_id = -1;
-				for (size_t i = 0; device_id < 0 && i < n_devices; ++i)
+
+int main(int argc, char** argv){
+
+	killed=false;
+	Ice::PropertiesPtr prop;
+	std::string componentPrefix("openniServer");
+
+	try{
+		ic = Ice::initialize(argc,argv);
+		prop = ic->getProperties();
+	}
+	catch (const Ice::Exception& ex) {
+		std::cerr << ex << std::endl;
+		return 1;
+	}
+	catch (const char* msg) {
+		std::cerr <<"Error :" << msg << std::endl;
+		return 1;
+	}
+
+
+	std::string Endpoints = prop->getProperty(componentPrefix + ".Endpoints");
+	Ice::ObjectAdapterPtr adapter =ic->createObjectAdapterWithEndpoints(componentPrefix, Endpoints);
+
+	int cameraR = prop->getPropertyAsIntWithDefault(componentPrefix + ".CameraRGB",0);
+	int cameraD = prop->getPropertyAsIntWithDefault(componentPrefix + ".CameraDEPTH",0);
+	int motors = prop->getPropertyAsIntWithDefault(componentPrefix + ".Pose3DMotorsActive",0);
+	int leds = prop->getPropertyAsIntWithDefault(componentPrefix + ".KinectLedsActive",0);
+	int pointCloud = prop->getPropertyAsIntWithDefault(componentPrefix + ".pointCloudActive",0);
+	int playerdetection = prop->getPropertyAsIntWithDefault(componentPrefix + ".PlayerDetection",0);
+	width=prop->getPropertyAsIntWithDefault("openniServer.Width", 640);
+	height=prop->getPropertyAsIntWithDefault("openniServer.Height",480);
+	int fps=prop->getPropertyAsIntWithDefault("openniServer.Fps",30);
+
+
+	SELCAM = prop->getPropertyAsIntWithDefault(componentPrefix + ".deviceId",0);
+	std::cout << "Selected device: " << SELCAM << std::endl;
+	int nCameras=0;
+	XnLicense m_license;
+
+//		std::vector<DeviceInfo> sensors;
+
+	/*COLORS*/
+	colors[0][0]=0;
+	colors[0][1]=0;
+	colors[0][2]=255;
+	colors[1][0]=0;
+	colors[1][1]=255;
+	colors[1][2]=255;
+	colors[2][0]=255;
+	colors[2][1]=255;
+	colors[2][2]=0;
+	colors[3][0]=255;
+	colors[3][1]=0;
+	colors[3][2]=0;
+	colors[4][0]=0;
+	colors[4][1]=255;
+	colors[4][2]=0;
+	colors[5][0]=255;
+	colors[5][1]=255;
+	colors[5][2]=0;
+	colors[6][0]=0;
+	colors[6][1]=0;
+	colors[6][2]=0;
+	colors[7][0]=150;
+	colors[7][1]=150;
+	colors[7][2]=0;
+	colors[8][0]=150;
+	colors[8][1]=150;
+	colors[8][2]=150;
+	colors[9][0]=0;
+	colors[9][1]=150;
+	colors[9][2]=150;
+
+	nCameras=cameraR + cameraD;
+	//g_context =  new xn::Context;
+	std::cout << "NCAMERAS = " << nCameras << std::endl;
+	pthread_mutex_init(&mutex, NULL);
+	if ((nCameras>0)||(pointCloud)){
+		// Getting Sensors information and configure all sensors
+		rc = g_context.Init();
+		xn::NodeInfoList device_node_info_list;
+			rc = g_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, device_node_info_list);
+			if (rc != XN_STATUS_OK && device_node_info_list.Begin () != device_node_info_list.End ())
+			std::cout << "enumerating devices failed. Reason: " << xnGetStatusString(rc) << std::endl;
+			for (xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
+				nodeIt != device_node_info_list.End (); ++nodeIt)
 				{
-					if (busId == sensors[i].bus && address == sensors[i].address)
-					device_id = i;
-				}
 
-				if (device_id < 0)
-					continue;
+				const xn::NodeInfo& deviceInfo = *nodeIt;
+				const XnProductionNodeDescription& description = deviceInfo.GetDescription();
+				std::cout << cv::format("Found device: vendor %s name %s", description.strVendor, description.strName) << std::endl;
 
-        			libusb_device_descriptor descriptor;
-        			result = libusb_get_device_descriptor (devices[devIdx], &descriptor);
-        			if (result == 0)
-        			{
-            			libusb_device_handle* dev_handle;
-            			result = libusb_open (device, &dev_handle);
-            			if (result == 0)
-            			{
-                			unsigned char buffer[1024];
-                			int len = libusb_get_string_descriptor_ascii (dev_handle, descriptor.iSerialNumber, buffer, 1024);
 
-                			if (len > 4)
-                			{
-                    			buffer[len] = 0;
-                    			sensors[device_id].serial = std::string((const char*) buffer);
-               			}
-                			else
-                			{
-                    			// If there is no serial (e.g. Asus XTION), use the bus address.
-                    			sensors[device_id].serial = cv::format("%d", busId);
-                			}
-                			libusb_close (dev_handle);
-            			}
-        			}
-    			}
-    			libusb_free_device_list (devices, 1);
-    			libusb_exit (context);
-			std::cout << "Number of detected devices: " << n_devices << std::endl;
-			NodeInfoList devicesList;
-			int devicesListCount = 0;
-		   	rc = g_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, devicesList);
-			for (NodeInfoList::Iterator it = devicesList.Begin(); it != devicesList.End(); ++it)
+			sensors[n_devices].camera_type = description.strName;
+			sensors[n_devices].vendor = description.strVendor;
+			sensors[n_devices].creation_info = deviceInfo.GetCreationInfo();
+
+			unsigned short vendor_id;
+			unsigned short product_id;
+			unsigned char bus;
+			unsigned char address;
+			sscanf(deviceInfo.GetCreationInfo(), "%hx/%hx@%hhu/%hhu", &vendor_id, &product_id, &bus, &address);
+			sensors[n_devices].vendor_id = vendor_id;
+			sensors[n_devices].product_id = product_id;
+			sensors[n_devices].bus = bus;
+			sensors[n_devices].address = address;
+			//sensors.push_back(info);
+			n_devices++;
+			}
+
+		strcpy(m_license.strVendor, "PrimeSense");
+			strcpy(m_license.strKey, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
+			g_context.AddLicense(m_license);
+
+		/*licencias */
+		libusb_context *context = 0;
+		int result = libusb_init(&context); //initialize a library session
+		if (result < 0)
+			return 0;
+
+		libusb_device **devices;
+		int count = libusb_get_device_list (context, &devices);
+		if (count < 0)
+			return 0;   //Count is the number of USB devices
+
+		for (int devIdx = 0; devIdx < count; ++devIdx)
+		{
+			libusb_device* device = devices[devIdx];
+			uint8_t busId = libusb_get_bus_number (device);
+			uint8_t address = libusb_get_device_address (device);
+
+			int device_id = -1;
+			for (size_t i = 0; device_id < 0 && i < n_devices; ++i)
 			{
-				devicesListCount++;
+				if (busId == sensors[i].bus && address == sensors[i].address)
+				device_id = i;
 			}
-			CHECK_RC(rc, "Enumerate");
-			int i=0;
-			for (NodeInfoList::Iterator it = devicesList.Begin(); it != devicesList.End(); ++it, ++i)
-			{
-				if (i==SELCAM){
-					KinectDevice& info = sensors[i];
-	        			if (info.serial.empty())
-	            			info.serial = i;
-					std::cout << cv::format("[Device %d] %s, %s, serial=%s", i, info.vendor.c_str(), info.camera_type.c_str(), info.serial.c_str()) << std::endl;
-					// Create the device node
-					NodeInfo deviceInfo = *it;
-				 	rc = g_context.CreateProductionTree(deviceInfo, sensors[i].device);
-					CHECK_RC(rc, "Create Device");
-					Query query;
-			       	query.AddNeededNode(deviceInfo.GetInstanceName());
-					xnOSMemCopy(sensors[i].name,deviceInfo.GetInstanceName(),
-					xnOSStrLen(deviceInfo.GetInstanceName()));
-					// Now create a depth generator over this device
-				 	rc = g_context.CreateAnyProductionTree(XN_NODE_TYPE_DEPTH, &query, sensors[i].depth);
-					// depth configuration:
-	
-					/*XnUInt32 xNuma = sensors[i].depth.GetSupportedMapOutputModesCount();
-					cout << "Support mode: " << xNuma << endl;
-	
-			
-					XnMapOutputMode* aModeD = new XnMapOutputMode[xNuma];
-					sensors[i].depth.GetSupportedMapOutputModes( aModeD, xNuma );
-					for( unsigned int j = 0; j < xNuma; j++ )
+
+			if (device_id < 0)
+				continue;
+
+				libusb_device_descriptor descriptor;
+				result = libusb_get_device_descriptor (devices[devIdx], &descriptor);
+				if (result == 0)
+				{
+					libusb_device_handle* dev_handle;
+					result = libusb_open (device, &dev_handle);
+					if (result == 0)
 					{
-						std::cout << aModeD[j].nXRes << " * " << aModeD[j].nYRes << " @" << aModeD[j].nFPS << "FPS" << std::endl;
+						unsigned char buffer[1024];
+						int len = libusb_get_string_descriptor_ascii (dev_handle, descriptor.iSerialNumber, buffer, 1024);
+
+						if (len > 4)
+						{
+							buffer[len] = 0;
+							sensors[device_id].serial = std::string((const char*) buffer);
 					}
-					delete[] aModeD;*/
-	
-	
-	
-	
-	    			    XnMapOutputMode depth_mode; 
-					depth_mode.nXRes = width;
-				    depth_mode.nYRes = height;
-				    depth_mode.nFPS = 30;
-				    sensors[i].depth.SetMapOutputMode(depth_mode);
-	
-					CHECK_RC(rc, "Create Depth");
-					// now create a image generator over this device
-				  	rc = g_context.CreateAnyProductionTree(XN_NODE_TYPE_IMAGE, &query, sensors[i].image);
-					CHECK_RC(rc, "Create Image");
-					/*XnUInt32 xNumb = sensors[i].image.GetSupportedMapOutputModesCount();
-					cout << "Support mode: " << xNumb << endl;
-					XnMapOutputMode* aModeR = new XnMapOutputMode[xNumb];
-					sensors[i].image.GetSupportedMapOutputModes( aModeR, xNumb );
-					for( unsigned int j = 0; j < xNumb; j++ )
-					{
-						std::cout << aModeR[j].nXRes << " * " << aModeR[j].nYRes << " @" << aModeR[j].nFPS << "FPS" << std::endl;
-					}
-					delete[] aModeD;*/
-	
-					rc = g_context.CreateAnyProductionTree(XN_NODE_TYPE_USER, &query, sensors[i].g_UserGenerator);
-					CHECK_RC(rc, "Find user generator");
-	
-					XnMapOutputMode rgb_mode;
-	            		rgb_mode.nXRes = width;
-	            		rgb_mode.nYRes = height;
-            			rgb_mode.nFPS = fps;
-	        			sensors[i].image.SetMapOutputMode(rgb_mode);
-		       		
-					sensors[i].depth.GetAlternativeViewPointCap().SetViewPoint(sensors[i].image);
-					
-					XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hCalibrationInProgress, hPoseInProgress;
-					if (playerdetection){
-						/*init player id array*/
-						pixelsID.resize(width*height);
-						if (!sensors[i].g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
+						else
 						{
-							printf("Supplied user generator doesn't support skeleton\n");
-							//return 1;
-							userGeneratorActive=0;
+							// If there is no serial (e.g. Asus XTION), use the bus address.
+							sensors[device_id].serial = cv::format("%d", busId);
 						}
-				
-						rc = sensors[i].g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
-						CHECK_RC(rc, "Register to user callbacks");
-						rc = sensors[i].g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
-						CHECK_RC(rc, "Register to calibration start");
-						rc = sensors[i].g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
-						CHECK_RC(rc, "Register to calibration complete");
-						userGeneratorActive=1;
-						if (sensors[i].g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
-						{
-							g_bNeedPose = TRUE;
-							if (!sensors[i].g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
-							{
-								std::cout << "Pose required, but not supported" << std::endl;
-							}
-							rc = sensors[i].g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
-							CHECK_RC(rc, "Register to Pose Detected");
-							sensors[i].g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
-
-							rc = sensors[i].g_UserGenerator.GetPoseDetectionCap().RegisterToPoseInProgress(MyPoseInProgress, NULL, hPoseInProgress);
-							CHECK_RC(rc, "Register to pose in progress");
-						}
-
-						sensors[i].g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
-
-						rc = sensors[i].g_UserGenerator.GetSkeletonCap().RegisterToCalibrationInProgress(MyCalibrationInProgress, NULL, hCalibrationInProgress);
-						CHECK_RC(rc, "Register to calibration in progress");
+						libusb_close (dev_handle);
 					}
 				}
 			}
-			g_context.SetGlobalMirror(false);
-			g_context.StartGeneratingAll();
-			/*XnFPSData xnFPS;
-		   	rc = xnFPSInit(&xnFPS, 180);
-		   	CHECK_RC(rc, "FPS Init");*/
-			rc =  pthread_create(&threads[0], NULL, &openniServer::kinectThread, NULL);
-			if (rc){
-		 		std::cout<< "ERROR; return code from pthread_create() is " << rc << std::endl;
-	            	exit(-1);
-     	    	}
+			libusb_free_device_list (devices, 1);
+			libusb_exit (context);
+		std::cout << "Number of detected devices: " << n_devices << std::endl;
+		NodeInfoList devicesList;
+		int devicesListCount = 0;
+		rc = g_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, devicesList);
+		for (NodeInfoList::Iterator it = devicesList.Begin(); it != devicesList.End(); ++it)
+		{
+			devicesListCount++;
 		}
+		CHECK_RC(rc, "Enumerate");
+		int i=0;
+		for (NodeInfoList::Iterator it = devicesList.Begin(); it != devicesList.End(); ++it, ++i)
+		{
+			if (i==SELCAM){
+				KinectDevice& info = sensors[i];
+					if (info.serial.empty())
+						info.serial = i;
+				std::cout << cv::format("[Device %d] %s, %s, serial=%s", i, info.vendor.c_str(), info.camera_type.c_str(), info.serial.c_str()) << std::endl;
+				// Create the device node
+				NodeInfo deviceInfo = *it;
+				rc = g_context.CreateProductionTree(deviceInfo, sensors[i].device);
+				CHECK_RC(rc, "Create Device");
+				Query query;
+				query.AddNeededNode(deviceInfo.GetInstanceName());
+				xnOSMemCopy(sensors[i].name,deviceInfo.GetInstanceName(),
+				xnOSStrLen(deviceInfo.GetInstanceName()));
+				// Now create a depth generator over this device
+				rc = g_context.CreateAnyProductionTree(XN_NODE_TYPE_DEPTH, &query, sensors[i].depth);
+				// depth configuration:
 
-		if ((motors) || (leds)){
-			const XnUSBConnectionString *paths; 
-			XnUInt32 count; 
-			std::cout << "inicializo el dispositivo" << std::endl;
-			rc = xnUSBInit();
-			CHECK_RC(rc, "USB Initialization") ;
-			//rc = xnUSBOpenDevice(VID_MICROSOFT, PID_NUI_MOTOR, NULL, NULL, &dev);
-			CHECK_RC(rc,"Openning Device");
-			rc = xnUSBEnumerateDevices(VID_MICROSOFT, PID_NUI_MOTOR, &paths, &count);
-             	CHECK_RC(rc,"xnUSBEnumerateDevices failed");
+				/*XnUInt32 xNuma = sensors[i].depth.GetSupportedMapOutputModesCount();
+				cout << "Support mode: " << xNuma << endl;
 
 
-	        	// Open first found device
-        		rc = xnUSBOpenDeviceByPath(paths[SELCAM], &dev);
-	        	CHECK_RC(rc,"xnUSBOpenDeviceByPath failed");
-		}
+				XnMapOutputMode* aModeD = new XnMapOutputMode[xNuma];
+				sensors[i].depth.GetSupportedMapOutputModes( aModeD, xNuma );
+				for( unsigned int j = 0; j < xNuma; j++ )
+				{
+					std::cout << aModeD[j].nXRes << " * " << aModeD[j].nYRes << " @" << aModeD[j].nFPS << "FPS" << std::endl;
+				}
+				delete[] aModeD;*/
 
-		if (cameraR){
-			std::string objPrefix(context().tag() + ".CameraRGB.");
-			std::string cameraName = prop->getProperty(objPrefix + "Name");
-			if (cameraName.size() == 0){//no name specified, we create one using the index
-				cameraName = "cameraR";
-				prop->setProperty(objPrefix + "Name",cameraName);//set the value
+
+
+
+					XnMapOutputMode depth_mode;
+				depth_mode.nXRes = width;
+				depth_mode.nYRes = height;
+				depth_mode.nFPS = 30;
+				sensors[i].depth.SetMapOutputMode(depth_mode);
+
+				CHECK_RC(rc, "Create Depth");
+				// now create a image generator over this device
+				rc = g_context.CreateAnyProductionTree(XN_NODE_TYPE_IMAGE, &query, sensors[i].image);
+				CHECK_RC(rc, "Create Image");
+				/*XnUInt32 xNumb = sensors[i].image.GetSupportedMapOutputModesCount();
+				cout << "Support mode: " << xNumb << endl;
+				XnMapOutputMode* aModeR = new XnMapOutputMode[xNumb];
+				sensors[i].image.GetSupportedMapOutputModes( aModeR, xNumb );
+				for( unsigned int j = 0; j < xNumb; j++ )
+				{
+					std::cout << aModeR[j].nXRes << " * " << aModeR[j].nYRes << " @" << aModeR[j].nFPS << "FPS" << std::endl;
 				}
-			context().tracer().info("Creating camera " + cameraName);
-			cameras[0] = new CameraRGB(objPrefix,context());
-			context().createInterfaceWithString(cameras[0],cameraName);
-			//test camera ok
-			std::cout<<"              -------- openniServer: Component: CameraRGB created successfully   --------" << std::endl;
-		}
-		if (cameraD){
-			std::string objPrefix(context().tag() + ".CameraDEPTH.");
-			std::string cameraName = prop->getProperty(objPrefix + "Name");
-			if (cameraName.size() == 0){//no name specified, we create one using the index
-				cameraName = "cameraD";
-				prop->setProperty(objPrefix + "Name",cameraName);//set the value
+				delete[] aModeD;*/
+
+				rc = g_context.CreateAnyProductionTree(XN_NODE_TYPE_USER, &query, sensors[i].g_UserGenerator);
+				CHECK_RC(rc, "Find user generator");
+
+				XnMapOutputMode rgb_mode;
+					rgb_mode.nXRes = width;
+					rgb_mode.nYRes = height;
+					rgb_mode.nFPS = fps;
+					sensors[i].image.SetMapOutputMode(rgb_mode);
+
+				sensors[i].depth.GetAlternativeViewPointCap().SetViewPoint(sensors[i].image);
+
+				XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hCalibrationInProgress, hPoseInProgress;
+				if (playerdetection){
+					/*init player id array*/
+					pixelsID.resize(width*height);
+					if (!sensors[i].g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
+					{
+						printf("Supplied user generator doesn't support skeleton\n");
+						//return 1;
+						userGeneratorActive=0;
+					}
+
+					rc = sensors[i].g_UserGenerator.RegisterUserCallbacks(openniServer::User_NewUser, openniServer::User_LostUser, NULL, hUserCallbacks);
+					CHECK_RC(rc, "Register to user callbacks");
+					rc = sensors[i].g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(openniServer::UserCalibration_CalibrationStart, NULL, hCalibrationStart);
+					CHECK_RC(rc, "Register to calibration start");
+					rc = sensors[i].g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(openniServer::UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
+					CHECK_RC(rc, "Register to calibration complete");
+					userGeneratorActive=1;
+					if (sensors[i].g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
+					{
+						g_bNeedPose = TRUE;
+						if (!sensors[i].g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
+						{
+							std::cout << "Pose required, but not supported" << std::endl;
+						}
+						rc = sensors[i].g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(openniServer::UserPose_PoseDetected, NULL, hPoseDetected);
+						CHECK_RC(rc, "Register to Pose Detected");
+						sensors[i].g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
+
+						rc = sensors[i].g_UserGenerator.GetPoseDetectionCap().RegisterToPoseInProgress(openniServer::MyPoseInProgress, NULL, hPoseInProgress);
+						CHECK_RC(rc, "Register to pose in progress");
+					}
+
+					sensors[i].g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
+
+					rc = sensors[i].g_UserGenerator.GetSkeletonCap().RegisterToCalibrationInProgress(openniServer::MyCalibrationInProgress, NULL, hCalibrationInProgress);
+					CHECK_RC(rc, "Register to calibration in progress");
 				}
-			context().tracer().info("Creating camera " + cameraName);
-			cameras[1] = new CameraDEPTH(objPrefix,context());
-			context().createInterfaceWithString(cameras[1],cameraName);
-			//test camera ok
-			std::cout<<"              -------- openniServer: Component: CameraDEPTH created successfully   --------" << std::endl;
+			}
 		}
-		if (motors){
-			std::string objPrefix4="Pose3DMotors1";
-			std::string Pose3DMotorsName = "Pose3DMotors1";
-			context().tracer().info("Creating Pose3DMotors1 " + Pose3DMotorsName);
-			Pose3DMotors1 = new Pose3DMotorsI(&dev,objPrefix4,context());
-			context().createInterfaceWithString(Pose3DMotors1,Pose3DMotorsName);
-			std::cout<<"              -------- openniServer: Component: Pose3DMotors created successfully   --------" << std::endl;
-		}
-			
-		if (leds){
-			std::string objPrefix4="kinectleds1";
-			std::string Name = "kinectleds1";
-			context().tracer().info("Creating kinectleds1 " + Name);
-			kinectleds1 = new KinectLedsI(&dev,objPrefix4,context());
-			context().createInterfaceWithString(kinectleds1,Name);
-			std::cout<<"              -------- openniServer: Component: KinectLeds created successfully   --------" << std::endl;
-		}
-		if (pointCloud){
-			std::string objPrefix5="pointcloud1";
-			std::string Name = "pointcloud1";
-			context().tracer().info("Creating pointcloud1 " + Name);
-			pointcloud1 = new pointCloudI(objPrefix5,context());
-			context().createInterfaceWithString(pointcloud1,Name);
-			std::cout<<"              -------- openniServer: Component: PointCloud created successfully   --------" << std::endl;
-		}
+		g_context.SetGlobalMirror(false);
+		g_context.StartGeneratingAll();
+		/*XnFPSData xnFPS;
+		rc = xnFPSInit(&xnFPS, 180);
+		CHECK_RC(rc, "FPS Init");*/
+		rc =  pthread_create(&threads[0], NULL, &openniServer::kinectThread, NULL);
+		if (rc){
+			std::cout<< "ERROR; return code from pthread_create() is " << rc << std::endl;
+				exit(-1);
+			}
+	}
 
+	if ((motors) || (leds)){
+		const XnUSBConnectionString *paths;
+		XnUInt32 count;
+		std::cout << "inicializo el dispositivo" << std::endl;
+		rc = xnUSBInit();
+		CHECK_RC(rc, "USB Initialization") ;
+		//rc = xnUSBOpenDevice(VID_MICROSOFT, PID_NUI_MOTOR, NULL, NULL, &dev);
+		CHECK_RC(rc,"Openning Device");
+		rc = xnUSBEnumerateDevices(VID_MICROSOFT, PID_NUI_MOTOR, &paths, &count);
+			CHECK_RC(rc,"xnUSBEnumerateDevices failed");
 
-    }
 
-    virtual ~Component(){
-    }
+			// Open first found device
+			rc = xnUSBOpenDeviceByPath(paths[SELCAM], &dev);
+			CHECK_RC(rc,"xnUSBOpenDeviceByPath failed");
+	}
 
-  private:
-    std::vector<Ice::ObjectPtr> cameras;
-	Ice::ObjectPtr Pose3DMotors1;
-	Ice::ObjectPtr kinectleds1;
-	Ice::ObjectPtr pointcloud1;
-	pthread_t threads[NUM_THREADS];
-	XN_USB_DEV_HANDLE dev;
+	if (cameraR){
+		std::string objPrefix(componentPrefix + ".CameraRGB.");
+		std::string cameraName = prop->getProperty(objPrefix + "Name");
+		if (cameraName.size() == 0){//no name specified, we create one using the index
+			cameraName = "cameraR";
+			prop->setProperty(objPrefix + "Name",cameraName);//set the value
+			}
+		std::cout << "Creating camera " + cameraName << std::endl;
+		camRGB = new openniServer::CameraRGB(objPrefix,prop);
+		adapter->add(camRGB,ic->stringToIdentity(cameraName));
+		//test camera ok
+		std::cout<<"              -------- openniServer: Component: CameraRGB created successfully   --------" << std::endl;
+	}
+	if (cameraD){
+		std::string objPrefix(componentPrefix + ".CameraDEPTH.");
+		std::string cameraName = prop->getProperty(objPrefix + "Name");
+		if (cameraName.size() == 0){//no name specified, we create one using the index
+			cameraName = "cameraD";
+			prop->setProperty(objPrefix + "Name",cameraName);//set the value
+			}
+		std::cout << "Creating camera " <<  cameraName << std::endl;
+		camDEPTH = new openniServer::CameraDEPTH(objPrefix,prop);
+		adapter->add(camDEPTH,ic->stringToIdentity(cameraName));
+		//test camera ok
+		std::cout<<"              -------- openniServer: Component: CameraDEPTH created successfully   --------" << std::endl;
+	}
+	if (motors){
+		std::string objPrefix4="Pose3DMotors1";
+		std::string Pose3DMotorsName = "Pose3DMotors1";
+		std::cout << "Creating Pose3DMotors1 " <<  Pose3DMotorsName << std::endl;
+		Pose3DMotors1 = new openniServer::Pose3DMotorsI(&dev,objPrefix4,prop);
+		adapter->add(Pose3DMotors1,ic->stringToIdentity(Pose3DMotorsName));
+		std::cout<<"              -------- openniServer: Component: Pose3DMotors created successfully   --------" << std::endl;
+	}
 
-  };
+	if (leds){
+		std::string objPrefix4="kinectleds1";
+		std::string Name = "kinectleds1";
+		std::cout << "Creating kinectleds1 " <<  Name << std::endl;
+		kinectleds1 = new openniServer::KinectLedsI(&dev,objPrefix4,prop);
+		adapter->add(kinectleds1,ic->stringToIdentity(Name));
 
-} //namespace
+		std::cout<<"              -------- openniServer: Component: KinectLeds created successfully   --------" << std::endl;
+	}
+	if (pointCloud){
+		std::string objPrefix5="pointcloud1";
+		std::string Name = "pointcloud1";
+		std::cout << "Creating pointcloud1 " + Name << std::endl;
+		pointcloud1 = new openniServer::pointCloudI(objPrefix5,prop);
+		adapter->add(pointcloud1,ic->stringToIdentity(Name));
 
-int main(int argc, char** argv){
+		std::cout<<"              -------- openniServer: Component: PointCloud created successfully   --------" << std::endl;
+	}
 
-  openniServer::Component component;
+	adapter->activate();
+	ic->waitForShutdown();
 
-	//usleep(1000);
-     jderobotice::Application app(component);
+	return 0;
 
-     return app.jderobotMain(argc,argv);
-
 }



More information about the Jderobot-admin mailing list