[Jderobot-admin] jderobot-r888 - in trunk/src/components/cameraserver: . build build-independent
ahcorde en jderobot.org
ahcorde en jderobot.org
Mar Mar 19 10:46:37 CET 2013
Author: ahcorde
Date: 2013-03-19 10:45:37 +0100 (Tue, 19 Mar 2013)
New Revision: 888
Added:
trunk/src/components/cameraserver/build-independent/
trunk/src/components/cameraserver/build-independent/CMakeLists.txt
Removed:
trunk/src/components/cameraserver/gstpipeline.cpp
trunk/src/components/cameraserver/gstpipeline.h
Modified:
trunk/src/components/cameraserver/CMakeLists.txt
trunk/src/components/cameraserver/build/CMakeLists.txt
trunk/src/components/cameraserver/cameraserver.cpp
Log:
[ahcorde]
Subido nuevo cameraserver depende de v4l2 y opencv
Modified: trunk/src/components/cameraserver/CMakeLists.txt
===================================================================
--- trunk/src/components/cameraserver/CMakeLists.txt 2013-03-14 15:57:14 UTC (rev 887)
+++ trunk/src/components/cameraserver/CMakeLists.txt 2013-03-19 09:45:37 UTC (rev 888)
@@ -1,27 +1,21 @@
-IF(with_fireware)
- SET( SOURCE_FILES cameraserver.cpp gstpipeline.cpp)
- add_definitions(-DGLADE_DIR="${gladedir}")
+SET( SOURCE_FILES cameraserver.cpp)
- include_directories(
+add_definitions(-DGLADE_DIR="${gladedir}")
+
+include_directories(
${INTERFACES_CPP_DIR}
${LIBS_DIR}
${CMAKE_CURRENT_SOURCE_DIR}
- )
+)
- add_executable (cameraserver ${SOURCE_FILES})
+add_executable (cameraserver ${SOURCE_FILES})
- TARGET_LINK_LIBRARIES(cameraserver
- ${gstreamerapp_LIBRARIES}
- ${gstreamer_LIBRARIES}
- ${OpenCV_LIBRARIES}
+TARGET_LINK_LIBRARIES(cameraserver
+ ${OpenCV_LIBRARIES}
${LIBS_DIR}/jderobotice/libjderobotice.so
${LIBS_DIR}/colorspaces/libcolorspacesmm.so
${INTERFACES_CPP_DIR}/jderobot/libJderobotInterfaces.so
- ${Gearbox_LIBRARIES}
+ ${Gearbox_LIBRARIES}
${ZeroCIce_LIBRARIES}
- ${with_fireware_LIBRARIES}
- raw1394
- dc1394)
-
-ENDIF(with_fireware)
+ )
Modified: trunk/src/components/cameraserver/build/CMakeLists.txt
===================================================================
--- trunk/src/components/cameraserver/build/CMakeLists.txt 2013-03-14 15:57:14 UTC (rev 887)
+++ trunk/src/components/cameraserver/build/CMakeLists.txt 2013-03-19 09:45:37 UTC (rev 888)
@@ -19,22 +19,10 @@
# #
###################
- # FIND & CHECK PRINCIPAL LIBRARIES
-include(FindPkgConfig)
-PKG_CHECK_MODULES(gstreamer REQUIRED gstreamer-0.10)
-include_directories(${gstreamer_INCLUDE_DIRS})
-link_directories(${gstreamer_LIBRARY_DIRS})
-PKG_CHECK_MODULES(gstreamerapp REQUIRED gstreamer-app-0.10)
-include_directories(${gstreamerapp_INCLUDE_DIRS})
-link_directories(${gstreamerapp_LIBRARY_DIRS})
-
-
-
# FIND AND CHECK OTHER DEPENDENCES
include(${DEPS_DIR}/gearbox/CMakeLists.txt)
include(${DEPS_DIR}/ice/CMakeLists.txt)
-include(${DEPS_DIR}/fireware/CMakeLists.txt)
include(${DEPS_DIR}/opencv/CMakeLists.txt)
Added: trunk/src/components/cameraserver/build-independent/CMakeLists.txt
===================================================================
--- trunk/src/components/cameraserver/build-independent/CMakeLists.txt (rev 0)
+++ trunk/src/components/cameraserver/build-independent/CMakeLists.txt 2013-03-19 09:45:37 UTC (rev 888)
@@ -0,0 +1,27 @@
+cmake_minimum_required(VERSION 2.8)
+
+SET( SOURCE_FILES ../cameraserver.cpp)
+SET( INTERFACES_CPP_DIR /usr/local/lib)
+
+include_directories(
+ /usr/local/include/jderobot
+ /usr/local/include/gearbox
+)
+SET( CMAKE_CXX_FLAGS "-lIce -lIceUtil" ) # Opciones para el compilador
+
+
+add_executable (cameraserver ${SOURCE_FILES})
+
+find_package(OpenCV REQUIRED)
+
+TARGET_LINK_LIBRARIES(cameraserver
+ ${INTERFACES_CPP_DIR}/jderobot/libJderobotInterfaces.so
+ ${INTERFACES_CPP_DIR}/jderobot/libjderobotice.so
+ ${INTERFACES_CPP_DIR}/jderobot/libjderobotutil.so
+ ${INTERFACES_CPP_DIR}/jderobot/libcolorspacesmm.so
+ ${OpenCV_LIBS}
+ /usr/local/lib/gearbox/libGbxIceUtilAcfr.so
+
+)
+
+
Modified: trunk/src/components/cameraserver/cameraserver.cpp
===================================================================
--- trunk/src/components/cameraserver/cameraserver.cpp 2013-03-14 15:57:14 UTC (rev 887)
+++ trunk/src/components/cameraserver/cameraserver.cpp 2013-03-19 09:45:37 UTC (rev 888)
@@ -23,432 +23,233 @@
#include <Ice/Ice.h>
#include <IceUtil/IceUtil.h>
-#include <IceStorm/IceStorm.h>
#include <gbxsickacfr/gbxiceutilacfr/safethread.h>
+
#include <jderobot/camera.h>
#include <jderobot/image.h>
#include <colorspaces/colorspacesmm.h>
#include <jderobotice/component.h>
#include <jderobotice/application.h>
-#include <tr1/memory>
-#include <list>
+
+//Opencv
+#include <opencv2/core/core.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
+#include <opencv2/highgui/highgui.hpp>
+
#include <string.h>
#include <sstream>
#include <stdlib.h>
-#include "gstpipeline.h"
#include <stdlib.h>
-#include <inttypes.h>
-#include <libraw1394/raw1394.h>
-#include <dc1394/control.h>
+#include <list>
+
namespace cameraserver{
- class CameraI: virtual public jderobot::Camera {
- public:
- CameraI(std::string& propertyPrefix, const jderobotice::Context& context)
- : prefix(propertyPrefix),context(context),pipeline(),
- imageFmt(),
- imageDescription(new jderobot::ImageDescription()),
- cameraDescription(new jderobot::CameraDescription()),
- imageConsumer(),
- replyTask(),
- rpc_mode(false),
- camera1394(),
- firewire_mode(false)
- {
-
-
- Ice::PropertiesPtr prop = context.properties();
- //fill cameraDescription
- cameraDescription->name = prop->getProperty(prefix+"Name");
- if (cameraDescription->name.size() == 0)
- throw jderobotice::ConfigFileException(ERROR_INFO,"Camera name not configured");
- cameraDescription->shortDescription = prop->getProperty(prefix+"ShortDescription");
- cameraDescription->streamingUri = prop->getProperty(prefix+"StreamingUri");
+class CameraI: virtual public jderobot::Camera {
- //fill imageDescription
- imageDescription->width = prop->getPropertyAsIntWithDefault(prefix+"ImageWidth",340);
- imageDescription->height = prop->getPropertyAsIntWithDefault(prefix+"ImageHeight",280);
- //we use formats acording to colorspaces
- std::string fmtStr = prop->getPropertyWithDefault(prefix+"Format","YUY2");//default format YUY2
- imageFmt = colorspaces::Image::Format::searchFormat(fmtStr);
- if (!imageFmt)
- throw jderobotice::ConfigFileException(ERROR_INFO, "Format " + fmtStr + " unknown");
- imageDescription->size = imageDescription->width * imageDescription->height * CV_ELEM_SIZE(imageFmt->cvType);
- imageDescription->format = imageFmt->name;
- //fill pipeline cfg
- pipelineCfg.name = prop->getProperty(prefix+"Name");
- pipelineCfg.srcpipeline = prop->getProperty(prefix+"SrcPipeline");
- pipelineCfg.uri = prop->getProperty(prefix+"Uri");
- pipelineCfg.framerateN = prop->getPropertyAsIntWithDefault(prefix+"FramerateN",25);
- pipelineCfg.framerateD = prop->getPropertyAsIntWithDefault(prefix+"FramerateD",1);
- pipelineCfg.width = imageDescription->width;
- pipelineCfg.height = imageDescription->height;
- pipelineCfg.format = imageFmt;
- if ((prop->getProperty(prefix+"Invert") == "True"))
- pipelineCfg.invert = true;
- else
- pipelineCfg.invert = false;
+ public:
+ std::string name;
+ std::string uri;
+ int framerateN;
+ int framerateD;
- //pipelineCfg.validate();FIXME: validate cfg before to use it
+ CameraI(std::string propertyPrefix, const jderobotice::Context& context)
+ : prefix(propertyPrefix),context(context) {
- if(strncmp((char*)pipelineCfg.uri.c_str(),"dv",2)==0){
- firewire_mode=true;
- context.tracer().info("Creating firewire thread with config: " + pipelineCfg.toString());
- }
- else{
- context.tracer().info("Creating pipeline with config: " + pipelineCfg.toString());
- createPipeline(pipelineCfg);
- }
-
- context.tracer().info("Starting thread for camera: " + cameraDescription->name);
- replyTask = new ReplyTask(this);
+ std::cout << "Constructor CameraI -> " << propertyPrefix << std::endl;
+ imageDescription = (new jderobot::ImageDescription());
+ cameraDescription = (new jderobot::CameraDescription());
- // check client/server service mode
- int rpc = prop->getPropertyAsIntWithDefault("CameraSrv.DefaultMode",0);
+ Ice::PropertiesPtr prop = context.properties();
- if(rpc!=0){
- rpc_mode=true;
- }
+ //fill cameraDescription
+ name = prop->getProperty(prefix+"Name");
+ if (name.size() == 0)
+ throw "Camera name not configured";
- // check publish/subscribe service mode
- Ice::ObjectPrx obj = context.communicator()->propertyToProxy("CameraSrv.TopicManager");
+ cameraDescription->shortDescription = prop->getProperty(prefix+"ShortDescription");
+ cameraDescription->streamingUri = prop->getProperty(prefix+"StreamingUri");
- if(obj!=0){
- // IceStorm publisher initialization
- IceStorm::TopicManagerPrx topicManager = IceStorm::TopicManagerPrx::checkedCast(obj);
- IceStorm::TopicPrx topic;
- try{
- topic = topicManager->retrieve(cameraDescription->name);
- }
- catch(const IceStorm::NoSuchTopic&){
- topic = topicManager->create(cameraDescription->name);
- }
- Ice::ObjectPrx pub = topic->getPublisher()->ice_oneway();
+ //fill imageDescription
+ imageDescription->width = prop->getPropertyAsIntWithDefault(prefix+"ImageWidth",340);
+ imageDescription->height = prop->getPropertyAsIntWithDefault(prefix+"ImageHeight",280);
- imageConsumer=jderobot::ImageConsumerPrx::uncheckedCast(pub);
- }
- else{
- imageConsumer=0;
- }
+ //we use formats acording to colorspaces
+ std::string fmtStr = prop->getPropertyWithDefault(prefix+"Format","YUY2");//default format YUY2
+ imageFmt = colorspaces::Image::Format::searchFormat(fmtStr);
+ if (!imageFmt)
+ throw "Format " + fmtStr + " unknown";
- }
+ imageDescription->size = imageDescription->width * imageDescription->height * CV_ELEM_SIZE(imageFmt->cvType);
+ imageDescription->format = imageFmt->name;
- virtual ~CameraI(){
- context.tracer().info("Stopping pipeline");
- if(firewire_mode){
- cleanup(camera1394);
- }
- pipeline->stop();
- context.tracer().info("Stopping and joining thread for camera: " + cameraDescription->name);
- gbxiceutilacfr::stopAndJoin(replyTask);
- }
-
- virtual jderobot::ImageDescriptionPtr getImageDescription(const Ice::Current& c){
- return imageDescription;
- }
+ //fill pipeline cfg
+ uri = prop->getProperty(prefix+"Uri");
+ framerateN = prop->getPropertyAsIntWithDefault(prefix+"FramerateN",25);
+ framerateD = prop->getPropertyAsIntWithDefault(prefix+"FramerateD",1);
- virtual jderobot::CameraDescriptionPtr getCameraDescription(const Ice::Current& c){
- return cameraDescription;
- }
+ std::cout << "URI: " << uri << std::endl;
- virtual Ice::Int setCameraDescription(const jderobot::CameraDescriptionPtr &description, const Ice::Current& c){
- return 0;
- }
+ if(uri.size()>3)
+ cap.open(uri);
+ else
+ cap.open(atoi(uri.c_str()));
- virtual void getImageData_async(const jderobot::AMD_ImageProvider_getImageDataPtr& cb,
- const Ice::Current& c){
- replyTask->pushJob(cb);
- }
+ if(cap.isOpened()){
+ replyTask = new ReplyTask(this);
+ replyTask->start(); // my own thread
+ }else{
+ exit(-1);
+ }
+ }
- virtual std::string startCameraStreaming(const Ice::Current& c)
- {
+ std::string getName () {
+ return (cameraDescription->name);
+ }
- std::string commandVLC = "vlc " + pipelineCfg.uri + " -I dummy --sout \"#transcode{vcodec=mp4v,acodec=aac}:rtp{dst=0.0.0.0,port=1234,sdp=" + cameraDescription->streamingUri + "}\" &";
+ std::string getRobotName () {
+ return ((context.properties())->getProperty(context.tag()+".RobotName"));
+ }
- // system is blocked, the command should be run in background
- int ret = system(commandVLC.c_str());
+ virtual ~CameraI() {
+ context.tracer().info("Stopping and joining thread for camera: " + cameraDescription->name);
+ gbxiceutilacfr::stopAndJoin(replyTask);
+ }
- if (ret==0)
- {
- context.tracer().info("Starting Streaming in " + cameraDescription->name + ": (" + cameraDescription->streamingUri + ")");
- return cameraDescription->streamingUri;
- }
- else
- return NULL;
- }
+ virtual jderobot::ImageDescriptionPtr getImageDescription(const Ice::Current& c){
+ return imageDescription;
+ }
- virtual void stopCameraStreaming(const Ice::Current& c)
- {
+ virtual jderobot::CameraDescriptionPtr getCameraDescription(const Ice::Current& c){
+ return cameraDescription;
+ }
- context.tracer().info("Stoping Streaming in " + cameraDescription->name);
- system ("killall vlc");
- return;
- }
+ virtual Ice::Int setCameraDescription(const jderobot::CameraDescriptionPtr &description, const Ice::Current& c) {
+ return 0;
+ }
- void createPipeline(const Config &cfg){
- pipeline = new GSTPipeline(context,cfg);
- }
+ virtual void getImageData_async(const jderobot::AMD_ImageProvider_getImageDataPtr& cb,const Ice::Current& c){
+ replyTask->pushJob(cb);
+ }
- void restartPipeline(const Config &cfg){
- pipeline->restart();
- }
-
- private:
- /** cleans up firewire structures and frees the firewire bus.*/
- int cleanup(dc1394camera_t *camera) {
- dc1394_video_set_transmission(camera, DC1394_OFF);
- dc1394_capture_stop(camera);
- return 0;
- }
+ virtual std::string startCameraStreaming(const Ice::Current&){
+ context.tracer().info("Should be made anything to start camera streaming: " + cameraDescription->name);
+ }
- /** firewire driver init function. It will start all firewire required devices
- * and setting them the default configuration.
- * @return 0 if initialitation was successful or -1 if something went wrong.*/
- void firewire_deviceinit(){
- const int NUM_BUFFERS=1;
- dc1394_t * d;
- dc1394error_t err;
- dc1394video_mode_t res;
- dc1394framerate_t f;
- dc1394camera_list_t *list;
- char device[128];
- strncpy(device,pipelineCfg.uri.c_str(),pipelineCfg.uri.length());
- device[pipelineCfg.uri.length()]='\0';
+ virtual void stopCameraStreaming(const Ice::Current&) {
+ context.tracer().info("Should be made anything to stop camera streaming: " + cameraDescription->name);
+ }
- // camera config
- int width=imageDescription->width;
- int fps=pipelineCfg.framerateN;
- int camera_id=atoi(&device[strlen(device)-1]);
-
- list=NULL;
- d = dc1394_new ();
- err=dc1394_camera_enumerate (d, &list);
+ private:
+ class ReplyTask: public gbxiceutilacfr::SafeThread {
+ public:
+ ReplyTask(CameraI* camera)
+ : gbxiceutilacfr::SafeThread(camera->context.tracer()), mycamera(camera) {
+ std::cout << "safeThread" << std::endl;
+ }
- if (list == NULL) {
- perror("Can't access firewire device\n");
- }
-
- camera1394 = dc1394_camera_new (d, list->ids[camera_id].guid);
- if (!camera1394){
- dc1394_log_warning("Failed to initialize camera with id %d",camera_id);
- }
+ void pushJob(const jderobot::AMD_ImageProvider_getImageDataPtr& cb){
+ IceUtil::Mutex::Lock sync(requestsMutex);
+ requests.push_back(cb);
+ }
- if(width==320){
- res = DC1394_VIDEO_MODE_320x240_YUV422;
- }
- else{
- // This resolution is only compatible with framerate 15
- res = DC1394_VIDEO_MODE_640x480_YUV422;
- }
+ virtual void walk(){
+ jderobot::ImageDataPtr reply(new jderobot::ImageData);
+ struct timeval a, b;
+ int cycle = 48;
+ long totalb,totala;
+ long diff;
+ int count = 0 ;
+ cv::Mat frame;
- if(fps==30){
- f = DC1394_FRAMERATE_30;
- }
- else if(fps==15){
- f = DC1394_FRAMERATE_15;
- }
- else{
- f = DC1394_FRAMERATE_15;
- fprintf(stderr,"Wrong framerate for camera with id %d, set to default 15 fps\n",camera_id);
- }
+ int cycle_control = 1000/mycamera->framerateN;
- err=dc1394_video_set_iso_speed(camera1394, DC1394_ISO_SPEED_400);
- //DC1394_ERR_CLN_RTN(err,cleanup(camera),"Could not set ISO speed");
+ while(!isStopping()){
- err=dc1394_video_set_mode(camera1394, res);
- //DC1394_ERR_CLN_RTN(err,cleanup(camera),"Could not set video mode");
+ gettimeofday(&a,NULL);
+ totala=a.tv_sec*1000000+a.tv_usec;
- err=dc1394_video_set_framerate(camera1394, f);
- //DC1394_ERR_CLN_RTN(err,cleanup(camera),"Could not set framerate");
+ if(!mycamera->cap.isOpened()){
+ exit(-1);
+ }
- err=dc1394_capture_setup(camera1394,NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT);
- //DC1394_ERR_CLN_RTN(err,cleanup(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera");
+ mycamera->cap >> frame;
- err=dc1394_video_set_transmission(camera1394, DC1394_ON);
- //DC1394_ERR_CLN_RTN(err,cleanup(camera),"Could not start camera iso transmission");
+ if(!frame.data){
+ mycamera->cap.set(CV_CAP_PROP_POS_AVI_RATIO, 0.0);
+ mycamera->cap >> frame;
+ }
+ cv::cvtColor(frame, frame, CV_RGB2BGR);
- dc1394_camera_free_list (list);
- }
+ if(mycamera->imageDescription->width!=frame.rows &&
+ mycamera->imageDescription->height!=frame.cols)
+ cv::resize(frame, frame,
+ cv::Size(mycamera->imageDescription->width,
+ mycamera->imageDescription->height));
- /** function to transform a buffer from uyvy to rgb.
- * @param src source buffer.
- * @param dest destination buffer where the transformation will be set.
- * @param NumPixels how many pixels per buffer.*/
- void uyvy2rgb (unsigned char *src, unsigned char *dest, unsigned long long int NumPixels)
- {
- /**
- * Color conversion functions from Bart Nabbe.
- * Corrected by Damien: bad coeficients in YUV2RGB.
- */
- #define YUV2RGB(y, u, v, r, g, b) \
- r = y + ((v*1436) >> 10); \
- g = y - ((u*352 + v*731) >> 10); \
- b = y + ((u*1814) >> 10); \
- r = r < 0 ? 0 : r; \
- g = g < 0 ? 0 : g; \
- b = b < 0 ? 0 : b; \
- r = r > 255 ? 255 : r; \
- g = g > 255 ? 255 : g; \
- b = b > 255 ? 255 : b
+ if(count==0){
+ reply->description = mycamera->imageDescription;
+ count++;
+ }
- register int i = (NumPixels << 1)-1;
- register int j = NumPixels + ( NumPixels << 1 ) -1;
- register int y0, y1, u, v;
- register int r, g, b;
+ IceUtil::Time t = IceUtil::Time::now();
+ reply->timeStamp.seconds = (long)t.toSeconds();
+ reply->timeStamp.useconds = (long)t.toMicroSeconds() - reply->timeStamp.seconds*1000000;
- while (i > 0) {
- y1 = (unsigned char) src[i--];
- v = (unsigned char) src[i--] - 128;
- y0 = (unsigned char) src[i--];
- u = (unsigned char) src[i--] - 128;
- YUV2RGB (y1, u, v, b, g, r);
- dest[j--] = r;
- dest[j--] = g;
- dest[j--] = b;
- YUV2RGB (y0, u, v, b, g, r);
- dest[j--] = r;
- dest[j--] = g;
- dest[j--] = b;
- }
- }
+// pthread_mutex_lock (&mycamera->cameraI->mutex);
+ reply->pixelData.resize(frame.rows*frame.cols*3);
- void invertImage (char *src, unsigned char *dest, int width, int heigth) {
- int i, j;
- int posI, posJ;
- char temp[3];
+ memcpy( &(reply->pixelData[0]), (unsigned char *) frame.data, frame.rows*frame.cols*3);
+// pthread_mutex_unlock (&mycamera->cameraI->mutex);
- for (i=0, j=(width*heigth)-1; i<j; i++, j--) {
- posI = i*3;
- posJ = j*3;
- temp[0] = (unsigned char) src[posI];
- temp[1] = (unsigned char) src[posI+1];
- temp[2] = (unsigned char) src[posI+2];
- dest[posI]=(unsigned char) src[posJ];
- dest[posI+1]=(unsigned char) src[posJ+1];
- dest[posI+2]=(unsigned char) src[posJ+2];
- dest[posJ]=temp[0];
- dest[posJ+1]=temp[1];
- dest[posJ+2]=temp[2];
- }
- }
+ { //critical region start
+ IceUtil::Mutex::Lock sync(requestsMutex);
+ while(!requests.empty()) {
+ jderobot::AMD_ImageProvider_getImageDataPtr cb = requests.front();
+ requests.pop_front();
+ cb->ice_response(reply);
+ }
+ } //critical region end
- class ReplyTask: public gbxiceutilacfr::SafeThread{
- public:
- ReplyTask(CameraI* camera)
- : gbxiceutilacfr::SafeThread(camera->context.tracer()), mycamera(camera) {
- start();//start thread
- }
+ gettimeofday(&b,NULL);
+ totalb=b.tv_sec*1000000+b.tv_usec;
- void pushJob(const jderobot::AMD_ImageProvider_getImageDataPtr& cb){
- IceUtil::Mutex::Lock sync(requestsMutex);
- requests.push_back(cb);
- }
+ diff = (totalb-totala)/1000;
+ diff = cycle-diff;
- virtual void walk(){
- jderobot::ImageDataPtr reply(new jderobot::ImageData);
- reply->description = mycamera->imageDescription;
+ //std::cout << "Gazeboserver takes " << diff << " ms " << mycamera->fileName << std::endl;
- if(mycamera->firewire_mode){
- mycamera->firewire_deviceinit();
- }
-
- bool new_frame;
- GstBuffer* buff;
- dc1394video_frame_t *frame;
- dc1394camera_t *camera=mycamera->camera1394;
+ if (diff < 0 || diff > cycle_control)
+ diff = cycle_control;
+ else
+ diff = cycle_control - diff;
- while(!isStopping()){
- new_frame=true;
- if(mycamera->firewire_mode){
- if (dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame)!=DC1394_SUCCESS){
- dc1394_log_error("Failed to capture from firewire camera");
- new_frame=false;
- }
- }
- else{
- buff = mycamera->pipeline->pull_buffer();
- if(!buff){
- //mycamera->context.tracer().info("Pipeline return empty buffer.");
- new_frame=false;
- IceUtil::ThreadControl::sleep(IceUtil::Time::milliSeconds(100));
- if (mycamera->pipeline->isEos()){
- mycamera->context.tracer().info("Pipeline is eos.Restarting pipeline...");
- mycamera->restartPipeline(mycamera->pipelineCfg);
- }
- }
- }
+ /*Sleep Algorithm*/
+ usleep(diff * 1000);
+// if (diff < 33)
+// usleep(33 * 1000);
+ }
+ }
- if (new_frame){
- IceUtil::Time t = IceUtil::Time::now();
- reply->timeStamp.seconds = (long)t.toSeconds();
- reply->timeStamp.useconds = (long)t.toMicroSeconds() - reply->timeStamp.seconds*1000000;
- reply->pixelData.resize(mycamera->imageDescription->width*mycamera->imageDescription->height*3);
+ CameraI* mycamera;
+ IceUtil::Mutex requestsMutex;
+ std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
+ };
- if(mycamera->firewire_mode){
- dc1394_capture_enqueue (camera, frame);
+ typedef IceUtil::Handle<ReplyTask> ReplyTaskPtr;
+ std::string prefix;
+ jderobotice::Context context;
+ colorspaces::Image::FormatPtr imageFmt;
+ jderobot::ImageDescriptionPtr imageDescription;
+ jderobot::CameraDescriptionPtr cameraDescription;
+ ReplyTaskPtr replyTask;
+ cv::VideoCapture cap;
- if (mycamera->imageFmt == colorspaces::ImageRGB8::FORMAT_RGB8){
- mycamera->uyvy2rgb((unsigned char*)frame->image,&(reply->pixelData[0]),frame->size[0]*frame->size[1]);
- }
- else{
- // Format colorspaces::ImageRGB8::FORMAT_YUYV
- memmove( &(reply->pixelData[0]),frame->image, mycamera->imageDescription->size);
- }
- }
- else{
- if (mycamera->pipelineCfg.invert) {
- mycamera->invertImage ((char*)buff->data, &(reply->pixelData[0]), mycamera->imageDescription->width, mycamera->imageDescription->height);
- } else {
- memmove(&(reply->pixelData[0]), buff->data, mycamera->imageDescription->size);//copy data to reply
- }
- gst_buffer_unref(buff);//release gstreamer buffer
- }
+}; // end class CameraI
- // publish
- if(mycamera->imageConsumer!=0){
- mycamera->imageConsumer->report(reply);
- }
-
- // response to data petition
- if(mycamera->rpc_mode){
- {//critical region start
- IceUtil::Mutex::Lock sync(requestsMutex);
- while(!requests.empty()){
- jderobot::AMD_ImageProvider_getImageDataPtr cb = requests.front();
- requests.pop_front();
- cb->ice_response(reply);
- }
- }//critical region end
- }
- }
- }
- }
- CameraI* mycamera;
- IceUtil::Mutex requestsMutex;
- std::list<jderobot::AMD_ImageProvider_getImageDataPtr> requests;
- };
- typedef IceUtil::Handle<ReplyTask> ReplyTaskPtr;
-
-
- std::string prefix;
- jderobotice::Context context;
- GSTPipelinePtr pipeline;
- Config pipelineCfg;
- colorspaces::Image::FormatPtr imageFmt;
- jderobot::ImageDescriptionPtr imageDescription;
- jderobot::CameraDescriptionPtr cameraDescription;
- ReplyTaskPtr replyTask;
- jderobot::ImageConsumerPrx imageConsumer;
- dc1394camera_t *camera1394;
- bool rpc_mode;
- bool firewire_mode;
- };
-
-
class Component: public jderobotice::Component{
public:
Component()
@@ -457,43 +258,26 @@
virtual void start(){
Ice::PropertiesPtr prop = context().properties();
- // check default service mode
- int rpc = prop->getPropertyAsIntWithDefault("CameraSrv.DefaultMode",0);
-
- if(rpc==0){
- // check publish/subscribe service mode
- Ice::ObjectPrx obj = context().communicator()->propertyToProxy("CameraSrv.TopicManager");
-
- if(obj==0){
- // no service mode configuration
- std::cerr << "Error: cameraserver needs server configuration mode\n" << std::endl;
- fflush(NULL);
-
- exit(0);
- }
- }
-
int nCameras = prop->getPropertyAsInt(context().tag() + ".NCameras");
cameras.resize(nCameras);
for (int i=0; i<nCameras; i++){//build camera objects
- std::stringstream objIdS;
- objIdS << i;
- std::string objId = objIdS.str();// should this be something unique??
- std::string objPrefix(context().tag() + ".Camera." + objId + ".");
- std::string cameraName = prop->getProperty(objPrefix + "Name");
+ std::stringstream objIdS;
+ objIdS << i;
+ std::string objId = objIdS.str();// should this be something unique??
+ std::string objPrefix(context().tag() + ".Camera." + objId + ".");
+ std::string cameraName = prop->getProperty(objPrefix + "Name");
- if (cameraName.size() == 0){//no name specified, we create one using the index
- cameraName = "camera" + objId;
- prop->setProperty(objPrefix + "Name",cameraName);//set the value
- }
- context().tracer().info("Creating camera " + cameraName);
- cameras[i] = new CameraI(objPrefix,context());
- context().createInterfaceWithString(cameras[i],cameraName);
+ if (cameraName.size() == 0){//no name specified, we create one using the index
+ cameraName = "camera" + objId;
+ prop->setProperty(objPrefix + "Name",cameraName);//set the value
+ }
+ context().tracer().info("Creating camera " + cameraName);
+ cameras[i] = new CameraI(objPrefix,context());
+ context().createInterfaceWithString(cameras[i],cameraName);
}
}
- virtual ~Component(){
- }
+ virtual ~Component(){}
private:
std::vector<Ice::ObjectPtr> cameras;
@@ -504,9 +288,7 @@
int main(int argc, char** argv){
cameraserver::Component component;
- /*initializes gstreamer*/
- gst_init(&argc,&argv);
-
jderobotice::Application app(component);
+
return app.jderobotMain(argc,argv);
}
Deleted: trunk/src/components/cameraserver/gstpipeline.cpp
===================================================================
--- trunk/src/components/cameraserver/gstpipeline.cpp 2013-03-14 15:57:14 UTC (rev 887)
+++ trunk/src/components/cameraserver/gstpipeline.cpp 2013-03-19 09:45:37 UTC (rev 888)
@@ -1,288 +0,0 @@
-#include "gstpipeline.h"
-#include <iostream>
-
-
-namespace cameraserver {
-
- Config::Config()
- : name(""),
- uri(""),
- width(0),
- height(0),
- bpp(0),
- framerateN(0),
- framerateD(0),
- format() {
- }
-
- bool
- Config::validate() const {
- if (name == "") return false;
- if (srcpipeline == "" || uri == "") return false;
- if (width <= 0) return false;
- if (height <= 0) return false;
- if (bpp <= 0) return false;
- if (framerateN <= 0) return false;
- if (framerateD <= 0) return false;
- if (!format) return false;
- return true;
- }
-
- std::string
- Config::toString() const {
- std::stringstream ss;
- ss << "GSTVideoPipeline config: name=" << name << " srcpipeline=" << srcpipeline << " uri=" << uri <<
- " width=" << width << " height=" << height <<
- " bpp=" << bpp << " framerate=" << framerateN << "/" << framerateD <<
- " format=" << format;
- return ss.str();
- }
-
- bool
- Config::operator==(const Config & other) {
- return (name == other.name && srcpipeline == other.srcpipeline && uri == other.uri && width == other.width &&
- height == other.height && bpp == other.bpp &&
- framerateN == other.framerateN && framerateD == other.framerateD &&
- format == other.format);
- }
-
- bool
- Config::operator!=(const Config & other) {
- return !(*this == other);
- }
-
- int gst_init(int* argcp, char** argvp[]) {
- GError *err;
- if (g_thread_supported())
- g_thread_init(NULL);
- return gst_init_check(argcp, argvp, &err);
- }
-
- const std::string GSTPipeline::v4l2UriScheme = "v4l2://";
- const std::string GSTPipeline::v4lUriScheme = "v4l://";
- const std::string GSTPipeline::videotestUriScheme = "videotest://";
-
- GSTPipeline::GSTPipeline(const jderobotice::Context& context, const Config &cfg) throw (jderobotice::ConfigFileException)
- : gbxiceutilacfr::SafeThread(context.tracer()),
- config_(cfg), context(context), mainloop(g_main_loop_new(NULL, FALSE)),eos(false) {
- pipeline = build_pipeline(cfg);
- context.tracer().info("Starting pipeline");
- start(); //my own thread
- }
-
- GSTPipeline::~GSTPipeline() {
- //gst_element_set_state (GST_ELEMENT(pipeline), GST_STATE_NULL);
- gbxiceutilacfr::stopAndJoin(this);
-
- gst_object_unref(pipeline);
- g_main_loop_unref(mainloop);
- }
-
-void GSTPipeline::restart() {
-
- /* This gstreamer call seeks the pipeline to position "0". */
- if (!gst_element_seek(pipeline,
- 1.0,
- GST_FORMAT_TIME,
- GST_SEEK_FLAG_FLUSH,
- GST_SEEK_TYPE_SET,
- 0,
- GST_SEEK_TYPE_NONE,
- GST_CLOCK_TIME_NONE))
- {
- printf("Seek failed\n");
- }
-}
-
- GstElement* GSTPipeline::build_pipeline(const Config &cfg) throw (jderobotice::ConfigFileException){
- GstCaps *caps;
- GstElement *p;
- GstBus *bus;
-
- /* create pipeline, add handler */
- p = gst_pipeline_new(cfg.name.c_str());
- bus = gst_pipeline_get_bus(GST_PIPELINE(p));
- gst_bus_add_watch(bus, GSTPipeline::my_bus_cb, (void*) this);
- gst_object_unref(bus);
-
- if (config_.format == colorspaces::ImageRGB8::FORMAT_RGB8) {
- caps = gst_caps_new_simple("video/x-raw-rgb",
- "bpp", G_TYPE_INT, (8 * config_.format->bytesPerPixel()),
- "depth", G_TYPE_INT, (8 * config_.format->bytesPerPixel()),
- "width", G_TYPE_INT, config_.width,
- "height", G_TYPE_INT, config_.height,
- "framerate", GST_TYPE_FRACTION,
- config_.framerateN, config_.framerateD,
- NULL);
- } else if (config_.format == colorspaces::ImageYUY2::FORMAT_YUY2) {/*or FORMAT_YUYV*/
- caps = gst_caps_new_simple("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_STR_FOURCC(config_.format->name.c_str()),
- "width", G_TYPE_INT, config_.width,
- "height", G_TYPE_INT, config_.height,
- "framerate", GST_TYPE_FRACTION,
- config_.framerateN, config_.framerateD,
- NULL);
- } else {
- throw jderobotice::ConfigFileException(ERROR_INFO, "Format " + config_.format->name + " is not supported within this server");
- }
-
- std::string capsStr(gst_caps_to_string(caps));
- context.tracer().info("Pipeline caps: " + capsStr);
-
- if (config_.srcpipeline != ""){//if pipeline supplied is prefered
- GError *err = 0;
- std::string srcpipeline(config_.srcpipeline+"! identity ");//append identity element to allow partial pipelines
- source = gst_parse_bin_from_description(srcpipeline.c_str(),1,&err);//ghost pads are linked
- if (err!=0){
- std::string err_msg(err->message);
- g_error_free(err);
- throw jderobotice::ConfigFileException(ERROR_INFO, "Can't build requested pipeline: " + err_msg);
- }
- }else if (config_.uri.find(v4l2UriScheme) == 0) {/*handle v4l2 source*/
- std::string dev = config_.uri.substr(v4l2UriScheme.size()); /*after v4l2://*/
- source = gst_element_factory_make("v4l2src", "source");
- g_object_set(G_OBJECT(source), "device", dev.c_str(), NULL);
- }else if (config_.uri.find(v4lUriScheme) == 0) {/*handle v4l source*/
- std::string dev = config_.uri.substr(v4lUriScheme.size()); /*after v4l://*/
- source = gst_element_factory_make("v4lsrc", "source");
- g_object_set(G_OBJECT(source), "device", dev.c_str(), NULL);
- } else if (config_.uri.find(videotestUriScheme) == 0) {/*handle videotest source*/
- std::string patternStr = config_.uri.substr(videotestUriScheme.size()); /*after videotest://*/
- std::stringstream s(patternStr);
- int pattern;
- source = gst_element_factory_make("videotestsrc", "source");
- s >> pattern; /*FIXME: no error checked*/
- g_object_set(G_OBJECT(source), "pattern", pattern, NULL);
- } else {
- GstElement *urisrc = gst_element_make_from_uri(GST_URI_SRC, config_.uri.c_str(), "urisrc");
- GstElement *decoder = gst_element_factory_make("decodebin", "decoder");
- g_signal_connect(decoder, "new-decoded-pad", G_CALLBACK(newpad_cb), (void*) this);
-
- source = gst_bin_new("source");
- gst_bin_add_many(GST_BIN(source),urisrc,decoder,NULL);
- gst_element_link(urisrc, decoder);
- //we create the bin src ghost pad without target because decode
- //will come with it when playing. See newpad_cb callback
- gst_element_add_pad (source, gst_ghost_pad_new_no_target ("src", GST_PAD_SRC));
- }
-
- gst_bin_add(GST_BIN(p), source);
- videoscale = gst_element_factory_make("videoscale", "videoscale");
- videorate = gst_element_factory_make("videorate", "videorate");
- videocolor = gst_element_factory_make("ffmpegcolorspace", "videocolor");
- sink = gst_element_factory_make("appsink", "sink");
-
- g_object_set(G_OBJECT(sink), "drop", 1, NULL);
- g_object_set(G_OBJECT(sink), "max-buffers", 16, NULL);
- gst_bin_add_many(GST_BIN(p), videorate, videoscale, videocolor, sink, NULL);
-
- gst_element_link(source,videorate);
- gst_element_link(videorate, videoscale);
- gst_element_link(videoscale, videocolor);
- gst_element_link_filtered(videocolor, sink, caps);
-
- gst_caps_unref(caps);
- return p;
- }
-
- GstBuffer* GSTPipeline::pull_buffer() {
- GstBuffer* buff = 0;
- if (!gst_app_sink_is_eos(GST_APP_SINK(sink)))
- buff = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
- return buff;
- }
-
- bool GSTPipeline::isEos(){
- return eos;
- }
-
- void GSTPipeline::stop() {
- gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
- if (g_main_loop_is_running(mainloop))
- g_main_loop_quit(mainloop);
- }
-
- void GSTPipeline::walk() {
- gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
- while (!isStopping()) {
- g_main_loop_run(mainloop);
- }
- }
-
- void
- GSTPipeline::newpad_cb(GstElement *decode,
- GstPad *pad,
- gboolean last,
- gpointer data) {
- GstCaps *caps = 0;
- GstStructure *str;
- GstPad *gpad = 0;
- GstPad *target = 0;
- GSTPipeline *self = static_cast<GSTPipeline*> (data);
- GstElement *source = self->source;
-
- gpad = gst_element_get_static_pad(source, "src");
- target = gst_ghost_pad_get_target(GST_GHOST_PAD(gpad));
- caps = gst_pad_get_caps(pad);
- str = gst_caps_get_structure(caps, 0);
- if (target == 0 && g_strrstr(gst_structure_get_name(str), "video"))//ghost pad without target && pad media type video
- gst_ghost_pad_set_target(GST_GHOST_PAD(gpad), pad);/* link'n'play */
- else
- gst_object_unref(pad);//unref pad, we won't use it
-
- gst_caps_unref(caps);
- gst_object_unref(gpad);
- if (target != 0)
- gst_object_unref(target);
- }
-
- gboolean GSTPipeline::my_bus_cb(GstBus *bus,
- GstMessage *message,
- gpointer data) {
- std::string s = GST_MESSAGE_TYPE_NAME(message);
- GSTPipeline *self = static_cast<GSTPipeline*> (data);
-
- self->context.tracer().debug("Bus callback received message: " + s);
-
- switch (GST_MESSAGE_TYPE(message)) {
- case GST_MESSAGE_ERROR:
- {
- GError *err;
- gchar *debug;
-
- gst_message_parse_error(message, &err, &debug);
- s = err->message;
- self->context.tracer().error("Error: " + s);
- g_error_free(err);
- g_free(debug);
- gst_element_set_state (GST_ELEMENT(self->pipeline), GST_STATE_NULL);//app_sink_pull_buffer will quit
- g_main_loop_quit (self->mainloop);
- break;
- }
- // case GST_MESSAGE_STATE_CHANGED: {
- // GstState old_state, new_state;
-
- // gst_message_parse_state_changed (message, &old_state, &new_state, NULL);
- // self->context.tracer().info("Element " + std::string(GST_OBJECT_NAME (message->src)) +
- // " changed state from " + std::string(gst_element_state_get_name (old_state)) +
- // " to " + std::string(gst_element_state_get_name (new_state)));
- // break;
- // }
- case GST_MESSAGE_EOS:
- /* end-of-stream */
- g_main_loop_quit (self->mainloop);
- self->eos = true;
- break;
- default:
- /* unhandled message */
- break;
- }
-
- /* we want to be notified again the next time there is a message
- * on the bus, so returning TRUE (FALSE means we want to stop watching
- * for messages on the bus and our callback should not be called again)
- */
- return TRUE;
- }
-
-}//namespace
Deleted: trunk/src/components/cameraserver/gstpipeline.h
===================================================================
--- trunk/src/components/cameraserver/gstpipeline.h 2013-03-14 15:57:14 UTC (rev 887)
+++ trunk/src/components/cameraserver/gstpipeline.h 2013-03-19 09:45:37 UTC (rev 888)
@@ -1,88 +0,0 @@
-#ifndef GSTPIPELINE_H
-#define GSTPIPELINE_H
-
-#include <exception>
-#include <string>
-#include <gst/gst.h>
-#include <gst/app/gstappsink.h>
-#include <gbxsickacfr/gbxiceutilacfr/safethread.h>
-#include <jderobotice/component.h>
-#include <jderobotice/exceptions.h>
-#include <colorspaces/colorspacesmm.h>
-
-namespace cameraserver {
-
- class Config{
- public:
- Config();
- bool validate() const;
- std::string toString() const;
- bool operator==( const Config & other );
- bool operator!=( const Config & other );
-
- //! pipeline name
- std::string name;
- //!pipeline src description
- std::string srcpipeline;
- //! video input uri file
- std::string uri;
- //! video output width [pixels]
- size_t width;
- //! video output height [pixels]
- size_t height;
- //! video output bits per pixel
- size_t bpp;
- //! video output frame rate numerator
- size_t framerateN;
- //! video output frame rate denominator
- size_t framerateD;
- //! video output format string
- colorspaces::Image::FormatPtr format;
-
- bool invert;
- };
-
- extern int gst_init(int* argcp, char** argvp[]);
-
- class GSTPipeline: virtual public gbxiceutilacfr::SafeThread{
- public:
- /**
- * Create pipeline
- * After creation pipeline has to be started (start()) in order to activate it
- */
- GSTPipeline(const jderobotice::Context& context, const Config &cfg) throw(jderobotice::ConfigFileException);
- ~GSTPipeline();
-
- GstElement* build_pipeline(const Config &cfg) throw(jderobotice::ConfigFileException);
-
- void stop();
- void restart();
- virtual void walk();
-
- GstBuffer* pull_buffer();
- bool isEos();
- private:
- static gboolean my_bus_cb(GstBus *bus,
- GstMessage *message,
- gpointer data);
- static void newpad_cb(GstElement *decode,
- GstPad *pad,
- gboolean last,
- gpointer data);
-
- static const std::string v4l2UriScheme;
- static const std::string v4lUriScheme;
- static const std::string videotestUriScheme;
-
- Config config_;
- jderobotice::Context context;
- GMainLoop *mainloop;
- GstElement *pipeline,*source,*decoder,*videoscale,*videorate,*videocolor,*sink;
- bool eos;
-
- };
- typedef IceUtil::Handle<GSTPipeline> GSTPipelinePtr;
-
-}//namespace
-
-#endif /*GSTPIPELINE_H*/
More information about the Jderobot-admin
mailing list