summaryrefslogtreecommitdiff
path: root/src/imaging
diff options
context:
space:
mode:
Diffstat (limited to 'src/imaging')
-rw-r--r--src/imaging/Blob.cpp64
-rw-r--r--src/imaging/Blob.h46
-rw-r--r--src/imaging/CMUCamera.cpp177
-rw-r--r--src/imaging/CMUCamera.h17
-rw-r--r--src/imaging/CMUCameraUtils.cpp170
-rw-r--r--src/imaging/CMUCameraUtils.h8
-rw-r--r--src/imaging/Camera.cpp95
-rw-r--r--src/imaging/Camera.h27
-rw-r--r--src/imaging/CameraInfo.cpp159
-rw-r--r--src/imaging/CameraInfo.h100
-rw-r--r--src/imaging/CoordTransformer.cpp2
-rw-r--r--src/imaging/CoordTransformer.h8
-rw-r--r--src/imaging/DSCamera.cpp298
-rw-r--r--src/imaging/DSCamera.h19
-rw-r--r--src/imaging/DSHelper.cpp60
-rw-r--r--src/imaging/DSHelper.h5
-rw-r--r--[-rwxr-xr-x]src/imaging/DSSampleGrabber.cpp2
-rw-r--r--[-rwxr-xr-x]src/imaging/DSSampleGrabber.h2
-rw-r--r--src/imaging/DeDistort.cpp104
-rw-r--r--src/imaging/DeDistort.h42
-rw-r--r--src/imaging/FWCamera.cpp259
-rw-r--r--src/imaging/FWCamera.h25
-rw-r--r--src/imaging/FWCameraUtils.cpp191
-rw-r--r--src/imaging/FWCameraUtils.h10
-rw-r--r--src/imaging/FakeCamera.cpp25
-rw-r--r--src/imaging/FakeCamera.h5
-rw-r--r--src/imaging/FilterClearBorder.cpp2
-rw-r--r--src/imaging/FilterClearBorder.h4
-rw-r--r--src/imaging/FilterDistortion.cpp4
-rw-r--r--src/imaging/FilterDistortion.h4
-rw-r--r--src/imaging/FilterWipeBorder.cpp2
-rw-r--r--src/imaging/FilterWipeBorder.h3
-rw-r--r--src/imaging/Makefile.am20
-rw-r--r--src/imaging/Makefile.in153
-rw-r--r--src/imaging/Run.cpp4
-rw-r--r--src/imaging/Run.h6
-rw-r--r--src/imaging/TrackerConfig.cpp53
-rw-r--r--src/imaging/TrackerConfig.h9
-rw-r--r--src/imaging/TrackerThread.cpp72
-rw-r--r--src/imaging/TrackerThread.h8
-rw-r--r--src/imaging/V4LCamera.cpp397
-rw-r--r--src/imaging/V4LCamera.h54
-rw-r--r--src/imaging/checktracking.cpp101
-rw-r--r--src/imaging/testfiles/filterwipeborder.pngbin1166 -> 0 bytes
-rw-r--r--src/imaging/testimaging.cpp101
-rw-r--r--src/imaging/trackerconfigdtd.cpp2
-rw-r--r--src/imaging/trackerconfigdtd.h2
47 files changed, 1930 insertions, 991 deletions
diff --git a/src/imaging/Blob.cpp b/src/imaging/Blob.cpp
index 59726af..b4d5777 100644
--- a/src/imaging/Blob.cpp
+++ b/src/imaging/Blob.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -156,7 +156,7 @@ void Blob::calcStats()
{
m_Center = calcCenter();
m_EstimatedNextCenter = m_Center;
- m_Area = calcArea();
+ m_Area = float(calcArea());
m_BoundingBox = calcBBox();
/*
more useful numbers that can be calculated from c
@@ -167,24 +167,24 @@ void Blob::calcStats()
Inertia = c_xx + c_yy
Eccentricity = ...
*/
- double c_xx = 0; // Variance in x direction
- double c_yy =0; // Variance in y direction
- double c_xy = 0; // Covariance
- double ll=0;
- double l1;
- double l2;
- double tmp_x;
- double tmp_y;
- double mag;
+ float c_xx = 0; // Variance in x direction
+ float c_yy =0; // Variance in y direction
+ float c_xy = 0; // Covariance
+ float ll=0;
+ float l1;
+ float l2;
+ float tmp_x;
+ float tmp_y;
+ float mag;
for (RunArray::iterator r = m_Runs.begin(); r != m_Runs.end();++r) {
//This is the evaluated expression for the variance when using runs...
- ll = r->length();
+ ll = float(r->length());
c_yy += ll* (r->m_Row- m_Center.y)*(r->m_Row- m_Center.y);
c_xx += ( (r->m_EndCol-1) * r->m_EndCol * (2*r->m_EndCol-1)
- - (r->m_StartCol-1) * r->m_StartCol * (2*r->m_StartCol -1))/6.
+ - (r->m_StartCol-1) * r->m_StartCol * (2*r->m_StartCol -1))/6.f
- m_Center.x * ((r->m_EndCol-1)*r->m_EndCol-(r->m_StartCol-1)*r->m_StartCol)
+ ll* m_Center.x*m_Center.x;
- c_xy += (r->m_Row-m_Center.y)*0.5*( (r->m_EndCol-1)*r->m_EndCol
+ c_xy += (r->m_Row-m_Center.y)*0.5f*( (r->m_EndCol-1)*r->m_EndCol
- (r->m_StartCol-1)*r->m_StartCol)
+ ll *(m_Center.x*m_Center.y - m_Center.x*r->m_Row);
}
@@ -195,9 +195,9 @@ void Blob::calcStats()
m_Inertia = c_xx + c_yy;
- double T = sqrt( (c_xx - c_yy) * (c_xx - c_yy) + 4*c_xy*c_xy);
+ float T = sqrt( (c_xx - c_yy) * (c_xx - c_yy) + 4*c_xy*c_xy);
m_Eccentricity = ((c_xx + c_yy) + T)/((c_xx+c_yy) - T);
- m_Orientation = 0.5*atan2(2*c_xy,c_xx-c_yy);
+ m_Orientation = 0.5f*atan2(2*c_xy,c_xx-c_yy);
// The l_i are variances (unit L^2) so to arrive at numbers that
// correspond to lengths in the picture we use sqrt
// Ensure that eigenvectors always have standard orientation, i.e. the determinant
@@ -205,8 +205,8 @@ void Blob::calcStats()
// E_1.x E_2.y - E_1.y E_2.x > 0
if (fabs(c_xy) > 1e-30) {
//FIXME. check l1!=0 l2!=0. li=0 happens for line-like components
- l1 = 0.5 * ((c_xx+c_yy) + sqrt((c_xx+c_yy)*(c_xx+c_yy)-4*(c_xx*c_yy-c_xy*c_xy)));
- l2 = 0.5 * ((c_xx+c_yy) - sqrt((c_xx+c_yy)*(c_xx+c_yy)-4*(c_xx*c_yy-c_xy*c_xy)));
+ l1 = 0.5f * ((c_xx+c_yy) + sqrt((c_xx+c_yy)*(c_xx+c_yy)-4*(c_xx*c_yy-c_xy*c_xy)));
+ l2 = 0.5f * ((c_xx+c_yy) - sqrt((c_xx+c_yy)*(c_xx+c_yy)-4*(c_xx*c_yy-c_xy*c_xy)));
tmp_x = c_xy/l1 - c_xx*c_yy/(c_xy*l1)+ (c_xx/c_xy);
tmp_y = 1.;
mag = sqrt(tmp_x*tmp_x + tmp_y*tmp_y);
@@ -250,17 +250,17 @@ void Blob::calcStats()
m_bStatsAvailable = true;
}
-const DPoint& Blob::getCenter() const
+const glm::vec2& Blob::getCenter() const
{
return m_Center;
}
-const DPoint& Blob::getEstimatedNextCenter() const
+const glm::vec2& Blob::getEstimatedNextCenter() const
{
return m_EstimatedNextCenter;
}
-double Blob::getArea() const
+float Blob::getArea() const
{
return m_Area;
}
@@ -270,37 +270,37 @@ const IntRect& Blob::getBoundingBox() const
return m_BoundingBox;
}
-double Blob::getEccentricity() const
+float Blob::getEccentricity() const
{
return m_Eccentricity;
}
-double Blob::getInertia() const
+float Blob::getInertia() const
{
return m_Inertia;
}
-double Blob::getOrientation() const
+float Blob::getOrientation() const
{
return m_Orientation;
}
-const DPoint & Blob::getScaledBasis(int i) const
+const glm::vec2 & Blob::getScaledBasis(int i) const
{
return m_ScaledBasis[i];
}
-const DPoint & Blob::getEigenVector(int i) const
+const glm::vec2 & Blob::getEigenVector(int i) const
{
return m_EigenVector[i];
}
-const DPoint & Blob::getEigenValues() const
+const glm::vec2 & Blob::getEigenValues() const
{
return m_EigenValues;
}
-void Blob::calcNextCenter(DPoint oldCenter)
+void Blob::calcNextCenter(glm::vec2 oldCenter)
{
m_EstimatedNextCenter = m_Center + (m_Center - oldCenter);
}
@@ -324,12 +324,12 @@ const BlobPtr Blob::getFirstRelated()
}
}
-DPoint Blob::calcCenter()
+glm::vec2 Blob::calcCenter()
{
- DPoint center(0,0);
- double c = 0;
+ glm::vec2 center(0,0);
+ float c = 0;
for (RunArray::iterator r = m_Runs.begin(); r != m_Runs.end(); ++r) {
- center += r->m_Center*r->length();
+ center += r->m_Center * float(r->length());
c += r->length();
}
center = center/c;
diff --git a/src/imaging/Blob.h b/src/imaging/Blob.h
index 5ced2c6..aff1994 100644
--- a/src/imaging/Blob.h
+++ b/src/imaging/Blob.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -30,7 +30,7 @@
#include "../graphics/Bitmap.h"
#include "../graphics/Pixel32.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include <vector>
@@ -65,18 +65,18 @@ class AVG_API Blob
void calcContour(int Precision);
ContourSeq getContour();
- const DPoint& getCenter() const;
- const DPoint& getEstimatedNextCenter() const;
- double getArea() const;
+ const glm::vec2& getCenter() const;
+ const glm::vec2& getEstimatedNextCenter() const;
+ float getArea() const;
const IntRect & getBoundingBox() const;
- double getEccentricity() const;
- double getInertia() const;
- double getOrientation() const;
- const DPoint& getScaledBasis(int i) const;
- const DPoint& getEigenVector(int i) const;
- const DPoint& getEigenValues() const;
-
- void calcNextCenter(DPoint oldCenter);
+ float getEccentricity() const;
+ float getInertia() const;
+ float getOrientation() const;
+ const glm::vec2& getScaledBasis(int i) const;
+ const glm::vec2& getEigenVector(int i) const;
+ const glm::vec2& getEigenValues() const;
+
+ void calcNextCenter(glm::vec2 oldCenter);
void clearRelated();
void addRelated(BlobPtr pBlob);
const BlobPtr getFirstRelated();
@@ -85,7 +85,7 @@ class AVG_API Blob
private:
Blob(const Blob &);
- DPoint calcCenter();
+ glm::vec2 calcCenter();
IntRect calcBBox();
int calcArea();
void initRowPositions();
@@ -98,16 +98,16 @@ class AVG_API Blob
// For hands, this contains the fingers.
bool m_bStatsAvailable;
- DPoint m_EstimatedNextCenter;
- DPoint m_Center;
- double m_Area;
+ glm::vec2 m_EstimatedNextCenter;
+ glm::vec2 m_Center;
+ float m_Area;
IntRect m_BoundingBox;
- double m_Eccentricity;
- double m_Inertia;
- double m_Orientation;
- DPoint m_ScaledBasis[2];
- DPoint m_EigenVector[2];
- DPoint m_EigenValues;
+ float m_Eccentricity;
+ float m_Inertia;
+ float m_Orientation;
+ glm::vec2 m_ScaledBasis[2];
+ glm::vec2 m_EigenVector[2];
+ glm::vec2 m_EigenValues;
ContourSeq m_Contour;
};
diff --git a/src/imaging/CMUCamera.cpp b/src/imaging/CMUCamera.cpp
index 1991916..6c68c5f 100644
--- a/src/imaging/CMUCamera.cpp
+++ b/src/imaging/CMUCamera.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -36,11 +36,9 @@ using namespace std;
namespace avg {
-CMUCamera::CMUCamera(long long guid, bool bFW800, IntPoint Size,
- PixelFormat camPF, PixelFormat destPF, double FrameRate)
- : Camera(camPF, destPF),
- m_Size(Size),
- m_FrameRate(FrameRate),
+CMUCamera::CMUCamera(long long guid, bool bFW800, IntPoint size,
+ PixelFormat camPF, PixelFormat destPF, float frameRate)
+ : Camera(camPF, destPF, size, frameRate),
m_WhitebalanceU(-1),
m_WhitebalanceV(-1),
m_pCamera(0)
@@ -48,7 +46,7 @@ CMUCamera::CMUCamera(long long guid, bool bFW800, IntPoint Size,
m_pCamera = new C1394Camera();
int err;
unsigned long videoFormat, videoMode;
- getVideoFormatAndMode(m_Size, getCamPF(), &videoFormat, &videoMode);
+ getVideoFormatAndMode(getImgSize(), getCamPF(), &videoFormat, &videoMode);
// Find and open camera
if (m_pCamera->RefreshCameraList() <= 0) {
@@ -72,7 +70,7 @@ CMUCamera::CMUCamera(long long guid, bool bFW800, IntPoint Size,
checkCMUError(err, AVG_ERR_CAMERA_NONFATAL,
string("CMUCamera: Error setting video mode ") + toString(videoMode) +
", format: " + toString(videoFormat));
- err = m_pCamera->SetVideoFrameRate(getFrameRateConst(m_FrameRate));
+ err = m_pCamera->SetVideoFrameRate(getFrameRateConst(getFrameRate()));
checkCMUError(err, AVG_ERR_CAMERA_NONFATAL, "Error setting frame rate");
// Start capturing images
@@ -95,10 +93,12 @@ CMUCamera::CMUCamera(long long guid, bool bFW800, IntPoint Size,
m_pCamera->GetCameraVendor(sVendor, 256);
if (strcmp(sModel, "DFx 31BF03") == 0) {
- AVG_TRACE(Logger::CONFIG, "Applying bayer pattern fixup for IS DFx31BF03 camera");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Applying bayer pattern fixup for IS DFx31BF03 camera");
setCamPF(BAYER8_GRBG);
} else if (strcmp(sVendor, "Point Grey Research") == 0) {
- AVG_TRACE(Logger::CONFIG, "Applying bayer pattern fixup for PointGrey cameras");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Applying bayer pattern fixup for PointGrey cameras");
enablePtGreyBayer();
}
@@ -111,12 +111,6 @@ CMUCamera::~CMUCamera()
delete m_pCamera;
}
-
-IntPoint CMUCamera::getImgSize()
-{
- return m_Size;
-}
-
BitmapPtr CMUCamera::getImage(bool bWait)
{
if (bWait) {
@@ -139,8 +133,8 @@ BitmapPtr CMUCamera::getImage(bool bWait)
unsigned long captureBufferLength;
unsigned char* pCaptureBuffer = m_pCamera->GetRawData(&captureBufferLength);
- BitmapPtr pCamBmp(new Bitmap(m_Size, getCamPF(), pCaptureBuffer,
- captureBufferLength / m_Size.y, false, "TempCameraBmp"));
+ BitmapPtr pCamBmp(new Bitmap(getImgSize(), getCamPF(), pCaptureBuffer,
+ captureBufferLength / getImgSize().y, false, "TempCameraBmp"));
return convertCamFrameToDestPF(pCamBmp);
}
@@ -156,11 +150,6 @@ const std::string& CMUCamera::getDriverName() const
return sDriverName;
}
-double CMUCamera::getFrameRate() const
-{
- return m_FrameRate;
-}
-
int CMUCamera::getFeature(CameraFeature Feature) const
{
unsigned short val1;
@@ -179,7 +168,7 @@ void CMUCamera::setFeature(CameraFeature Feature, int Value, bool bIgnoreOldValu
int err = pControl->SetValue(Value);
checkCMUWarning(err == CAM_SUCCESS, "Error setting camera strobe.");
} else {
- AVG_TRACE(Logger::WARNING, "Camera does not support strobe.");
+ AVG_LOG_WARNING("Camera does not support strobe.");
}
} else {
CAMERA_FEATURE cmuFeature = getFeatureID(Feature);
@@ -196,7 +185,7 @@ void CMUCamera::setFeature(CameraFeature Feature, int Value, bool bIgnoreOldValu
string("Error setting camera feature: ") +
cameraFeatureToString(Feature));
} else {
- AVG_TRACE(Logger::WARNING, string("Camera does not support feature: ") +
+ AVG_LOG_WARNING(string("Camera does not support feature: ") +
cameraFeatureToString(Feature));
}
}
@@ -215,7 +204,7 @@ void CMUCamera::setFeatureOneShot(CameraFeature Feature)
&& err3 == CAM_SUCCESS,
string("Error setting feature: ") + cameraFeatureToString(Feature));
} else {
- AVG_TRACE(Logger::WARNING, string("Camera does not support feature: ") +
+ AVG_LOG_WARNING(string("Camera does not support feature: ") +
cameraFeatureToString(Feature));
}
}
@@ -255,35 +244,127 @@ void CMUCamera::setWhitebalance(int u, int v, bool bIgnoreOldValue)
string("Error setting camera feature: ") +
cameraFeatureToString(CAM_FEATURE_WHITE_BALANCE));
} else {
- AVG_TRACE(Logger::WARNING, string("Camera does not support feature: ") +
+ AVG_LOG_WARNING(string("Camera does not support feature: ") +
cameraFeatureToString(CAM_FEATURE_WHITE_BALANCE));
}
}
}
-void CMUCamera::dumpCameras()
+
+int CMUCamera::countCameras()
{
C1394Camera* pCamera = new C1394Camera();
-
if (pCamera->RefreshCameraList() <= 0) {
- return;
+ return 0;
}
- if (pCamera->GetNumberCameras() == 0) {
- return;
+ int numCameras = pCamera->GetNumberCameras();
+ return numCameras;
+}
+
+CameraInfo* CMUCamera::getCameraInfos(int deviceNumber)
+{
+#ifdef AVG_ENABLE_CMU1394
+ C1394Camera* pCamera = new C1394Camera();
+ int err = pCamera->RefreshCameraList();
+ if (err <= 0) {
+ return 0;
}
- cerr << "CMU Driver Firewire Cameras: " << endl;
- for (int i=0; i<pCamera->GetNumberCameras(); ++i) {
- pCamera->SelectCamera(i);
- char sz[256];
- cerr << " -----------------------" << endl;
- pCamera->GetCameraVendor(sz, 256);
- cerr << " Vendor: " << sz << endl;
- pCamera->GetCameraName(sz, 256);
- cerr << " Name: " << sz << endl;
- long long camGuid;
- pCamera->GetCameraUniqueID((PLARGE_INTEGER)&camGuid);
- cerr << " GUID: " << camGuid << endl;
+
+ err = pCamera->SelectCamera(deviceNumber);
+ if (err != CAM_SUCCESS) {
+ AVG_ASSERT(false);
}
+ pCamera->InitCamera(true);
+
+ long long uniqueID;
+ pCamera->GetCameraUniqueID((PLARGE_INTEGER)&uniqueID);
+ stringstream deviceID;
+ deviceID << uniqueID;
+
+ CameraInfo* pCamInfo = new CameraInfo("Firewire", deviceID.str());
+ getCameraImageFormats(pCamera, pCamInfo);
+ getCameraControls(pCamera, pCamInfo);
+
delete pCamera;
+ return pCamInfo;
+#endif
+ return NULL;
+}
+
+void CMUCamera::getCameraImageFormats(C1394Camera* pCamera, CameraInfo* pCamInfo)
+{
+ //Iterate over formats (up to 3 formats are supported)
+ for (int format = 0; format <= 2; format++) {
+ BOOL hasFormat = false;
+ hasFormat = pCamera->HasVideoFormat(format);
+ if(!hasFormat){
+ continue;
+ }
+ //Iterate over modes (up to 8 modes are supported)
+ for (int mode = 0; mode <= 7; mode++) {
+ BOOL hasMode = false;
+ hasMode = pCamera->HasVideoMode(format, mode);
+ if (!hasMode) {
+ continue;
+ }
+ //Ignore not libavg supported formats
+ if (mode == 0 && format == 0) {
+ continue;
+ }
+
+ IntPoint size;
+ PixelFormat pixelFormat;
+ FrameratesVector framerates;
+
+ getImageSizeAndPF(format, mode, size, pixelFormat);
+ getCameraFramerates(pCamera, format, mode, framerates);
+
+ CameraImageFormat imageFormat = CameraImageFormat(size, pixelFormat, framerates);
+ pCamInfo->addImageFormat(imageFormat);
+ }
+ }
+}
+
+void CMUCamera::getCameraFramerates(C1394Camera* pCamera, unsigned long videoFormat, unsigned long videoMode, FrameratesVector &framerates)
+{
+ for (int itFramerate = 0; itFramerate <= 7; itFramerate++) {
+ BOOL hasFramerate = false;
+ hasFramerate = pCamera->HasVideoFrameRate(videoFormat, videoMode, itFramerate);
+ if (!hasFramerate) {
+ continue;
+ }
+
+ float framerate = getFrameRateFloat(itFramerate);
+ framerates.push_back(framerate);
+ }
+}
+
+void CMUCamera::getCameraControls(C1394Camera* pCamera, CameraInfo* pCamInfo)
+{
+ //Iterate over amount of possible Features (up to 24 in CMU1394 DCD 6.4.5.240)
+ for (int indexFeature = 0; indexFeature <= 23; indexFeature++) {
+ C1394CameraControl* feature = pCamera->GetCameraControl((CAMERA_FEATURE)indexFeature);
+ if (feature == NULL) {
+ continue;
+ }
+ bool hasFeature = pCamera->HasFeature((CAMERA_FEATURE)indexFeature);
+ if (!hasFeature) {
+ continue;
+ }
+ //FrameRate (also known as TransferRate) is not supported
+ if (feature->GetFeatureID() == FEATURE_FRAME_RATE) {
+ continue;
+ }
+
+ std::string featureName = feature->GetName();
+ unsigned short min = -1;
+ unsigned short max = -1;
+ feature->GetRange(&min, &max);
+ unsigned short value_low = -1;
+ unsigned short value_high = -1; //TODO: For Whitebalance or Temperature etc.
+ feature->GetValue(&value_low, &value_high);
+ CameraControl camControl = CameraControl(featureName, (int)min, (int)max, (int)value_low);
+ pCamInfo->addControl(camControl);
+ }
}
int CMUCamera::getCamIndex(long long guid)
@@ -299,7 +380,7 @@ int CMUCamera::getCamIndex(long long guid)
return i;
}
}
- AVG_TRACE(Logger::WARNING, string("Camera with guid ") + toString(guid)
+ AVG_LOG_WARNING(string("Camera with guid ") + toString(guid)
+ " not present. Using first camera.");
return 0;
}
@@ -316,7 +397,7 @@ void CMUCamera::internalGetFeature(CameraFeature Feature, unsigned short* val1,
pControl->Status();
pControl->GetValue(val1, val2);
} else {
- AVG_TRACE(Logger::WARNING, string("Error reading camera feature: ") +
+ AVG_LOG_WARNING(string("Error reading camera feature: ") +
cameraFeatureToString(Feature));
}
}
@@ -338,7 +419,7 @@ void CMUCamera::enablePtGreyBayer()
PixelFormat exactPF = fwBayerStringToPF(bayerFormat);
setCamPF(exactPF);
} else {
- AVG_TRACE(Logger::ERROR, "imageDataFormat not supported.");
+ AVG_LOG_ERROR("imageDataFormat not supported.");
}
}
@@ -352,7 +433,7 @@ void CMUCamera::checkCMUError(int code, int type, const string & sMsg) const
void CMUCamera::checkCMUWarning(bool bOk, const string& sMsg) const
{
if (!bOk) {
- AVG_TRACE(Logger::WARNING, sMsg);
+ AVG_LOG_WARNING(sMsg);
}
}
diff --git a/src/imaging/CMUCamera.h b/src/imaging/CMUCamera.h
index 58dc129..a2f31e0 100644
--- a/src/imaging/CMUCamera.h
+++ b/src/imaging/CMUCamera.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -36,16 +36,14 @@ namespace avg {
class AVG_API CMUCamera : public Camera {
public:
CMUCamera(long long guid, bool bFW800, IntPoint Size, PixelFormat camPF,
- PixelFormat destPF, double FrameRate);
+ PixelFormat destPF, float FrameRate);
virtual ~CMUCamera();
- virtual IntPoint getImgSize();
virtual BitmapPtr getImage(bool bWait);
virtual const std::string& getDevice() const;
virtual const std::string& getDriverName() const;
- virtual double getFrameRate() const;
-
+
virtual int getFeature(CameraFeature Feature) const;
virtual void setFeature(CameraFeature Feature, int Value, bool bIgnoreOldValue=false);
virtual void setFeatureOneShot(CameraFeature Feature);
@@ -54,6 +52,8 @@ public:
virtual void setWhitebalance(int u, int v, bool bIgnoreOldValue=false);
static void dumpCameras();
+ static int countCameras();
+ static CameraInfo* getCameraInfos(int deviceNumber);
private:
int getCamIndex(long long guid);
@@ -64,9 +64,12 @@ private:
void checkCMUWarning(bool bOk, const std::string& sMsg) const;
std::string CMUErrorToString(int code);
+ static void getCameraControls(C1394Camera* pCamera, CameraInfo* camInfo);
+ static void getCameraImageFormats(C1394Camera* pCamera, CameraInfo* pCamInfo);
+ static void getCameraFramerates(C1394Camera* pCamera, unsigned long videoFormat,
+ unsigned long videoMode, FrameratesVector &framerates);
+
std::string m_sDevice;
- IntPoint m_Size;
- double m_FrameRate;
mutable C1394Camera * m_pCamera; // The CMU1394 lib is not const-correct.
FeatureMap m_Features;
diff --git a/src/imaging/CMUCameraUtils.cpp b/src/imaging/CMUCameraUtils.cpp
index 85b2af7..3e10570 100644
--- a/src/imaging/CMUCameraUtils.cpp
+++ b/src/imaging/CMUCameraUtils.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -147,31 +147,56 @@ void getVideoFormatAndMode(IntPoint& Size, PixelFormat pf,
}
}
-unsigned long getFrameRateConst(double FrameRate)
+unsigned long getFrameRateConst(float frameRate)
{
- if (FrameRate == 1.875) {
+ if (frameRate == 1.875f) {
return FRAMERATE_1_875;
- } else if (FrameRate == 3.75) {
+ } else if (frameRate == 3.75f) {
return FRAMERATE_3_75;
- } else if (FrameRate == 7.5) {
+ } else if (frameRate == 7.5f) {
return FRAMERATE_7_5;
- } else if (FrameRate == 15) {
+ } else if (frameRate == 15) {
return FRAMERATE_15;
- } else if (FrameRate == 30) {
+ } else if (frameRate == 30) {
return FRAMERATE_30;
- } else if (FrameRate == 60) {
+ } else if (frameRate == 60) {
return FRAMERATE_60;
- } else if (FrameRate == 120) {
+ } else if (frameRate == 120) {
return FRAMERATE_120;
- } else if (FrameRate == 240) {
+ } else if (frameRate == 240) {
return FRAMERATE_240;
} else {
throw Exception(AVG_ERR_INVALID_ARGS,
- "Unsupported or illegal value ("+toString(FrameRate)+
+ "Unsupported or illegal value ("+toString(frameRate)+
") for camera framerate.");
}
}
+float getFrameRateFloat(unsigned long frameRate)
+{
+ if(frameRate == FRAMERATE_1_875){
+ return 1.875;
+ } else if (frameRate == FRAMERATE_3_75){
+ return 3.75;
+ } else if (frameRate == FRAMERATE_7_5){
+ return 7.5;
+ } else if (frameRate == FRAMERATE_15){
+ return 15;
+ } else if (frameRate == FRAMERATE_30){
+ return 30;
+ } else if (frameRate == FRAMERATE_60){
+ return 60;
+ } else if (frameRate == FRAMERATE_120){
+ return 120;
+ } else if (frameRate == FRAMERATE_240){
+ return 240;
+ } else {
+ throw Exception(AVG_ERR_INVALID_ARGS,
+ "Unsupported or illegal value ("+toString(frameRate)+
+ ") as camera framerate.");
+ }
+}
+
CAMERA_FEATURE getFeatureID(CameraFeature Feature)
{
switch(Feature) {
@@ -218,4 +243,127 @@ CAMERA_FEATURE getFeatureID(CameraFeature Feature)
}
}
+void getImageSizeAndPF(unsigned long videoFormat, unsigned long videoMode,
+ IntPoint &pSize, PixelFormat &pPixelFormat)
+{
+ int format = (int) videoFormat;
+ int mode = (int) videoMode;
+ switch(format) {
+ case FORMAT_0: {
+ if (mode == MODE_160_120_YUV444) {
+ pSize = IntPoint(160,120);
+ pPixelFormat = PixelFormat(NO_PIXELFORMAT); //Not supported by libavg
+ return;
+ } else if (mode == MODE_320x240_YUV422) {
+ pSize = IntPoint(320,240);
+ pPixelFormat = PixelFormat(YCbCr422);
+ return;
+ } else if (mode == MODE_640x480_YUV411) {
+ pSize = IntPoint(640,480);
+ pPixelFormat = PixelFormat(YCbCr411);
+ return;
+ } else if (mode == MODE_640x480_YUV422) {
+ pSize = IntPoint(640,480);
+ pPixelFormat = PixelFormat(YCbCr422);
+ return;
+ } else if (mode == MODE_640x480_RGB) {
+ pSize = IntPoint(640,480);
+ pPixelFormat = PixelFormat(R8G8B8);
+ return;
+ } else if (mode == MODE_640x480_MONO) {
+ pSize = IntPoint(640,480);
+ pPixelFormat = PixelFormat(I8);
+ return;
+ } else if (mode == MODE_640x480_MONO16) {
+ pSize = IntPoint(640,480);
+ pPixelFormat = PixelFormat(I16);
+ return;
+ } else {
+ AVG_ASSERT(false);
+ return;
+ }
+ break;
+ } case FORMAT_1: {
+ if (mode == MODE_800x600_YUV422) {
+ pSize = IntPoint(800,600);
+ pPixelFormat = PixelFormat(YCbCr422);
+ return;
+ } else if (mode == MODE_800x600_RGB) {
+ pSize = IntPoint(800,600);
+ pPixelFormat = PixelFormat(R8G8B8);
+ return;
+ } else if (mode == MODE_800x600_MONO) {
+ pSize = IntPoint(800,600);
+ pPixelFormat = PixelFormat(I8);
+ return;
+ } else if (mode == MODE_1024x768_YUV422) {
+ pSize = IntPoint(1024,768);
+ pPixelFormat = PixelFormat(YCbCr422);
+ return;
+ } else if (mode == MODE_1024x768_RGB) {
+ pSize = IntPoint(1024,768);
+ pPixelFormat = PixelFormat(R8G8B8);
+ return;
+ } else if (mode == MODE_1024x768_MONO) {
+ pSize = IntPoint(1024,768);
+ pPixelFormat = PixelFormat(I8);
+ return;
+ } else if (mode == MODE_800x600_MONO16) {
+ pSize = IntPoint(800,600);
+ pPixelFormat = PixelFormat(I16);
+ return;
+ } else if (mode == MODE_1024x768_MONO16) {
+ pSize = IntPoint(1024,768);
+ pPixelFormat = PixelFormat(I16);
+ return;
+ } else {
+ AVG_ASSERT(false);
+ return;
+ }
+ break;
+ } case FORMAT_2: {
+ if (mode == MODE_1280x960_YUV422) {
+ pSize = IntPoint(1280,960);
+ pPixelFormat = PixelFormat(YCbCr422);
+ return;
+ } else if (mode == MODE_1280x960_RGB) {
+ pSize = IntPoint(1280,960);
+ pPixelFormat = PixelFormat(R8G8B8);
+ return;
+ } else if (mode == MODE_1280x960_MONO) {
+ pSize = IntPoint(1280,960);
+ pPixelFormat = PixelFormat(I8);
+ return;
+ } else if (mode == MODE_1600x1200_YUV422) {
+ pSize = IntPoint(1600,1200);
+ pPixelFormat = PixelFormat(YCbCr422);
+ return;
+ } else if (mode == MODE_1600x1200_RGB) {
+ pSize = IntPoint(1600,1200);
+ pPixelFormat = PixelFormat(R8G8B8);
+ return;
+ } else if (mode == MODE_1600x1200_MONO) {
+ pSize = IntPoint(1600,1200);
+ pPixelFormat = PixelFormat(I8);
+ return;
+ } else if (mode == MODE_1280x960_MONO16) {
+ pSize = IntPoint(1280,960);
+ pPixelFormat = PixelFormat(I16);
+ return;
+ } else if (mode == MODE_1600x1200_MONO16) {
+ pSize = IntPoint(1600,1200);
+ pPixelFormat = PixelFormat(I16);
+ return;
+ } else {
+ AVG_ASSERT(false);
+ return;
+ }
+ break;
+ } default: {
+ AVG_ASSERT(false);
+ return;
+ }
+ }
+}
+
}
diff --git a/src/imaging/CMUCameraUtils.h b/src/imaging/CMUCameraUtils.h
index 8172928..0d00c9f 100644
--- a/src/imaging/CMUCameraUtils.h
+++ b/src/imaging/CMUCameraUtils.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -36,9 +36,11 @@ namespace avg {
void getVideoFormatAndMode(IntPoint& Size, PixelFormat pf,
unsigned long* pVideoFormat, unsigned long* pVideoMode);
-unsigned long getFrameRateConst(double FrameRate);
+unsigned long getFrameRateConst(float FrameRate);
+float getFrameRateFloat(unsigned long FrameRate);
CAMERA_FEATURE getFeatureID(CameraFeature Feature);
-
+void getImageSizeAndPF(unsigned long videoFormat, unsigned long videoMode,
+ IntPoint &pSize, PixelFormat &pPixelFormat);
}
#endif
diff --git a/src/imaging/Camera.cpp b/src/imaging/Camera.cpp
index cfd13ab..51d778e 100644
--- a/src/imaging/Camera.cpp
+++ b/src/imaging/Camera.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -51,9 +51,11 @@ namespace avg {
using namespace std;
-Camera::Camera(PixelFormat camPF, PixelFormat destPF)
+Camera::Camera(PixelFormat camPF, PixelFormat destPF, IntPoint size, float frameRate)
: m_CamPF(camPF),
- m_DestPF(destPF)
+ m_DestPF(destPF),
+ m_Size(size),
+ m_FrameRate(frameRate)
{
// cerr << "Camera: " << getPixelFormatString(camPF) << "-->"
// << getPixelFormatString(destPF) << endl;
@@ -74,7 +76,7 @@ PixelFormat Camera::getDestPF() const
return m_DestPF;
}
-static ProfilingZoneID CameraConvertProfilingZone("Camera format conversion");
+static ProfilingZoneID CameraConvertProfilingZone("Camera format conversion", true);
BitmapPtr Camera::convertCamFrameToDestPF(BitmapPtr pCamBmp)
{
@@ -85,10 +87,24 @@ BitmapPtr Camera::convertCamFrameToDestPF(BitmapPtr pCamBmp)
pDestBmp->setPixelFormat(R8G8B8X8);
FilterFlipRGB().applyInPlace(pDestBmp);
}
+ if (m_CamPF != R8G8B8 && m_DestPF == R8G8B8X8) {
+ pDestBmp->setPixelFormat(B8G8R8X8);
+ FilterFlipRGB().applyInPlace(pDestBmp);
+ }
return pDestBmp;
}
+IntPoint Camera::getImgSize()
+{
+ return m_Size;
+}
+
+float Camera::getFrameRate() const
+{
+ return m_FrameRate;
+}
+
PixelFormat Camera::fwBayerStringToPF(unsigned long reg)
{
string sBayerFormat((char*)&reg, 4);
@@ -108,6 +124,11 @@ PixelFormat Camera::fwBayerStringToPF(unsigned long reg)
}
}
+void Camera::setImgSize(const IntPoint& size)
+{
+ m_Size = size;
+}
+
string cameraFeatureToString(CameraFeature feature)
{
switch (feature) {
@@ -137,6 +158,10 @@ string cameraFeatureToString(CameraFeature feature)
return "temperature";
case CAM_FEATURE_TRIGGER:
return "trigger";
+ case CAM_FEATURE_TRIGGER_DELAY:
+ return "trigger delay";
+ case CAM_FEATURE_WHITE_SHADING:
+ return "white shading";
case CAM_FEATURE_ZOOM:
return "zoom";
case CAM_FEATURE_PAN:
@@ -160,7 +185,7 @@ string cameraFeatureToString(CameraFeature feature)
CameraPtr createCamera(const string& sDriver, const string& sDevice, int unit,
bool bFW800, const IntPoint& captureSize, PixelFormat camPF, PixelFormat destPF,
- double frameRate)
+ float frameRate)
{
CameraPtr pCamera;
try {
@@ -182,8 +207,8 @@ CameraPtr createCamera(const string& sDriver, const string& sDevice, int unit,
pCamera = CameraPtr(new CMUCamera(guid, bFW800, captureSize, camPF, destPF,
frameRate));
#else
- guid = 0; // Silence compiler warning
- AVG_TRACE(Logger::WARNING, "Firewire camera specified, but firewire "
+ (void)guid; // Silence compiler warning
+ AVG_LOG_WARNING("Firewire camera specified, but firewire "
"support not compiled in.");
#endif
} else if (sDriver == "video4linux") {
@@ -191,7 +216,7 @@ CameraPtr createCamera(const string& sDriver, const string& sDevice, int unit,
pCamera = CameraPtr(new V4LCamera(sDevice, unit, captureSize, camPF,
destPF, frameRate));
#else
- AVG_TRACE(Logger::WARNING, "Video4Linux camera specified, but "
+ AVG_LOG_WARNING("Video4Linux camera specified, but "
"Video4Linux support not compiled in.");
#endif
} else if (sDriver == "directshow") {
@@ -203,7 +228,7 @@ CameraPtr createCamera(const string& sDriver, const string& sDevice, int unit,
pCamera = CameraPtr(new DSCamera(sDevice, captureSize, camPF, destPF,
frameRate));
#else
- AVG_TRACE(Logger::WARNING, "DirectShow camera specified, but "
+ AVG_LOG_WARNING("DirectShow camera specified, but "
"DirectShow is only available under windows.");
#endif
} else {
@@ -212,7 +237,7 @@ CameraPtr createCamera(const string& sDriver, const string& sDevice, int unit,
}
} catch (const Exception& e) {
if (e.getCode() == AVG_ERR_CAMERA_NONFATAL) {
- AVG_TRACE(Logger::WARNING, e.getStr());
+ AVG_LOG_WARNING(e.getStr());
} else {
throw;
}
@@ -225,20 +250,52 @@ CameraPtr createCamera(const string& sDriver, const string& sDevice, int unit,
}
-void dumpCameras()
+std::vector<CameraInfo> getCamerasInfos()
{
-#ifdef AVG_ENABLE_1394_2
- FWCamera::dumpCameras();
+ std::vector<CameraInfo> camerasInfo;
+
+#ifdef AVG_ENABLE_1394_2
+ int amountFWCameras = FWCamera::countCameras();
+ for (int i = 0; i < amountFWCameras; i++) {
+ CameraInfo* camInfo = FWCamera::getCameraInfos(i);
+ if (camInfo != NULL) {
+ camInfo->checkAddBayer8();
+ camerasInfo.push_back(*camInfo);
+ }
+ }
#endif
-#ifdef AVG_ENABLE_CMU1394
- CMUCamera::dumpCameras();
+#ifdef AVG_ENABLE_CMU1394
+ int amountCMUCameras = CMUCamera::countCameras();
+ for (int i = 0; i < amountCMUCameras; i++) {
+ CameraInfo* camInfo = CMUCamera::getCameraInfos(i);
+ if (camInfo != NULL) {
+ camInfo->checkAddBayer8();
+ camerasInfo.push_back(*camInfo);
+ }
+ }
#endif
-#ifdef AVG_ENABLE_V4L2
- V4LCamera::dumpCameras();
+#ifdef AVG_ENABLE_DSHOW
+ int amountDSCameras = DSCamera::countCameras();
+ for (int i = 0; i < amountDSCameras; i++) {
+ CameraInfo* camInfo = DSCamera::getCameraInfos(i);
+ if (camInfo != NULL) {
+ camInfo->checkAddBayer8();
+ camerasInfo.push_back(*camInfo);
+ }
+ }
#endif
-#ifdef AVG_ENABLE_DSHOW
- DSCamera::dumpCameras();
+#ifdef AVG_ENABLE_V4L2
+ int amountV4LCameras = V4LCamera::countCameras();
+ for (int i = 0; i < amountV4LCameras; i++) {
+ CameraInfo* camInfo = V4LCamera::getCameraInfos(i);
+ if (camInfo != NULL) {
+ camInfo->checkAddBayer8();
+ camerasInfo.push_back(*camInfo);
+ }
+ }
#endif
+ return camerasInfo;
}
+
}
diff --git a/src/imaging/Camera.h b/src/imaging/Camera.h
index 3a008e3..9d41574 100644
--- a/src/imaging/Camera.h
+++ b/src/imaging/Camera.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -26,8 +26,10 @@
#include "../graphics/Bitmap.h"
#include <boost/shared_ptr.hpp>
+#include "CameraInfo.h"
#include <string>
+#include <list>
#include <map>
namespace avg {
@@ -46,6 +48,8 @@ enum CameraFeature {
CAM_FEATURE_FOCUS,
CAM_FEATURE_TEMPERATURE,
CAM_FEATURE_TRIGGER,
+ CAM_FEATURE_TRIGGER_DELAY,
+ CAM_FEATURE_WHITE_SHADING,
CAM_FEATURE_ZOOM,
CAM_FEATURE_PAN,
CAM_FEATURE_TILT,
@@ -53,27 +57,28 @@ enum CameraFeature {
CAM_FEATURE_CAPTURE_SIZE,
CAM_FEATURE_CAPTURE_QUALITY,
CAM_FEATURE_CONTRAST,
- CAM_FEATURE_STROBE_DURATION
+ CAM_FEATURE_STROBE_DURATION,
+ CAM_FEATURE_UNSUPPORTED
};
class AVG_API Camera
{
public:
- Camera(PixelFormat camPF, PixelFormat destPF);
+ Camera(PixelFormat camPF, PixelFormat destPF, IntPoint size, float frameRate);
virtual ~Camera() {};
virtual void startCapture() {};
-
+
PixelFormat getCamPF() const;
void setCamPF(PixelFormat pf);
PixelFormat getDestPF() const;
BitmapPtr convertCamFrameToDestPF(BitmapPtr pCamBmp);
- virtual IntPoint getImgSize() = 0;
+ IntPoint getImgSize();
+ float getFrameRate() const;
virtual BitmapPtr getImage(bool bWait) = 0;
virtual const std::string& getDevice() const = 0;
virtual const std::string& getDriverName() const = 0;
- virtual double getFrameRate() const = 0;
virtual int getFeature(CameraFeature feature) const = 0;
virtual void setFeature(CameraFeature feature, int Value,
@@ -85,13 +90,19 @@ public:
protected:
PixelFormat fwBayerStringToPF(unsigned long reg);
+ void setImgSize(const IntPoint& size);
private:
Camera();
PixelFormat m_CamPF;
PixelFormat m_DestPF;
+
+ IntPoint m_Size;
+ float m_FrameRate;
};
+
+
std::string cameraFeatureToString(CameraFeature feature);
typedef boost::shared_ptr<Camera> CameraPtr;
@@ -99,9 +110,9 @@ typedef std::map<CameraFeature, int> FeatureMap;
AVG_API CameraPtr createCamera(const std::string& sDriver, const std::string& sDevice,
int unit, bool bFW800, const IntPoint& captureSize, PixelFormat camPF,
- PixelFormat destPF, double frameRate);
+ PixelFormat destPF, float frameRate);
-AVG_API void dumpCameras();
+AVG_API std::vector<CameraInfo> getCamerasInfos();
}
diff --git a/src/imaging/CameraInfo.cpp b/src/imaging/CameraInfo.cpp
new file mode 100644
index 0000000..b33789f
--- /dev/null
+++ b/src/imaging/CameraInfo.cpp
@@ -0,0 +1,159 @@
+//
+// libavg - Media Playback Engine.
+// Copyright (C) 2003-2014 Ulrich von Zadow
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+//
+// Current versions can be found at www.libavg.de
+//
+// V4L2/libavg compliance by 02L > Outside Standing Level
+
+#include "CameraInfo.h"
+
+namespace avg {
+
+CameraImageFormat::CameraImageFormat(IntPoint size, PixelFormat pixelFormat,
+ FrameratesVector framerates)
+{
+ m_Size = size;
+ m_PixelFormat = pixelFormat;
+ m_Framerates = framerates;
+}
+
+CameraImageFormat::~CameraImageFormat()
+{
+
+}
+
+IntPoint CameraImageFormat::getSize()
+{
+ return m_Size;
+}
+
+PixelFormat CameraImageFormat::getPixelFormat()
+{
+ return m_PixelFormat;
+}
+
+FrameratesVector CameraImageFormat::getFramerates()
+{
+ return m_Framerates;
+}
+
+
+CameraControl::CameraControl(const std::string& sControlName, int min, int max,
+ int defaultValue)
+{
+ m_sControlName = sControlName;
+ m_Min = min;
+ m_Max = max;
+ m_DefaultValue = defaultValue;
+}
+
+CameraControl::~CameraControl()
+{
+
+}
+
+std::string CameraControl::getControlName()
+{
+ return m_sControlName;
+}
+
+int CameraControl::getMin()
+{
+ return m_Min;
+}
+
+int CameraControl::getMax()
+{
+ return m_Max;
+}
+
+int CameraControl::getDefault()
+{
+ return m_DefaultValue;
+}
+
+
+CameraInfo::CameraInfo(const std::string& sDriver, const std::string& sDeviceID)
+{
+ m_sDriver = sDriver;
+ m_sDeviceID = sDeviceID;
+}
+
+CameraInfo::~CameraInfo()
+{
+
+}
+
+void CameraInfo::addControl(CameraControl control)
+{
+ m_Controls.push_back(control);
+}
+
+void CameraInfo::addImageFormat(CameraImageFormat format)
+{
+ m_Formats.push_back(format);
+}
+
+std::string CameraInfo::getDriver()
+{
+ return m_sDriver;
+}
+
+std::string CameraInfo::getDeviceID()
+{
+ return m_sDeviceID;
+}
+
+CameraImageFormatsVector CameraInfo::getImageFormats()
+{
+ return m_Formats;
+}
+
+CameraControlsVector CameraInfo::getControls()
+{
+ return m_Controls;
+}
+
+void CameraInfo::checkAddBayer8()
+{
+ CameraImageFormatsVector::iterator it = m_Formats.begin();
+ CameraImageFormatsVector i8ImageFormats;
+ bool hasColor = false;
+ for (; it!=m_Formats.end(); it++) {
+ PixelFormat pf = (*it).getPixelFormat();
+ if (pf == I8) {
+ i8ImageFormats.push_back(*it);
+ }
+ if (hasColor == false) {
+ hasColor = pixelFormatIsColored(pf);
+ }
+ }
+ if (hasColor) {
+ it = i8ImageFormats.begin();
+ for (; it!=i8ImageFormats.end(); it++) {
+ PixelFormat format = BAYER8;
+ IntPoint size = (*it).getSize();
+ FrameratesVector framerates = (*it).getFramerates();
+ CameraImageFormat bayerImageFormat = CameraImageFormat(size, format,
+ framerates);
+ m_Formats.push_back(bayerImageFormat);
+ }
+ }
+}
+
+}
diff --git a/src/imaging/CameraInfo.h b/src/imaging/CameraInfo.h
new file mode 100644
index 0000000..0c5a75b
--- /dev/null
+++ b/src/imaging/CameraInfo.h
@@ -0,0 +1,100 @@
+//
+// libavg - Media Playback Engine.
+// Copyright (C) 2003-2014 Ulrich von Zadow
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+//
+// Current versions can be found at www.libavg.de
+//
+
+#ifndef CameraInfo_H_
+#define CameraInfo_H_
+
+#include "../api.h"
+#include "../graphics/PixelFormat.h"
+#include "../base/GLMHelper.h"
+
+#include <string>
+#include <list>
+#include <map>
+
+namespace avg{
+
+typedef std::vector<float> FrameratesVector;
+
+class AVG_API CameraImageFormat
+{
+ public:
+ CameraImageFormat(IntPoint size, PixelFormat pixelFormat,
+ FrameratesVector framerates);
+ ~CameraImageFormat();
+
+ IntPoint getSize();
+ PixelFormat getPixelFormat();
+ FrameratesVector getFramerates();
+
+ private:
+ IntPoint m_Size;
+ PixelFormat m_PixelFormat;
+ FrameratesVector m_Framerates;
+};
+
+class AVG_API CameraControl
+{
+ public:
+ CameraControl(const std::string& sControlName, int min, int max,
+ int defaultValue);
+ ~CameraControl();
+
+ std::string getControlName();
+ int getMin();
+ int getMax();
+ int getDefault();
+
+ private:
+ std::string m_sControlName;
+ int m_Min;
+ int m_Max;
+ int m_DefaultValue;
+};
+
+typedef std::vector<CameraImageFormat> CameraImageFormatsVector;
+typedef std::vector<CameraControl> CameraControlsVector;
+
+class AVG_API CameraInfo
+{
+ public:
+ CameraInfo(const std::string& sDriver, const std::string& sDeviceID);
+ ~CameraInfo();
+
+ void addControl(CameraControl control);
+ void addImageFormat(CameraImageFormat format);
+
+ std::string getDriver();
+ std::string getDeviceID();
+ CameraImageFormatsVector getImageFormats();
+ CameraControlsVector getControls();
+ void checkAddBayer8();
+
+ private:
+ std::string m_sDriver;
+ std::string m_sDeviceID;
+ CameraImageFormatsVector m_Formats;
+ CameraControlsVector m_Controls;
+};
+
+}
+
+#endif /* CAMERAINFO_H_ */
diff --git a/src/imaging/CoordTransformer.cpp b/src/imaging/CoordTransformer.cpp
index 1fdbadd..b4d7489 100644
--- a/src/imaging/CoordTransformer.cpp
+++ b/src/imaging/CoordTransformer.cpp
@@ -1,7 +1,7 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
diff --git a/src/imaging/CoordTransformer.h b/src/imaging/CoordTransformer.h
index 795e9b2..b9ce97d 100644
--- a/src/imaging/CoordTransformer.h
+++ b/src/imaging/CoordTransformer.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -23,7 +23,7 @@
#define _CoordTransformer_H_
#include "../api.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include <boost/shared_ptr.hpp>
@@ -35,8 +35,8 @@ public:
CoordTransformer();
virtual ~CoordTransformer();
- virtual DPoint transform_point(const DPoint & pt) = 0; //(x,y) -> (x', y')
- virtual DPoint inverse_transform_point(const DPoint & pt) = 0; //(x,y) -> (x', y')
+ virtual glm::dvec2 transform_point(const glm::dvec2 & pt) = 0;
+ virtual glm::dvec2 inverse_transform_point(const glm::dvec2 & pt) = 0;
};
typedef boost::shared_ptr<CoordTransformer> CoordTransformerPtr;
diff --git a/src/imaging/DSCamera.cpp b/src/imaging/DSCamera.cpp
index 2d0ae6e..e9e3b3b 100644
--- a/src/imaging/DSCamera.cpp
+++ b/src/imaging/DSCamera.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -42,11 +42,9 @@ namespace avg {
using namespace std;
DSCamera::DSCamera(std::string sDevice, IntPoint size, PixelFormat camPF,
- PixelFormat destPF, double frameRate)
- : Camera(camPF, destPF),
+ PixelFormat destPF, float frameRate)
+ : Camera(camPF, destPF, size, frameRate),
m_sDevice(sDevice),
- m_Size(size),
- m_FrameRate(frameRate),
m_pGraph(0),
m_pCapture(0),
m_pCameraPropControl(0)
@@ -135,11 +133,6 @@ void DSCamera::close()
m_pSampleGrabber->Release();
}
-IntPoint DSCamera::getImgSize()
-{
- return m_Size;
-}
-
BitmapPtr DSCamera::getImage(bool bWait)
{
BitmapPtr pBmp;
@@ -179,7 +172,7 @@ void DSCamera::setCaptureFormat()
checkForDShowError(hr, "DSCamera::dumpMediaTypes::GetStreamCaps");
pvih = (VIDEOINFOHEADER*)(pmtConfig->pbFormat);
bih = pvih->bmiHeader;
- double frameRate = double(10000000L/pvih->AvgTimePerFrame);
+ float frameRate = float(10000000L/pvih->AvgTimePerFrame);
capsPF = mediaSubtypeToPixelFormat(pmtConfig->subtype);
if (capsPF != NO_PIXELFORMAT && bih.biWidth != 0) {
@@ -191,10 +184,10 @@ void DSCamera::setCaptureFormat()
if (height < 0) {
height = -height;
}
- if (bih.biWidth == m_Size.x && height == m_Size.y &&
+ if (bih.biWidth == getImgSize().x && height == getImgSize().y &&
(getCamPF() == capsPF || (getCamPF() == BAYER8_GBRG && capsPF == I8)))
{
- if (fabs(m_FrameRate-frameRate) < 0.001) {
+ if (fabs(getFrameRate()-frameRate) < 0.001) {
bFormatFound = true;
break;
} else if (!bCloseFormatFound) {
@@ -212,10 +205,10 @@ void DSCamera::setCaptureFormat()
}
}
if (bFormatFound) {
- AVG_TRACE(Logger::CONFIG, "Camera image format: "
- << camImageFormatToString(pmtConfig));
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Camera image format: " << camImageFormatToString(pmtConfig));
int height = ((VIDEOINFOHEADER*)(pmtConfig->pbFormat))->bmiHeader.biHeight;
-// m_bUpsideDown = (height < 0);
+ m_bUpsideDown = (height > 0);
hr = pSC->SetFormat(pmtConfig);
checkForDShowError(hr, "DSCamera::dumpMediaTypes::SetFormat");
CoTaskMemFree((PVOID)pmtConfig->pbFormat);
@@ -224,22 +217,21 @@ void DSCamera::setCaptureFormat()
if (bCloseFormatFound) {
// Set the framerate manually.
pvih = (VIDEOINFOHEADER*)(pmtCloseConfig->pbFormat);
- pvih->AvgTimePerFrame = REFERENCE_TIME(10000000/m_FrameRate);
+ pvih->AvgTimePerFrame = REFERENCE_TIME(10000000/getFrameRate());
int height = pvih->bmiHeader.biHeight;
-// m_bUpsideDown = (height < 0);
+ m_bUpsideDown = (height > 0);
hr = pSC->SetFormat(pmtCloseConfig);
checkForDShowError(hr, "DSCamera::dumpMediaTypes::SetFormat");
- AVG_TRACE(Logger::CONFIG, "Camera image format: "
- << camImageFormatToString(pmtCloseConfig));
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Camera image format: " << camImageFormatToString(pmtCloseConfig));
CoTaskMemFree((PVOID)pmtCloseConfig->pbFormat);
CoTaskMemFree(pmtCloseConfig);
// TODO: Check if framerate is actually attained.
} else {
- AVG_TRACE(Logger::WARNING,
- "Possibly incomplete list of camera image formats: ");
+ AVG_LOG_WARNING("Possibly incomplete list of camera image formats: ");
for (unsigned i = 0; i < sImageFormats.size(); i++) {
- AVG_TRACE(Logger::WARNING, " " << sImageFormats[i]);
+ AVG_LOG_WARNING(" " << sImageFormats[i]);
}
throw Exception(AVG_ERR_CAMERA_NONFATAL,
"Could not find requested camera image format.");
@@ -260,11 +252,6 @@ const std::string& DSCamera::getDriverName() const
return sDriverName;
}
-double DSCamera::getFrameRate() const
-{
- return m_FrameRate;
-}
-
int DSCamera::getFeature(CameraFeature feature) const
{
long prop = getDSFeatureID(feature);
@@ -277,8 +264,8 @@ int DSCamera::getFeature(CameraFeature feature) const
hr = m_pCameraPropControl->Get(prop, &val, &flags);
}
if (!SUCCEEDED(hr)) {
- AVG_TRACE(Logger::WARNING, "DSCamera::getFeature "
- + cameraFeatureToString(feature)+" failed.");
+ AVG_LOG_WARNING("DSCamera::getFeature " + cameraFeatureToString(feature)+
+ " failed.");
return 0;
}
return val;
@@ -305,14 +292,13 @@ void DSCamera::setFeature(CameraFeature feature, int value, bool bIgnoreOldValue
switch (hr) {
case E_INVALIDARG:
// TODO: Throw exception
- AVG_TRACE(Logger::ERROR, "DSCamera::setFeature("
- << cameraFeatureToString(feature) << ", " << value << ") failed.");
+ AVG_LOG_ERROR("DSCamera::setFeature(" << cameraFeatureToString(feature) <<
+ ", " << value << ") failed.");
break;
case E_PROP_ID_UNSUPPORTED:
case E_PROP_SET_UNSUPPORTED:
- AVG_TRACE(Logger::ERROR, "DSCamera::setFeature("
- << cameraFeatureToString(feature)
- << ") failed: Feature not supported by camera.");
+ AVG_LOG_ERROR("DSCamera::setFeature(" << cameraFeatureToString(feature)
+ << ") failed: Feature not supported by camera.");
break;
default:
checkForDShowError(hr, "DSCamera::setFeature()::Set value");
@@ -321,27 +307,27 @@ void DSCamera::setFeature(CameraFeature feature, int value, bool bIgnoreOldValue
void DSCamera::setFeatureOneShot(CameraFeature feature)
{
- AVG_TRACE(Logger::WARNING,
+ AVG_LOG_WARNING(
"OneShot feature setting not implemented for DirectShow camera driver.");
}
int DSCamera::getWhitebalanceU() const
{
- AVG_TRACE(Logger::WARNING,
+ AVG_LOG_WARNING(
"Whitebalance not implemented for DirectShow camera driver.");
return 0;
}
int DSCamera::getWhitebalanceV() const
{
- AVG_TRACE(Logger::WARNING,
+ AVG_LOG_WARNING(
"Whitebalance not implemented for DirectShow camera driver.");
return 0;
}
void DSCamera::setWhitebalance(int u, int v, bool bIgnoreOldValue)
{
- AVG_TRACE(Logger::WARNING,
+ AVG_LOG_WARNING(
"Whitebalance not implemented for DirectShow camera driver.");
}
@@ -352,64 +338,240 @@ void DSCamera::onSample(IMediaSample * pSample)
// Get the current image.
pSample->GetPointer(&pData);
- int stride = m_Size.x*getBytesPerPixel(getCamPF());
- Bitmap camBmp(m_Size, getCamPF(), pData, stride, false, "CameraImage");
+ int stride = getImgSize().x*getBytesPerPixel(getCamPF());
+ Bitmap camBmp(getImgSize(), getCamPF(), pData, stride, false, "CameraImage");
// Copy over to bitmap queue, doing pixel format conversion if necessary.
- BitmapPtr pDestBmp = BitmapPtr(new Bitmap(m_Size, getDestPF(),
+ BitmapPtr pDestBmp = BitmapPtr(new Bitmap(getImgSize(), getDestPF(),
"ConvertedCameraImage"));
pDestBmp->copyPixels(camBmp);
-/*
+
if (m_bUpsideDown) {
FilterFlip().applyInPlace(pDestBmp);
}
-*/
+
m_BitmapQ.push(pDestBmp);
}
-void DSCamera::dumpCameras()
+int DSCamera::countCameras()
{
+ int count = 0;
HRESULT hr = S_OK;
- // TODO: Check if the threading model is ok.
hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
- checkForDShowError(hr, "DSCamera::dumpCameras()::CoInitializeEx");
+ checkForDShowError(hr, "DSCamera::countCameras()::CoInitializeEx");
- // Create the system device enumerator
ICreateDevEnum *pDevEnum =NULL;
hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
IID_ICreateDevEnum, (void **) &pDevEnum);
- checkForDShowError(hr, "DSCamera::dumpCameras()::CreateDevEnum");
+ checkForDShowError(hr, "DSCamera::countCameras()::CreateDevEnum");
- // Create an enumerator for the video capture devices
IEnumMoniker *pClassEnum = NULL;
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
- checkForDShowError(hr, "DSCamera::dumpCameras()::CreateClassEnumerator");
+ checkForDShowError(hr, "DSCamera::countCameras()::CreateClassEnumerator");
if (pClassEnum == NULL) {
- return;
+ pDevEnum->Release();
+ return count;
}
-
IMoniker* pMoniker = NULL;
- bool bFirst = true;
while (pClassEnum->Next(1, &pMoniker, NULL) == S_OK) {
- if (bFirst) {
- cerr << endl;
- cerr << "DirectShow cameras: " << endl;
- bFirst = false;
- }
- IPropertyBag* pPropBag;
- hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
- checkForDShowError(hr, "DSCamera::dumpCameras()::BindToStorage");
- cerr << " ----------------------------" << endl;
- cerr << " Name: " << getStringProp(pPropBag, L"FriendlyName") << endl;
- cerr << " Description: " << getStringProp(pPropBag, L"Description") << endl;
- cerr << " Device Path: " << getStringProp(pPropBag, L"DevicePath") << endl;
- pPropBag->Release();
+ count += 1;
}
pMoniker->Release();
+ pClassEnum->Release();
pDevEnum->Release();
+ return count;
+}
+
+CameraInfo* DSCamera::getCameraInfos(int deviceNumber)
+{
+#ifdef AVG_ENABLE_DSHOW
+ HRESULT hr = S_OK;
+ // Create apartment for Thread
+ hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
+ checkForDShowError(hr, "DSCamera::getCameraInfos()::CoInitializeEx");
+ // Create the system device enumerator
+ ICreateDevEnum *pDevEnum =NULL;
+ hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
+ IID_ICreateDevEnum, (void **) &pDevEnum);
+ checkForDShowError(hr, "DSCamera::getCameraInfos()::CreateDevEnum");
+ // Create an enumerator for the video capture devices
+ IEnumMoniker *pClassEnum = NULL;
+ hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
+ checkForDShowError(hr, "DSCamera::getCameraInfos()::CreateClassEnumerator");
+ if (pClassEnum == NULL) {
+ pClassEnum->Release();
+ pDevEnum->Release();
+ return NULL;
+ }
+ IMoniker* pMoniker = NULL;
+ pClassEnum->Skip(deviceNumber);
+ hr = pClassEnum->Next(1, &pMoniker, NULL);
+ if (hr != S_OK) {
+ pClassEnum->Release();
+ pDevEnum->Release();
+ pMoniker->Release();
+ return NULL;
+ }
+ IPropertyBag* pPropBag;
+ hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
+ checkForDShowError(hr, "DSCamera::getCameraInfos()::BindToStorage");
+
+ std::string deviceID = getStringProp(pPropBag, L"DevicePath");
+ if (deviceID.empty()) {
+ deviceID = getStringProp(pPropBag, L"Description");
+ if (deviceID.empty()) {
+ deviceID = getStringProp(pPropBag, L"FriendlyName");
+ }
+ }
+ CameraInfo* pCamInfo = new CameraInfo("DirectShow", deviceID);
+
+ getCameraImageFormats(pMoniker, pCamInfo);
+ getCameraControls(pMoniker, pCamInfo);
+
+ pPropBag->Release();
+ pMoniker->Release();
pClassEnum->Release();
+ pDevEnum->Release();
+ return pCamInfo;
+#endif
+ return NULL;
+}
+
+void DSCamera::getCameraImageFormats(IMoniker* pMoniker, CameraInfo* pCamInfo)
+{
+ HRESULT hr = S_OK;
+ IAMStreamConfig* pSC;
+ ICaptureGraphBuilder2* pCapture;
+ IBaseFilter* pSrcFilter;
+ // locates the object identified by pMoniker and
+ // returns a pointer to its filter interface
+ hr = pMoniker->BindToObject(0,0,IID_IBaseFilter, (void**) &pSrcFilter);
+ checkForDShowError(hr, "DSCamera::getImageFormats()::BindToObject");
+ if (pSrcFilter == NULL) {
+ return;
+ }
+ // Creates an uninitialized instance and returns a pointer to
+ // the IID_ICaptureGraphBuilder2 interface
+ hr = CoCreateInstance (CLSID_CaptureGraphBuilder2 , NULL, CLSCTX_INPROC,
+ IID_ICaptureGraphBuilder2, (void **) &pCapture);
+ checkForDShowError(hr, "DSCamera::getImageFormats()::CaptureGraphBuilder2");
+ // searches the graph for a IID_IAMStreamConfig interface, returns a pointer
+ hr = pCapture->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
+ pSrcFilter, IID_IAMStreamConfig, (void **)&pSC);
+ checkForDShowError(hr, "DSCamera::getImageFormats()::FindInterface");
+ int numCaps = 0;
+ int capsSize = 0;
+ hr = pSC->GetNumberOfCapabilities(&numCaps, &capsSize);
+ checkForDShowError(hr, "DSCamera::getImageFormats()::GetNumberOfCapabilities");
+ AM_MEDIA_TYPE* pmtConfig;
+ vector<string> sImageFormats;
+ VIDEOINFOHEADER* pvih;
+ BITMAPINFOHEADER bih;
+ PixelFormat capsPF;
+ for (int i = 0; i < numCaps; i++) {
+ VIDEO_STREAM_CONFIG_CAPS scc;
+ hr = pSC->GetStreamCaps(i, &pmtConfig, (BYTE*)&scc);
+ checkForDShowError(hr, "DSCamera::getImageFormats()::GetStreamCaps");
+ pvih = (VIDEOINFOHEADER*)(pmtConfig->pbFormat);
+ bih = pvih->bmiHeader;
+ capsPF = mediaSubtypeToPixelFormat(pmtConfig->subtype);
+
+ if (capsPF != NO_PIXELFORMAT && bih.biWidth != 0) {
+ IntPoint size;
+ if (bih.biHeight >= 0) {
+ size = IntPoint(bih.biWidth, bih.biHeight);
+ } else {
+ size = IntPoint(bih.biWidth, -bih.biHeight);
+ }
+
+ std::vector<float> framerates;
+ float minFramerate = (float)(10000000 / scc.MinFrameInterval);
+ float maxFramerate = (float)(10000000 / scc.MaxFrameInterval);
+ float averageFramerate = (float)(10000000 / pvih->AvgTimePerFrame);
+ if (maxFramerate != 0.) {
+ framerates.push_back(maxFramerate);
+ }
+ if (averageFramerate != maxFramerate && averageFramerate != minFramerate) {
+ framerates.push_back(averageFramerate);
+ }
+ if (minFramerate != 0. && minFramerate != maxFramerate) {
+ framerates.push_back(minFramerate);
+ }
+ CameraImageFormat imageFormat = CameraImageFormat(size, capsPF, framerates);
+ pCamInfo->addImageFormat(imageFormat);
+ }
+ }
+ pCapture->Release();
+ pSrcFilter->Release();
+}
+
+void DSCamera::getCameraControls(IMoniker* pMoniker, CameraInfo* pCamInfo)
+{
+ HRESULT hr = S_OK;
+ IBaseFilter * pSrcFilter;
+ // locates the object identified by pMoniker and
+ // returns a pointer to its filter interface
+ hr = pMoniker->BindToObject(0,0,IID_IBaseFilter, (void**) &pSrcFilter);
+ checkForDShowError(hr, "DSCamera::dumpImageFormats()::BindToObject");
+ if (pSrcFilter == NULL) {
+ return;
+ }
+ IAMCameraControl* pAMCameraControl;
+ pSrcFilter->QueryInterface(IID_IAMCameraControl,
+ (void **)&pAMCameraControl);
+ if (pAMCameraControl != NULL) {
+ // DirectShow has 7 supported CameraControlProperties
+ for (int indexControl = 0; indexControl <= 6; indexControl++) {
+ long value = -999;
+ long flags = -999;
+ pAMCameraControl->Get((CameraControlProperty)indexControl, &value, &flags);
+ long min = -999;
+ long max = -999;
+ long delta = -999;
+ long defaultValue = -999;
+ flags = -999;
+ pAMCameraControl->GetRange((CameraControlProperty)indexControl, &min, &max,
+ &delta, &defaultValue, &flags);
+
+ CameraFeature feature = getCameraFeatureID_CCP((CameraControlProperty)indexControl);
+ if (min != -999 && max != -999 && defaultValue != -999
+ && feature != CAM_FEATURE_UNSUPPORTED) {
+ std::string featureName = cameraFeatureToString(feature);
+ CameraControl control = CameraControl(featureName,min,max,defaultValue);
+ pCamInfo->addControl(control);
+ }
+ }
+ }
+ IAMVideoProcAmp* pCameraPropControl;
+ pSrcFilter->QueryInterface(IID_IAMVideoProcAmp,
+ (void **)&pCameraPropControl);
+ if (pCameraPropControl != NULL) {
+ // DirectShow has 10 supported VideoProcAmpProperties
+ for (int indexPropControl = 0; indexPropControl <= 9; indexPropControl++) {
+ long value = -999;
+ long flags = -999;
+ pCameraPropControl->Get((VideoProcAmpProperty)indexPropControl, &value,
+ &flags);
+ long min = -999;
+ long max = -999;
+ long delta = -999;
+ long defaultValue = -999;
+ flags = -999;
+ pCameraPropControl->GetRange((VideoProcAmpProperty)indexPropControl, &min,
+ &max, &delta, &defaultValue, &flags);
+
+ CameraFeature feature = getCameraFeatureID_VPAP((VideoProcAmpProperty)indexPropControl);
+ if (min != -999 && max != -999 && defaultValue != -999
+ && feature != CAM_FEATURE_UNSUPPORTED) {
+ std::string featureName = cameraFeatureToString(feature);
+ CameraControl control = CameraControl(featureName,min,max,defaultValue);
+ pCamInfo->addControl(control);
+ }
+ }
+ }
}
+
void DSCamera::initGraphBuilder()
{
HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
@@ -477,7 +639,7 @@ void DSCamera::findCaptureDevice(IBaseFilter ** ppSrcFilter)
if (!bFound) {
pClassEnum->Reset();
if (pClassEnum->Next(1, &pMoniker, NULL) == S_OK) {
- AVG_TRACE(Logger::WARNING, string("Camera ") + m_sDevice
+ AVG_LOG_WARNING(string("Camera ") + m_sDevice
+ " not found. Using first camera.");
bFound = true;
IPropertyBag *pPropBag;
diff --git a/src/imaging/DSCamera.h b/src/imaging/DSCamera.h
index bf562ac..3602a62 100644
--- a/src/imaging/DSCamera.h
+++ b/src/imaging/DSCamera.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -31,7 +31,7 @@
#include "../graphics/Bitmap.h"
#include "../graphics/Pixel24.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include "../base/Queue.h"
#include <string>
@@ -48,16 +48,14 @@ class DSCamera: public Camera, IDSSampleCallback
{
public:
DSCamera(std::string sDevice, IntPoint size, PixelFormat camPF, PixelFormat destPF,
- double frameRate);
+ float frameRate);
virtual ~DSCamera();
- virtual void startCapture();
+ virtual void startCapture();
- virtual IntPoint getImgSize();
virtual BitmapPtr getImage(bool bWait);
virtual const std::string& getDevice() const;
virtual const std::string& getDriverName() const;
- virtual double getFrameRate() const;
virtual int getFeature(CameraFeature feature) const;
virtual void setFeature(CameraFeature feature, int value,
@@ -69,7 +67,8 @@ public:
virtual void onSample(IMediaSample * pSample);
- static void dumpCameras();
+ static int countCameras();
+ static CameraInfo* getCameraInfos(int deviceNumber);
private:
virtual void open();
@@ -81,10 +80,10 @@ private:
void connectFilters(IGraphBuilder *pGraph, IBaseFilter *pSrc, IBaseFilter *pDest);
void getUnconnectedPin(IBaseFilter *pFilter, PIN_DIRECTION pinDir, IPin **ppPin);
static void checkForDShowError(HRESULT hr, const std::string & sAppMsg);
+ static void getCameraImageFormats(IMoniker* pMoniker, CameraInfo* pCamInfo);
+ static void getCameraControls(IMoniker* pMoniker, CameraInfo* pCamInfo);
std::string m_sDevice;
- IntPoint m_Size;
- double m_FrameRate;
IGraphBuilder * m_pGraph;
ICaptureGraphBuilder2 * m_pCapture;
@@ -98,7 +97,7 @@ private:
Queue<Bitmap> m_BitmapQ;
DWORD m_GraphRegisterID;
-// bool m_bUpsideDown;
+ bool m_bUpsideDown;
};
}
diff --git a/src/imaging/DSHelper.cpp b/src/imaging/DSHelper.cpp
index 6802ce9..d8880fb 100644
--- a/src/imaging/DSHelper.cpp
+++ b/src/imaging/DSHelper.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -181,8 +181,8 @@ string camImageFormatToString(const AM_MEDIA_TYPE* pMediaType)
BITMAPINFOHEADER* pBitmapInfo = &pVideoInfo->bmiHeader;
PixelFormat pf = mediaSubtypeToPixelFormat(pMediaType->subtype);
int height = abs(pBitmapInfo->biHeight);
- ss << "(" << pBitmapInfo->biWidth << "x" << height << "), " << pf
- << ", " << 10000000./pVideoInfo->AvgTimePerFrame << " fps.";
+ ss << pf << " " << "(" << pBitmapInfo->biWidth << ", " << height << ")"
+ << " " << 10000000./pVideoInfo->AvgTimePerFrame << " fps.";
return ss.str();
}
@@ -211,12 +211,12 @@ bool isDSFeatureCamControl(CameraFeature feature)
case CAM_FEATURE_CAPTURE_SIZE:
case CAM_FEATURE_CAPTURE_QUALITY:
case CAM_FEATURE_CONTRAST:
- AVG_TRACE(Logger::WARNING, "isDSFeatureCamControl: "
- + cameraFeatureToString(feature) + " not supported by DirectShow.");
+ AVG_LOG_WARNING("isDSFeatureCamControl: " + cameraFeatureToString(feature) +
+ " not supported by DirectShow.");
return false;
default:
- AVG_TRACE(Logger::WARNING, "isDSFeatureCamControl: "
- + cameraFeatureToString(feature) + " unknown.");
+ AVG_LOG_WARNING("isDSFeatureCamControl: " + cameraFeatureToString(feature) +
+ " unknown.");
return false;
}
}
@@ -257,13 +257,55 @@ long getDSFeatureID(CameraFeature feature)
case CAM_FEATURE_CAPTURE_SIZE:
case CAM_FEATURE_CAPTURE_QUALITY:
case CAM_FEATURE_CONTRAST:
- AVG_TRACE(Logger::WARNING, "getDSFeatureID: "+cameraFeatureToString(feature)
+
+ AVG_LOG_WARNING("getDSFeatureID: "+cameraFeatureToString(feature)
+" not supported by DirectShow.");
return 0;
default:
- AVG_TRACE(Logger::WARNING, "getDSFeatureID: "+cameraFeatureToString(feature)+" unknown.");
+ AVG_LOG_WARNING("getDSFeatureID: "+cameraFeatureToString(feature)+" unknown.");
return -1;
}
}
+CameraFeature getCameraFeatureID_VPAP(long videoProcAmpProperty)
+{
+ switch(videoProcAmpProperty) {
+ case VideoProcAmp_Brightness:
+ return CAM_FEATURE_BRIGHTNESS;
+ case VideoProcAmp_Sharpness:
+ return CAM_FEATURE_SHARPNESS;
+ case VideoProcAmp_WhiteBalance:
+ return CAM_FEATURE_WHITE_BALANCE;
+ case VideoProcAmp_Hue:
+ return CAM_FEATURE_HUE;
+ case VideoProcAmp_Saturation:
+ return CAM_FEATURE_SATURATION;
+ case VideoProcAmp_Gamma:
+ return CAM_FEATURE_GAMMA;
+ case VideoProcAmp_Gain:
+ return CAM_FEATURE_GAIN;
+ default:
+ return CAM_FEATURE_UNSUPPORTED;
+ }
+}
+CameraFeature getCameraFeatureID_CCP(long cameraControlProperty)
+{
+ switch(cameraControlProperty) {
+ case CameraControl_Exposure:
+ return CAM_FEATURE_EXPOSURE;
+ case CameraControl_Iris:
+ return CAM_FEATURE_IRIS;
+ case CameraControl_Focus:
+ return CAM_FEATURE_FOCUS;
+ case CameraControl_Zoom:
+ return CAM_FEATURE_ZOOM;
+ case CameraControl_Pan:
+ return CAM_FEATURE_PAN;
+ case CameraControl_Tilt:
+ return CAM_FEATURE_TILT;
+ default:
+ return CAM_FEATURE_UNSUPPORTED;
+ }
+}
+
}
diff --git a/src/imaging/DSHelper.h b/src/imaging/DSHelper.h
index 8904902..a8c6edb 100644
--- a/src/imaging/DSHelper.h
+++ b/src/imaging/DSHelper.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -56,7 +56,8 @@ std::string camImageFormatToString(const AM_MEDIA_TYPE* pMediaType);
bool isDSFeatureCamControl(CameraFeature feature);
long getDSFeatureID(CameraFeature feature);
-
+CameraFeature getCameraFeatureID_VPAP(long videoProcAmpProperty);
+CameraFeature getCameraFeatureID_CCP(long cameraControlProperty);
}
#endif
diff --git a/src/imaging/DSSampleGrabber.cpp b/src/imaging/DSSampleGrabber.cpp
index d38ad5f..1a62c2d 100755..100644
--- a/src/imaging/DSSampleGrabber.cpp
+++ b/src/imaging/DSSampleGrabber.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
diff --git a/src/imaging/DSSampleGrabber.h b/src/imaging/DSSampleGrabber.h
index 100667c..8c3ba5e 100755..100644
--- a/src/imaging/DSSampleGrabber.h
+++ b/src/imaging/DSSampleGrabber.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
diff --git a/src/imaging/DeDistort.cpp b/src/imaging/DeDistort.cpp
index abed1c8..5fec652 100644
--- a/src/imaging/DeDistort.cpp
+++ b/src/imaging/DeDistort.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -25,12 +25,13 @@
#include "TrackerConfig.h"
#include "../base/StringHelper.h"
-
+#include "../base/MathHelper.h"
+#include "../glm/gtx/rotate_vector.hpp"
#include <cstring>
#include <iostream>
#include <math.h>
-const double sqrt3 = sqrt(3.);
+const double sqrt3 = sqrt(3.f);
using namespace std;
@@ -61,12 +62,12 @@ DeDistort::DeDistort()
m_RescaleFactor = calc_rescale();
}
-DeDistort::DeDistort(const DPoint& camExtents, const DPoint& displayExtents)
+DeDistort::DeDistort(const glm::vec2& camExtents, const glm::vec2& displayExtents)
: m_Angle(0.0),
m_TrapezoidFactor(0),
m_DisplayOffset(0,0)
{
- m_CamExtents = camExtents;
+ m_CamExtents = glm::vec2(camExtents);
m_DistortionParams.push_back(0);
m_DistortionParams.push_back(0);
m_DisplayScale.x = displayExtents.x/camExtents.x;
@@ -74,9 +75,9 @@ DeDistort::DeDistort(const DPoint& camExtents, const DPoint& displayExtents)
m_RescaleFactor = calc_rescale();
}
-DeDistort::DeDistort(const DPoint& camExtents, const vector<double>& distortionParams,
- double angle, double trapezoidFactor, const DPoint& displayOffset,
- const DPoint& displayScale)
+DeDistort::DeDistort(const glm::vec2& camExtents, const vector<double>& distortionParams,
+ double angle, double trapezoidFactor, const glm::dvec2& displayOffset,
+ const glm::dvec2& displayScale)
: m_CamExtents(camExtents),
m_DistortionParams(distortionParams),
m_Angle(angle),
@@ -91,39 +92,39 @@ DeDistort::~DeDistort()
{
}
-DRect DeDistort::getDisplayArea(const DPoint& displayExtents)
+FRect DeDistort::getDisplayArea(const glm::vec2& displayExtents)
{
- return getActiveBlobArea(DRect(DPoint(0,0), displayExtents));
+ return getActiveBlobArea(FRect(glm::vec2(0,0), displayExtents));
}
-DRect DeDistort::getActiveBlobArea(const DRect& displayROI)
+FRect DeDistort::getActiveBlobArea(const FRect& displayROI)
{
- DRect activeRect;
- activeRect.tl = transformScreenToBlob(DPoint(displayROI.tl));
- activeRect.br = transformScreenToBlob(DPoint(displayROI.br));
+ FRect activeRect;
+ activeRect.tl = transformScreenToBlob(glm::dvec2(displayROI.tl));
+ activeRect.br = transformScreenToBlob(glm::dvec2(displayROI.br));
if (activeRect.height() < 1) {
- double temp = activeRect.tl.y;
+ float temp = activeRect.tl.y;
activeRect.tl.y = activeRect.br.y;
activeRect.br.y = temp;
}
if (activeRect.width() < 1) {
- double temp = activeRect.tl.x;
+ float temp = activeRect.tl.x;
activeRect.tl.x = activeRect.br.x;
activeRect.br.x = temp;
}
return activeRect;
}
-void DeDistort::load(const DPoint& camExtents, const TrackerConfig& config)
+void DeDistort::load(const glm::vec2& camExtents, const TrackerConfig& config)
{
- m_CamExtents = camExtents;
+ m_CamExtents = glm::dvec2(camExtents);
m_DistortionParams.clear();
- m_DistortionParams.push_back(config.getDoubleParam
- ("/transform/distortionparams/@p2"));
- m_DistortionParams.push_back(config.getDoubleParam
- ("/transform/distortionparams/@p3"));
- m_TrapezoidFactor = config.getDoubleParam("/transform/trapezoid/@value");
- m_Angle = config.getDoubleParam("/transform/angle/@value");
+ m_DistortionParams.push_back(double(config.getFloatParam
+ ("/transform/distortionparams/@p2")));
+ m_DistortionParams.push_back(double(config.getFloatParam
+ ("/transform/distortionparams/@p3")));
+ m_TrapezoidFactor = config.getFloatParam("/transform/trapezoid/@value");
+ m_Angle = config.getFloatParam("/transform/angle/@value");
m_DisplayOffset = config.getPointParam("/transform/displaydisplacement/");
m_DisplayScale = config.getPointParam("/transform/displayscale/");
@@ -173,59 +174,59 @@ void DeDistort::dump() const
cerr << " DisplayScale: " << m_DisplayScale << endl;
}
-DPoint DeDistort::transformScreenToBlob(const DPoint& pt)
+glm::dvec2 DeDistort::transformScreenToBlob(const glm::dvec2& pt)
{
// scale to blob image resolution and translate 0,0 to upper left corner.
- DPoint DestPt = pt-m_DisplayOffset;
- DestPt = DPoint(DestPt.x/m_DisplayScale.x, DestPt.y/m_DisplayScale.y);
+ glm::dvec2 DestPt = pt-m_DisplayOffset;
+ DestPt = glm::dvec2(DestPt.x/m_DisplayScale.x, DestPt.y/m_DisplayScale.y);
return DestPt;
}
-DPoint DeDistort::inverse_transform_point(const DPoint& pt)
+glm::dvec2 DeDistort::inverse_transform_point(const glm::dvec2& pt)
{
- DPoint destPt = pt-m_CamExtents/2;
- destPt = DPoint(2*destPt.x/m_CamExtents.x, 2*destPt.y/m_CamExtents.y);
+ glm::dvec2 destPt = pt - m_CamExtents/2.;
+ destPt = glm::dvec2(2*destPt.x/m_CamExtents.x, 2*destPt.y/m_CamExtents.y);
destPt = inv_trapezoid(m_TrapezoidFactor, destPt);
- destPt = destPt.getRotated(-m_Angle);
+ destPt = glm::rotate(destPt, -m_Angle*180/M_PI);
destPt *= m_RescaleFactor;
destPt = inverse_undistort(m_DistortionParams, destPt);
- destPt = DPoint(destPt.x*m_CamExtents.x/2, destPt.y*m_CamExtents.y/2);
- destPt += m_CamExtents/2;
+ destPt = glm::dvec2(destPt.x*m_CamExtents.x/2, destPt.y*m_CamExtents.y/2);
+ destPt += m_CamExtents/2.;
return destPt;
}
-DPoint DeDistort::transformBlobToScreen(const DPoint& pt)
+glm::dvec2 DeDistort::transformBlobToScreen(const glm::dvec2& pt)
{
- DPoint destPt = DPoint(m_DisplayScale.x*pt.x, m_DisplayScale.y*pt.y);
+ glm::dvec2 destPt(m_DisplayScale.x*pt.x, m_DisplayScale.y*pt.y);
destPt += m_DisplayOffset;
return destPt;
}
-DPoint DeDistort::transform_point(const DPoint& pt)
+glm::dvec2 DeDistort::transform_point(const glm::dvec2& pt)
{
- DPoint destPt = pt-m_CamExtents/2;
- destPt = DPoint(2*destPt.x/m_CamExtents.x, 2*destPt.y/m_CamExtents.y);
+ glm::dvec2 destPt = pt-m_CamExtents/2.;
+ destPt = glm::dvec2(2*destPt.x/m_CamExtents.x, 2*destPt.y/m_CamExtents.y);
destPt = undistort(m_DistortionParams, destPt);
destPt /= m_RescaleFactor;
- destPt = destPt.getRotated(m_Angle);
+ destPt = glm::rotate(destPt, m_Angle*180/M_PI);
destPt = trapezoid(m_TrapezoidFactor, destPt);
- destPt = DPoint(destPt.x*m_CamExtents.x/2, destPt.y*m_CamExtents.y/2);
- destPt += m_CamExtents/2;
+ destPt = glm::dvec2(destPt.x*m_CamExtents.x/2, destPt.y*m_CamExtents.y/2);
+ destPt += m_CamExtents/2.;
return destPt;
}
-DPoint DeDistort::inv_trapezoid(const double trapezoid_factor, const DPoint& pt)
+glm::dvec2 DeDistort::inv_trapezoid(const double trapezoid_factor, const glm::dvec2& pt)
{
// stretch x coord
double yn = pt.y;
- return DPoint(pt.x/(1+yn*trapezoid_factor), pt.y);
+ return glm::dvec2(pt.x/(1+yn*trapezoid_factor), pt.y);
}
-DPoint DeDistort::trapezoid(const double trapezoid_factor, const DPoint& pt)
+glm::dvec2 DeDistort::trapezoid(const double trapezoid_factor, const glm::dvec2& pt)
{
// stretch x coord
double yn = pt.y;
- return DPoint(pt.x*(1+yn*trapezoid_factor), pt.y);
+ return glm::dvec2(pt.x*(1+yn*trapezoid_factor), pt.y);
}
double distort_map(const vector<double>& params, double r)
@@ -265,12 +266,13 @@ double inv_distort_map(const vector<double>& params, double r)
}
#define EPSILON 0.00001
-DPoint DeDistort::inverse_undistort(const vector<double> &params, const DPoint &pt)
+glm::dvec2 DeDistort::inverse_undistort(const vector<double> &params,
+ const glm::dvec2 &pt)
{
if (params.empty()) {
return pt;
}
- DPoint pt_norm = pt;
+ glm::dvec2 pt_norm = pt;
double r_d = sqrt(pt_norm.x*pt_norm.x + pt_norm.y*pt_norm.y);
double S;
if (r_d < EPSILON) {
@@ -278,17 +280,17 @@ DPoint DeDistort::inverse_undistort(const vector<double> &params, const DPoint &
} else {
S = inv_distort_map(params, r_d)/r_d;
}
- DPoint result = pt_norm*(S);
+ glm::dvec2 result = pt_norm*(S);
return result;
}
-DPoint DeDistort::undistort(const vector<double>& params, const DPoint &pt)
+glm::dvec2 DeDistort::undistort(const vector<double>& params, const glm::dvec2 &pt)
{
std::vector<double>::const_iterator v = params.begin();
if (v == params.end()) {
return pt;
}
- DPoint pt_norm = pt;
+ glm::dvec2 pt_norm = pt;
double r_d = sqrt(pt_norm.x*pt_norm.x + pt_norm.y*pt_norm.y);
double S;
if (r_d < EPSILON) {
@@ -297,7 +299,7 @@ DPoint DeDistort::undistort(const vector<double>& params, const DPoint &pt)
S = distort_map(params, r_d)/r_d;
}
- DPoint result = pt_norm*(S);
+ glm::dvec2 result = pt_norm*(S);
return result;
}
diff --git a/src/imaging/DeDistort.h b/src/imaging/DeDistort.h
index ab804c6..1c2edc5 100644
--- a/src/imaging/DeDistort.h
+++ b/src/imaging/DeDistort.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -25,7 +25,7 @@
#include "../api.h"
#include "CoordTransformer.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include "../base/Rect.h"
#include <boost/shared_ptr.hpp>
@@ -43,20 +43,21 @@ class TrackerConfig;
class AVG_API DeDistort: public CoordTransformer {
public:
DeDistort();
- DeDistort(const DPoint& camExtents, const DPoint& displayExtents);
- DeDistort(const DPoint& camExtents, const std::vector<double>& distortionParams,
- double angle, double trapezoidFactor, const DPoint& displayOffset,
- const DPoint& displayScale);
+ DeDistort(const glm::vec2& camExtents, const glm::vec2& displayExtents);
+ DeDistort(const glm::vec2& camExtents,
+ const std::vector<double>& distortionParams, double angle,
+ double trapezoidFactor, const glm::dvec2& displayOffset,
+ const glm::dvec2& displayScale);
virtual ~DeDistort();
- DPoint transformBlobToScreen(const DPoint& pt);
- DPoint transformScreenToBlob(const DPoint& pt);
- virtual DPoint transform_point(const DPoint& pt);
- virtual DPoint inverse_transform_point(const DPoint& pt);
- DRect getDisplayArea(const DPoint& displayExtents);
- DRect getActiveBlobArea(const DRect& displayROI);
+ glm::dvec2 transformBlobToScreen(const glm::dvec2& pt);
+ glm::dvec2 transformScreenToBlob(const glm::dvec2& pt);
+ virtual glm::dvec2 transform_point(const glm::dvec2& pt);
+ virtual glm::dvec2 inverse_transform_point(const glm::dvec2& pt);
+ FRect getDisplayArea(const glm::vec2& displayExtents);
+ FRect getActiveBlobArea(const FRect& displayROI);
- void load(const DPoint &CameraExtents, const TrackerConfig& config);
+ void load(const glm::vec2 &CameraExtents, const TrackerConfig& config);
void save(TrackerConfig& config);
bool operator ==(const DeDistort& other) const;
@@ -65,17 +66,18 @@ class AVG_API DeDistort: public CoordTransformer {
private:
double calc_rescale();
- DPoint inverse_undistort(const std::vector<double>& params, const DPoint& pt);
- DPoint undistort(const std::vector<double>& params, const DPoint& pt);
- DPoint trapezoid(const double trapezoid_factor, const DPoint& pt);
- DPoint inv_trapezoid(const double trapezoid_factor, const DPoint& pt);
+ glm::dvec2 inverse_undistort(const std::vector<double>& params,
+ const glm::dvec2& pt);
+ glm::dvec2 undistort(const std::vector<double>& params, const glm::dvec2& pt);
+ glm::dvec2 trapezoid(const double trapezoid_factor, const glm::dvec2& pt);
+ glm::dvec2 inv_trapezoid(const double trapezoid_factor, const glm::dvec2& pt);
- DPoint m_CamExtents;
+ glm::dvec2 m_CamExtents;
std::vector<double> m_DistortionParams;
double m_Angle;
double m_TrapezoidFactor;
- DPoint m_DisplayOffset;
- DPoint m_DisplayScale;
+ glm::dvec2 m_DisplayOffset;
+ glm::dvec2 m_DisplayScale;
double m_RescaleFactor;
};
diff --git a/src/imaging/FWCamera.cpp b/src/imaging/FWCamera.cpp
index d009b97..1fc62ff 100644
--- a/src/imaging/FWCamera.cpp
+++ b/src/imaging/FWCamera.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -37,16 +37,14 @@ namespace avg {
using namespace std;
-FWCamera::FWCamera(uint64_t guid, int unit, bool bFW800, IntPoint size,
- PixelFormat camPF, PixelFormat destPF, double frameRate)
- : Camera(camPF, destPF),
- m_Size(size),
- m_FrameRate(frameRate),
+FWCamera::FWCamera(unsigned long long guid, int unit, bool bFW800, IntPoint size,
+ PixelFormat camPF, PixelFormat destPF, float frameRate)
+ : Camera(camPF, destPF, size, frameRate),
m_WhitebalanceU(-1),
m_WhitebalanceV(-1)
{
#ifdef AVG_ENABLE_1394_2
- m_FrameRateConstant = getFrameRateConst(m_FrameRate);
+ m_FrameRateConstant = getFrameRateConst(getFrameRate());
if (camPF == I16) {
throw Exception(AVG_ERR_CAMERA_NONFATAL,
"I16 pixel format is not supported for firewire cameras.");
@@ -62,13 +60,12 @@ FWCamera::FWCamera(uint64_t guid, int unit, bool bFW800, IntPoint size,
int err = dc1394_camera_enumerate(m_pDC1394, &pCameraList);
if (err != DC1394_SUCCESS) {
- AVG_TRACE(Logger::ERROR, "Unable to look for cameras");
+ AVG_LOG_ERROR("Unable to look for cameras");
#ifdef linux
- AVG_TRACE(Logger::ERROR, "Please check");
- AVG_TRACE(Logger::ERROR,
- " - if the kernel modules `ieee1394',`raw1394' and `ohci1394' are loaded");
- AVG_TRACE(Logger::ERROR,
- " - if you have read/write access to /dev/raw1394.");
+ AVG_LOG_ERROR("Please check");
+ AVG_LOG_ERROR(" - if the kernel modules `ieee1394',`raw1394' and \
+ `ohci1394' are loaded");
+ AVG_LOG_ERROR(" - if you have read/write access to /dev/raw1394.");
#endif
dc1394_free(m_pDC1394);
throw Exception(AVG_ERR_CAMERA_NONFATAL,"Firewire failure");
@@ -87,7 +84,7 @@ FWCamera::FWCamera(uint64_t guid, int unit, bool bFW800, IntPoint size,
}
}
if (id_to_use == -1) {
- AVG_TRACE(Logger::WARNING, "Firewire GUID=" << hex << guid << dec
+ AVG_LOG_WARNING("Firewire GUID=" << hex << guid << dec
<< " requested but not found on bus. Using first camera");
id_to_use = 0;
}
@@ -129,14 +126,14 @@ FWCamera::FWCamera(uint64_t guid, int unit, bool bFW800, IntPoint size,
}
}
if (!bFrameRateSupported) {
- AVG_TRACE(Logger::ERROR, "Camera does not support framerate " << m_FrameRate
+ AVG_LOG_ERROR("Camera does not support framerate " << getFrameRate()
<< " in the current video mode.");
dc1394_capture_stop(m_pCamera);
dc1394_video_set_transmission(m_pCamera, DC1394_OFF);
dc1394_camera_free(m_pCamera);
dc1394_free(m_pDC1394);
throw Exception(AVG_ERR_CAMERA_NONFATAL,
- string("Camera does not support framerate ")+toString(m_FrameRate)+
+ string("Camera does not support framerate ")+toString(getFrameRate())+
" in the current video mode.");
}
@@ -145,10 +142,9 @@ FWCamera::FWCamera(uint64_t guid, int unit, bool bFW800, IntPoint size,
err = dc1394_capture_setup(m_pCamera,8, DC1394_CAPTURE_FLAGS_DEFAULT);
if (err != DC1394_SUCCESS) {
- AVG_TRACE(Logger::ERROR, "Unable to setup camera. Make sure that");
- AVG_TRACE(Logger::ERROR, "video mode and framerate (" <<
- m_FrameRate << ") are");
- AVG_TRACE(Logger::ERROR, "supported by your camera.");
+ AVG_LOG_ERROR("Unable to setup camera. Make sure that");
+ AVG_LOG_ERROR("video mode and framerate (" << getFrameRate() << ") are");
+ AVG_LOG_ERROR("supported by your camera.");
dc1394_capture_stop(m_pCamera);
dc1394_video_set_transmission(m_pCamera, DC1394_OFF);
dc1394_camera_free(m_pCamera);
@@ -168,7 +164,7 @@ FWCamera::~FWCamera()
dc1394_camera_free(m_pCamera);
dc1394_free(m_pDC1394);
#endif
- AVG_TRACE(Logger::CONFIG, "Firewire camera closed.");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "Firewire camera closed.");
}
void FWCamera::startCapture()
@@ -197,7 +193,7 @@ void FWCamera::startCapture()
DC1394_FEATURE_MODE_MANUAL);
dc1394_feature_set_power(m_pCamera, DC1394_FEATURE_EXPOSURE, DC1394_OFF);
- AVG_TRACE(Logger::CONFIG, "Firewire camera opened.");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "Firewire camera opened.");
for (FeatureMap::iterator it=m_Features.begin(); it != m_Features.end(); it++) {
setFeature(it->first, it->second, true);
}
@@ -205,11 +201,11 @@ void FWCamera::startCapture()
if (getCamPF() == BAYER8) {
if (strcmp(m_pCamera->model, "DFx 31BF03") == 0) {
- AVG_TRACE(Logger::CONFIG,
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
"Applying bayer pattern fixup for IS DFx31BF03 camera");
setCamPF(BAYER8_GRBG);
} else if (strcmp(m_pCamera->vendor, "Point Grey Research") == 0) {
- AVG_TRACE(Logger::CONFIG,
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
"Applying bayer pattern fixup for PointGrey cameras");
enablePtGreyBayer();
}
@@ -217,18 +213,6 @@ void FWCamera::startCapture()
#endif
}
-IntPoint FWCamera::getImgSize()
-{
-#ifdef AVG_ENABLE_1394_2
- return m_Size;
-#else
- return IntPoint(0, 0);
-#endif
-}
-
-
-static ProfilingZoneID CameraConvertProfilingZone("FW Camera format conversion");
-
BitmapPtr FWCamera::getImage(bool bWait)
{
#ifdef AVG_ENABLE_1394_2
@@ -248,12 +232,12 @@ BitmapPtr FWCamera::getImage(bool bWait)
if (bGotFrame) {
int lineLen;
if (getCamPF() == YCbCr411) {
- lineLen = m_Size.x*1.5;
+ lineLen = getImgSize().x*1.5;
} else {
- lineLen = m_Size.x*getBytesPerPixel(getCamPF());
+ lineLen = getImgSize().x*getBytesPerPixel(getCamPF());
}
- BitmapPtr pCamBmp(new Bitmap(m_Size, getCamPF(), pCaptureBuffer, lineLen, false,
- "TempCameraBmp"));
+ BitmapPtr pCamBmp(new Bitmap(getImgSize(), getCamPF(), pCaptureBuffer, lineLen,
+ false, "TempCameraBmp"));
BitmapPtr pDestBmp = convertCamFrameToDestPF(pCamBmp);
// cerr << "CamBmp: " << pCamBmp->getPixelFormat() << ", DestBmp: "
// << pDestBmp->getPixelFormat() << endl;
@@ -290,11 +274,6 @@ const std::string& FWCamera::getDriverName() const
return sDriverName;
}
-double FWCamera::getFrameRate() const
-{
- return m_FrameRate;
-}
-
int FWCamera::getFeature(CameraFeature feature) const
{
#ifdef AVG_ENABLE_1394_2
@@ -336,8 +315,7 @@ void FWCamera::setFeature(CameraFeature feature, int value, bool bIgnoreOldValue
try {
setStrobeDuration(value);
} catch (Exception& e) {
- AVG_TRACE(Logger::WARNING,
- string("Camera: Setting strobe duration failed. ") +
+ AVG_LOG_WARNING(string("Camera: Setting strobe duration failed. ") +
e.getStr());
}
} else {
@@ -357,7 +335,7 @@ void FWCamera::setFeatureOneShot(CameraFeature feature)
dc1394error_t err = dc1394_feature_set_mode(m_pCamera, featureID,
DC1394_FEATURE_MODE_ONE_PUSH_AUTO);
if (err != DC1394_SUCCESS) {
- AVG_TRACE(Logger::WARNING, "Camera: Unable to set one-shot for "
+ AVG_LOG_WARNING("Camera: Unable to set one-shot for "
<< cameraFeatureToString(feature) << ". Error was " << err);
}
#endif
@@ -396,8 +374,7 @@ void FWCamera::setWhitebalance(int u, int v, bool bIgnoreOldValue)
err = dc1394_feature_whitebalance_set_value(m_pCamera, u, v);
}
if (err != DC1394_SUCCESS) {
- AVG_TRACE(Logger::WARNING,
- "Camera: Unable to set whitebalance. Error was " << err);
+ AVG_LOG_WARNING("Camera: Unable to set whitebalance. Error was " << err);
}
}
}
@@ -417,8 +394,7 @@ void FWCamera::setFeature(dc1394feature_t feature, int value)
err = dc1394_feature_set_value(m_pCamera, feature, value);
}
if (err != DC1394_SUCCESS) {
- AVG_TRACE(Logger::WARNING, "Camera: Unable to set " << feature <<
- ". Error was " << err);
+ AVG_LOG_WARNING("Camera: Unable to set " << feature << ". Error was " << err);
}
/*
dc1394feature_info_t featureInfo;
@@ -450,16 +426,17 @@ void FWCamera::setStrobeDuration(int microsecs)
} else {
// Wierd calculations: IIDC register values for time are non-linear. Translate
// the method parameter in microseconds to appropriate register values.
- double targetMillisecs = microsecs/1000.;
- const double realTimes[] = {1,2,4,6,8,12,16,24,32,48,63.93};
+ float targetMillisecs = microsecs/1000.f;
+ const float realTimes[] = {1,2,4,6,8,12,16,24,32,48,63.93};
const uint32_t regValues[] =
{0x400, 0x600, 0x800, 0x900, 0xA00, 0xB00, 0xC00, 0xD00,
0xE00, 0xF00, 0xFFF};
int len = sizeof(regValues)/sizeof(*regValues);
AVG_ASSERT(len == sizeof(realTimes)/sizeof(*realTimes));
int i;
- for (i = 1; realTimes[i] < targetMillisecs; ++i);
- double ratio = (targetMillisecs-realTimes[i])/(realTimes[i-1]-realTimes[i]);
+ for (i = 1; realTimes[i] < targetMillisecs; ++i) {
+ }
+ float ratio = (targetMillisecs-realTimes[i])/(realTimes[i-1]-realTimes[i]);
durationRegValue = ratio*regValues[i-1]+(1-ratio)*regValues[i];
}
@@ -479,8 +456,7 @@ void FWCamera::getWhitebalance(int* pU, int* pV) const
dc1394error_t err = dc1394_feature_whitebalance_get_value(m_pCamera,
(uint32_t*)pU, (uint32_t*)pV);
if (err != DC1394_SUCCESS) {
- AVG_TRACE(Logger::WARNING,
- "Camera: Unable to get whitebalance setting. Error was " << err);
+ AVG_LOG_WARNING("Camera: Unable to get whitebalance setting. Error was " << err);
}
#endif
}
@@ -508,45 +484,164 @@ void FWCamera::enablePtGreyBayer()
#endif
}
-void FWCamera::dumpCameras()
+int FWCamera::countCameras()
{
#ifdef AVG_ENABLE_1394_2
dc1394_t* pDC1394 = dc1394_new();
if (pDC1394 == 0) {
- return;
+ return 0;
}
dc1394camera_list_t * pCameraList;
int err=dc1394_camera_enumerate(pDC1394, &pCameraList);
if (err == DC1394_SUCCESS) {
- if (pCameraList->num != 0) {
- cerr << "Firewire cameras: " << endl;
- for (unsigned i=0; i<pCameraList->num;++i) {
- dc1394camera_id_t id = pCameraList->ids[i];
- dc1394camera_t * pCamera = dc1394_camera_new_unit(pDC1394, id.guid,
- id.unit);
- if (pCamera) {
- dc1394_camera_print_info(pCamera, stderr);
- dumpCameraInfo(pCamera);
- dc1394_camera_free(pCamera);
- cerr << endl;
- }
- }
+ int numCameras = pCameraList->num;
+ return numCameras;
+ }
+#endif
+ return 0;
+}
+
+CameraInfo* FWCamera::getCameraInfos(int deviceNumber)
+{
+#ifdef AVG_ENABLE_1394_2
+ dc1394_t* pDC1394 = dc1394_new();
+ if (pDC1394 == 0) {
+ AVG_ASSERT(false);
+ return NULL;
+ }
+ dc1394camera_list_t * pCameraList;
+ int err=dc1394_camera_enumerate(pDC1394, &pCameraList);
+ if (err != DC1394_SUCCESS) {
+ AVG_ASSERT(false);
+ return NULL;
+ }
+ if (pCameraList->num != 0) {
+ dc1394camera_id_t id = pCameraList->ids[deviceNumber];
+ dc1394camera_t * pCamera = dc1394_camera_new_unit(pDC1394, id.guid,
+ id.unit);
+ if (pCamera) {
+ stringstream deviceID;
+ deviceID << hex << id.guid;//pCamera->guid;
+ CameraInfo* camInfo = new CameraInfo("Firewire", deviceID.str());
+
+ getCameraControls(pCamera, camInfo);
+ getCameraImageFormats(pCamera, camInfo);
+
+ dc1394_camera_free(pCamera);
+ dc1394_camera_free_list(pCameraList);
+ dc1394_free(pDC1394);
+ return camInfo;
}
- dc1394_camera_free_list(pCameraList);
}
- dc1394_free(pDC1394);
#endif
+ return NULL;
}
#ifdef AVG_ENABLE_1394_2
-void FWCamera::dumpCameraInfo(dc1394camera_t * pCamera)
+void FWCamera::getCameraImageFormats(dc1394camera_t* pCamera, CameraInfo* camInfo)
{
- dc1394error_t err;
- dc1394featureset_t FeatureSet;
- err = dc1394_feature_get_all(pCamera, &FeatureSet);
- AVG_ASSERT(err == DC1394_SUCCESS);
- // TODO: do this using AVG_TRACE
- dc1394_feature_print_all(&FeatureSet, stderr);
+ dc1394video_modes_t videoModes;
+ dc1394framerates_t framerates;
+ dc1394error_t err = dc1394_video_get_supported_modes(pCamera, &videoModes);
+ if (err != DC1394_SUCCESS) {
+ AVG_ASSERT(false);
+ return;
+ }
+ for (unsigned i = 0; i < videoModes.num; i++) {
+ //Covers only libavg supported formats, other capabilities are ignored
+ if (videoModes.modes[i] >= DC1394_VIDEO_MODE_320x240_YUV422
+ && videoModes.modes[i] <= DC1394_VIDEO_MODE_1600x1200_MONO16){
+ PixelFormat pixFormat = getPFFromVideoMode(videoModes.modes[i]);
+ IntPoint size = getFrameSizeFromVideoMode(videoModes.modes[i]);
+ FrameratesVector framerateList;
+ err = dc1394_video_get_supported_framerates(pCamera, videoModes.modes[i],
+ &framerates);
+ if (err != DC1394_SUCCESS) {
+ AVG_LOG_WARNING("Camera: No framerates. Error was: " << err);
+ } else {
+ for (unsigned j = 0; j < framerates.num; j++)
+ {
+ float rate = framerateToFloat(framerates.framerates[j]);
+ framerateList.push_back(rate);
+ }
+ }
+ CameraImageFormat format = CameraImageFormat(size,pixFormat,framerateList);
+ camInfo->addImageFormat(format);
+ }
+ }
+}
+
+void FWCamera::getCameraControls(dc1394camera_t* pCamera, CameraInfo* camInfo)
+{
+ dc1394featureset_t featureSet;
+ int err = dc1394_feature_get_all(pCamera, &featureSet);
+ if (err != DC1394_SUCCESS) {
+ AVG_ASSERT(false);
+ return;
+ }
+
+ for (int i = DC1394_FEATURE_MIN; i <= DC1394_FEATURE_MAX; i++) {
+ dc1394feature_info_t featureInfo = featureSet.feature[i - DC1394_FEATURE_MIN];
+
+ dc1394bool_t bool_t;
+ dc1394_feature_is_present(pCamera,featureInfo.id, &bool_t);
+ if (bool_t != DC1394_TRUE) {
+ continue;
+ }
+
+ uint32_t min = -1;
+ uint32_t max = -1;
+ uint32_t actValue = -1;
+
+ //TODO: 428 (TRIGGER) doesnt have min max
+ err = dc1394_feature_get_boundaries(pCamera, featureInfo.id, &min, &max);
+ if (err != DC1394_SUCCESS) {
+ continue;
+ }
+
+ switch(featureInfo.id) {
+ case DC1394_FEATURE_TEMPERATURE: {
+ uint32_t targetTemp = -1;
+ uint32_t currentTemp = -1;
+ err = dc1394_feature_temperature_get_value(pCamera,&targetTemp,&currentTemp);
+ if (err != DC1394_SUCCESS) {
+ continue;
+ }
+ actValue = currentTemp;
+ break;
+ }
+ //TODO: Think about a way to get this information into CameraInfo
+ case DC1394_FEATURE_WHITE_BALANCE: {
+ uint32_t ubValue = -1;
+ uint32_t vrValue = -1;
+ err = dc1394_feature_whitebalance_get_value(pCamera,&ubValue,&vrValue);
+ if (err != DC1394_SUCCESS) {
+ continue;
+ }
+ //actValue = ubValue; //vrValue;
+ //cout <<"UBlue Value: " << ubValue << " VRed Value: " << vrValue << endl;
+ break;
+ }
+ default: {
+ err = dc1394_feature_get_value(pCamera,featureInfo.id, &actValue);
+ if (err != DC1394_SUCCESS) {
+ continue;
+ }
+ break;
+ }
+ }
+ CameraFeature enumFeature = featureIDToEnum(featureInfo.id);
+ if (enumFeature == CAM_FEATURE_UNSUPPORTED) {
+ continue;
+ }
+ std::string controlName = cameraFeatureToString(enumFeature);
+
+ CameraControl control = CameraControl(controlName,
+ (int) min,
+ (int) max,
+ (int) actValue ); //TODO: isnt really a default value!?
+ camInfo->addControl(control);
+ }
}
#endif
diff --git a/src/imaging/FWCamera.h b/src/imaging/FWCamera.h
index a5af3ad..c7badc0 100644
--- a/src/imaging/FWCamera.h
+++ b/src/imaging/FWCamera.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -29,7 +29,7 @@
#include "../graphics/Bitmap.h"
#include "../graphics/Pixel24.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#ifdef AVG_ENABLE_1394_2
#include <dc1394/control.h>
@@ -38,9 +38,6 @@
#ifndef AVG_ENABLE_1394_2
typedef unsigned int dc1394feature_t;
#endif
-#ifndef uint64_t
-#define uint64_t unsigned long long
-#endif
#include <string>
#include <map>
@@ -49,17 +46,15 @@ namespace avg {
class AVG_API FWCamera: public Camera {
public:
- FWCamera(uint64_t guid, int unit, bool bFW800, IntPoint size, PixelFormat camPF,
- PixelFormat destPF, double frameRate);
+ FWCamera(unsigned long long guid, int unit, bool bFW800, IntPoint size,
+ PixelFormat camPF, PixelFormat destPF, float frameRate);
virtual ~FWCamera();
virtual void startCapture();
- virtual IntPoint getImgSize();
virtual BitmapPtr getImage(bool bWait);
virtual const std::string& getDevice() const;
virtual const std::string& getDriverName() const;
- virtual double getFrameRate() const;
virtual int getFeature(CameraFeature feature) const;
bool hasFeature(CameraFeature feature);
@@ -69,7 +64,8 @@ public:
virtual int getWhitebalanceV() const;
virtual void setWhitebalance(int u, int v, bool bIgnoreOldValue=false);
- static void dumpCameras();
+ static int countCameras();
+ static CameraInfo* getCameraInfos(int deviceNumber);
static void resetBus();
private:
@@ -78,15 +74,14 @@ private:
void getWhitebalance(int* pU, int* pV) const;
void enablePtGreyBayer();
- IntPoint m_Size;
- double m_FrameRate;
-
#ifdef AVG_ENABLE_1394_2
+ static void getCameraImageFormats(dc1394camera_t* pCamera, CameraInfo* camInfo);
+ static void getCameraControls(dc1394camera_t* pCamera, CameraInfo* camInfo);
+
dc1394_t * m_pDC1394;
dc1394camera_t * m_pCamera;
dc1394framerate_t m_FrameRateConstant;
- dc1394video_mode_t m_Mode;
- static void dumpCameraInfo(dc1394camera_t * pCamera);
+ dc1394video_mode_t m_Mode;
#endif
FeatureMap m_Features;
diff --git a/src/imaging/FWCameraUtils.cpp b/src/imaging/FWCameraUtils.cpp
index 5e5a8e1..e8f37b8 100644
--- a/src/imaging/FWCameraUtils.cpp
+++ b/src/imaging/FWCameraUtils.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -111,13 +111,13 @@ dc1394video_mode_t getCamMode(IntPoint size, PixelFormat pf)
"), "+getPixelFormatString(pf)+"\" for camera mode.");
}
-dc1394framerate_t getFrameRateConst(double frameRate)
+dc1394framerate_t getFrameRateConst(float frameRate)
{
- if (frameRate == 1.875) {
+ if (frameRate == 1.875f) {
return DC1394_FRAMERATE_1_875;
- } else if (frameRate == 3.75) {
+ } else if (frameRate == 3.75f) {
return DC1394_FRAMERATE_3_75;
- } else if (frameRate == 7.5) {
+ } else if (frameRate == 7.5f) {
return DC1394_FRAMERATE_7_5;
} else if (frameRate == 15) {
return DC1394_FRAMERATE_15;
@@ -134,6 +134,55 @@ dc1394framerate_t getFrameRateConst(double frameRate)
+toString(frameRate)+" for camera framerate.");
}
}
+CameraFeature featureIDToEnum(dc1394feature_t feature)
+{
+ switch (feature) {
+ case DC1394_FEATURE_BRIGHTNESS:
+ return CAM_FEATURE_BRIGHTNESS;
+ case DC1394_FEATURE_EXPOSURE:
+ return CAM_FEATURE_EXPOSURE;
+ case DC1394_FEATURE_SHARPNESS:
+ return CAM_FEATURE_SHARPNESS;
+ case DC1394_FEATURE_WHITE_BALANCE:
+ return CAM_FEATURE_WHITE_BALANCE;
+ case DC1394_FEATURE_HUE:
+ return CAM_FEATURE_HUE;
+ case DC1394_FEATURE_SATURATION:
+ return CAM_FEATURE_SATURATION;
+ case DC1394_FEATURE_GAMMA:
+ return CAM_FEATURE_GAMMA;
+ case DC1394_FEATURE_SHUTTER:
+ return CAM_FEATURE_SHUTTER;
+ case DC1394_FEATURE_GAIN:
+ return CAM_FEATURE_GAIN;
+ case DC1394_FEATURE_IRIS:
+ return CAM_FEATURE_IRIS;
+ case DC1394_FEATURE_FOCUS:
+ return CAM_FEATURE_FOCUS;
+ case DC1394_FEATURE_TEMPERATURE:
+ return CAM_FEATURE_TEMPERATURE;
+ case DC1394_FEATURE_TRIGGER:
+ return CAM_FEATURE_TRIGGER;
+ case DC1394_FEATURE_TRIGGER_DELAY:
+ return CAM_FEATURE_TRIGGER_DELAY;
+ case DC1394_FEATURE_WHITE_SHADING:
+ return CAM_FEATURE_WHITE_SHADING;
+ case DC1394_FEATURE_ZOOM:
+ return CAM_FEATURE_ZOOM;
+ case DC1394_FEATURE_PAN:
+ return CAM_FEATURE_PAN;
+ case DC1394_FEATURE_TILT:
+ return CAM_FEATURE_TILT;
+ case DC1394_FEATURE_OPTICAL_FILTER:
+ return CAM_FEATURE_OPTICAL_FILTER;
+ case DC1394_FEATURE_CAPTURE_SIZE:
+ return CAM_FEATURE_CAPTURE_SIZE;
+ case DC1394_FEATURE_CAPTURE_QUALITY:
+ return CAM_FEATURE_CAPTURE_QUALITY;
+ default:
+ return CAM_FEATURE_UNSUPPORTED;
+ }
+}
dc1394feature_t getFeatureID(CameraFeature feature)
{
@@ -164,6 +213,10 @@ dc1394feature_t getFeatureID(CameraFeature feature)
return DC1394_FEATURE_TEMPERATURE;
case CAM_FEATURE_TRIGGER:
return DC1394_FEATURE_TRIGGER;
+ case CAM_FEATURE_TRIGGER_DELAY:
+ return DC1394_FEATURE_TRIGGER_DELAY;
+ case CAM_FEATURE_WHITE_SHADING:
+ return DC1394_FEATURE_WHITE_SHADING;
case CAM_FEATURE_ZOOM:
return DC1394_FEATURE_ZOOM;
case CAM_FEATURE_PAN:
@@ -182,4 +235,132 @@ dc1394feature_t getFeatureID(CameraFeature feature)
}
}
+IntPoint getFrameSizeFromVideoMode(dc1394video_mode_t mode)
+{
+ IntPoint point = IntPoint();
+ point.x = -1;
+ point.y = -1;
+ switch (mode) {
+ case DC1394_VIDEO_MODE_160x120_YUV444: {
+ point.x = 160;
+ point.y = 120;
+ return point;
+ }
+ case DC1394_VIDEO_MODE_320x240_YUV422: {
+ point.x = 320;
+ point.y = 240;
+ return point;
+ }
+ case DC1394_VIDEO_MODE_640x480_YUV411:
+ case DC1394_VIDEO_MODE_640x480_YUV422:
+ case DC1394_VIDEO_MODE_640x480_RGB8:
+ case DC1394_VIDEO_MODE_640x480_MONO8:
+ case DC1394_VIDEO_MODE_640x480_MONO16: {
+ point.x = 640;
+ point.y = 480;
+ return point;
+ }
+ case DC1394_VIDEO_MODE_800x600_YUV422:
+ case DC1394_VIDEO_MODE_800x600_RGB8:
+ case DC1394_VIDEO_MODE_800x600_MONO8:
+ case DC1394_VIDEO_MODE_800x600_MONO16: {
+ point.x = 800;
+ point.y = 600;
+ return point;
+ }
+ case DC1394_VIDEO_MODE_1024x768_YUV422:
+ case DC1394_VIDEO_MODE_1024x768_RGB8:
+ case DC1394_VIDEO_MODE_1024x768_MONO8:
+ case DC1394_VIDEO_MODE_1024x768_MONO16: {
+ point.x = 1024;
+ point.y = 768;
+ return point;
+ }
+
+
+ case DC1394_VIDEO_MODE_1280x960_YUV422:
+ case DC1394_VIDEO_MODE_1280x960_RGB8:
+ case DC1394_VIDEO_MODE_1280x960_MONO8:
+ case DC1394_VIDEO_MODE_1280x960_MONO16: {
+ point.x = 1280;
+ point.y = 960;
+ return point;
+ }
+ case DC1394_VIDEO_MODE_1600x1200_YUV422:
+ case DC1394_VIDEO_MODE_1600x1200_RGB8:
+ case DC1394_VIDEO_MODE_1600x1200_MONO8:
+ case DC1394_VIDEO_MODE_1600x1200_MONO16: {
+ point.x = 1600;
+ point.y = 1200;
+ return point;
+ }
+ default:
+ AVG_ASSERT(false);
+ return point;
+ }
+}
+
+PixelFormat getPFFromVideoMode(dc1394video_mode_t mode)
+{
+ switch (mode) {
+ case DC1394_VIDEO_MODE_640x480_YUV411:
+ return YCbCr411;
+ case DC1394_VIDEO_MODE_320x240_YUV422:
+ case DC1394_VIDEO_MODE_640x480_YUV422:
+ case DC1394_VIDEO_MODE_800x600_YUV422:
+ case DC1394_VIDEO_MODE_1024x768_YUV422:
+ case DC1394_VIDEO_MODE_1280x960_YUV422:
+ case DC1394_VIDEO_MODE_1600x1200_YUV422:
+ return YCbCr422;
+ case DC1394_VIDEO_MODE_640x480_RGB8:
+ case DC1394_VIDEO_MODE_800x600_RGB8:
+ case DC1394_VIDEO_MODE_1024x768_RGB8:
+ case DC1394_VIDEO_MODE_1280x960_RGB8:
+ case DC1394_VIDEO_MODE_1600x1200_RGB8:
+ return R8G8B8;
+ case DC1394_VIDEO_MODE_640x480_MONO8:
+ case DC1394_VIDEO_MODE_800x600_MONO8:
+ case DC1394_VIDEO_MODE_1024x768_MONO8:
+ case DC1394_VIDEO_MODE_1280x960_MONO8:
+ case DC1394_VIDEO_MODE_1600x1200_MONO8:
+ return I8;
+ case DC1394_VIDEO_MODE_640x480_MONO16:
+ case DC1394_VIDEO_MODE_800x600_MONO16:
+ case DC1394_VIDEO_MODE_1024x768_MONO16:
+ case DC1394_VIDEO_MODE_1280x960_MONO16:
+ case DC1394_VIDEO_MODE_1600x1200_MONO16:
+ return I16;
+ default:
+ AVG_ASSERT(false);
+ return PixelFormat(0);
+ }
+}
+
+float framerateToFloat(dc1394framerate_t framerate)
+{
+ switch (framerate) {
+ case DC1394_FRAMERATE_1_875:
+ return 1.875;
+ case DC1394_FRAMERATE_3_75:
+ return 3.75;
+ case DC1394_FRAMERATE_7_5:
+ return 7.5;
+ case DC1394_FRAMERATE_15:
+ return 15;
+ case DC1394_FRAMERATE_30:
+ return 30;
+ case DC1394_FRAMERATE_60:
+ return 60;
+ case DC1394_FRAMERATE_120:
+ return 120;
+ case DC1394_FRAMERATE_240:
+ return 240;
+ default:{
+ AVG_ASSERT(false);
+ return -1;
+ }
+ }
+}
+
+
}
diff --git a/src/imaging/FWCameraUtils.h b/src/imaging/FWCameraUtils.h
index 634838d..668f5bb 100644
--- a/src/imaging/FWCameraUtils.h
+++ b/src/imaging/FWCameraUtils.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -27,7 +27,7 @@
#include "../avgconfigwrapper.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include <dc1394/control.h>
@@ -36,8 +36,12 @@
namespace avg {
dc1394video_mode_t getCamMode(IntPoint size, PixelFormat pf);
-dc1394framerate_t getFrameRateConst(double frameRate);
+dc1394framerate_t getFrameRateConst(float frameRate);
+CameraFeature featureIDToEnum(dc1394feature_t feature);
dc1394feature_t getFeatureID(CameraFeature feature);
+IntPoint getFrameSizeFromVideoMode(dc1394video_mode_t mode);
+PixelFormat getPFFromVideoMode(dc1394video_mode_t mode);
+float framerateToFloat(dc1394framerate_t framerate);
}
diff --git a/src/imaging/FakeCamera.cpp b/src/imaging/FakeCamera.cpp
index ae966f5..ffb1cb3 100644
--- a/src/imaging/FakeCamera.cpp
+++ b/src/imaging/FakeCamera.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -25,6 +25,7 @@
#include "../graphics/Filterfill.h"
#include "../graphics/Filterfillrect.h"
#include "../graphics/Filtergrayscale.h"
+#include "../graphics/BitmapLoader.h"
#include "../base/TimeSource.h"
#include "../base/Exception.h"
@@ -36,27 +37,25 @@ using namespace std;
namespace avg {
FakeCamera::FakeCamera(PixelFormat camPF, PixelFormat destPF)
- : Camera(camPF, destPF),
+ : Camera(camPF, destPF, IntPoint(640, 480), 60),
m_pBmpQ(new std::queue<BitmapPtr>()),
m_bIsOpen(false)
{
- m_ImgSize = IntPoint(640, 480);
}
FakeCamera::FakeCamera(std::vector<std::string>& pictures)
- : Camera(I8, I8),
+ : Camera(I8, I8, IntPoint(640,480), 60),
m_pBmpQ(new std::queue<BitmapPtr>()),
m_bIsOpen(false)
{
- m_ImgSize = IntPoint(640, 480);
for (vector<string>::iterator it = pictures.begin(); it != pictures.end(); ++it) {
try {
- BitmapPtr pBmp (new Bitmap(*it));
+ BitmapPtr pBmp = loadBitmap(*it);
FilterGrayscale().applyInPlace(pBmp);
- m_ImgSize = pBmp->getSize();
+ setImgSize(pBmp->getSize());
m_pBmpQ->push(pBmp);
} catch (Exception& ex) {
- AVG_TRACE(Logger::ERROR, ex.getStr());
+ AVG_LOG_ERROR(ex.getStr());
throw;
}
}
@@ -77,11 +76,6 @@ void FakeCamera::close()
}
-IntPoint FakeCamera::getImgSize()
-{
- return m_ImgSize;
-}
-
BitmapPtr FakeCamera::getImage(bool bWait)
{
if (bWait) {
@@ -114,11 +108,6 @@ const std::string& FakeCamera::getDriverName() const
return sDriverName;
}
-double FakeCamera::getFrameRate() const
-{
- return 60;
-}
-
const string& FakeCamera::getMode() const
{
static string sMode = "FakeCamera";
diff --git a/src/imaging/FakeCamera.h b/src/imaging/FakeCamera.h
index 7f444cc..4128043 100644
--- a/src/imaging/FakeCamera.h
+++ b/src/imaging/FakeCamera.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -43,13 +43,11 @@ public:
virtual void open();
virtual void close();
- virtual IntPoint getImgSize();
virtual BitmapPtr getImage(bool bWait);
virtual bool isCameraAvailable();
virtual const std::string& getDevice() const;
virtual const std::string& getDriverName() const;
- virtual double getFrameRate() const;
virtual const std::string& getMode() const;
virtual int getFeature(CameraFeature feature) const;
@@ -60,7 +58,6 @@ public:
virtual void setWhitebalance(int u, int v, bool bIgnoreOldValue=false);
private:
- IntPoint m_ImgSize;
BitmapQueuePtr m_pBmpQ;
bool m_bIsOpen;
};
diff --git a/src/imaging/FilterClearBorder.cpp b/src/imaging/FilterClearBorder.cpp
index 549a7bc..dba7c8b 100644
--- a/src/imaging/FilterClearBorder.cpp
+++ b/src/imaging/FilterClearBorder.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
diff --git a/src/imaging/FilterClearBorder.h b/src/imaging/FilterClearBorder.h
index 869d784..80c700b 100644
--- a/src/imaging/FilterClearBorder.h
+++ b/src/imaging/FilterClearBorder.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -27,7 +27,7 @@
#include "../graphics/Filter.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include "../base/Rect.h"
#include <boost/shared_ptr.hpp>
diff --git a/src/imaging/FilterDistortion.cpp b/src/imaging/FilterDistortion.cpp
index 441dcd3..08b55bb 100644
--- a/src/imaging/FilterDistortion.cpp
+++ b/src/imaging/FilterDistortion.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -41,7 +41,7 @@ FilterDistortion::FilterDistortion(const IntPoint& srcSize,
m_pMap = new IntPoint[m_SrcSize.y*m_SrcSize.x];
for (int y = 0; y < m_SrcSize.y; ++y) {
for (int x = 0; x < m_SrcSize.x; ++x) {
- DPoint tmp = m_pTransformer->inverse_transform_point(DPoint(int(x),int(y)));
+ glm::dvec2 tmp = m_pTransformer->inverse_transform_point(glm::dvec2(x,y));
IntPoint tmp2(int(tmp.x+0.5),int(tmp.y+0.5));
if (tmp2.x < m_SrcSize.x && tmp2.y < m_SrcSize.y &&
tmp2.x >= 0 && tmp2.y >= 0)
diff --git a/src/imaging/FilterDistortion.h b/src/imaging/FilterDistortion.h
index a050796..25de14e 100644
--- a/src/imaging/FilterDistortion.h
+++ b/src/imaging/FilterDistortion.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -29,7 +29,7 @@
#include "../graphics/Filter.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include "../base/Rect.h"
#include <boost/shared_ptr.hpp>
diff --git a/src/imaging/FilterWipeBorder.cpp b/src/imaging/FilterWipeBorder.cpp
index cfdef40..dd09068 100644
--- a/src/imaging/FilterWipeBorder.cpp
+++ b/src/imaging/FilterWipeBorder.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
diff --git a/src/imaging/FilterWipeBorder.h b/src/imaging/FilterWipeBorder.h
index 91a0505..7804560 100644
--- a/src/imaging/FilterWipeBorder.h
+++ b/src/imaging/FilterWipeBorder.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -27,7 +27,6 @@
#include "../graphics/Filter.h"
-#include "../base/Point.h"
#include "../base/Rect.h"
#include <boost/shared_ptr.hpp>
diff --git a/src/imaging/Makefile.am b/src/imaging/Makefile.am
index 41b5f61..93d1562 100644
--- a/src/imaging/Makefile.am
+++ b/src/imaging/Makefile.am
@@ -1,4 +1,4 @@
-INCLUDES = -I.. @PTHREAD_CFLAGS@ @XML2_CFLAGS@
+AM_CPPFLAGS = -I.. @PTHREAD_CFLAGS@ @XML2_CFLAGS@ @GDK_PIXBUF_CFLAGS@
if ENABLE_V4L2
V4L2_SOURCES = V4LCamera.cpp
@@ -12,7 +12,6 @@ if ENABLE_1394_2
DC1394_SOURCES = FWCameraUtils.cpp
DC1394_INCLUDES = FWCameraUtils.h
else
-
DC1394_SOURCES =
DC1394_INCLUDES =
endif
@@ -20,13 +19,13 @@ endif
ALL_H = Camera.h TrackerThread.h TrackerConfig.h Blob.h FWCamera.h Run.h \
FakeCamera.h CoordTransformer.h FilterDistortion.h $(DC1394_INCLUDES) \
DeDistort.h trackerconfigdtd.h FilterWipeBorder.h FilterClearBorder.h \
- $(V4L2_INCLUDES)
+ $(V4L2_INCLUDES) CameraInfo.h
ALL_CPP = Camera.cpp TrackerThread.cpp TrackerConfig.cpp Blob.cpp FWCamera.cpp Run.cpp \
FakeCamera.cpp CoordTransformer.cpp FilterDistortion.cpp $(DC1394_SOURCES) \
DeDistort.cpp trackerconfigdtd.cpp FilterWipeBorder.cpp FilterClearBorder.cpp \
- $(V4L2_SOURCES)
+ $(V4L2_SOURCES) CameraInfo.cpp
-TESTS=testimaging
+TESTS = testimaging
EXTRA_DIST = avgtrackerrc.minimal $(wildcard baseline/*.png) $(wildcard testfiles/*.png) \
CMUCamera.h CMUCamera.cpp DSCamera.cpp DSCamera.h DSHelper.h DSHelper.cpp \
@@ -34,12 +33,9 @@ EXTRA_DIST = avgtrackerrc.minimal $(wildcard baseline/*.png) $(wildcard testfile
noinst_LTLIBRARIES = libimaging.la
libimaging_la_SOURCES = $(ALL_CPP) $(ALL_H)
-noinst_PROGRAMS=testimaging checktracking
-testimaging_SOURCES=testimaging.cpp $(ALL_H)
+noinst_PROGRAMS = testimaging
+testimaging_SOURCES = testimaging.cpp $(ALL_H)
testimaging_LDADD = ./libimaging.la ../graphics/libgraphics.la ../base/libbase.la \
- @XML2_LIBS@ -l@BOOST_THREAD_LIB@ -lboost_system @PTHREAD_LIBS@ @GDK_PIXBUF_LIBS@
-
-checktracking_SOURCES=checktracking.cpp $(ALL_H)
-checktracking_LDADD = ./libimaging.la ../graphics/libgraphics.la ../base/libbase.la \
- @XML2_LIBS@ -l@BOOST_THREAD_LIB@ -lboost_system @PTHREAD_LIBS@ @GDK_PIXBUF_LIBS@
+ ../base/triangulate/libtriangulate.la \
+ @XML2_LIBS@ @BOOST_THREAD_LIBS@ @PTHREAD_LIBS@ @GDK_PIXBUF_LIBS@
diff --git a/src/imaging/Makefile.in b/src/imaging/Makefile.in
index 3f65abd..8ee4fe5 100644
--- a/src/imaging/Makefile.in
+++ b/src/imaging/Makefile.in
@@ -1,9 +1,9 @@
-# Makefile.in generated by automake 1.11.1 from Makefile.am.
+# Makefile.in generated by automake 1.11.3 from Makefile.am.
# @configure_input@
# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
-# 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation,
-# Inc.
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+# Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
@@ -37,18 +37,19 @@ build_triplet = @build@
host_triplet = @host@
target_triplet = @target@
TESTS = testimaging$(EXEEXT)
-noinst_PROGRAMS = testimaging$(EXEEXT) checktracking$(EXEEXT)
+noinst_PROGRAMS = testimaging$(EXEEXT)
subdir = src/imaging
DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ac_cxx_namespaces.m4 \
$(top_srcdir)/m4/ac_path_generic.m4 \
$(top_srcdir)/m4/ax_boost_thread.m4 \
- $(top_srcdir)/m4/ax_check_gl.m4 $(top_srcdir)/m4/libtool.m4 \
- $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
- $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
- $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/m4/python_dev.m4 \
- $(top_srcdir)/configure.in
+ $(top_srcdir)/m4/ax_check_gl.m4 \
+ $(top_srcdir)/m4/ax_python_devel.m4 \
+ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
+ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
+ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/pkg.m4 \
+ $(top_srcdir)/m4/avg_version.m4 $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs
@@ -61,44 +62,37 @@ am__libimaging_la_SOURCES_DIST = Camera.cpp TrackerThread.cpp \
TrackerConfig.cpp Blob.cpp FWCamera.cpp Run.cpp FakeCamera.cpp \
CoordTransformer.cpp FilterDistortion.cpp FWCameraUtils.cpp \
DeDistort.cpp trackerconfigdtd.cpp FilterWipeBorder.cpp \
- FilterClearBorder.cpp V4LCamera.cpp Camera.h TrackerThread.h \
- TrackerConfig.h Blob.h FWCamera.h Run.h FakeCamera.h \
- CoordTransformer.h FilterDistortion.h FWCameraUtils.h \
- DeDistort.h trackerconfigdtd.h FilterWipeBorder.h \
- FilterClearBorder.h V4LCamera.h
+ FilterClearBorder.cpp V4LCamera.cpp CameraInfo.cpp Camera.h \
+ TrackerThread.h TrackerConfig.h Blob.h FWCamera.h Run.h \
+ FakeCamera.h CoordTransformer.h FilterDistortion.h \
+ FWCameraUtils.h DeDistort.h trackerconfigdtd.h \
+ FilterWipeBorder.h FilterClearBorder.h V4LCamera.h \
+ CameraInfo.h
@ENABLE_1394_2_TRUE@am__objects_1 = FWCameraUtils.lo
@ENABLE_V4L2_TRUE@am__objects_2 = V4LCamera.lo
am__objects_3 = Camera.lo TrackerThread.lo TrackerConfig.lo Blob.lo \
FWCamera.lo Run.lo FakeCamera.lo CoordTransformer.lo \
FilterDistortion.lo $(am__objects_1) DeDistort.lo \
trackerconfigdtd.lo FilterWipeBorder.lo FilterClearBorder.lo \
- $(am__objects_2)
+ $(am__objects_2) CameraInfo.lo
am__objects_4 =
am__objects_5 = $(am__objects_4) $(am__objects_4)
am_libimaging_la_OBJECTS = $(am__objects_3) $(am__objects_5)
libimaging_la_OBJECTS = $(am_libimaging_la_OBJECTS)
-AM_V_lt = $(am__v_lt_$(V))
-am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY))
+AM_V_lt = $(am__v_lt_@AM_V@)
+am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
am__v_lt_0 = --silent
PROGRAMS = $(noinst_PROGRAMS)
-am__checktracking_SOURCES_DIST = checktracking.cpp Camera.h \
- TrackerThread.h TrackerConfig.h Blob.h FWCamera.h Run.h \
- FakeCamera.h CoordTransformer.h FilterDistortion.h \
- FWCameraUtils.h DeDistort.h trackerconfigdtd.h \
- FilterWipeBorder.h FilterClearBorder.h V4LCamera.h
-am_checktracking_OBJECTS = checktracking.$(OBJEXT) $(am__objects_5)
-checktracking_OBJECTS = $(am_checktracking_OBJECTS)
-checktracking_DEPENDENCIES = ./libimaging.la \
- ../graphics/libgraphics.la ../base/libbase.la
am__testimaging_SOURCES_DIST = testimaging.cpp Camera.h \
TrackerThread.h TrackerConfig.h Blob.h FWCamera.h Run.h \
FakeCamera.h CoordTransformer.h FilterDistortion.h \
FWCameraUtils.h DeDistort.h trackerconfigdtd.h \
- FilterWipeBorder.h FilterClearBorder.h V4LCamera.h
+ FilterWipeBorder.h FilterClearBorder.h V4LCamera.h \
+ CameraInfo.h
am_testimaging_OBJECTS = testimaging.$(OBJEXT) $(am__objects_5)
testimaging_OBJECTS = $(am_testimaging_OBJECTS)
testimaging_DEPENDENCIES = ./libimaging.la ../graphics/libgraphics.la \
- ../base/libbase.la
+ ../base/libbase.la ../base/triangulate/libtriangulate.la
DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)/src
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
@@ -109,18 +103,18 @@ LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
-AM_V_CXX = $(am__v_CXX_$(V))
-am__v_CXX_ = $(am__v_CXX_$(AM_DEFAULT_VERBOSITY))
+AM_V_CXX = $(am__v_CXX_@AM_V@)
+am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@)
am__v_CXX_0 = @echo " CXX " $@;
-AM_V_at = $(am__v_at_$(V))
-am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
-AM_V_CXXLD = $(am__v_CXXLD_$(V))
-am__v_CXXLD_ = $(am__v_CXXLD_$(AM_DEFAULT_VERBOSITY))
+AM_V_CXXLD = $(am__v_CXXLD_@AM_V@)
+am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@)
am__v_CXXLD_0 = @echo " CXXLD " $@;
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
@@ -128,23 +122,21 @@ LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CFLAGS) $(CFLAGS)
-AM_V_CC = $(am__v_CC_$(V))
-am__v_CC_ = $(am__v_CC_$(AM_DEFAULT_VERBOSITY))
+AM_V_CC = $(am__v_CC_@AM_V@)
+am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@)
am__v_CC_0 = @echo " CC " $@;
CCLD = $(CC)
LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
-AM_V_CCLD = $(am__v_CCLD_$(V))
-am__v_CCLD_ = $(am__v_CCLD_$(AM_DEFAULT_VERBOSITY))
+AM_V_CCLD = $(am__v_CCLD_@AM_V@)
+am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
am__v_CCLD_0 = @echo " CCLD " $@;
-AM_V_GEN = $(am__v_GEN_$(V))
-am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
-SOURCES = $(libimaging_la_SOURCES) $(checktracking_SOURCES) \
- $(testimaging_SOURCES)
+SOURCES = $(libimaging_la_SOURCES) $(testimaging_SOURCES)
DIST_SOURCES = $(am__libimaging_la_SOURCES_DIST) \
- $(am__checktracking_SOURCES_DIST) \
$(am__testimaging_SOURCES_DIST)
ETAGS = etags
CTAGS = ctags
@@ -160,7 +152,7 @@ AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
BOOST_PYTHON_LIBS = @BOOST_PYTHON_LIBS@
-BOOST_THREAD_LIB = @BOOST_THREAD_LIB@
+BOOST_THREAD_LIBS = @BOOST_THREAD_LIBS@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
@@ -175,6 +167,7 @@ DC1394_2_CFLAGS = @DC1394_2_CFLAGS@
DC1394_2_LIBS = @DC1394_2_LIBS@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
@@ -215,6 +208,7 @@ LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
MTDEV_CFLAGS = @MTDEV_CFLAGS@
MTDEV_LIBS = @MTDEV_LIBS@
@@ -244,11 +238,13 @@ PTHREAD_CC = @PTHREAD_CC@
PTHREAD_CFLAGS = @PTHREAD_CFLAGS@
PTHREAD_LIBS = @PTHREAD_LIBS@
PYTHON = @PYTHON@
+PYTHON_CPPFLAGS = @PYTHON_CPPFLAGS@
PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
-PYTHON_INCLUDES = @PYTHON_INCLUDES@
-PYTHON_LIBS = @PYTHON_LIBS@
+PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@
+PYTHON_LDFLAGS = @PYTHON_LDFLAGS@
PYTHON_PLATFORM = @PYTHON_PLATFORM@
PYTHON_PREFIX = @PYTHON_PREFIX@
+PYTHON_SITE_PKG = @PYTHON_SITE_PKG@
PYTHON_VERSION = @PYTHON_VERSION@
RANLIB = @RANLIB@
SDL_CFLAGS = @SDL_CFLAGS@
@@ -271,6 +267,7 @@ abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_CXX = @ac_ct_CXX@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
@@ -305,7 +302,6 @@ libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
-lt_ECHO = @lt_ECHO@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
@@ -329,7 +325,7 @@ target_vendor = @target_vendor@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
-INCLUDES = -I.. @PTHREAD_CFLAGS@ @XML2_CFLAGS@
+AM_CPPFLAGS = -I.. @PTHREAD_CFLAGS@ @XML2_CFLAGS@ @GDK_PIXBUF_CFLAGS@
@ENABLE_V4L2_FALSE@V4L2_SOURCES =
@ENABLE_V4L2_TRUE@V4L2_SOURCES = V4LCamera.cpp
@ENABLE_V4L2_FALSE@V4L2_INCLUDES =
@@ -341,12 +337,12 @@ INCLUDES = -I.. @PTHREAD_CFLAGS@ @XML2_CFLAGS@
ALL_H = Camera.h TrackerThread.h TrackerConfig.h Blob.h FWCamera.h Run.h \
FakeCamera.h CoordTransformer.h FilterDistortion.h $(DC1394_INCLUDES) \
DeDistort.h trackerconfigdtd.h FilterWipeBorder.h FilterClearBorder.h \
- $(V4L2_INCLUDES)
+ $(V4L2_INCLUDES) CameraInfo.h
ALL_CPP = Camera.cpp TrackerThread.cpp TrackerConfig.cpp Blob.cpp FWCamera.cpp Run.cpp \
FakeCamera.cpp CoordTransformer.cpp FilterDistortion.cpp $(DC1394_SOURCES) \
DeDistort.cpp trackerconfigdtd.cpp FilterWipeBorder.cpp FilterClearBorder.cpp \
- $(V4L2_SOURCES)
+ $(V4L2_SOURCES) CameraInfo.cpp
EXTRA_DIST = avgtrackerrc.minimal $(wildcard baseline/*.png) $(wildcard testfiles/*.png) \
CMUCamera.h CMUCamera.cpp DSCamera.cpp DSCamera.h DSHelper.h DSHelper.cpp \
@@ -356,11 +352,8 @@ noinst_LTLIBRARIES = libimaging.la
libimaging_la_SOURCES = $(ALL_CPP) $(ALL_H)
testimaging_SOURCES = testimaging.cpp $(ALL_H)
testimaging_LDADD = ./libimaging.la ../graphics/libgraphics.la ../base/libbase.la \
- @XML2_LIBS@ -l@BOOST_THREAD_LIB@ @PTHREAD_LIBS@ @GDK_PIXBUF_LIBS@
-
-checktracking_SOURCES = checktracking.cpp $(ALL_H)
-checktracking_LDADD = ./libimaging.la ../graphics/libgraphics.la ../base/libbase.la \
- @XML2_LIBS@ -l@BOOST_THREAD_LIB@ @PTHREAD_LIBS@ @GDK_PIXBUF_LIBS@
+ ../base/triangulate/libtriangulate.la \
+ @XML2_LIBS@ @BOOST_THREAD_LIBS@ @PTHREAD_LIBS@ @GDK_PIXBUF_LIBS@
all: all-am
@@ -405,7 +398,7 @@ clean-noinstLTLIBRARIES:
echo "rm -f \"$${dir}/so_locations\""; \
rm -f "$${dir}/so_locations"; \
done
-libimaging.la: $(libimaging_la_OBJECTS) $(libimaging_la_DEPENDENCIES)
+libimaging.la: $(libimaging_la_OBJECTS) $(libimaging_la_DEPENDENCIES) $(EXTRA_libimaging_la_DEPENDENCIES)
$(AM_V_CXXLD)$(CXXLINK) $(libimaging_la_OBJECTS) $(libimaging_la_LIBADD) $(LIBS)
clean-noinstPROGRAMS:
@@ -416,10 +409,7 @@ clean-noinstPROGRAMS:
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
-checktracking$(EXEEXT): $(checktracking_OBJECTS) $(checktracking_DEPENDENCIES)
- @rm -f checktracking$(EXEEXT)
- $(AM_V_CXXLD)$(CXXLINK) $(checktracking_OBJECTS) $(checktracking_LDADD) $(LIBS)
-testimaging$(EXEEXT): $(testimaging_OBJECTS) $(testimaging_DEPENDENCIES)
+testimaging$(EXEEXT): $(testimaging_OBJECTS) $(testimaging_DEPENDENCIES) $(EXTRA_testimaging_DEPENDENCIES)
@rm -f testimaging$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(testimaging_OBJECTS) $(testimaging_LDADD) $(LIBS)
@@ -431,6 +421,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Blob.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Camera.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/CameraInfo.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/CoordTransformer.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/DeDistort.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/FWCamera.Plo@am__quote@
@@ -443,33 +434,29 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/TrackerConfig.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/TrackerThread.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/V4LCamera.Plo@am__quote@
-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/checktracking.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/testimaging.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/trackerconfigdtd.Plo@am__quote@
.cpp.o:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
-@am__fastdepCXX_FALSE@ $(AM_V_CXX) @AM_BACKSLASH@
-@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
-@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $<
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $<
.cpp.obj:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
-@am__fastdepCXX_FALSE@ $(AM_V_CXX) @AM_BACKSLASH@
-@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
-@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cpp.lo:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
-@am__fastdepCXX_FALSE@ $(AM_V_CXX) @AM_BACKSLASH@
-@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
-@am__fastdepCXX_FALSE@ $(LTCXXCOMPILE) -c -o $@ $<
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
@@ -610,14 +597,15 @@ check-TESTS: $(TESTS)
fi; \
dashes=`echo "$$dashes" | sed s/./=/g`; \
if test "$$failed" -eq 0; then \
- echo "$$grn$$dashes"; \
+ col="$$grn"; \
else \
- echo "$$red$$dashes"; \
+ col="$$red"; \
fi; \
- echo "$$banner"; \
- test -z "$$skipped" || echo "$$skipped"; \
- test -z "$$report" || echo "$$report"; \
- echo "$$dashes$$std"; \
+ echo "$${col}$$dashes$${std}"; \
+ echo "$${col}$$banner$${std}"; \
+ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \
+ test -z "$$report" || echo "$${col}$$report$${std}"; \
+ echo "$${col}$$dashes$${std}"; \
test "$$failed" -eq 0; \
else :; fi
@@ -666,10 +654,15 @@ install-am: all-am
installcheck: installcheck-am
install-strip:
- $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
- install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
- `test -z '$(STRIP)' || \
- echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+ if test -z '$(STRIP)'; then \
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ install; \
+ else \
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+ fi
mostlyclean-generic:
clean-generic:
diff --git a/src/imaging/Run.cpp b/src/imaging/Run.cpp
index e965338..37cc97d 100644
--- a/src/imaging/Run.cpp
+++ b/src/imaging/Run.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -30,7 +30,7 @@ Run::Run(int row, int startCol, int endCol)
m_Row = row;
m_StartCol = startCol;
m_EndCol = endCol;
- m_Center = DPoint((m_StartCol + m_EndCol-1)/2., m_Row);
+ m_Center = glm::vec2((m_StartCol + m_EndCol-1)/2., m_Row);
}
}
diff --git a/src/imaging/Run.h b/src/imaging/Run.h
index d751490..f3375f2 100644
--- a/src/imaging/Run.h
+++ b/src/imaging/Run.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -25,7 +25,7 @@
#define _Run_H_
#include "../api.h"
-#include "../base/Point.h"
+#include "../base/GLMHelper.h"
#include <boost/weak_ptr.hpp>
@@ -42,7 +42,7 @@ struct Run
int m_Row;
int m_StartCol;
int m_EndCol;
- DPoint m_Center;
+ glm::vec2 m_Center;
int length() {
return m_EndCol-m_StartCol;
};
diff --git a/src/imaging/TrackerConfig.cpp b/src/imaging/TrackerConfig.cpp
index 1e86f8e..be4c758 100644
--- a/src/imaging/TrackerConfig.cpp
+++ b/src/imaging/TrackerConfig.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -70,15 +70,17 @@ void TrackerConfig::loadConfigFile(const string& sFilename)
string sDTDFName = "trackerconfig.dtd";
dtd = xmlParseDTD(NULL, (const xmlChar*) sDTDFName.c_str());
if (!dtd) {
- AVG_TRACE(Logger::WARNING,
- "DTD not found at " << sDTDFName
+ AVG_LOG_WARNING("DTD not found at " << sDTDFName
<< ". Not validating trackerconfig files.");
}
- m_Doc = xmlParseFile(sFilename.c_str());
+ // xmlParseFile crashes for some reason under Lion.
+ string sFileContents;
+ readWholeFile(sFilename, sFileContents);
+ m_Doc = xmlParseMemory(sFileContents.c_str(), sFileContents.length());
if (!m_Doc) {
- AVG_TRACE(Logger::ERROR, "Could not open tracker config file "
- << sFilename << ". Using defaults which will probably not work.");
+ AVG_LOG_ERROR("Could not open tracker config file " << sFilename <<
+ ". Using defaults which will probably not work.");
return;
}
@@ -95,7 +97,8 @@ void TrackerConfig::loadConfigFile(const string& sFilename)
xmlFreeDtd(dtd);
m_sFilename = sFilename;
- AVG_TRACE(Logger::CONFIG, "Reading Tracker config file from " << sFilename);
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Reading Tracker config file from " << sFilename);
}
void TrackerConfig::load()
@@ -117,13 +120,13 @@ xmlXPathObjectPtr TrackerConfig::findConfigNodes(const string& sXPathExpr) const
xpCtx = xmlXPathNewContext(m_Doc);
if(!xpCtx) {
- AVG_TRACE(Logger::ERROR, "Unable to create new XPath context");
+ AVG_LOG_ERROR("Unable to create new XPath context");
return NULL;
}
xpElement = xmlXPathEvalExpression(BAD_CAST sFullPath.c_str(), xpCtx);
if(!xpElement) {
- AVG_TRACE(Logger::ERROR, "Unable to evaluate XPath expression '"
+ AVG_LOG_ERROR("Unable to evaluate XPath expression '"
<< sFullPath << "'");
xmlXPathFreeContext(xpCtx);
return NULL;
@@ -163,7 +166,7 @@ string TrackerConfig::getParam(const string& sXPathExpr) const
throw (Exception(AVG_ERR_OPTION_UNKNOWN,
string("getParam(): cannot find requested element ")+sXPathExpr));
} else if (nodes->nodeNr > 1) {
- AVG_TRACE(Logger::WARNING,
+ AVG_LOG_WARNING(
"getParam(): expression selects more than one node. Returning the first.");
}
@@ -186,23 +189,23 @@ int TrackerConfig::getIntParam(const std::string& sXPathExpr) const
return stringToInt(getParam(sXPathExpr));
}
-double TrackerConfig::getDoubleParam(const std::string& sXPathExpr) const
+float TrackerConfig::getFloatParam(const std::string& sXPathExpr) const
{
- return stringToDouble(getParam(sXPathExpr));
+ return stringToFloat(getParam(sXPathExpr));
}
-DPoint TrackerConfig::getPointParam(const std::string& sXPathExpr) const
+glm::vec2 TrackerConfig::getPointParam(const std::string& sXPathExpr) const
{
- return DPoint(getDoubleParam(sXPathExpr+"@x"), getDoubleParam(sXPathExpr+"@y"));
+ return glm::vec2(getFloatParam(sXPathExpr+"@x"), getFloatParam(sXPathExpr+"@y"));
}
-DRect TrackerConfig::getRectParam(const std::string& sXPathExpr) const
+FRect TrackerConfig::getRectParam(const std::string& sXPathExpr) const
{
- DPoint pos1 = DPoint(getDoubleParam(sXPathExpr+"@x1"),
- getDoubleParam(sXPathExpr+"@y1"));
- DPoint pos2 = DPoint(getDoubleParam(sXPathExpr+"@x2"),
- getDoubleParam(sXPathExpr+"@y2"));
- return DRect(pos1, pos2);
+ glm::vec2 pos1 = glm::vec2(getFloatParam(sXPathExpr+"@x1"),
+ getFloatParam(sXPathExpr+"@y1"));
+ glm::vec2 pos2 = glm::vec2(getFloatParam(sXPathExpr+"@x2"),
+ getFloatParam(sXPathExpr+"@y2"));
+ return FRect(pos1, pos2);
}
xmlNodePtr TrackerConfig::getXmlNode(const std::string& sXPathExpr) const
@@ -214,7 +217,7 @@ xmlNodePtr TrackerConfig::getXmlNode(const std::string& sXPathExpr) const
throw (Exception(AVG_ERR_OPTION_UNKNOWN,
string("getParam(): cannot find requested element ")+sXPathExpr));
} else if (nodes->nodeNr > 1) {
- AVG_TRACE(Logger::WARNING,
+ AVG_LOG_WARNING(
"getXmlNode(): expression selects more than one node. Returning the first.");
}
return nodes->nodeTab[0];
@@ -222,7 +225,7 @@ xmlNodePtr TrackerConfig::getXmlNode(const std::string& sXPathExpr) const
DeDistortPtr TrackerConfig::getTransform() const
{
- DPoint CameraExtents = getPointParam("/camera/size/");
+ glm::vec2 CameraExtents = getPointParam("/camera/size/");
DeDistortPtr pDD(new DeDistort);
pDD->load(CameraExtents, *this);
return pDD;
@@ -243,15 +246,15 @@ void TrackerConfig::dump() const
void TrackerConfig::save()
{
- AVG_TRACE(Logger::CONFIG, "Saving tracker configuration to "
- << m_sFilename << ".");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Saving tracker configuration to " << m_sFilename << ".");
if (m_Doc) {
if (fileExists(m_sFilename)) {
string sBakFile = m_sFilename + ".bak";
unlink(sBakFile.c_str());
if (rename(m_sFilename.c_str(), sBakFile.c_str())) {
- AVG_TRACE(Logger::WARNING, "Cannot create tracker config backup. Backing "
+ AVG_LOG_WARNING("Cannot create tracker config backup. Backing "
"it up on current workdir.");
copyFile(m_sFilename, "avgtrackerrc.bak");
}
diff --git a/src/imaging/TrackerConfig.h b/src/imaging/TrackerConfig.h
index 3eb10f7..557f78a 100644
--- a/src/imaging/TrackerConfig.h
+++ b/src/imaging/TrackerConfig.h
@@ -1,4 +1,7 @@
//
+// libavg - Media Playback Engine.
+// Copyright (C) 2003-2014 Ulrich von Zadow
+//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
@@ -47,9 +50,9 @@ public:
std::string getParam(const std::string& sXPathExpr) const;
bool getBoolParam(const std::string& sXPathExpr) const;
int getIntParam(const std::string& sXPathExpr) const;
- double getDoubleParam(const std::string& sXPathExpr) const;
- DPoint getPointParam(const std::string& sXPathExpr) const;
- DRect getRectParam(const std::string& sXPathExpr) const;
+ float getFloatParam(const std::string& sXPathExpr) const;
+ glm::vec2 getPointParam(const std::string& sXPathExpr) const;
+ FRect getRectParam(const std::string& sXPathExpr) const;
xmlNodePtr getXmlNode(const std::string& sXPathExpr) const;
DeDistortPtr getTransform() const;
diff --git a/src/imaging/TrackerThread.cpp b/src/imaging/TrackerThread.cpp
index 16c30e4..079a8d6 100644
--- a/src/imaging/TrackerThread.cpp
+++ b/src/imaging/TrackerThread.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -23,7 +23,7 @@
#include "FilterWipeBorder.h"
#include "../base/Logger.h"
-#include "../base/ProfilingZone.h"
+#include "../base/ProfilingZoneID.h"
#include "../base/TimeSource.h"
#include "../base/ScopeTimer.h"
#include "../base/Exception.h"
@@ -37,9 +37,10 @@
#include "../graphics/FilterBlur.h"
#include "../graphics/FilterGauss.h"
#include "../graphics/FilterMask.h"
-#include "../graphics/OGLImagingContext.h"
+#include "../graphics/GLContext.h"
#include "../graphics/GPUBandpassFilter.h"
#include "../graphics/GPUBlurFilter.h"
+#include "../graphics/BitmapLoader.h"
#include <iostream>
#include <stdlib.h>
@@ -102,12 +103,14 @@ TrackerThread::~TrackerThread()
bool TrackerThread::init()
{
try {
- m_pImagingContext = new OGLImagingContext();
+ m_pImagingContext = GLContext::create(
+ GLConfig(false, false, true, 1, GLConfig::AUTO, false));
createBandpassFilter();
- AVG_TRACE(Logger::CONFIG, "Using fragment shaders for imaging operations.");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
+ "Using fragment shaders for imaging operations.");
} catch (Exception& e) {
- AVG_TRACE(Logger::WARNING, e.getStr());
- AVG_TRACE(Logger::CONFIG,
+ AVG_LOG_WARNING(e.getStr());
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::WARNING,
"Using CPU for imaging operations (slow and inaccurate).");
m_pImagingContext = 0;
m_pBandpassFilter = FilterPtr(new FilterFastBandpass());
@@ -116,7 +119,7 @@ bool TrackerThread::init()
m_StartTime = TimeSource::get()->getCurrentMillisecs();
m_HistoryDelay = m_pConfig->getIntParam("/tracker/historydelay/@value");
} catch (Exception& e) {
- AVG_TRACE(Logger::WARNING, e.getStr());
+ AVG_LOG_WARNING(e.getStr());
}
// Done in TrackerInputDevice::ctor to work around Leopard/libdc1394 threading issue.
@@ -138,7 +141,7 @@ bool TrackerThread::work()
ScopeTimer timer(ProfilingZoneCapture);
pCamBmp = m_pCamera->getImage(true);
BitmapPtr pTempBmp1;
- while (pTempBmp1 = m_pCamera->getImage(false)) {
+ while ((pTempBmp1 = m_pCamera->getImage(false))) {
m_NumCamFramesDiscarded++;
m_NumFrames++;
pCamBmp = pTempBmp1;
@@ -153,7 +156,7 @@ bool TrackerThread::work()
FilterMask(m_pCameraMaskBmp).applyInPlace(pCamBmp);
}
if (m_bCreateDebugImages) {
- boost::mutex::scoped_lock lock(*m_pMutex);
+ lock_guard lock(*m_pMutex);
*(m_pBitmaps[TRACKER_IMG_CAMERA]) = *pCamBmp;
ScopeTimer timer(ProfilingZoneHistogram);
drawHistogram(m_pBitmaps[TRACKER_IMG_HISTOGRAM], pCamBmp);
@@ -171,7 +174,7 @@ bool TrackerThread::work()
}
BitmapPtr pCroppedBmp(new Bitmap(*pDistortedBmp, m_ROI));
if (m_bCreateDebugImages) {
- boost::mutex::scoped_lock lock(*m_pMutex);
+ lock_guard lock(*m_pMutex);
m_pBitmaps[TRACKER_IMG_DISTORTED]->copyPixels(*pCroppedBmp);
}
if (m_pHistoryPreProcessor) {
@@ -179,7 +182,7 @@ bool TrackerThread::work()
m_pHistoryPreProcessor->applyInPlace(pCroppedBmp);
}
if (m_bCreateDebugImages) {
- boost::mutex::scoped_lock lock(*m_pMutex);
+ lock_guard lock(*m_pMutex);
m_pBitmaps[TRACKER_IMG_NOHISTORY]->copyPixels(*pCroppedBmp);
FilterNormalize(2).applyInPlace(m_pBitmaps[TRACKER_IMG_NOHISTORY]);
}
@@ -191,7 +194,7 @@ bool TrackerThread::work()
pBmpBandpass = m_pBandpassFilter->apply(pCroppedBmp);
}
if (m_bCreateDebugImages) {
- boost::mutex::scoped_lock lock(*m_pMutex);
+ lock_guard lock(*m_pMutex);
*(m_pBitmaps[TRACKER_IMG_HIGHPASS]) = *pBmpBandpass;
}
}
@@ -205,8 +208,10 @@ bool TrackerThread::work()
void TrackerThread::deinit()
{
m_pCamera = CameraPtr();
- AVG_TRACE(Logger::PROFILE, "Total camera frames: " << m_NumFrames);
- AVG_TRACE(Logger::PROFILE, "Camera frames discarded: " << m_NumCamFramesDiscarded);
+ AVG_TRACE(Logger::category::PROFILE, Logger::severity::INFO,
+ "Total camera frames: " << m_NumFrames);
+ AVG_TRACE(Logger::category::PROFILE, Logger::severity::INFO,
+ "Camera frames discarded: " << m_NumCamFramesDiscarded);
if (m_pBandpassFilter) {
m_pBandpassFilter.reset();
}
@@ -218,7 +223,7 @@ void TrackerThread::deinit()
void TrackerThread::setConfig(TrackerConfig config, IntRect roi,
BitmapPtr ppBitmaps[NUM_TRACKER_IMAGES])
{
- boost::mutex::scoped_lock lock(*m_pMutex);
+ lock_guard lock(*m_pMutex);
try {
m_TouchThreshold = config.getIntParam("/tracker/touch/threshold/@value");
} catch (Exception&) {
@@ -272,10 +277,7 @@ void TrackerThread::setConfig(TrackerConfig config, IntRect roi,
if (sCameraMaskFName == "") {
m_pCameraMaskBmp = BitmapPtr();
} else {
- BitmapPtr pRGBXCameraMaskBmp = BitmapPtr(new Bitmap(sCameraMaskFName));
- m_pCameraMaskBmp = BitmapPtr(
- new Bitmap(pRGBXCameraMaskBmp->getSize(), I8));
- m_pCameraMaskBmp->copyPixels(*pRGBXCameraMaskBmp);
+ BitmapPtr pRGBXCameraMaskBmp = loadBitmap(sCameraMaskFName, I8);
}
}
m_pConfig = TrackerConfigPtr(new TrackerConfig(config));
@@ -309,10 +311,10 @@ void TrackerThread::setBitmaps(IntRect roi, BitmapPtr ppBitmaps[NUM_TRACKER_IMAG
void TrackerThread::createBandpassFilter()
{
if (m_TouchThreshold != 0) {
- double bandpassMin = m_pConfig->getDoubleParam("/tracker/touch/bandpass/@min");
- double bandpassMax = m_pConfig->getDoubleParam("/tracker/touch/bandpass/@max");
- double bandpassPostMult =
- m_pConfig->getDoubleParam("/tracker/touch/bandpasspostmult/@value");
+ float bandpassMin = m_pConfig->getFloatParam("/tracker/touch/bandpass/@min");
+ float bandpassMax = m_pConfig->getFloatParam("/tracker/touch/bandpass/@max");
+ float bandpassPostMult =
+ m_pConfig->getFloatParam("/tracker/touch/bandpasspostmult/@value");
if (m_pImagingContext) {
m_pBandpassFilter = FilterPtr(new GPUBandpassFilter(m_ROI.size(), I8,
bandpassMin, bandpassMax, bandpassPostMult, m_bTrackBrighter));
@@ -366,16 +368,16 @@ void TrackerThread::drawHistogram(BitmapPtr pDestBmp, BitmapPtr pSrcBmp)
}
}
-inline bool isInbetween(double x, double min, double max)
+inline bool isInbetween(float x, float min, float max)
{
return x >= min && x <= max;
}
bool TrackerThread::isRelevant(BlobPtr pBlob, int minArea, int maxArea,
- double minEccentricity, double maxEccentricity)
+ float minEccentricity, float maxEccentricity)
{
bool res;
- res = isInbetween(pBlob->getArea(), minArea, maxArea) &&
+ res = isInbetween(pBlob->getArea(), float(minArea), float(maxArea)) &&
isInbetween(pBlob->getEccentricity(), minEccentricity, maxEccentricity);
return res;
}
@@ -390,8 +392,10 @@ BlobVectorPtr TrackerThread::findRelevantBlobs(BlobVectorPtr pBlobs, bool bTouch
}
int minArea = m_pConfig->getIntParam(sConfigPrefix+"areabounds/@min");
int maxArea = m_pConfig->getIntParam(sConfigPrefix+"areabounds/@max");
- double minEccentricity = m_pConfig->getDoubleParam(sConfigPrefix+"eccentricitybounds/@min");
- double maxEccentricity = m_pConfig->getDoubleParam(sConfigPrefix+"eccentricitybounds/@max");
+ float minEccentricity = m_pConfig->getFloatParam(sConfigPrefix+
+ "eccentricitybounds/@min");
+ float maxEccentricity = m_pConfig->getFloatParam(sConfigPrefix+
+ "eccentricitybounds/@max");
BlobVectorPtr pRelevantBlobs(new BlobVector());
for(BlobVector::iterator it = pBlobs->begin(); it != pBlobs->end(); ++it) {
@@ -420,9 +424,9 @@ void TrackerThread::drawBlobs(BlobVectorPtr pBlobs, BitmapPtr pSrcBmp,
}
int minArea = m_pConfig->getIntParam(sConfigPrefix+"areabounds/@min");
int maxArea = m_pConfig->getIntParam(sConfigPrefix+"areabounds/@max");
- double minEccentricity = m_pConfig->getDoubleParam(
+ float minEccentricity = m_pConfig->getFloatParam(
sConfigPrefix+"eccentricitybounds/@min");
- double maxEccentricity = m_pConfig->getDoubleParam(
+ float maxEccentricity = m_pConfig->getFloatParam(
sConfigPrefix+"eccentricitybounds/@max");
// Get max. pixel value in Bitmap
@@ -466,9 +470,9 @@ void TrackerThread::calcContours(BlobVectorPtr pBlobs)
sConfigPrefix = "/tracker/track/";
int minArea = m_pConfig->getIntParam(sConfigPrefix+"areabounds/@min");
int maxArea = m_pConfig->getIntParam(sConfigPrefix+"areabounds/@max");
- double minEccentricity = m_pConfig->getDoubleParam(
+ float minEccentricity = m_pConfig->getFloatParam(
sConfigPrefix+"eccentricitybounds/@min");
- double maxEccentricity = m_pConfig->getDoubleParam(
+ float maxEccentricity = m_pConfig->getFloatParam(
sConfigPrefix+"eccentricitybounds/@max");
int ContourPrecision = m_pConfig->getIntParam("/tracker/contourprecision/@value");
@@ -510,7 +514,7 @@ void TrackerThread::calcBlobs(BitmapPtr pTrackBmp, BitmapPtr pTouchBmp, long lon
BlobVectorPtr pTouchComps;
{
ScopeTimer timer(ProfilingZoneComps);
- boost::mutex::scoped_lock lock(*m_pMutex);
+ lock_guard lock(*m_pMutex);
BitmapPtr pDestBmp;
if (m_bCreateFingerImage) {
Pixel32 Black(0x00, 0x00, 0x00, 0x00);
diff --git a/src/imaging/TrackerThread.h b/src/imaging/TrackerThread.h
index 5372828..27d91ff 100644
--- a/src/imaging/TrackerThread.h
+++ b/src/imaging/TrackerThread.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -54,7 +54,7 @@ typedef enum {
} TrackerImageID;
typedef boost::shared_ptr<boost::mutex> MutexPtr;
-class OGLImagingContext;
+class GLContext;
class AVG_API IBlobTarget {
public:
@@ -90,7 +90,7 @@ class AVG_API TrackerThread: public WorkerThread<TrackerThread>
void drawHistogram(BitmapPtr pDestBmp, BitmapPtr pSrcBmp);
void calcBlobs(BitmapPtr pTrackBmp, BitmapPtr pTouchBmp, long long time);
bool isRelevant(BlobPtr pBlob, int minArea, int maxArea,
- double minEccentricity, double maxEccentricity);
+ float minEccentricity, float maxEccentricity);
BlobVectorPtr findRelevantBlobs(BlobVectorPtr pBlobs, bool bTouch);
void drawBlobs(BlobVectorPtr pBlobs, BitmapPtr pSrcBmp, BitmapPtr pDestBmp,
int Offset, bool bTouch);
@@ -125,7 +125,7 @@ class AVG_API TrackerThread: public WorkerThread<TrackerThread>
int m_NumFrames;
int m_NumCamFramesDiscarded;
- OGLImagingContext* m_pImagingContext;
+ GLContext* m_pImagingContext;
FilterPtr m_pBandpassFilter;
};
diff --git a/src/imaging/V4LCamera.cpp b/src/imaging/V4LCamera.cpp
index 9452964..85d7d41 100644
--- a/src/imaging/V4LCamera.cpp
+++ b/src/imaging/V4LCamera.cpp
@@ -1,6 +1,6 @@
//
-// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// libavg - Media Playback Engine.
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -27,6 +27,7 @@
#include "../base/Logger.h"
#include "../base/Exception.h"
#include "../base/StringHelper.h"
+#include "../base/GLMHelper.h"
#include <sys/stat.h>
#include <sys/ioctl.h>
@@ -62,15 +63,14 @@ namespace {
}
}
+namespace avg {
-V4LCamera::V4LCamera(std::string sDevice, int channel, IntPoint size, PixelFormat camPF,
- PixelFormat destPF, double frameRate)
- : Camera(camPF, destPF),
+V4LCamera::V4LCamera(string sDevice, int channel, IntPoint size, PixelFormat camPF,
+ PixelFormat destPF, float frameRate)
+ : Camera(camPF, destPF, size, frameRate),
m_Fd(-1),
m_Channel(channel),
- m_sDevice(sDevice),
- m_ImgSize(size),
- m_FrameRate(frameRate)
+ m_sDevice(sDevice)
{
m_v4lPF = getV4LPF(camPF);
if (m_sDevice == "") {
@@ -87,27 +87,30 @@ V4LCamera::V4LCamera(std::string sDevice, int channel, IntPoint size, PixelForma
m_FeaturesNames[V4L2_CID_WHITENESS] = "whiteness";
m_FeaturesNames[V4L2_CID_GAMMA] = "gamma";
m_FeaturesNames[V4L2_CID_SATURATION] = "saturation";
-
- struct stat st;
+
+ struct stat st;
if (stat(m_sDevice.c_str(), &st) == -1) {
- fatalError(string("Unable to access v4l2 device '")+m_sDevice+"'." );
+ AVG_ASSERT_MSG(false, (string("Unable to access v4l2 device '" +
+ m_sDevice + "'.").c_str()));
}
if (!S_ISCHR (st.st_mode)) {
- fatalError(string("'")+m_sDevice+" is not a v4l2 device.");
+ AVG_ASSERT_MSG(false, (string("'" + m_sDevice +
+ " is not a v4l2 device.").c_str()));
}
m_Fd = ::open(m_sDevice.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
if (m_Fd == -1) {
- fatalError(string("Unable to open v4l2 device '") + m_sDevice + "'.");
+ AVG_ASSERT_MSG(false, (string("Unable to open v4l2 device '" + m_sDevice
+ + "'.").c_str()));
}
-
+
initDevice();
- AVG_TRACE(Logger::CONFIG, "V4L2 Camera opened");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "V4L2 Camera opened");
}
-V4LCamera::~V4LCamera()
+V4LCamera::~V4LCamera()
{
close();
}
@@ -117,7 +120,7 @@ void V4LCamera::close()
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int rc = xioctl(m_Fd, VIDIOC_STREAMOFF, &type);
if (rc == -1) {
- AVG_TRACE(Logger::ERROR, "VIDIOC_STREAMOFF");
+ AVG_LOG_ERROR("VIDIOC_STREAMOFF");
}
vector<Buffer>::iterator it;
for (it = m_vBuffers.begin(); it != m_vBuffers.end(); ++it) {
@@ -127,21 +130,22 @@ void V4LCamera::close()
m_vBuffers.clear();
::close(m_Fd);
- AVG_TRACE(Logger::CONFIG, "V4L2 Camera closed");
+ AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "V4L2 Camera closed");
m_Fd = -1;
}
-IntPoint V4LCamera::getImgSize()
-{
- return m_ImgSize;
-}
-
int V4LCamera::getV4LPF(PixelFormat pf)
{
switch (pf) {
case I8:
return V4L2_PIX_FMT_GREY;
+ case BAYER8:
+ case BAYER8_BGGR:
+ case BAYER8_GBRG:
+ case BAYER8_GRBG:
+ case BAYER8_RGGB:
+ return V4L2_PIX_FMT_GREY;
case YCbCr411:
return V4L2_PIX_FMT_Y41P;
case YCbCr422:
@@ -154,7 +158,7 @@ int V4LCamera::getV4LPF(PixelFormat pf)
return V4L2_PIX_FMT_BGR24;
default:
throw Exception(AVG_ERR_INVALID_ARGS,
- "Unsupported or illegal value for camera pixel format '"
+ "Unsupported or illegal value for camera pixel format '"
+ getPixelFormatString(pf) + "'.");
}
}
@@ -163,37 +167,37 @@ BitmapPtr V4LCamera::getImage(bool bWait)
{
struct v4l2_buffer buf;
CLEAR(buf);
-
+
// wait for incoming data blocking, timeout 2s
if (bWait) {
fd_set fds;
struct timeval tv;
int rc;
-
+
FD_ZERO(&fds);
FD_SET(m_Fd, &fds);
-
+
/* Timeout. */
tv.tv_sec = 2;
tv.tv_usec = 0;
-
+
rc = select (m_Fd+1, &fds, NULL, NULL, &tv);
- // caught signal or something else
+ // caught signal or something else
if (rc == -1) {
- AVG_TRACE(Logger::WARNING, "V4L2: select failed.");
+ AVG_LOG_WARNING("V4L2: select failed.");
return BitmapPtr();
}
// timeout
if (rc == 0) {
- AVG_TRACE(Logger::WARNING, "V4L2: Timeout while waiting for image data");
+ AVG_LOG_WARNING("V4L2: Timeout while waiting for image data");
return BitmapPtr();
}
}
-
+
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
-
+
// dequeue filled buffer
if (xioctl (m_Fd, VIDIOC_DQBUF, &buf) == -1) {
if (errno == EAGAIN) {
@@ -203,31 +207,30 @@ BitmapPtr V4LCamera::getImage(bool bWait)
AVG_ASSERT(false);
}
}
-
+
unsigned char * pCaptureBuffer = (unsigned char*)m_vBuffers[buf.index].start;
-
- double lineLen;
+
+ float lineLen;
switch (getCamPF()) {
case YCbCr411:
- lineLen = m_ImgSize.x*1.5;
+ lineLen = getImgSize().x*1.5f;
break;
case YCbCr420p:
- lineLen = m_ImgSize.x;
+ lineLen = getImgSize().x;
break;
default:
- lineLen = m_ImgSize.x*getBytesPerPixel(getCamPF());
+ lineLen = getImgSize().x*getBytesPerPixel(getCamPF());
}
- BitmapPtr pCamBmp(new Bitmap(m_ImgSize, getCamPF(), pCaptureBuffer, lineLen,
+ BitmapPtr pCamBmp(new Bitmap(getImgSize(), getCamPF(), pCaptureBuffer, lineLen,
false, "TempCameraBmp"));
BitmapPtr pDestBmp = convertCamFrameToDestPF(pCamBmp);
// enqueues free buffer for mmap
if (-1 == xioctl (m_Fd, VIDIOC_QBUF, &buf)) {
- throw(Exception(AVG_ERR_CAMERA_FATAL,
- "V4L Camera: failed to enqueue image buffer."));
+ AVG_ASSERT_MSG(false, "V4L Camera: failed to enqueue image buffer.");
}
-
+
return pDestBmp;
}
@@ -236,28 +239,23 @@ bool V4LCamera::isCameraAvailable()
return m_bCameraAvailable;
}
-const std::string& V4LCamera::getDevice() const
+const string& V4LCamera::getDevice() const
{
return m_sDevice;
}
-const std::string& V4LCamera::getDriverName() const
+const string& V4LCamera::getDriverName() const
{
return m_sDriverName;
}
-double V4LCamera::getFrameRate() const
-{
- return m_FrameRate;
-}
-
-std::string V4LCamera::getFeatureName(V4LCID_t v4lFeature)
+string V4LCamera::getFeatureName(V4LCID_t v4lFeature)
{
- std::string sName = m_FeaturesNames[v4lFeature];
+ string sName = m_FeaturesNames[v4lFeature];
if (sName == "") {
sName = "UNKNOWN";
}
-
+
return sName;
}
@@ -277,21 +275,21 @@ V4LCID_t V4LCamera::getFeatureID(CameraFeature feature) const
} else if (feature == CAM_FEATURE_SATURATION) {
v4lFeature = V4L2_CID_SATURATION;
} else {
- AVG_TRACE(Logger::WARNING, "feature " << cameraFeatureToString(feature)
+ AVG_LOG_WARNING("feature " << cameraFeatureToString(feature)
<< " not supported for V4L2.");
return -1;
}
-
+
return v4lFeature;
}
bool V4LCamera::isFeatureSupported(V4LCID_t v4lFeature) const
{
struct v4l2_queryctrl queryCtrl;
-
+
CLEAR(queryCtrl);
queryCtrl.id = v4lFeature;
-
+
if (ioctl (m_Fd, VIDIOC_QUERYCTRL, &queryCtrl) == -1) {
if (errno != EINVAL) {
cerr << "Got " << strerror(errno) << endl;
@@ -310,9 +308,9 @@ bool V4LCamera::isFeatureSupported(V4LCID_t v4lFeature) const
int V4LCamera::getFeature(CameraFeature feature) const
{
V4LCID_t v4lFeature = getFeatureID(feature);
-
+
FeatureMap::const_iterator it = m_Features.find(v4lFeature);
-
+
if (it == m_Features.end()) {
return 0;
} else {
@@ -323,191 +321,191 @@ int V4LCamera::getFeature(CameraFeature feature) const
void V4LCamera::setFeature(V4LCID_t v4lFeature, int value)
{
if (!m_bCameraAvailable) {
- AVG_TRACE(Logger::WARNING, "setFeature() called before opening device: ignored");
+ AVG_LOG_WARNING("setFeature() called before opening device: ignored");
return;
}
if (!isFeatureSupported(v4lFeature)) {
- AVG_TRACE(Logger::WARNING, "Camera feature " << getFeatureName(v4lFeature) <<
+ AVG_LOG_WARNING("Camera feature " << getFeatureName(v4lFeature) <<
" is not supported by hardware");
return;
}
-
+
struct v4l2_control control;
-
+
CLEAR(control);
control.id = v4lFeature;
control.value = value;
-// AVG_TRACE(Logger::APP, "Setting feature " << getFeatureName(v4lFeature) <<
+// AVG_TRACE(Logger::category::APP, "Setting feature " << getFeatureName(v4lFeature) <<
// " to "<< value);
if (ioctl(m_Fd, VIDIOC_S_CTRL, &control) == -1) {
- AVG_TRACE(Logger::ERROR, "Cannot set feature " <<
- m_FeaturesNames[v4lFeature]);
+ AVG_LOG_ERROR("Cannot set feature " << m_FeaturesNames[v4lFeature]);
}
}
void V4LCamera::setFeatureOneShot(CameraFeature feature)
{
- AVG_TRACE(Logger::WARNING, "setFeatureOneShot is not supported by V4L cameras.");
+ AVG_LOG_WARNING("setFeatureOneShot is not supported by V4L cameras.");
}
int V4LCamera::getWhitebalanceU() const
{
- AVG_TRACE(Logger::WARNING, "getWhitebalance is not supported by V4L cameras.");
+ AVG_LOG_WARNING("getWhitebalance is not supported by V4L cameras.");
return 0;
}
int V4LCamera::getWhitebalanceV() const
{
- AVG_TRACE(Logger::WARNING, "getWhitebalance is not supported by V4L cameras.");
+ AVG_LOG_WARNING("getWhitebalance is not supported by V4L cameras.");
return 0;
}
void V4LCamera::setWhitebalance(int u, int v, bool bIgnoreOldValue)
{
- setFeature(V4L2_CID_RED_BALANCE, u);
- setFeature(V4L2_CID_BLUE_BALANCE, v);
+ setFeature(V4L2_CID_RED_BALANCE, u);
+ setFeature(V4L2_CID_BLUE_BALANCE, v);
}
-int dumpCameras_open(int j)
+int V4LCamera::checkCamera(int j)
{
stringstream minorDeviceNumber;
minorDeviceNumber << j;
string address = "/dev/video";
- string result = address + minorDeviceNumber.str();
+ string result = address + minorDeviceNumber.str();
int fd = ::open(result.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
return fd;
}
-v4l2_capability dumpCameraCapabilities(int fd)
+v4l2_capability getCamCapabilities(int fd)
{
v4l2_capability capability;
memset(&capability, 0, sizeof(capability));
- int rc = ioctl(fd, VIDIOC_QUERYCAP, &capability);
- if (rc != -1) {
- cout << capability.card << ":" << endl;
- cout << " Driver: " << capability.driver << endl;
- cout << " Location: " << capability.bus_info;
- cout << endl << endl;
- }
+ ioctl(fd, VIDIOC_QUERYCAP, &capability);
return capability;
}
-void dumpSupportedImgFormats(int fd)
+PixelFormat V4LCamera::intToPixelFormat(unsigned int pixelformat)
+{
+ switch (pixelformat) {
+ case v4l2_fourcc('Y','U','Y','V'):
+ return YUYV422;
+ case v4l2_fourcc('U','Y','V','Y'):
+ return YCbCr422;
+ case v4l2_fourcc('G','R','E','Y'):
+ return I8;
+ case v4l2_fourcc('Y','1','6',' '):
+ return I16;
+ case v4l2_fourcc('R','G','B','3'):
+ return R8G8B8;
+ case v4l2_fourcc('B','G','R','3'):
+ return B8G8R8;
+ default:
+ return NO_PIXELFORMAT;
+ }
+}
+
+int V4LCamera::countCameras()
+{
+ int numberOfCameras = 0;
+ for (int j = 0; j < 256; j++) {
+ int fd = checkCamera(j);
+ if (fd != -1) {
+ numberOfCameras++;
+ }
+ }
+ return numberOfCameras;
+}
+
+CameraInfo* V4LCamera::getCameraInfos(int deviceNumber)
+{
+ int fd = checkCamera(deviceNumber);
+ if (fd == -1) {
+ AVG_ASSERT(false);
+ return NULL;
+ }
+ stringstream ss;
+ ss << "/dev/video" << deviceNumber;
+ CameraInfo* camInfo = new CameraInfo("video4linux", ss.str());
+ v4l2_capability capability = getCamCapabilities(fd);
+ if (capability.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
+ getCameraImageFormats(fd, camInfo);
+ getCameraControls(fd, camInfo);
+ }
+ return camInfo;
+}
+
+void V4LCamera::getCameraImageFormats(int fd, CameraInfo* camInfo)
{
- cout << "Suported Image Formats:" << endl;
for (int i = 0;; i++) {
+// cerr << i << endl;
v4l2_fmtdesc fmtDesc;
memset(&fmtDesc, 0, sizeof(fmtDesc));
fmtDesc.index = i;
fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int rc = ioctl(fd, VIDIOC_ENUM_FMT, &fmtDesc);
if (rc == -1) {
- break;
+ break;
}
v4l2_frmsizeenum frmSizeEnum;
memset(&frmSizeEnum, 0, sizeof (frmSizeEnum));
frmSizeEnum.index = 0;
frmSizeEnum.pixel_format = fmtDesc.pixelformat;
- bool bSupported = false;
while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum) == 0) {
- string sAvgPixelformat;
- switch (fmtDesc.pixelformat) {
- case v4l2_fourcc('Y','U','Y','V'):
- sAvgPixelformat = "YUYV422";
- bSupported = true;
- break;
- case v4l2_fourcc('U','Y','V','Y'):
- sAvgPixelformat = "YUV422";
- bSupported = true;
- break;
- case v4l2_fourcc('G','R','E','Y'):
- sAvgPixelformat = "I8";
- bSupported = true;
- break;
- case v4l2_fourcc('Y','1','6',' '):
- sAvgPixelformat = "I16";
- bSupported = true;
- break;
- case v4l2_fourcc('R','G','B','3'):
- sAvgPixelformat = "RGB";
- bSupported = true;
- break;
- case v4l2_fourcc('B','G','R','3'):
- sAvgPixelformat = "BGR";
- bSupported = true;
- break;
- default:
- break;
- }
-
- if (bSupported) {
+/* fprintf(stdout, " pixelformat :%c%c%c%c\\n",
+ fmtDesc.pixelformat & 0xFF,
+ (fmtDesc.pixelformat >> 8) & 0xFF,
+ (fmtDesc.pixelformat >> 16) & 0xFF,
+ (fmtDesc.pixelformat >> 24) & 0xFF);
+*/
+ PixelFormat pixFormat = intToPixelFormat(fmtDesc.pixelformat);
+ if (pixFormat != NO_PIXELFORMAT) {
v4l2_frmivalenum frmIvalEnum;
- cout << " " << sAvgPixelformat << " ";
- cout << " (" << frmSizeEnum.discrete.width << ", ";
- cout << frmSizeEnum.discrete.height << ")";
- cout << " fps: ";
memset (&frmIvalEnum, 0, sizeof (frmIvalEnum));
- frmIvalEnum.index = 0;
- frmIvalEnum.pixel_format = frmSizeEnum.pixel_format;
- frmIvalEnum.width = frmSizeEnum.discrete.width;
+ frmIvalEnum.index = 0;
+ frmIvalEnum.pixel_format = frmSizeEnum.pixel_format;
+ frmIvalEnum.width = frmSizeEnum.discrete.width;
frmIvalEnum.height = frmSizeEnum.discrete.height;
+ IntPoint size;
+ size.x = frmSizeEnum.discrete.width;
+ size.y = frmSizeEnum.discrete.height;
+ std::vector<float> framerates;
while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmIvalEnum) == 0) {
- cout << frmIvalEnum.discrete.denominator << "/";
- frmIvalEnum.index++;
+ framerates.push_back(frmIvalEnum.discrete.denominator);
+ frmIvalEnum.index++;
}
- cout << endl;
+ CameraImageFormat camImFormat = CameraImageFormat(size, pixFormat,
+ framerates);
+ camInfo->addImageFormat(camImFormat);
}
frmSizeEnum.index++;
}
}
}
-void dumpCameraControls(int fd)
+void V4LCamera::getCameraControls(int fd, CameraInfo* camInfo)
{
- cout << endl << "Camera Controls:" << endl;
v4l2_queryctrl queryCtrl;
for (queryCtrl.id = V4L2_CID_BASE; queryCtrl.id < V4L2_CID_LASTP1; queryCtrl.id++) {
int rc = ioctl (fd, VIDIOC_QUERYCTRL, &queryCtrl);
- if (rc != -1) {
- if (queryCtrl.flags & V4L2_CTRL_FLAG_DISABLED) {
- continue;
- }
- cout << " " << queryCtrl.name << ":" << endl;
- cout << " Min: " << queryCtrl.minimum << " | ";
- cout << "Max: " << queryCtrl.maximum << " | ";
- cout << "Default: "<< queryCtrl.default_value << endl;
- } else {
- if (errno != EINVAL) {
- perror("VIDIOC_QUERYCTRL");
- exit(EXIT_FAILURE);
- }
+ if (rc == -1) {
+ continue;
}
- }
-}
-
-void V4LCamera::dumpCameras()
-{
- for(int j = 0; j < 256; j++){
- int fd = dumpCameras_open(j);
- if (fd != -1) {
- cout << "------------------------Video4linux Camera-------------------------";
- cout << endl;
- cout << "/dev/video" << j << " ";
- v4l2_capability capability = dumpCameraCapabilities(fd);
- if (capability.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
- dumpSupportedImgFormats(fd);
- dumpCameraControls(fd);
- }
- cout << "-------------------------------------------------------------------";
- cout << endl;
+ if (queryCtrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ continue;
}
+ stringstream ss;
+ ss << queryCtrl.name;
+ std::string sControlName = ss.str();
+ int min = queryCtrl.minimum;
+ int max = queryCtrl.maximum;
+ int defaultValue = queryCtrl.default_value;
+ CameraControl camControl = CameraControl(sControlName, min, max, defaultValue);
+ camInfo->addControl(camControl);
}
}
-
void V4LCamera::setFeature(CameraFeature feature, int value, bool bIgnoreOldValue)
{
// ignore -1 coming from default unbiased cameranode parameters
@@ -518,8 +516,6 @@ void V4LCamera::setFeature(CameraFeature feature, int value, bool bIgnoreOldValu
V4LCID_t v4lFeature = getFeatureID(feature);
m_Features[v4lFeature] = value;
-// AVG_TRACE(Logger::WARNING,"Setting feature " << sFeature <<
-// " to " << value);
if (m_bCameraAvailable) {
setFeature(v4lFeature, value);
}
@@ -527,7 +523,7 @@ void V4LCamera::setFeature(CameraFeature feature, int value, bool bIgnoreOldValu
void V4LCamera::startCapture()
{
-// AVG_TRACE(Logger::APP, "Entering startCapture()...");
+// AVG_TRACE(Logger::category::APP, "Entering startCapture()...");
unsigned int i;
enum v4l2_buf_type type;
@@ -544,7 +540,7 @@ void V4LCamera::startCapture()
int err = xioctl(m_Fd, VIDIOC_QBUF, &buf);
AVG_ASSERT(err != -1);
}
-
+
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int err= xioctl (m_Fd, VIDIOC_STREAMON, &type);
AVG_ASSERT(err != -1);
@@ -552,8 +548,8 @@ void V4LCamera::startCapture()
void V4LCamera::initDevice()
{
-// AVG_TRACE(Logger::APP, "Entering initDevice()...");
-
+// AVG_TRACE(Logger::category::APP, "Entering initDevice()...");
+
struct v4l2_capability cap;
struct v4l2_cropcap CropCap;
struct v4l2_crop Crop;
@@ -562,17 +558,17 @@ void V4LCamera::initDevice()
if (xioctl(m_Fd, VIDIOC_QUERYCAP, &cap) == -1) {
close();
- fatalError(m_sDevice + " is not a valid V4L2 device.");
+ AVG_ASSERT_MSG(false, (m_sDevice + " is not a valid V4L2 device.").c_str());
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
close();
- fatalError(m_sDevice + " does not support capturing");
+ AVG_ASSERT_MSG(false, (m_sDevice + " does not support capturing").c_str());
}
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
close();
- fatalError(m_sDevice + " does not support streaming i/os");
+ AVG_ASSERT_MSG(false, (m_sDevice + " does not support streaming i/os").c_str());
}
m_sDriverName = (const char *)cap.driver;
@@ -594,58 +590,65 @@ void V4LCamera::initDevice()
break;
}
}
- } else {
+ } else {
/* Errors ignored. */
}
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- fmt.fmt.pix.width = m_ImgSize.x;
- fmt.fmt.pix.height = m_ImgSize.y;
+ fmt.fmt.pix.width = getImgSize().x;
+ fmt.fmt.pix.height = getImgSize().y;
fmt.fmt.pix.pixelformat = m_v4lPF;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
int rc = xioctl(m_Fd, VIDIOC_S_FMT, &fmt);
- if (int(fmt.fmt.pix.width) != m_ImgSize.x || int(fmt.fmt.pix.height) != m_ImgSize.y
- || rc == -1)
+ if (int(fmt.fmt.pix.width) != getImgSize().x ||
+ int(fmt.fmt.pix.height) != getImgSize().y || rc == -1)
{
- throw(Exception(AVG_ERR_CAMERA_NONFATAL,
+ throw(Exception(AVG_ERR_CAMERA_NONFATAL,
string("Unable to set V4L camera image format: '")
+strerror(errno)
- +"'. Try using avg_showcamera.py --dump to find out what the device supports."));
+ +"'. Try using avg_showcamera.py --list to find out what the device supports."));
}
CLEAR(StreamParam);
-
StreamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- StreamParam.parm.capture.timeperframe.numerator = 1;
- StreamParam.parm.capture.timeperframe.denominator = (int) m_FrameRate;
- rc = xioctl(m_Fd, VIDIOC_S_PARM, &StreamParam);
- if (m_FrameRate != StreamParam.parm.capture.timeperframe.denominator || rc == -1) {
- throw(Exception(AVG_ERR_CAMERA_NONFATAL,
- string("Unable to set V4L camera framerate: '")
- +strerror(errno)
- +"'. Try using avg_showcamera.py --dump to find out what the device supports."));
+ rc = xioctl(m_Fd, VIDIOC_G_PARM, &StreamParam);
+
+ if(StreamParam.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) {
+ CLEAR(StreamParam);
+
+ StreamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ StreamParam.parm.capture.timeperframe.numerator = 1;
+ StreamParam.parm.capture.timeperframe.denominator = (int)getFrameRate();
+ rc = xioctl(m_Fd, VIDIOC_S_PARM, &StreamParam);
+ if (getFrameRate() != StreamParam.parm.capture.timeperframe.denominator ||
+ rc == -1)
+ {
+ throw(Exception(AVG_ERR_CAMERA_NONFATAL,
+ string("Unable to set V4L camera framerate: '")
+ +strerror(errno)
+ +"'. Try using avg_showcamera.py --list to find out what the device supports."));
+ }
}
- m_FrameRate = (double)StreamParam.parm.capture.timeperframe.denominator / \
- (double)StreamParam.parm.capture.timeperframe.numerator;
- initMMap ();
-
+ initMMap();
+
// TODO: string channel instead of numeric
// select channel
if (xioctl(m_Fd, VIDIOC_S_INPUT, &m_Channel) == -1) {
close();
- fatalError(string("Cannot set MUX channel ")+toString(m_Channel));
+ AVG_ASSERT_MSG(false, (string("Cannot set MUX channel " +
+ toString(m_Channel))).c_str());
}
-
+
m_bCameraAvailable = true;
for (FeatureMap::iterator it=m_Features.begin(); it != m_Features.end(); it++) {
setFeature(it->first, it->second);
}
-
+
}
void V4LCamera::initMMap()
@@ -660,13 +663,14 @@ void V4LCamera::initMMap()
if (xioctl(m_Fd, VIDIOC_REQBUFS, &req) == -1) {
if (EINVAL == errno) {
close();
- fatalError(m_sDevice+" does not support memory mapping");
+ AVG_ASSERT_MSG(false, (m_sDevice +
+ " does not support memory mapping").c_str());
} else {
cerr << "errno: " << strerror(errno);
AVG_ASSERT(false);
}
}
-
+
if (req.count < 2) {
cerr << "Insufficient buffer memory on " << m_sDevice;
AVG_ASSERT(false);
@@ -689,7 +693,7 @@ void V4LCamera::initMMap()
}
tmp.length = buf.length;
-
+
tmp.start = mmap (NULL /* start anywhere */,
buf.length,
PROT_READ | PROT_WRITE /* required */,
@@ -699,10 +703,11 @@ void V4LCamera::initMMap()
if (MAP_FAILED == tmp.start) {
AVG_ASSERT(false);
}
-
+
m_vBuffers.push_back(tmp);
}
}
+}
diff --git a/src/imaging/V4LCamera.h b/src/imaging/V4LCamera.h
index 6a8ebaa..b779e8e 100644
--- a/src/imaging/V4LCamera.h
+++ b/src/imaging/V4LCamera.h
@@ -1,6 +1,6 @@
//
-// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// libavg - Media Playback Engine.
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -39,48 +39,42 @@ class AVG_API V4LCamera: public Camera {
void * start;
size_t length;
};
-
+
public:
V4LCamera(std::string sDevice, int channel, IntPoint size, PixelFormat camPF,
- PixelFormat destPF, double frameRate);
+ PixelFormat destPF, float frameRate);
virtual ~V4LCamera();
- virtual IntPoint getImgSize();
virtual BitmapPtr getImage(bool bWait);
virtual bool isCameraAvailable();
- virtual const std::string& getDevice() const;
- virtual const std::string& getDriverName() const;
- virtual double getFrameRate() const;
-
+ virtual const std::string& getDevice() const;
+ virtual const std::string& getDriverName() const;
+
virtual int getFeature(CameraFeature feature) const;
- virtual void setFeature(CameraFeature feature, int value,
+ virtual void setFeature(CameraFeature feature, int value,
bool bIgnoreOldValue=false);
virtual void setFeatureOneShot(CameraFeature feature);
virtual int getWhitebalanceU() const;
virtual int getWhitebalanceV() const;
virtual void setWhitebalance(int u, int v, bool bIgnoreOldValue=false);
-
- static void dumpCameras();
+
+ static CameraInfo* getCameraInfos(int deviceNumber);
+ static int countCameras();
private:
void initDevice();
void startCapture();
void initMMap();
virtual void close();
-
+
int getV4LPF(PixelFormat pf);
-
- int m_Fd;
- int m_Channel;
- std::string m_sDevice;
- std::string m_sDriverName;
- std::vector<Buffer> m_vBuffers;
- bool m_bCameraAvailable;
- int m_v4lPF;
- IntPoint m_ImgSize;
- double m_FrameRate;
-
+ static int checkCamera(int j);
+ static PixelFormat intToPixelFormat(unsigned int pixelformat);
+
+ static void getCameraImageFormats(int fd, CameraInfo* camInfo);
+ static void getCameraControls(int deviceNumber, CameraInfo* camInfo);
+
void setFeature(V4LCID_t v4lFeature, int value);
V4LCID_t getFeatureID(CameraFeature feature) const;
std::string getFeatureName(V4LCID_t v4lFeature);
@@ -88,9 +82,17 @@ private:
typedef std::map<V4LCID_t, unsigned int> FeatureMap;
typedef std::map<int, std::string> FeatureNamesMap;
FeatureMap m_Features;
- // TODO: Feature strings should really be handled by
+ // TODO: Feature strings should really be handled by
// Camera::cameraFeatureToString
- FeatureNamesMap m_FeaturesNames;
+ FeatureNamesMap m_FeaturesNames;
+
+ int m_Fd;
+ int m_Channel;
+ std::string m_sDevice;
+ std::string m_sDriverName;
+ std::vector<Buffer> m_vBuffers;
+ bool m_bCameraAvailable;
+ int m_v4lPF;
};
}
diff --git a/src/imaging/checktracking.cpp b/src/imaging/checktracking.cpp
deleted file mode 100644
index 86c3935..0000000
--- a/src/imaging/checktracking.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-//
-// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
-//
-// This library is free software; you can redistribute it and/or
-// modify it under the terms of the GNU Lesser General Public
-// License as published by the Free Software Foundation; either
-// version 2 of the License, or (at your option) any later version.
-//
-// This library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-//
-// Current versions can be found at www.libavg.de
-
-#include "TrackerThread.h"
-#include "FWCamera.h"
-#include "FakeCamera.h"
-
-#include "../graphics/Filtergrayscale.h"
-#include "../graphics/FilterGauss.h"
-#include "../graphics/FilterHighpass.h"
-#include "../graphics/FilterBandpass.h"
-#include "../graphics/FilterFastBandpass.h"
-#include "../graphics/FilterBlur.h"
-
-#include <sstream>
-#include <iostream>
-#include <math.h>
-
-using namespace avg;
-using namespace std;
-
-class TestTracker: public IBlobTarget {
-
-public:
-
-TestTracker()
-{
- m_FrameNum = 0;
- std::vector<std::string> p = std::vector<std::string>();
- for (int i=1; i<4; ++i) {
- stringstream s;
- s << "camimages/img" << i << "_nohistory.png";
- p.push_back(s.str());
- }
- CameraPtr pCam = CameraPtr(new FakeCamera(p));
- for (int i=0; i<NUM_TRACKER_IMAGES; i++) {
- m_pBitmaps[i] = BitmapPtr(new Bitmap(pCam->getImgSize(), I8));
- }
- MutexPtr pMutex(new boost::mutex);
- TrackerConfig Config;
- Config.setParam("/tracker/touch/threshold/@value", "131");
- m_pCmdQ = TrackerThread::CQueuePtr(new TrackerThread::CQueue);
- IntRect ROI(0,0,pCam->getImgSize().x, pCam->getImgSize().y);
- boost::thread Thread(
- TrackerThread(ROI, pCam, m_pBitmaps, pMutex, *m_pCmdQ, this,
- false, Config));
- Thread.join();
-}
-
-virtual ~TestTracker()
-{
-}
-
-virtual void update(BlobVectorPtr pBlobs, BitmapPtr)
-{
- m_FrameNum++;
- cerr << "Frame " << m_FrameNum << endl;
- for (int i=0; i<NUM_TRACKER_IMAGES; i++) {
- stringstream s;
- s << "camimages/img" << m_FrameNum << "_" << i << ".png";
- m_pBitmaps[i]->save(s.str());
- }
-}
-
-private:
- int m_FrameNum;
- TrackerThread::CQueuePtr m_pCmdQ;
- BitmapPtr m_pBitmaps[NUM_TRACKER_IMAGES];
-
-};
-
-void testBlur()
-{
- BitmapPtr pBitmap(new Bitmap("camimages/img1_nohistory.png"));
- FilterGrayscale().applyInPlace(pBitmap);
- BitmapPtr pBmpBandpass = FilterFastBandpass().apply(pBitmap);
- pBmpBandpass->save("camimages/img1_bandpass_test.png");
-}
-
-int main(int argc, char **argv)
-{
-// TestTracker t;
- testBlur();
-}
diff --git a/src/imaging/testfiles/filterwipeborder.png b/src/imaging/testfiles/filterwipeborder.png
deleted file mode 100644
index 58ac84d..0000000
--- a/src/imaging/testfiles/filterwipeborder.png
+++ /dev/null
Binary files differ
diff --git a/src/imaging/testimaging.cpp b/src/imaging/testimaging.cpp
index 85f7b07..9877c12 100644
--- a/src/imaging/testimaging.cpp
+++ b/src/imaging/testimaging.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
@@ -28,7 +28,7 @@
#include "../graphics/GraphicsTest.h"
#include "../graphics/Filtergrayscale.h"
-#include "../graphics/FilterId.h"
+#include "../graphics/BitmapLoader.h"
#include "../base/TestSuite.h"
#include "../base/Exception.h"
@@ -41,9 +41,10 @@
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
-
#include <sstream>
+#include <glib-object.h>
+
using namespace avg;
using namespace std;
@@ -99,68 +100,71 @@ public:
vector<double> params;
params.push_back(0);
params.push_back(0);
- DeDistort IdentityDistort = DeDistort(DPoint(1,1),
- params, 0.0, 0.0,
- DPoint(0,0), DPoint(1,1));
- TEST(almostEqual(IdentityDistort.transform_point(DPoint(0,0)), DPoint(0,0)));
- TEST(almostEqual(IdentityDistort.transform_point(DPoint(1,2)), DPoint(1,2)));
- TEST(almostEqual(IdentityDistort.transformBlobToScreen(DPoint(0,0)),
- DPoint(0,0)));
- TEST(almostEqual(IdentityDistort.transformBlobToScreen(DPoint(1,2)),
- DPoint(1,2)));
- TEST(almostEqual(IdentityDistort.inverse_transform_point(DPoint(0,0)),
- DPoint(0,0)));
- TEST(almostEqual(IdentityDistort.inverse_transform_point(DPoint(1,2)),
- DPoint(1,2)));
- TEST(almostEqual(IdentityDistort.transformScreenToBlob(DPoint(0,0)),
- DPoint(0,0)));
- TEST(almostEqual(IdentityDistort.transformScreenToBlob(DPoint(1,2)),
- DPoint(1,2)));
- TEST(IdentityDistort.getDisplayArea(DPoint(1280,720)) == DRect(0,0,1280,720));
-
- DeDistort scaler = DeDistort(DPoint(1,1), params, 0, 0.0, DPoint(0,0),
- DPoint(2,2));
- TEST(almostEqual(scaler.transform_point(DPoint(0,0)), DPoint(0,0)));
- TEST(almostEqual(scaler.transformBlobToScreen(DPoint(1,2)), DPoint(2,4)));
- TEST(almostEqual(scaler.inverse_transform_point(DPoint(0,0)), DPoint(0,0)));
- TEST(almostEqual(scaler.transformScreenToBlob(DPoint(1,2)), DPoint(0.5,1)));
-
- DeDistort shifter = DeDistort(DPoint(1,1), params, 0, 0.0, DPoint(1,1),
- DPoint(1,1));
- TEST(almostEqual(shifter.transformBlobToScreen(DPoint(0,0)), DPoint(1,1)));
- TEST(almostEqual(shifter.transformBlobToScreen(DPoint(1,2)), DPoint(2,3)));
- TEST(almostEqual(shifter.transformScreenToBlob(DPoint(0,0)), DPoint(-1,-1)));
- TEST(almostEqual(shifter.transformScreenToBlob(DPoint(1,2)), DPoint(0,1)));
- TEST(shifter.getDisplayArea(DPoint(1,1)) == DRect(-1, -1, 0, 0));
+ DeDistort IdentityDistort = DeDistort(glm::vec2(1,1),
+ params, 0.0, 0.0, glm::dvec2(0,0), glm::dvec2(1,1));
+ TEST(almostEqual(IdentityDistort.transform_point(glm::dvec2(0,0)),
+ glm::dvec2(0,0)));
+ TEST(almostEqual(IdentityDistort.transform_point(glm::dvec2(1,2)),
+ glm::dvec2(1,2)));
+ TEST(almostEqual(IdentityDistort.transformBlobToScreen(glm::dvec2(0,0)),
+ glm::dvec2(0,0)));
+ TEST(almostEqual(IdentityDistort.transformBlobToScreen(glm::dvec2(1,2)),
+ glm::dvec2(1,2)));
+ TEST(almostEqual(IdentityDistort.inverse_transform_point(glm::dvec2(0,0)),
+ glm::dvec2(0,0)));
+ TEST(almostEqual(IdentityDistort.inverse_transform_point(glm::dvec2(1,2)),
+ glm::dvec2(1,2)));
+ TEST(almostEqual(IdentityDistort.transformScreenToBlob(glm::dvec2(0,0)),
+ glm::dvec2(0,0)));
+ TEST(almostEqual(IdentityDistort.transformScreenToBlob(glm::dvec2(1,2)),
+ glm::dvec2(1,2)));
+ TEST(IdentityDistort.getDisplayArea(glm::vec2(1280,720)) == FRect(0,0,1280,720));
+
+ DeDistort scaler = DeDistort(glm::vec2(1,1), params, 0, 0.0, glm::dvec2(0,0),
+ glm::dvec2(2,2));
+ TEST(almostEqual(scaler.transform_point(glm::dvec2(0,0)), glm::dvec2(0,0)));
+ TEST(almostEqual(scaler.transformBlobToScreen(glm::dvec2(1,2)), glm::dvec2(2,4)));
+ TEST(almostEqual(scaler.inverse_transform_point(glm::dvec2(0,0)), glm::dvec2(0,0)));
+ TEST(almostEqual(scaler.transformScreenToBlob(glm::dvec2(1,2)), glm::dvec2(0.5,1)));
+
+ DeDistort shifter = DeDistort(glm::vec2(1,1), params, 0, 0.0, glm::dvec2(1,1),
+ glm::dvec2(1,1));
+ TEST(almostEqual(shifter.transformBlobToScreen(glm::dvec2(0,0)), glm::dvec2(1,1)));
+ TEST(almostEqual(shifter.transformBlobToScreen(glm::dvec2(1,2)), glm::dvec2(2,3)));
+ TEST(almostEqual(shifter.transformScreenToBlob(glm::dvec2(0,0)),
+ glm::dvec2(-1,-1)));
+ TEST(almostEqual(shifter.transformScreenToBlob(glm::dvec2(1,2)), glm::dvec2(0,1)));
+ TEST(shifter.getDisplayArea(glm::vec2(1,1)) == FRect(-1, -1, 0, 0));
vector<double> cubed;
cubed.push_back(0);
cubed.push_back(1);
- DeDistort barreler = DeDistort(DPoint(1,1), cubed, 0, 0.0, DPoint(0,0),
- DPoint(1,1));
+ DeDistort barreler = DeDistort(glm::vec2(1,1), cubed, 0, 0.0, glm::dvec2(0,0),
+ glm::dvec2(1,1));
for (double xp = 0; xp < 10; xp++) {
for(double yp = 0; yp < 10; yp++) {
QUIET_TEST(almostEqual(barreler.inverse_transform_point(
- barreler.transform_point(DPoint(xp,yp))), DPoint(xp,yp)));
+ barreler.transform_point(glm::dvec2(xp,yp))), glm::dvec2(xp,yp)));
}
}
- TEST(almostEqual(barreler.transform_point(DPoint(1,1)), DPoint(1,1)));
+ TEST(almostEqual(barreler.transform_point(glm::dvec2(1,1)), glm::dvec2(1,1)));
- DeDistort rotator = DeDistort(DPoint(1,1), params, 0, M_PI/2, DPoint(0,0),
- DPoint(1,1));
+ DeDistort rotator = DeDistort(glm::vec2(1,1), params, 0, M_PI/2, glm::dvec2(0,0),
+ glm::dvec2(1,1));
for (double xp = 0; xp < 10; xp++) {
for(double yp = 0; yp < 10; yp++) {
QUIET_TEST(almostEqual(rotator.inverse_transform_point(
- rotator.transform_point(DPoint(xp,yp))), DPoint(xp,yp)));
+ rotator.transform_point(glm::dvec2(xp,yp))), glm::dvec2(xp,yp)));
}
}
- DeDistort shifterScaler = DeDistort(DPoint(1,1), params, 0, 0.0, DPoint(1,1),
- DPoint(2,2));
+ DeDistort shifterScaler = DeDistort(glm::vec2(1,1), params, 0, 0.0,
+ glm::dvec2(1,1), glm::dvec2(2,2));
for (double xp = 0; xp < 10; xp++) {
for(double yp = 0; yp < 10; yp++) {
QUIET_TEST(almostEqual(shifterScaler.inverse_transform_point(
- shifterScaler.transform_point(DPoint(xp,yp))), DPoint(xp,yp)));
+ shifterScaler.transform_point(glm::dvec2(xp,yp))),
+ glm::dvec2(xp,yp)));
}
}
}
@@ -197,8 +201,8 @@ public:
TrackerConfig loadedConfig;
loadedConfig.load();
- DPoint scale = loadedConfig.getPointParam("/transform/displayscale/");
- TEST(almostEqual(scale, DPoint(2,2)));
+ glm::vec2 scale = loadedConfig.getPointParam("/transform/displayscale/");
+ TEST(almostEqual(scale, glm::vec2(2,2)));
unlink("avgtrackerrc.bak");
}
unlink("avgtrackerrc");
@@ -222,6 +226,7 @@ public:
int main(int nargs, char** args)
{
ImagingTestSuite Suite;
+ BitmapLoader::init(true);
Suite.runTests();
bool bOK = Suite.isOk();
diff --git a/src/imaging/trackerconfigdtd.cpp b/src/imaging/trackerconfigdtd.cpp
index 2d12406..d4185de 100644
--- a/src/imaging/trackerconfigdtd.cpp
+++ b/src/imaging/trackerconfigdtd.cpp
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
diff --git a/src/imaging/trackerconfigdtd.h b/src/imaging/trackerconfigdtd.h
index 20293b2..366b8d4 100644
--- a/src/imaging/trackerconfigdtd.h
+++ b/src/imaging/trackerconfigdtd.h
@@ -1,6 +1,6 @@
//
// libavg - Media Playback Engine.
-// Copyright (C) 2003-2011 Ulrich von Zadow
+// Copyright (C) 2003-2014 Ulrich von Zadow
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public