// // libavg - Media Playback Engine. // Copyright (C) 2003-2014 Ulrich von Zadow // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // // Current versions can be found at www.libavg.de // #include "CameraNode.h" #include "OGLSurface.h" #include "TypeDefinition.h" #include "../base/Logger.h" #include "../base/Exception.h" #include "../base/ScopeTimer.h" #include "../base/XMLHelper.h" #include "../graphics/Filterfill.h" #include "../graphics/TextureMover.h" #include "../graphics/GLTexture.h" #include "../graphics/BitmapLoader.h" #include "../imaging/Camera.h" #include "../imaging/FWCamera.h" #include "../imaging/FakeCamera.h" #include #include #ifdef HAVE_UNISTD_H #include #endif using namespace std; namespace avg { void CameraNode::registerType() { TypeDefinition def = TypeDefinition("camera", "rasternode", ExportedObject::buildObject) .addArg(Arg("driver", "firewire")) .addArg(Arg("device", "")) .addArg(Arg("unit", -1)) .addArg(Arg("fw800", false)) .addArg(Arg("framerate", 15)) .addArg(Arg("capturewidth", 640)) .addArg(Arg("captureheight", 480)) .addArg(Arg("pixelformat", "RGB")) .addArg(Arg("brightness", -1)) .addArg(Arg("exposure", -1)) .addArg(Arg("sharpness", -1)) .addArg(Arg("saturation", -1)) .addArg(Arg("camgamma", -1)) .addArg(Arg("shutter", -1)) .addArg(Arg("gain", -1)) .addArg(Arg("strobeduration", -1)); TypeRegistry::get()->registerType(def); } CameraNode::CameraNode(const ArgList& args) : m_bIsPlaying(false), m_FrameNum(0), m_bIsAutoUpdateCameraImage(true), m_bNewBmp(false) { args.setMembers(this); string sDriver = args.getArgVal("driver"); string sDevice = args.getArgVal("device"); int unit = args.getArgVal("unit"); bool bFW800 = args.getArgVal("fw800"); float frameRate = args.getArgVal("framerate"); int width = args.getArgVal("capturewidth"); int height = args.getArgVal("captureheight"); string sPF = args.getArgVal("pixelformat"); PixelFormat camPF = stringToPixelFormat(sPF); if (camPF == NO_PIXELFORMAT) { throw Exception(AVG_ERR_INVALID_ARGS, "Unknown camera pixel format "+sPF+"."); } PixelFormat destPF; if (pixelFormatIsColored(camPF)) { if (BitmapLoader::get()->isBlueFirst()) { destPF = B8G8R8X8; } else { destPF = R8G8B8X8; } } else { destPF = I8; } // cerr << "CameraNode ctor: " << camPF << "-->" << destPF << endl; m_pCamera = createCamera(sDriver, sDevice, unit, bFW800, IntPoint(width, height), camPF, destPF, frameRate); AVG_TRACE(Logger::category::CONFIG,Logger::severity::INFO, "Got Camera " << m_pCamera->getDevice() << " from driver: " << m_pCamera->getDriverName()); m_pCamera->setFeature(CAM_FEATURE_BRIGHTNESS, args.getArgVal("brightness")); m_pCamera->setFeature(CAM_FEATURE_EXPOSURE, args.getArgVal("exposure")); m_pCamera->setFeature(CAM_FEATURE_SHARPNESS, args.getArgVal("sharpness")); m_pCamera->setFeature(CAM_FEATURE_SATURATION, args.getArgVal("saturation")); m_pCamera->setFeature(CAM_FEATURE_GAMMA, args.getArgVal("camgamma")); m_pCamera->setFeature(CAM_FEATURE_SHUTTER, args.getArgVal("shutter")); m_pCamera->setFeature(CAM_FEATURE_GAIN, args.getArgVal("gain")); m_pCamera->setFeature(CAM_FEATURE_STROBE_DURATION, args.getArgVal("strobeduration")); } CameraNode::~CameraNode() { m_pCamera = CameraPtr(); } void CameraNode::connectDisplay() { RasterNode::connectDisplay(); if (m_bIsPlaying) { open(); } } void CameraNode::connect(CanvasPtr pCanvas) { if (!m_pCamera) { throw Exception(AVG_ERR_UNSUPPORTED, "Can't use camera node after disconnect(True)."); } RasterNode::connect(pCanvas); } void CameraNode::disconnect(bool bKill) { if (bKill) { m_pCamera = CameraPtr(); } RasterNode::disconnect(bKill); } void CameraNode::play() { if (getState() == NS_CANRENDER) { open(); } m_bIsPlaying = true; } void CameraNode::stop() { m_bIsPlaying = false; } bool CameraNode::isAvailable() { if (!m_pCamera || boost::dynamic_pointer_cast(m_pCamera)) { return false; } else { return true; } } int CameraNode::getBrightness() const { return getFeature(CAM_FEATURE_BRIGHTNESS); } void CameraNode::setBrightness(int value) { setFeature(CAM_FEATURE_BRIGHTNESS, value); } int CameraNode::getSharpness() const { return getFeature(CAM_FEATURE_SHARPNESS); } void CameraNode::setSharpness(int value) { setFeature(CAM_FEATURE_SHARPNESS, value); } int CameraNode::getSaturation() const { return getFeature(CAM_FEATURE_SATURATION); } void CameraNode::setSaturation(int value) { setFeature(CAM_FEATURE_SATURATION, value); } int CameraNode::getCamGamma() const { return getFeature(CAM_FEATURE_GAMMA); } void CameraNode::setCamGamma(int value) { setFeature(CAM_FEATURE_GAMMA, value); } int CameraNode::getShutter() const { return getFeature(CAM_FEATURE_SHUTTER); } void CameraNode::setShutter(int value) { setFeature(CAM_FEATURE_SHUTTER, value); } int CameraNode::getGain() const { return getFeature(CAM_FEATURE_GAIN); } void CameraNode::setGain(int value) { setFeature(CAM_FEATURE_GAIN, value); } int CameraNode::getWhitebalanceU() const { return m_pCamera->getWhitebalanceU(); } int CameraNode::getWhitebalanceV() const { return m_pCamera->getWhitebalanceV(); } void CameraNode::setWhitebalance(int u, int v) { m_pCamera->setWhitebalance(u, v); } void CameraNode::doOneShotWhitebalance() { // The first line turns off auto white balance. m_pCamera->setWhitebalance(m_pCamera->getWhitebalanceU(), m_pCamera->getWhitebalanceV()); m_pCamera->setFeatureOneShot(CAM_FEATURE_WHITE_BALANCE); } int CameraNode::getStrobeDuration() const { return getFeature(CAM_FEATURE_STROBE_DURATION); } void CameraNode::setStrobeDuration(int value) { setFeature(CAM_FEATURE_STROBE_DURATION, value); } IntPoint CameraNode::getMediaSize() { return m_pCamera->getImgSize(); } BitmapPtr CameraNode::getBitmap() { if (m_pCurBmp) { return m_pCurBmp; } else { throw Exception(AVG_ERR_CAMERA_NONFATAL, "CameraNode.getBitmap: No camera image available."); } } CamerasInfosVector CameraNode::getCamerasInfos() { CamerasInfosVector camInfos = avg::getCamerasInfos(); return camInfos; } void CameraNode::resetFirewireBus() { FWCamera::resetBus(); } float CameraNode::getFPS() const { return m_pCamera->getFrameRate(); } void CameraNode::open() { m_pCamera->startCapture(); setViewport(-32767, -32767, -32767, -32767); PixelFormat pf = getPixelFormat(); IntPoint size = getMediaSize(); bool bMipmap = getMaterial().getUseMipmaps(); m_pTex = GLTexturePtr(new GLTexture(size, pf, bMipmap)); m_pTex->enableStreaming(); getSurface()->create(pf, m_pTex); newSurface(); BitmapPtr pBmp = m_pTex->lockStreamingBmp(); if (pf == B8G8R8X8 || pf == B8G8R8A8) { FilterFill Filter(Pixel32(0,0,0,255)); Filter.applyInPlace(pBmp); } else if (pf == I8) { FilterFill Filter(0); Filter.applyInPlace(pBmp); } m_pTex->unlockStreamingBmp(true); setupFX(true); } int CameraNode::getFeature(CameraFeature feature) const { return m_pCamera->getFeature(feature); } void CameraNode::setFeature(CameraFeature feature, int value) { m_pCamera->setFeature(feature, value); } int CameraNode::getFrameNum() const { return m_FrameNum; } static ProfilingZoneID CameraFetchImage("Camera fetch image"); static ProfilingZoneID CameraDownloadProfilingZone("Camera tex download"); void CameraNode::preRender(const VertexArrayPtr& pVA, bool bIsParentActive, float parentEffectiveOpacity) { Node::preRender(pVA, bIsParentActive, parentEffectiveOpacity); if (isAutoUpdateCameraImage()) { ScopeTimer Timer(CameraFetchImage); updateToLatestCameraImage(); } if (m_bNewBmp && isVisible()) { ScopeTimer Timer(CameraDownloadProfilingZone); m_FrameNum++; BitmapPtr pBmp = m_pTex->lockStreamingBmp(); if (pBmp->getPixelFormat() != m_pCurBmp->getPixelFormat()) { cerr << "Surface: " << pBmp->getPixelFormat() << ", CamDest: " << m_pCurBmp->getPixelFormat() << endl; } AVG_ASSERT(pBmp->getPixelFormat() == m_pCurBmp->getPixelFormat()); pBmp->copyPixels(*m_pCurBmp); m_pTex->unlockStreamingBmp(true); renderFX(getSize(), Pixel32(255, 255, 255, 255), false); m_bNewBmp = false; } calcVertexArray(pVA); } static ProfilingZoneID CameraProfilingZone("Camera::render"); void CameraNode::render() { if (m_bIsPlaying) { ScopeTimer Timer(CameraProfilingZone); blt32(getTransform(), getSize(), getEffectiveOpacity(), getBlendMode()); } } PixelFormat CameraNode::getPixelFormat() { return m_pCamera->getDestPF(); } void CameraNode::updateToLatestCameraImage() { BitmapPtr pTmpBmp = m_pCamera->getImage(false); while (pTmpBmp) { m_bNewBmp = true; m_pCurBmp = pTmpBmp; pTmpBmp = m_pCamera->getImage(false); } } void CameraNode::updateCameraImage() { if (!isAutoUpdateCameraImage()) { m_pCurBmp = m_pCamera->getImage(false); blt32(getTransform(), getSize(), getEffectiveOpacity(), getBlendMode()); } } bool CameraNode::isAutoUpdateCameraImage() const { return m_bIsAutoUpdateCameraImage; } void CameraNode::setAutoUpdateCameraImage(bool bVal) { m_bIsAutoUpdateCameraImage = bVal; } bool CameraNode::isImageAvailable() const { return m_pCurBmp.get() != NULL; } }