root/OpenSceneGraph/trunk/src/osgViewer/View.cpp @ 13525

Revision 13525, 81.2 kB (checked in by robert, 9 days ago)

Added shaders to support experimental shader based displacement mapping technique osgTerrain::ShaderTerrain?.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
Line 
1/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2006 Robert Osfield
2 *
3 * This library is open source and may be redistributed and/or modified under
4 * the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
5 * (at your option) any later version.  The full license is in LICENSE file
6 * included with this distribution, and on the openscenegraph.org website.
7 *
8 * This library is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 * OpenSceneGraph Public License for more details.
12*/
13
14#include <osgViewer/Renderer>
15#include <osgViewer/View>
16#include <osgViewer/GraphicsWindow>
17
18#include <osg/io_utils>
19
20#include <osg/TextureCubeMap>
21#include <osg/TextureRectangle>
22#include <osg/Texture1D>
23#include <osg/TexMat>
24#include <osg/Stencil>
25#include <osg/PolygonStipple>
26#include <osg/ValueObject>
27
28#include <osgUtil/Optimizer>
29#include <osgUtil/ShaderGen>
30#include <osgUtil/IntersectionVisitor>
31
32#include <osgDB/ReadFile>
33#include <osgDB/WriteFile>
34
35// view configurations.
36#include <osgViewer/config/AcrossAllScreens>
37#include <osgViewer/config/SingleWindow>
38#include <osgViewer/config/SingleScreen>
39#include <osgViewer/config/SphericalDisplay>
40#include <osgViewer/config/PanoramicSphericalDisplay>
41#include <osgViewer/config/WoWVxDisplay>
42
43
44#include <iterator>
45
46using namespace osgViewer;
47
48osg::DisplaySettings* ViewConfig::getActiveDisplaySetting(osgViewer::View& view) const
49{
50    return view.getDisplaySettings()  ? view.getDisplaySettings() : osg::DisplaySettings::instance().get();
51}
52
53class CollectedCoordinateSystemNodesVisitor : public osg::NodeVisitor
54{
55public:
56
57    CollectedCoordinateSystemNodesVisitor():
58        NodeVisitor(osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN) {}
59
60    META_NodeVisitor("osgViewer","CollectedCoordinateSystemNodesVisitor")
61
62    virtual void apply(osg::Node& node)
63    {
64        traverse(node);
65    }
66
67    virtual void apply(osg::CoordinateSystemNode& node)
68    {
69        if (_pathToCoordinateSystemNode.empty())
70        {
71            OSG_DEBUG<<"Found CoordinateSystemNode node"<<std::endl;
72            OSG_DEBUG<<"     CoordinateSystem = "<<node.getCoordinateSystem()<<std::endl;
73            _pathToCoordinateSystemNode = getNodePath();
74        }
75        else
76        {
77            OSG_DEBUG<<"Found additional CoordinateSystemNode node, but ignoring"<<std::endl;
78            OSG_DEBUG<<"     CoordinateSystem = "<<node.getCoordinateSystem()<<std::endl;
79        }
80        traverse(node);
81    }
82
83    osg::NodePath _pathToCoordinateSystemNode;
84};
85
86
87/** callback class to use to allow matrix manipulators to query the application for the local coordinate frame.*/
88class ViewerCoordinateFrameCallback : public osgGA::CameraManipulator::CoordinateFrameCallback
89{
90public:
91
92    ViewerCoordinateFrameCallback(osgViewer::View* view):
93        _view(view) {}
94
95    virtual osg::CoordinateFrame getCoordinateFrame(const osg::Vec3d& position) const
96    {
97        OSG_DEBUG<<"getCoordinateFrame("<<position<<")"<<std::endl;
98
99        osg::NodePath tmpPath = _view->getCoordinateSystemNodePath();
100
101        if (!tmpPath.empty())
102        {
103            osg::Matrixd coordinateFrame;
104
105            osg::CoordinateSystemNode* csn = dynamic_cast<osg::CoordinateSystemNode*>(tmpPath.back());
106            if (csn)
107            {
108                osg::Vec3 local_position = position*osg::computeWorldToLocal(tmpPath);
109
110                // get the coordinate frame in world coords.
111                coordinateFrame = csn->computeLocalCoordinateFrame(local_position)* osg::computeLocalToWorld(tmpPath);
112
113                // keep the position of the coordinate frame to reapply after rescale.
114                osg::Vec3d pos = coordinateFrame.getTrans();
115
116                // compensate for any scaling, so that the coordinate frame is a unit size
117                osg::Vec3d x(1.0,0.0,0.0);
118                osg::Vec3d y(0.0,1.0,0.0);
119                osg::Vec3d z(0.0,0.0,1.0);
120                x = osg::Matrixd::transform3x3(x,coordinateFrame);
121                y = osg::Matrixd::transform3x3(y,coordinateFrame);
122                z = osg::Matrixd::transform3x3(z,coordinateFrame);
123                coordinateFrame.preMultScale(osg::Vec3d(1.0/x.length(),1.0/y.length(),1.0/z.length()));
124
125                // reapply the position.
126                coordinateFrame.setTrans(pos);
127
128                OSG_DEBUG<<"csn->computeLocalCoordinateFrame(position)* osg::computeLocalToWorld(tmpPath)"<<coordinateFrame<<std::endl;
129
130            }
131            else
132            {
133                OSG_DEBUG<<"osg::computeLocalToWorld(tmpPath)"<<std::endl;
134                coordinateFrame =  osg::computeLocalToWorld(tmpPath);
135            }
136            return coordinateFrame;
137        }
138        else
139        {
140            OSG_DEBUG<<"   no coordinate system found, using default orientation"<<std::endl;
141            return osg::Matrixd::translate(position);
142        }
143    }
144
145protected:
146    virtual ~ViewerCoordinateFrameCallback() {}
147
148    osg::observer_ptr<osgViewer::View> _view;
149};
150
151
152View::View():
153    _fusionDistanceMode(osgUtil::SceneView::PROPORTIONAL_TO_SCREEN_DISTANCE),
154    _fusionDistanceValue(1.0f)
155{
156    // OSG_NOTICE<<"Constructing osgViewer::View"<<std::endl;
157
158    _startTick = 0;
159
160    _frameStamp = new osg::FrameStamp;
161    _frameStamp->setFrameNumber(0);
162    _frameStamp->setReferenceTime(0);
163    _frameStamp->setSimulationTime(0);
164
165    _scene = new Scene;
166
167    // make sure View is safe to reference multi-threaded.
168    setThreadSafeRefUnref(true);
169
170    // need to attach a Renderer to the master camera which has been default constructed
171    getCamera()->setRenderer(createRenderer(getCamera()));
172
173    setEventQueue(new osgGA::EventQueue);
174
175    setStats(new osg::Stats("View"));
176}
177
178
179View::View(const osgViewer::View& view, const osg::CopyOp& copyop):
180    osg::Object(true),
181    osg::View(view,copyop),
182    osgGA::GUIActionAdapter(),
183    _startTick(0),
184    _fusionDistanceMode(view._fusionDistanceMode),
185    _fusionDistanceValue(view._fusionDistanceValue)
186{
187    _scene = new Scene;
188
189    // need to attach a Renderer to the master camera which has been default constructed
190    getCamera()->setRenderer(createRenderer(getCamera()));
191
192    setEventQueue(new osgGA::EventQueue);
193
194    setStats(new osg::Stats("View"));
195}
196
197View::~View()
198{
199    OSG_INFO<<"Destructing osgViewer::View"<<std::endl;
200}
201
202void View::take(osg::View& rhs)
203{
204    osg::View::take(rhs);
205
206#if 1
207    osgViewer::View* rhs_osgViewer = dynamic_cast<osgViewer::View*>(&rhs);
208    if (rhs_osgViewer)
209    {
210
211        // copy across rhs
212        _startTick = rhs_osgViewer->_startTick;
213        _frameStamp = rhs_osgViewer->_frameStamp;
214
215        if (rhs_osgViewer->getSceneData())
216        {
217            _scene = rhs_osgViewer->_scene;
218        }
219
220        if (rhs_osgViewer->_cameraManipulator.valid())
221        {
222            _cameraManipulator = rhs_osgViewer->_cameraManipulator;
223        }
224
225        _eventHandlers.insert(_eventHandlers.end(), rhs_osgViewer->_eventHandlers.begin(), rhs_osgViewer->_eventHandlers.end());
226
227        _coordinateSystemNodePath = rhs_osgViewer->_coordinateSystemNodePath;
228
229        _displaySettings = rhs_osgViewer->_displaySettings;
230        _fusionDistanceMode = rhs_osgViewer->_fusionDistanceMode;
231        _fusionDistanceValue = rhs_osgViewer->_fusionDistanceValue;
232
233
234        // clear rhs
235        rhs_osgViewer->_frameStamp = 0;
236        rhs_osgViewer->_scene = 0;
237        rhs_osgViewer->_cameraManipulator = 0;
238        rhs_osgViewer->_eventHandlers.clear();
239
240        rhs_osgViewer->_coordinateSystemNodePath.clearNodePath();
241
242        rhs_osgViewer->_displaySettings = 0;
243    }
244#endif
245    computeActiveCoordinateSystemNodePath();
246    assignSceneDataToCameras();
247}
248
249osg::GraphicsOperation* View::createRenderer(osg::Camera* camera)
250{
251    Renderer* render = new Renderer(camera);
252    camera->setStats(new osg::Stats("Camera"));
253    return render;
254}
255
256
257void View::init()
258{
259    OSG_INFO<<"View::init()"<<std::endl;
260
261    osg::ref_ptr<osgGA::GUIEventAdapter> initEvent = _eventQueue->createEvent();
262    initEvent->setEventType(osgGA::GUIEventAdapter::FRAME);
263
264    if (_cameraManipulator.valid())
265    {
266        _cameraManipulator->init(*initEvent, *this);
267    }
268}
269
270void View::setStartTick(osg::Timer_t tick)
271{
272    _startTick = tick;
273   
274    for(Devices::iterator eitr = _eventSources.begin();
275        eitr != _eventSources.end();
276        ++eitr)
277    {
278        (*eitr)->getEventQueue()->setStartTick(_startTick);
279    }
280}
281
282void View::setSceneData(osg::Node* node)
283{
284    if (node==_scene->getSceneData()) return;
285
286    osg::ref_ptr<Scene> scene = Scene::getScene(node);
287
288    if (scene)
289    {
290        OSG_INFO<<"View::setSceneData() Sharing scene "<<scene.get()<<std::endl;
291        _scene = scene;
292    }
293    else
294    {
295        if (_scene->referenceCount()!=1)
296        {
297            // we are not the only reference to the Scene so we cannot reuse it.
298            _scene = new Scene;
299            OSG_INFO<<"View::setSceneData() Allocating new scene"<<_scene.get()<<std::endl;
300        }
301        else
302        {
303            OSG_INFO<<"View::setSceneData() Reusing exisitng scene"<<_scene.get()<<std::endl;
304        }
305
306        _scene->setSceneData(node);
307    }
308
309    if (getSceneData())
310    {
311        #if defined(OSG_GLES2_AVAILABLE)
312            osgUtil::ShaderGenVisitor sgv;
313            getSceneData()->getOrCreateStateSet();
314            getSceneData()->accept(sgv);
315        #endif
316
317        // now make sure the scene graph is set up with the correct DataVariance to protect the dynamic elements of
318        // the scene graph from being run in parallel.
319        osgUtil::Optimizer::StaticObjectDetectionVisitor sodv;
320        getSceneData()->accept(sodv);
321
322        // make sure that existing scene graph objects are allocated with thread safe ref/unref
323        if (getViewerBase() &&
324            getViewerBase()->getThreadingModel()!=ViewerBase::SingleThreaded)
325        {
326            getSceneData()->setThreadSafeRefUnref(true);
327        }
328
329        // update the scene graph so that it has enough GL object buffer memory for the graphics contexts that will be using it.
330        getSceneData()->resizeGLObjectBuffers(osg::DisplaySettings::instance()->getMaxNumberOfGraphicsContexts());
331    }
332
333    computeActiveCoordinateSystemNodePath();
334
335    assignSceneDataToCameras();
336}
337
338void View::setDatabasePager(osgDB::DatabasePager* dp)
339{
340    _scene->setDatabasePager(dp);
341}
342
343osgDB::DatabasePager* View::getDatabasePager()
344{
345    return _scene->getDatabasePager();
346}
347
348const osgDB::DatabasePager* View::getDatabasePager() const
349{
350    return _scene->getDatabasePager();
351}
352
353
354void View::setImagePager(osgDB::ImagePager* dp)
355{
356    _scene->setImagePager(dp);
357}
358
359osgDB::ImagePager* View::getImagePager()
360{
361    return _scene->getImagePager();
362}
363
364const osgDB::ImagePager* View::getImagePager() const
365{
366    return _scene->getImagePager();
367}
368
369
370void View::setCameraManipulator(osgGA::CameraManipulator* manipulator, bool resetPosition)
371{
372    _cameraManipulator = manipulator;
373
374    if (_cameraManipulator.valid())
375    {
376        _cameraManipulator->setCoordinateFrameCallback(new ViewerCoordinateFrameCallback(this));
377
378        if (getSceneData()) _cameraManipulator->setNode(getSceneData());
379
380        if (resetPosition)
381        {
382            osg::ref_ptr<osgGA::GUIEventAdapter> dummyEvent = _eventQueue->createEvent();
383            _cameraManipulator->home(*dummyEvent, *this);
384        }
385    }
386}
387
388void View::home()
389{
390    if (_cameraManipulator.valid())
391    {
392        osg::ref_ptr<osgGA::GUIEventAdapter> dummyEvent = _eventQueue->createEvent();
393        _cameraManipulator->home(*dummyEvent, *this);
394    }
395}
396
397
398void View::addEventHandler(osgGA::GUIEventHandler* eventHandler)
399{
400    EventHandlers::iterator itr = std::find(_eventHandlers.begin(), _eventHandlers.end(), eventHandler);
401    if (itr == _eventHandlers.end())
402    {
403        _eventHandlers.push_back(eventHandler);
404    }
405}
406
407void View::removeEventHandler(osgGA::GUIEventHandler* eventHandler)
408{
409    EventHandlers::iterator itr = std::find(_eventHandlers.begin(), _eventHandlers.end(), eventHandler);
410    if (itr != _eventHandlers.end())
411    {
412        _eventHandlers.erase(itr);
413    }
414}
415
416void View::setCoordinateSystemNodePath(const osg::NodePath& nodePath)
417{
418    _coordinateSystemNodePath.setNodePath(nodePath);
419}
420
421osg::NodePath View::getCoordinateSystemNodePath() const
422{
423    osg::NodePath nodePath;
424    _coordinateSystemNodePath.getNodePath(nodePath);
425    return nodePath;
426}
427
428void View::computeActiveCoordinateSystemNodePath()
429{
430    // now search for CoordinateSystemNode's for which we want to track.
431    osg::Node* subgraph = getSceneData();
432
433    if (subgraph)
434    {
435
436        CollectedCoordinateSystemNodesVisitor ccsnv;
437        subgraph->accept(ccsnv);
438
439        if (!ccsnv._pathToCoordinateSystemNode.empty())
440        {
441           setCoordinateSystemNodePath(ccsnv._pathToCoordinateSystemNode);
442           return;
443        }
444    }
445
446    // otherwise no node path found so reset to empty.
447    setCoordinateSystemNodePath(osg::NodePath());
448}
449
450
451void View::apply(ViewConfig* config)
452{
453    if (config)
454    {
455        OSG_INFO<<"Applying osgViewer::ViewConfig : "<<config->className()<<std::endl;
456        config->configure(*this);
457    }
458    _lastAppliedViewConfig = config;
459}
460
461void View::setUpViewAcrossAllScreens()
462{
463    apply(new osgViewer::AcrossAllScreens());
464}
465
466void View::setUpViewInWindow(int x, int y, int width, int height, unsigned int screenNum)
467{
468    apply(new osgViewer::SingleWindow(x, y, width, height, screenNum));
469}
470
471void View::setUpViewOnSingleScreen(unsigned int screenNum)
472{
473    apply(new osgViewer::SingleScreen(screenNum));
474}
475
476void View::setUpViewFor3DSphericalDisplay(double radius, double collar, unsigned int screenNum, osg::Image* intensityMap, const osg::Matrixd& projectorMatrix)
477{
478    apply(new osgViewer::SphericalDisplay(radius, collar, screenNum, intensityMap, projectorMatrix));
479}
480
481void View::setUpViewForPanoramicSphericalDisplay(double radius, double collar, unsigned int screenNum, osg::Image* intensityMap, const osg::Matrixd& projectorMatrix)
482{
483    apply(new osgViewer::PanoramicSphericalDisplay(radius, collar, screenNum, intensityMap, projectorMatrix));
484}
485
486void View::setUpViewForWoWVxDisplay(unsigned int screenNum, unsigned char wow_content, unsigned char wow_factor, unsigned char wow_offset, float wow_disparity_Zd, float wow_disparity_vz, float wow_disparity_M, float wow_disparity_C)
487{
488    apply(new osgViewer::WoWVxDisplay(screenNum, wow_content, wow_factor, wow_offset, wow_disparity_Zd,wow_disparity_vz, wow_disparity_M, wow_disparity_C));
489}
490
491DepthPartitionSettings::DepthPartitionSettings(DepthMode mode):
492    _mode(mode),
493    _zNear(1.0), _zMid(5.0), _zFar(1000.0)
494{}
495
496bool DepthPartitionSettings::getDepthRange(osg::View& view, unsigned int partition, double& zNear, double& zFar)
497{
498    switch(_mode)
499    {
500        case(FIXED_RANGE):
501        {
502            if (partition==0)
503            {
504                zNear = _zNear;
505                zFar = _zMid;
506                return true;
507            }
508            else if (partition==1)
509            {
510                zNear = _zMid;
511                zFar = _zFar;
512                return true;
513            }
514            return false;
515        }
516        case(BOUNDING_VOLUME):
517        {
518            osgViewer::View* view_withSceneData = dynamic_cast<osgViewer::View*>(&view);
519            const osg::Node* node = view_withSceneData ? view_withSceneData->getSceneData() : 0;
520            if (!node) return false;
521
522            const osg::Camera* masterCamera = view.getCamera();
523            if (!masterCamera) return false;
524
525            osg::BoundingSphere bs = node->getBound();
526            const osg::Matrixd& viewMatrix = masterCamera->getViewMatrix();
527            //osg::Matrixd& projectionMatrix = masterCamera->getProjectionMatrix();
528
529            osg::Vec3d lookVectorInWorldCoords = osg::Matrixd::transform3x3(viewMatrix,osg::Vec3d(0.0,0.0,-1.0));
530            lookVectorInWorldCoords.normalize();
531
532            osg::Vec3d nearPointInWorldCoords = bs.center() - lookVectorInWorldCoords*bs.radius();
533            osg::Vec3d farPointInWorldCoords = bs.center() + lookVectorInWorldCoords*bs.radius();
534
535            osg::Vec3d nearPointInEyeCoords = nearPointInWorldCoords * viewMatrix;
536            osg::Vec3d farPointInEyeCoords = farPointInWorldCoords * viewMatrix;
537
538#if 0
539            OSG_NOTICE<<std::endl;
540            OSG_NOTICE<<"viewMatrix = "<<viewMatrix<<std::endl;
541            OSG_NOTICE<<"lookVectorInWorldCoords = "<<lookVectorInWorldCoords<<std::endl;
542            OSG_NOTICE<<"nearPointInWorldCoords = "<<nearPointInWorldCoords<<std::endl;
543            OSG_NOTICE<<"farPointInWorldCoords = "<<farPointInWorldCoords<<std::endl;
544            OSG_NOTICE<<"nearPointInEyeCoords = "<<nearPointInEyeCoords<<std::endl;
545            OSG_NOTICE<<"farPointInEyeCoords = "<<farPointInEyeCoords<<std::endl;
546#endif
547            double minZNearRatio = 0.00001;
548
549
550            if (masterCamera->getDisplaySettings())
551            {
552                OSG_NOTICE<<"Has display settings"<<std::endl;
553            }
554
555            double scene_zNear = -nearPointInEyeCoords.z();
556            double scene_zFar = -farPointInEyeCoords.z();
557            if (scene_zNear<=0.0) scene_zNear = minZNearRatio * scene_zFar;
558
559            double scene_zMid = sqrt(scene_zFar*scene_zNear);
560
561#if 0
562            OSG_NOTICE<<"scene_zNear = "<<scene_zNear<<std::endl;
563            OSG_NOTICE<<"scene_zMid = "<<scene_zMid<<std::endl;
564            OSG_NOTICE<<"scene_zFar = "<<scene_zFar<<std::endl;
565#endif
566            if (partition==0)
567            {
568                zNear = scene_zNear;
569                zFar = scene_zMid;
570                return true;
571            }
572            else if (partition==1)
573            {
574                zNear = scene_zMid;
575                zFar = scene_zFar;
576                return true;
577            }
578
579            return false;
580        }
581        default: return false;
582    }
583}
584
585namespace osgDepthPartition {
586
587struct MyUpdateSlaveCallback : public osg::View::Slave::UpdateSlaveCallback
588{
589    MyUpdateSlaveCallback(DepthPartitionSettings* dps, unsigned int partition):_dps(dps), _partition(partition) {}
590
591    virtual void updateSlave(osg::View& view, osg::View::Slave& slave)
592    {
593        slave.updateSlaveImplementation(view);
594
595        if (!_dps) return;
596
597        osg::Camera* camera = slave._camera.get();
598
599        double computed_zNear;
600        double computed_zFar;
601        if (!_dps->getDepthRange(view, _partition, computed_zNear, computed_zFar))
602        {
603            OSG_NOTICE<<"Switching off Camera "<<camera<<std::endl;
604            camera->setNodeMask(0x0);
605            return;
606        }
607        else
608        {
609            camera->setNodeMask(0xffffff);
610        }
611
612        if (camera->getProjectionMatrix()(0,3)==0.0 &&
613            camera->getProjectionMatrix()(1,3)==0.0 &&
614            camera->getProjectionMatrix()(2,3)==0.0)
615        {
616            double left, right, bottom, top, zNear, zFar;
617            camera->getProjectionMatrixAsOrtho(left, right, bottom, top, zNear, zFar);
618            camera->setProjectionMatrixAsOrtho(left, right, bottom, top, computed_zNear, computed_zFar);
619        }
620        else
621        {
622            double left, right, bottom, top, zNear, zFar;
623            camera->getProjectionMatrixAsFrustum(left, right, bottom, top, zNear, zFar);
624
625            double nr = computed_zNear / zNear;
626            camera->setProjectionMatrixAsFrustum(left * nr, right * nr, bottom * nr, top * nr, computed_zNear, computed_zFar);
627        }
628    }
629
630    osg::ref_ptr<DepthPartitionSettings> _dps;
631    unsigned int _partition;
632};
633
634
635typedef std::list< osg::ref_ptr<osg::Camera> > Cameras;
636
637Cameras getActiveCameras(osg::View& view)
638{
639    Cameras activeCameras;
640
641    if (view.getCamera() && view.getCamera()->getGraphicsContext())
642    {
643        activeCameras.push_back(view.getCamera());
644    }
645
646    for(unsigned int i=0; i<view.getNumSlaves(); ++i)
647    {
648        osg::View::Slave& slave = view.getSlave(i);
649        if (slave._camera.valid() && slave._camera->getGraphicsContext())
650        {
651            activeCameras.push_back(slave._camera.get());
652        }
653    }
654    return activeCameras;
655}
656
657}
658
659bool View::setUpDepthPartitionForCamera(osg::Camera* cameraToPartition, DepthPartitionSettings* incomming_dps)
660{
661    osg::ref_ptr<osg::GraphicsContext> context = cameraToPartition->getGraphicsContext();
662    if (!context) return false;
663
664    osg::ref_ptr<osg::Viewport> viewport = cameraToPartition->getViewport();
665    if (!viewport) return false;
666
667    osg::ref_ptr<DepthPartitionSettings> dps = incomming_dps;
668    if (!dps) dps = new DepthPartitionSettings;
669
670    bool useMastersSceneData = true;
671    osg::Matrixd projectionOffset;
672    osg::Matrixd viewOffset;
673
674    if (getCamera()==cameraToPartition)
675    {
676        // replace main camera with depth partition cameras
677        OSG_INFO<<"View::setUpDepthPartitionForCamera(..) Replacing main Camera"<<std::endl;
678    }
679    else
680    {
681        unsigned int i = findSlaveIndexForCamera(cameraToPartition);
682        if (i>=getNumSlaves()) return false;
683
684        osg::View::Slave& slave = getSlave(i);
685
686        useMastersSceneData = slave._useMastersSceneData;
687        projectionOffset = slave._projectionOffset;
688        viewOffset = slave._viewOffset;
689
690        OSG_NOTICE<<"View::setUpDepthPartitionForCamera(..) Replacing slave Camera"<<i<<std::endl;
691        removeSlave(i);
692    }
693
694    cameraToPartition->setGraphicsContext(0);
695    cameraToPartition->setViewport(0);
696
697    // far camera
698    {
699        osg::ref_ptr<osg::Camera> camera = new osg::Camera;
700        camera->setGraphicsContext(context.get());
701        camera->setViewport(viewport.get());
702
703        camera->setDrawBuffer(cameraToPartition->getDrawBuffer());
704        camera->setReadBuffer(cameraToPartition->getReadBuffer());
705
706        camera->setComputeNearFarMode(osg::Camera::DO_NOT_COMPUTE_NEAR_FAR);
707        camera->setCullingMode(osg::Camera::ENABLE_ALL_CULLING);
708
709        addSlave(camera.get());
710
711        osg::View::Slave& slave = getSlave(getNumSlaves()-1);
712
713        slave._useMastersSceneData = useMastersSceneData;
714        slave._projectionOffset = projectionOffset;
715        slave._viewOffset = viewOffset;
716        slave._updateSlaveCallback =  new osgDepthPartition::MyUpdateSlaveCallback(dps.get(), 1);
717    }
718
719    // near camera
720    {
721        osg::ref_ptr<osg::Camera> camera = new osg::Camera;
722        camera->setGraphicsContext(context.get());
723        camera->setViewport(viewport.get());
724
725        camera->setDrawBuffer(cameraToPartition->getDrawBuffer());
726        camera->setReadBuffer(cameraToPartition->getReadBuffer());
727
728        camera->setComputeNearFarMode(osg::Camera::DO_NOT_COMPUTE_NEAR_FAR);
729        camera->setCullingMode(osg::Camera::ENABLE_ALL_CULLING);
730        camera->setClearMask(GL_DEPTH_BUFFER_BIT);
731
732        addSlave(camera.get());
733
734        osg::View::Slave& slave = getSlave(getNumSlaves()-1);
735        slave._useMastersSceneData = useMastersSceneData;
736        slave._projectionOffset = projectionOffset;
737        slave._viewOffset = viewOffset;
738        slave._updateSlaveCallback =  new osgDepthPartition::MyUpdateSlaveCallback(dps.get(), 0);
739    }
740
741    return true;
742}
743
744
745
746bool View::setUpDepthPartition(DepthPartitionSettings* dsp)
747{
748    osgDepthPartition::Cameras originalCameras = osgDepthPartition::getActiveCameras(*this);
749    if (originalCameras.empty())
750    {
751        OSG_INFO<<"osgView::View::setUpDepthPartition(,..), no windows assigned, doing view.setUpViewAcrossAllScreens()"<<std::endl;
752        setUpViewAcrossAllScreens();
753
754        originalCameras = osgDepthPartition::getActiveCameras(*this);
755        if (originalCameras.empty())
756        {
757            OSG_NOTICE<<"osgView::View::setUpDepthPartition(View,..) Unable to set up windows for viewer."<<std::endl;
758            return false;
759        }
760    }
761
762    bool threadsWereRunning = getViewerBase()->areThreadsRunning();
763    if (threadsWereRunning) getViewerBase()->stopThreading();
764
765    for(osgDepthPartition::Cameras::iterator itr = originalCameras.begin();
766        itr != originalCameras.end();
767        ++itr)
768    {
769        setUpDepthPartitionForCamera(itr->get(), dsp);
770    }
771
772    if (threadsWereRunning) getViewerBase()->startThreading();
773
774    return true;
775}
776
777
778void View::assignSceneDataToCameras()
779{
780    // OSG_NOTICE<<"View::assignSceneDataToCameras()"<<std::endl;
781
782    if (_scene.valid() && _scene->getDatabasePager() && getViewerBase())
783    {
784        _scene->getDatabasePager()->setIncrementalCompileOperation(getViewerBase()->getIncrementalCompileOperation());
785    }
786
787    osg::Node* sceneData = _scene.valid() ? _scene->getSceneData() : 0;
788
789    if (_cameraManipulator.valid())
790    {
791        _cameraManipulator->setNode(sceneData);
792
793        osg::ref_ptr<osgGA::GUIEventAdapter> dummyEvent = _eventQueue->createEvent();
794
795        _cameraManipulator->home(*dummyEvent, *this);
796    }
797
798    if (_camera.valid())
799    {
800        _camera->removeChildren(0,_camera->getNumChildren());
801        if (sceneData) _camera->addChild(sceneData);
802
803        Renderer* renderer = dynamic_cast<Renderer*>(_camera->getRenderer());
804        if (renderer) renderer->setCompileOnNextDraw(true);
805
806    }
807
808    for(unsigned i=0; i<getNumSlaves(); ++i)
809    {
810        Slave& slave = getSlave(i);
811        if (slave._camera.valid() && slave._useMastersSceneData)
812        {
813            slave._camera->removeChildren(0,slave._camera->getNumChildren());
814            if (sceneData) slave._camera->addChild(sceneData);
815
816            Renderer* renderer = dynamic_cast<Renderer*>(slave._camera->getRenderer());
817            if (renderer) renderer->setCompileOnNextDraw(true);
818        }
819    }
820}
821
822void View::requestRedraw()
823{
824    if (getViewerBase())
825    {
826        getViewerBase()->_requestRedraw = true;
827    }
828    else
829    {
830        OSG_INFO<<"View::requestRedraw(), No viewer base has been assigned yet."<<std::endl;
831    }
832}
833
834void View::requestContinuousUpdate(bool flag)
835{
836    if (getViewerBase())
837    {
838        getViewerBase()->_requestContinousUpdate = flag;
839    }
840    else
841    {
842        OSG_INFO<<"View::requestContinuousUpdate(), No viewer base has been assigned yet."<<std::endl;
843    }
844}
845
846void View::requestWarpPointer(float x,float y)
847{
848    OSG_INFO<<"View::requestWarpPointer("<<x<<","<<y<<")"<<std::endl;
849   
850    float local_x, local_y;
851    const osg::Camera* camera = getCameraContainingPosition(x, y, local_x, local_y);
852    if (camera)
853    {
854        const osgViewer::GraphicsWindow* gw = dynamic_cast<const osgViewer::GraphicsWindow*>(camera->getGraphicsContext());
855        if (gw)
856        {
857            getEventQueue()->mouseWarped(x,y);
858            if (gw->getEventQueue()->getCurrentEventState()->getMouseYOrientation()==osgGA::GUIEventAdapter::Y_INCREASING_DOWNWARDS)
859            {
860                local_y = gw->getTraits()->height - local_y;
861            }
862            const_cast<osgViewer::GraphicsWindow*>(gw)->getEventQueue()->mouseWarped(local_x,local_y);
863            const_cast<osgViewer::GraphicsWindow*>(gw)->requestWarpPointer(local_x, local_y);
864        }
865    }
866    else
867    {
868        OSG_INFO<<"View::requestWarpPointer failed no camera containing pointer"<<std::endl;
869    }
870}
871
872bool View::containsCamera(const osg::Camera* camera) const
873{
874    if (_camera == camera) return true;
875
876    for(unsigned i=0; i<getNumSlaves(); ++i)
877    {
878        const Slave& slave = getSlave(i);
879        if (slave._camera == camera) return true;
880    }
881    return false;
882}
883
884
885const osg::Camera* View::getCameraContainingPosition(float x, float y, float& local_x, float& local_y) const
886{
887    const osgGA::GUIEventAdapter* eventState = getEventQueue()->getCurrentEventState();
888    const osgViewer::GraphicsWindow* gw = dynamic_cast<const osgViewer::GraphicsWindow*>(eventState->getGraphicsContext());
889    bool view_invert_y = eventState->getMouseYOrientation()==osgGA::GUIEventAdapter::Y_INCREASING_DOWNWARDS;
890   
891    // OSG_NOTICE<<"getCameraContainingPosition("<<x<<", "<<y<<") view_invert_y = "<<view_invert_y<<", Xmin() = "<<eventState->getXmin()<<", Xmax() = "<<eventState->getXmax()<<", Ymin() = "<<eventState->getYmin()<<", Ymax() = "<<eventState->getYmax()<<std::endl;
892
893    double epsilon = 0.5;
894   
895       
896    // if master camera has graphics context and eventState context matches then assume coordinates refer
897    // to master camera
898    bool masterActive = (_camera->getGraphicsContext()!=0 && _camera->getViewport());
899    bool eventStateMatchesMaster = (gw!=0) ? _camera->getGraphicsContext()==gw : false;
900   
901    if (masterActive && eventStateMatchesMaster)
902    {
903        // OSG_NOTICE<<"Event state matches master"<<std::endl;
904        const osg::Viewport* viewport = _camera->getViewport();
905       
906        // rescale mouse x,y first to 0 to 1 range
907        double new_x = (x-eventState->getXmin())/(eventState->getXmax()-eventState->getXmin());
908        double new_y = (y-eventState->getYmin())/(eventState->getYmax()-eventState->getYmin());
909       
910        // flip y if required
911        if (view_invert_y) new_y = 1.0f-new_y;
912       
913        // rescale mouse x, y to window dimensions so we can check against master Camera's viewport
914        new_x *= static_cast<double>(_camera->getGraphicsContext()->getTraits()->width);
915        new_y *= static_cast<double>(_camera->getGraphicsContext()->getTraits()->height);
916       
917        if (new_x >= (viewport->x()-epsilon) && new_y >= (viewport->y()-epsilon) &&
918            new_x < (viewport->x()+viewport->width()-1.0+epsilon) && new_y <= (viewport->y()+viewport->height()-1.0+epsilon) )
919        {
920            local_x = new_x;
921            local_y = new_y;
922
923            //OSG_NOTICE<<"Returning master camera"<<std::endl;
924
925            return _camera.get();
926        }
927        else
928        {
929            // OSG_NOTICE<<"master camera viewport not matched."<<std::endl;
930        }
931    }
932   
933    osg::Matrix masterCameraVPW = getCamera()->getViewMatrix() * getCamera()->getProjectionMatrix();
934
935    // convert to non dimensional
936    x = (x - eventState->getXmin()) * 2.0 / (eventState->getXmax()-eventState->getXmin()) - 1.0;
937    y = (y - eventState->getYmin())* 2.0 / (eventState->getYmax()-eventState->getYmin()) - 1.0;
938
939    if (view_invert_y) y = - y;
940
941    for(int i=getNumSlaves()-1; i>=0; --i)
942    {
943        const Slave& slave = getSlave(i);
944        if (slave._camera.valid() &&
945            slave._camera->getAllowEventFocus() &&
946            slave._camera->getRenderTargetImplementation()==osg::Camera::FRAME_BUFFER)
947        {
948            OSG_INFO<<"Testing slave camera "<<slave._camera->getName()<<std::endl;
949
950            const osg::Camera* camera = slave._camera.get();
951            const osg::Viewport* viewport = camera ? camera->getViewport() : 0;
952
953            osg::Matrix localCameraVPW = camera->getViewMatrix() * camera->getProjectionMatrix();
954            if (viewport) localCameraVPW *= viewport->computeWindowMatrix();
955
956            osg::Matrix matrix( osg::Matrix::inverse(masterCameraVPW) * localCameraVPW );
957
958            osg::Vec3d new_coord = osg::Vec3d(x,y,0.0) * matrix;
959
960            //OSG_NOTICE<<"  x="<<x<<" y="<<y<<std::endl;;
961            //OSG_NOTICE<<"  eventState->getXmin()="<<eventState->getXmin()<<" eventState->getXmax()="<<eventState->getXmax()<<std::endl;;
962            //OSG_NOTICE<<"  new_coord "<<new_coord<<std::endl;;
963
964            if (viewport &&
965                new_coord.x() >= (viewport->x()-epsilon) && new_coord.y() >= (viewport->y()-epsilon) &&
966                new_coord.x() < (viewport->x()+viewport->width()-1.0+epsilon) && new_coord.y() <= (viewport->y()+viewport->height()-1.0+epsilon) )
967            {
968                // OSG_NOTICE<<"  in viewport "<<std::endl;;
969
970                local_x = new_coord.x();
971                local_y = new_coord.y();
972
973                return camera;
974            }
975            else
976            {
977                // OSG_NOTICE<<"  not in viewport "<<viewport->x()<<" "<<(viewport->x()+viewport->width())<<std::endl;;
978            }
979
980        }
981    }
982
983    local_x = x;
984    local_y = y;
985
986    return 0;
987}
988
989bool View::computeIntersections(float x,float y, osgUtil::LineSegmentIntersector::Intersections& intersections, osg::Node::NodeMask traversalMask)
990{
991    float local_x, local_y;
992    const osg::Camera* camera = getCameraContainingPosition(x, y, local_x, local_y);
993   
994    OSG_INFO<<"computeIntersections("<<x<<", "<<y<<") local_x="<<local_x<<", local_y="<<local_y<<std::endl;
995   
996    if (camera) return computeIntersections(camera, (camera->getViewport()==0)?osgUtil::Intersector::PROJECTION : osgUtil::Intersector::WINDOW, local_x, local_y, intersections, traversalMask);
997    else return false;
998}
999
1000bool View::computeIntersections(float x,float y, const osg::NodePath& nodePath, osgUtil::LineSegmentIntersector::Intersections& intersections, osg::Node::NodeMask traversalMask)
1001{
1002    float local_x, local_y;
1003    const osg::Camera* camera = getCameraContainingPosition(x, y, local_x, local_y);
1004   
1005    OSG_INFO<<"computeIntersections("<<x<<", "<<y<<") local_x="<<local_x<<", local_y="<<local_y<<std::endl;
1006
1007    if (camera) return computeIntersections(camera, (camera->getViewport()==0)?osgUtil::Intersector::PROJECTION : osgUtil::Intersector::WINDOW, local_x, local_y, nodePath, intersections, traversalMask);
1008    else return false;
1009}
1010
1011bool View::computeIntersections(const osgGA::GUIEventAdapter& ea, osgUtil::LineSegmentIntersector::Intersections& intersections,osg::Node::NodeMask traversalMask)
1012{
1013#if 1
1014    if (ea.getNumPointerData()>=1)
1015    {
1016        const osgGA::PointerData* pd = ea.getPointerData(ea.getNumPointerData()-1);
1017        const osg::Camera* camera = dynamic_cast<const osg::Camera*>(pd->object.get());
1018        if (camera)
1019        {
1020            return computeIntersections(camera, osgUtil::Intersector::PROJECTION, pd->getXnormalized(), pd->getYnormalized(), intersections, traversalMask);
1021        }
1022    }
1023#endif
1024    return computeIntersections(ea.getX(), ea.getY(), intersections, traversalMask);
1025}
1026
1027bool View::computeIntersections(const osgGA::GUIEventAdapter& ea, const osg::NodePath& nodePath, osgUtil::LineSegmentIntersector::Intersections& intersections,osg::Node::NodeMask traversalMask)
1028{
1029#if 1
1030    if (ea.getNumPointerData()>=1)
1031    {
1032        const osgGA::PointerData* pd = ea.getPointerData(ea.getNumPointerData()-1);
1033        const osg::Camera* camera = dynamic_cast<const osg::Camera*>(pd->object.get());
1034        if (camera)
1035        {
1036            return computeIntersections(camera, osgUtil::Intersector::PROJECTION, pd->getXnormalized(), pd->getYnormalized(), nodePath, intersections, traversalMask);
1037        }
1038    }
1039#endif
1040    return computeIntersections(ea.getX(), ea.getY(), nodePath, intersections, traversalMask);
1041}
1042
1043bool View::computeIntersections(const osg::Camera* camera, osgUtil::Intersector::CoordinateFrame cf, float x,float y, osgUtil::LineSegmentIntersector::Intersections& intersections, osg::Node::NodeMask traversalMask)
1044{
1045    if (!camera) return false;
1046
1047    osg::ref_ptr< osgUtil::LineSegmentIntersector > picker = new osgUtil::LineSegmentIntersector(cf, x, y);
1048    osgUtil::IntersectionVisitor iv(picker.get());
1049    iv.setTraversalMask(traversalMask);
1050
1051    const_cast<osg::Camera*>(camera)->accept(iv);
1052
1053    if (picker->containsIntersections())
1054    {
1055        intersections = picker->getIntersections();
1056        return true;
1057    }
1058    else
1059    {
1060        intersections.clear();
1061        return false;
1062    }
1063}
1064
1065bool View::computeIntersections(const osg::Camera* camera, osgUtil::Intersector::CoordinateFrame cf, float x,float y, const osg::NodePath& nodePath, osgUtil::LineSegmentIntersector::Intersections& intersections,osg::Node::NodeMask traversalMask)
1066{
1067    if (!camera || nodePath.empty()) return false;
1068
1069    osg::Matrixd matrix;
1070    if (nodePath.size()>1)
1071    {
1072        osg::NodePath prunedNodePath(nodePath.begin(),nodePath.end()-1);
1073        matrix = osg::computeLocalToWorld(prunedNodePath);
1074    }
1075
1076    matrix.postMult(camera->getViewMatrix());
1077    matrix.postMult(camera->getProjectionMatrix());
1078
1079    double zNear = -1.0;
1080    double zFar = 1.0;
1081    if (cf==osgUtil::Intersector::WINDOW && camera->getViewport())
1082    {
1083        matrix.postMult(camera->getViewport()->computeWindowMatrix());
1084        zNear = 0.0;
1085        zFar = 1.0;
1086    }
1087
1088    osg::Matrixd inverse;
1089    inverse.invert(matrix);
1090
1091    osg::Vec3d startVertex = osg::Vec3d(x,y,zNear) * inverse;
1092    osg::Vec3d endVertex = osg::Vec3d(x,y,zFar) * inverse;
1093
1094    osg::ref_ptr< osgUtil::LineSegmentIntersector > picker = new osgUtil::LineSegmentIntersector(osgUtil::Intersector::MODEL, startVertex, endVertex);
1095
1096    osgUtil::IntersectionVisitor iv(picker.get());
1097    iv.setTraversalMask(traversalMask);
1098    nodePath.back()->accept(iv);
1099
1100    if (picker->containsIntersections())
1101    {
1102        intersections = picker->getIntersections();
1103        return true;
1104    }
1105    else
1106    {
1107        intersections.clear();
1108        return false;
1109    }
1110}
1111
1112void View::addDevice(osgGA::Device* eventSource)
1113{
1114    Devices::iterator itr = std::find( _eventSources.begin(), _eventSources.end(), eventSource );
1115    if (itr==_eventSources.end())
1116    {
1117        _eventSources.push_back(eventSource);
1118    }
1119   
1120    if (eventSource)
1121        eventSource->getEventQueue()->setStartTick(getStartTick());
1122}
1123
1124void View::removeDevice(osgGA::Device* eventSource)
1125{
1126    Devices::iterator itr = std::find( _eventSources.begin(), _eventSources.end(), eventSource );
1127    if (itr!=_eventSources.end())
1128    {
1129        _eventSources.erase(itr);
1130    }
1131}
1132
1133///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
1134//
1135// Methods that support Stereo and Keystone correction.
1136//
1137osg::Texture* View::createDistortionTexture(int width, int height)
1138{
1139    osg::ref_ptr<osg::TextureRectangle> texture = new osg::TextureRectangle;
1140
1141    texture->setTextureSize(width, height);
1142    texture->setInternalFormat(GL_RGB);
1143    texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR);
1144    texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR);
1145    texture->setWrap(osg::Texture::WRAP_S,osg::Texture::CLAMP_TO_EDGE);
1146    texture->setWrap(osg::Texture::WRAP_T,osg::Texture::CLAMP_TO_EDGE);
1147
1148    return texture.release();
1149}
1150
1151osg::Camera* View::assignRenderToTextureCamera(osg::GraphicsContext* gc, int width, int height, osg::Texture* texture)
1152{
1153    osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1154    camera->setName("Render to texture camera");
1155    camera->setGraphicsContext(gc);
1156    camera->setViewport(new osg::Viewport(0,0,width, height));
1157    camera->setDrawBuffer(GL_FRONT);
1158    camera->setReadBuffer(GL_FRONT);
1159    camera->setAllowEventFocus(false);
1160    camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1161
1162    // attach the texture and use it as the color buffer.
1163    camera->attach(osg::Camera::COLOR_BUFFER, texture);
1164
1165    addSlave(camera.get(), osg::Matrixd(), osg::Matrixd());
1166
1167    return camera.release();
1168}
1169
1170osg::Camera* View::assignKeystoneDistortionCamera(osg::DisplaySettings* ds, osg::GraphicsContext* gc, int x, int y, int width, int height, GLenum buffer, osg::Texture* texture, Keystone* keystone)
1171{
1172    double screenDistance = ds->getScreenDistance();
1173    double screenWidth = ds->getScreenWidth();
1174    double screenHeight = ds->getScreenHeight();
1175    double fovy = osg::RadiansToDegrees(2.0*atan2(screenHeight/2.0,screenDistance));
1176    double aspectRatio = screenWidth/screenHeight;
1177
1178    osg::Geode* geode = keystone->createKeystoneDistortionMesh();
1179
1180    // new we need to add the texture to the mesh, we do so by creating a
1181    // StateSet to contain the Texture StateAttribute.
1182    osg::StateSet* stateset = geode->getOrCreateStateSet();
1183    stateset->setTextureAttributeAndModes(0, texture,osg::StateAttribute::ON);
1184    stateset->setMode(GL_LIGHTING,osg::StateAttribute::OFF);
1185
1186    osg::TexMat* texmat = new osg::TexMat;
1187    texmat->setScaleByTextureRectangleSize(true);
1188    stateset->setTextureAttributeAndModes(0, texmat, osg::StateAttribute::ON);
1189
1190    osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1191    camera->setGraphicsContext(gc);
1192    camera->setClearMask(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT );
1193    camera->setClearColor( osg::Vec4(0.0,0.0,0.0,1.0) );
1194    camera->setViewport(new osg::Viewport(x, y, width, height));
1195    camera->setDrawBuffer(buffer);
1196    camera->setReadBuffer(buffer);
1197    camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);
1198    camera->setInheritanceMask(camera->getInheritanceMask() & ~osg::CullSettings::CLEAR_COLOR & ~osg::CullSettings::COMPUTE_NEAR_FAR_MODE);
1199    //camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR);
1200
1201    camera->setViewMatrix(osg::Matrix::identity());
1202    camera->setProjectionMatrixAsPerspective(fovy, aspectRatio, 0.1, 1000.0);
1203
1204    // add subgraph to render
1205    camera->addChild(geode);
1206
1207    camera->addChild(keystone->createGrid());
1208
1209    camera->setName("DistortionCorrectionCamera");
1210
1211    // camera->addEventCallback(new KeystoneHandler(keystone));
1212
1213    addSlave(camera.get(), osg::Matrixd(), osg::Matrixd(), false);
1214
1215    return camera.release();
1216}
1217
1218
1219
1220void View::StereoSlaveCallback::updateSlave(osg::View& view, osg::View::Slave& slave)
1221{
1222    osg::Camera* camera = slave._camera.get();
1223    osgViewer::View* viewer_view = dynamic_cast<osgViewer::View*>(&view);
1224
1225    if (_ds.valid() && camera && viewer_view)
1226    {
1227        // inherit any settings applied to the master Camera.
1228        camera->inheritCullSettings(*(view.getCamera()), camera->getInheritanceMask());
1229       
1230        if (_eyeScale<0.0)
1231        {
1232            camera->setCullMask(camera->getCullMaskLeft());
1233        }
1234        else
1235        {
1236            camera->setCullMask(camera->getCullMaskRight());
1237        }
1238           
1239        // set projection matrix
1240        if (_eyeScale<0.0)
1241        {
1242            camera->setProjectionMatrix(_ds->computeLeftEyeProjectionImplementation(view.getCamera()->getProjectionMatrix()));
1243        }
1244        else
1245        {
1246            camera->setProjectionMatrix(_ds->computeRightEyeProjectionImplementation(view.getCamera()->getProjectionMatrix()));
1247        }
1248
1249        double sd = _ds->getScreenDistance();
1250        double fusionDistance = sd;
1251        switch(viewer_view->getFusionDistanceMode())
1252        {
1253            case(osgUtil::SceneView::USE_FUSION_DISTANCE_VALUE):
1254                fusionDistance = viewer_view->getFusionDistanceValue();
1255                break;
1256            case(osgUtil::SceneView::PROPORTIONAL_TO_SCREEN_DISTANCE):
1257                fusionDistance *= viewer_view->getFusionDistanceValue();
1258                break;
1259        }
1260        double eyeScale = osg::absolute(_eyeScale) * (fusionDistance/sd);
1261
1262        if (_eyeScale<0.0)
1263        {
1264            camera->setViewMatrix(_ds->computeLeftEyeViewImplementation(view.getCamera()->getViewMatrix(), eyeScale));
1265        }
1266        else
1267        {
1268            camera->setViewMatrix(_ds->computeRightEyeViewImplementation(view.getCamera()->getViewMatrix(), eyeScale));
1269        }
1270    }
1271    else
1272    {
1273        slave.updateSlaveImplementation(view);
1274    }
1275}
1276
1277osg::Camera* View::assignStereoCamera(osg::DisplaySettings* ds, osg::GraphicsContext* gc, int x, int y, int width, int height, GLenum buffer, double eyeScale)
1278{
1279    osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1280
1281    camera->setGraphicsContext(gc);
1282    camera->setViewport(new osg::Viewport(x,y, width, height));
1283    camera->setDrawBuffer(buffer);
1284    camera->setReadBuffer(buffer);
1285
1286    // add this slave camera to the viewer, with a shift left of the projection matrix
1287    addSlave(camera.get(), osg::Matrixd::identity(), osg::Matrixd::identity());
1288
1289    // assign update callback to maintain the correct view and projection matrices
1290    osg::View::Slave& slave = getSlave(getNumSlaves()-1);
1291    slave._updateSlaveCallback =  new StereoSlaveCallback(ds, eyeScale);
1292
1293    return camera.release();
1294}
1295
1296static const GLubyte patternVertEven[] = {
1297    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1298    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1299    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1300    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1301    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1302    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1303    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1304    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1305    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1306    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1307    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1308    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1309    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1310    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1311    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1312    0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55};
1313
1314static const GLubyte patternVertOdd[] = {
1315    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1316    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1317    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1318    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1319    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1320    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1321    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1322    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1323    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1324    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1325    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1326    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1327    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1328    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1329    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
1330    0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA};
1331
1332static const GLubyte patternHorzEven[] = {
1333    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1334    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1335    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1336    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1337    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1338    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1339    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1340    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1341    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1342    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1343    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1344    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1345    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1346    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1347    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1348    0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
1349
1350// 32 x 32 bit array every row is a horizontal line of pixels
1351//  and the (bitwise) columns a vertical line
1352//  The following is a checkerboard pattern
1353static const GLubyte patternCheckerboard[] = {
1354    0x55, 0x55, 0x55, 0x55,
1355    0xAA, 0xAA, 0xAA, 0xAA,
1356    0x55, 0x55, 0x55, 0x55,
1357    0xAA, 0xAA, 0xAA, 0xAA,
1358    0x55, 0x55, 0x55, 0x55,
1359    0xAA, 0xAA, 0xAA, 0xAA,
1360    0x55, 0x55, 0x55, 0x55,
1361    0xAA, 0xAA, 0xAA, 0xAA,
1362    0x55, 0x55, 0x55, 0x55,
1363    0xAA, 0xAA, 0xAA, 0xAA,
1364    0x55, 0x55, 0x55, 0x55,
1365    0xAA, 0xAA, 0xAA, 0xAA,
1366    0x55, 0x55, 0x55, 0x55,
1367    0xAA, 0xAA, 0xAA, 0xAA,
1368    0x55, 0x55, 0x55, 0x55,
1369    0xAA, 0xAA, 0xAA, 0xAA,
1370    0x55, 0x55, 0x55, 0x55,
1371    0xAA, 0xAA, 0xAA, 0xAA,
1372    0x55, 0x55, 0x55, 0x55,
1373    0xAA, 0xAA, 0xAA, 0xAA,
1374    0x55, 0x55, 0x55, 0x55,
1375    0xAA, 0xAA, 0xAA, 0xAA,
1376    0x55, 0x55, 0x55, 0x55,
1377    0xAA, 0xAA, 0xAA, 0xAA,
1378    0x55, 0x55, 0x55, 0x55,
1379    0xAA, 0xAA, 0xAA, 0xAA,
1380    0x55, 0x55, 0x55, 0x55,
1381    0xAA, 0xAA, 0xAA, 0xAA,
1382    0x55, 0x55, 0x55, 0x55,
1383    0xAA, 0xAA, 0xAA, 0xAA,
1384    0x55, 0x55, 0x55, 0x55,
1385    0xAA, 0xAA, 0xAA, 0xAA};
1386
1387
1388void View::assignStereoOrKeystoneToCamera(osg::Camera* camera, osg::DisplaySettings* ds)
1389{
1390    if (!camera || camera->getGraphicsContext()==0) return;
1391    if (!ds->getStereo() && !ds->getKeystoneHint()) return;
1392   
1393    ds->setUseSceneViewForStereoHint(false);
1394
1395    typedef std::vector< osg::ref_ptr<Keystone> > Keystones;
1396    Keystones keystones;
1397    if (ds->getKeystoneHint() && !ds->getKeystones().empty())
1398    {
1399        for(osg::DisplaySettings::Objects::iterator itr = ds->getKeystones().begin();
1400            itr != ds->getKeystones().end();
1401            ++itr)
1402        {
1403            Keystone* keystone = dynamic_cast<Keystone*>(itr->get());
1404            if (keystone) keystones.push_back(keystone);
1405        }
1406    }
1407   
1408    if (ds->getKeystoneHint())
1409    {
1410        while(keystones.size()<2) keystones.push_back(new Keystone);
1411    }
1412
1413   
1414    // set up view's main camera
1415    {
1416        double height = ds->getScreenHeight();
1417        double width = ds->getScreenWidth();
1418        double distance = ds->getScreenDistance();
1419        double vfov = osg::RadiansToDegrees(atan2(height/2.0f,distance)*2.0);
1420
1421        camera->setProjectionMatrixAsPerspective( vfov, width/height, 1.0f,10000.0f);
1422    }
1423   
1424
1425    osg::ref_ptr<osg::GraphicsContext> gc = camera->getGraphicsContext();
1426   
1427    osg::ref_ptr<osg::GraphicsContext::Traits> traits = const_cast<osg::GraphicsContext::Traits*>(camera->getGraphicsContext()->getTraits());
1428
1429    if (!ds->getStereo())
1430    {
1431        // load or create a Keystone object
1432        osg::ref_ptr<osgViewer::Keystone> keystone = 0;
1433        if (!(ds->getKeystones().empty())) keystone = dynamic_cast<osgViewer::Keystone*>(ds->getKeystones().front().get());
1434        if (!keystone) keystone = new osgViewer::Keystone;
1435       
1436        // create distortion texture
1437        osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1438
1439        // create RTT Camera
1440        assignRenderToTextureCamera(gc.get(), traits->width, traits->height, texture.get());
1441
1442        // create Keystone distortion camera
1443        osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1444                                                                        0, 0, traits->width, traits->height,
1445                                                                        traits->doubleBuffer ? GL_BACK : GL_FRONT,
1446                                                                        texture.get(), keystone.get());
1447        // attach Keystone editing event handler.
1448        distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1449       
1450        camera->setGraphicsContext(0);
1451       
1452        return;
1453    }
1454   
1455    switch(ds->getStereoMode())
1456    {
1457        case(osg::DisplaySettings::QUAD_BUFFER):
1458        {
1459            // disconect the camera from the graphics context.
1460            camera->setGraphicsContext(0);
1461
1462            // left Camera left buffer
1463            osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK_LEFT : GL_FRONT_LEFT, -1.0);
1464            left_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1465            left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1466
1467            // right Camera right buffer
1468            osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK_RIGHT : GL_FRONT_RIGHT, 1.0);
1469            right_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1470            right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1471
1472            // for keystone:
1473            // left camera to render to left texture
1474            // right camera to render to right texture
1475            // left keystone camera to render to left buffer
1476            // left keystone camera to render to right buffer
1477            // one keystone and editing for the one window
1478           
1479            if (!keystones.empty())
1480            {
1481                // for keystone:
1482                // left camera to render to left texture using whole viewport of left texture
1483                // right camera to render to right texture using whole viewport of right texture
1484                // left keystone camera to render to left viewport/window
1485                // right keystone camera to render to right viewport/window
1486                // two keystone, one for each of the left and right viewports/windows
1487               
1488                osg::ref_ptr<Keystone> keystone = keystones.front();
1489
1490                // create distortion texture
1491                osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width, traits->height);
1492
1493                // convert to RTT Camera
1494                left_camera->setViewport(0, 0, traits->width, traits->height);
1495                left_camera->setDrawBuffer(GL_FRONT);
1496                left_camera->setReadBuffer(GL_FRONT);
1497                left_camera->setAllowEventFocus(true);
1498                left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1499
1500                // attach the texture and use it as the color buffer.
1501                left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1502
1503
1504                // create distortion texture
1505                osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width, traits->height);
1506
1507                // convert to RTT Camera
1508                right_camera->setViewport(0, 0, traits->width, traits->height);
1509                right_camera->setDrawBuffer(GL_FRONT);
1510                right_camera->setReadBuffer(GL_FRONT);
1511                right_camera->setAllowEventFocus(true);
1512                right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1513
1514                // attach the texture and use it as the color buffer.
1515                right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1516               
1517
1518                // create Keystone left distortion camera
1519                keystone->setGridColor(osg::Vec4(1.0f,0.0f,0.0,1.0));
1520                osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1521                                                                                0, 0, traits->width, traits->height,
1522                                                                                traits->doubleBuffer ? GL_BACK_LEFT : GL_FRONT_LEFT,
1523                                                                                left_texture.get(), keystone.get());
1524
1525                left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1526
1527                // attach Keystone editing event handler.
1528                left_keystone_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1529
1530               
1531                // create Keystone right distortion camera
1532                osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1533                                                                                0, 0, traits->width, traits->height,
1534                                                                                traits->doubleBuffer ? GL_BACK_RIGHT : GL_FRONT_RIGHT,
1535                                                                                right_texture.get(), keystone.get());
1536
1537                right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1538                right_keystone_camera->setAllowEventFocus(false);
1539               
1540            }
1541
1542            break;
1543        }
1544        case(osg::DisplaySettings::ANAGLYPHIC):
1545        {
1546            // disconect the camera from the graphics context.
1547            camera->setGraphicsContext(0);
1548
1549            // left Camera red
1550            osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, -1.0);
1551            left_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1552            left_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(true, false, false, true));
1553            left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1554
1555            // right Camera cyan
1556            osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, 1.0);
1557            right_camera->setClearMask(GL_DEPTH_BUFFER_BIT);
1558            right_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(false, true, true, true));
1559            right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1560
1561            if (!keystones.empty())
1562            {
1563                // for keystone:
1564                // left camera to render to texture using red colour mask
1565                // right camera to render to same texture using cyan colour mask
1566                // keystone camera to render to whole screen without colour masks
1567                // one keystone and editing for the one window
1568
1569                osg::ref_ptr<Keystone> keystone = keystones.front();
1570               
1571                bool useTwoTexture = true;
1572               
1573                if (useTwoTexture)
1574                {
1575                   
1576                    // create left distortion texture
1577                    osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width, traits->height);
1578
1579                    // convert to RTT Camera
1580                    left_camera->setDrawBuffer(GL_FRONT);
1581                    left_camera->setReadBuffer(GL_FRONT);
1582                    left_camera->setAllowEventFocus(false);
1583                    left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1584                    left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1585                    left_camera->getOrCreateStateSet()->removeAttribute(osg::StateAttribute::COLORMASK);
1586                    left_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1587
1588                    // attach the texture and use it as the color buffer.
1589                    left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1590
1591                    // create left distortion texture
1592                    osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width, traits->height);
1593
1594                    // convert to RTT Camera
1595                    right_camera->setDrawBuffer(GL_FRONT);
1596                    right_camera->setReadBuffer(GL_FRONT);
1597                    right_camera->setAllowEventFocus(false);
1598                    right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1599                    right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1600                    right_camera->getOrCreateStateSet()->removeAttribute(osg::StateAttribute::COLORMASK);
1601                    right_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1602
1603                    // attach the texture and use it as the color buffer.
1604                    right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1605
1606                    // create Keystone left distortion camera
1607                    osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1608                                                                                    0, 0, traits->width, traits->height,
1609                                                                                    traits->doubleBuffer ? GL_BACK : GL_FRONT,
1610                                                                                    left_texture.get(), keystone.get());
1611
1612                    left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1613                    left_keystone_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1614                    left_keystone_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(true, false, false, true));
1615
1616
1617                    // create Keystone right distortion camera
1618                    osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1619                                                                                    0, 0, traits->width, traits->height,
1620                                                                                    traits->doubleBuffer ? GL_BACK : GL_FRONT,
1621                                                                                    right_texture.get(), keystone.get());
1622
1623                    right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1624                    right_keystone_camera->setClearMask(GL_DEPTH_BUFFER_BIT);
1625                    right_keystone_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(false, true, true, true));
1626
1627                    // attach Keystone editing event handler.
1628                    left_keystone_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1629
1630                    camera->setAllowEventFocus(false);
1631                   
1632                }
1633                else
1634                {                   
1635                    // create distortion texture
1636                    osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1637
1638                    // convert to RTT Camera
1639                    left_camera->setDrawBuffer(GL_FRONT);
1640                    left_camera->setReadBuffer(GL_FRONT);
1641                    left_camera->setAllowEventFocus(false);
1642                    left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1643                    left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1644
1645                    // attach the texture and use it as the color buffer.
1646                    left_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1647
1648
1649                    // convert to RTT Camera
1650                    right_camera->setDrawBuffer(GL_FRONT);
1651                    right_camera->setReadBuffer(GL_FRONT);
1652                    right_camera->setAllowEventFocus(false);
1653                    right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1654                    right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1655
1656                    // attach the texture and use it as the color buffer.
1657                    right_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1658
1659
1660                    // create Keystone distortion camera
1661                    osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1662                                                                                    0, 0, traits->width, traits->height,
1663                                                                                    traits->doubleBuffer ? GL_BACK : GL_FRONT,
1664                                                                                    texture.get(), keystone.get());
1665
1666                    distortion_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1667                   
1668                    // attach Keystone editing event handler.
1669                    distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1670
1671                    camera->setAllowEventFocus(false);
1672                }
1673            }
1674
1675            break;
1676        }
1677        case(osg::DisplaySettings::HORIZONTAL_SPLIT):
1678        {
1679            // disconect the camera from the graphics context.
1680            camera->setGraphicsContext(0);
1681
1682            bool left_eye_left_viewport = ds->getSplitStereoHorizontalEyeMapping()==osg::DisplaySettings::LEFT_EYE_LEFT_VIEWPORT;
1683            int left_start = (left_eye_left_viewport) ? 0 : traits->width/2;
1684            int right_start = (left_eye_left_viewport) ? traits->width/2 : 0;
1685
1686            // left viewport Camera
1687            osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(),
1688                               left_start, 0, traits->width/2, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1689                               -1.0);
1690
1691            // right viewport Camera
1692            osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(),
1693                               right_start, 0, traits->width/2, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1694                               1.0);
1695
1696            if (!keystones.empty())
1697            {
1698                // for keystone:
1699                // left camera to render to left texture using whole viewport of left texture
1700                // right camera to render to right texture using whole viewport of right texture
1701                // left keystone camera to render to left viewport/window
1702                // right keystone camera to render to right viewport/window
1703                // two keystone, one for each of the left and right viewports/windows
1704               
1705                osg::ref_ptr<Keystone> left_keystone = keystones[0];
1706                osg::ref_ptr<Keystone> right_keystone = keystones[1];
1707
1708                // create distortion texture
1709                osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width/2, traits->height);
1710
1711                // convert to RTT Camera
1712                left_camera->setViewport(0, 0, traits->width/2, traits->height);
1713                left_camera->setDrawBuffer(GL_FRONT);
1714                left_camera->setReadBuffer(GL_FRONT);
1715                left_camera->setAllowEventFocus(true);
1716                left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1717                left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1718
1719                // attach the texture and use it as the color buffer.
1720                left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1721
1722
1723                // create distortion texture
1724                osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width/2, traits->height);
1725
1726                // convert to RTT Camera
1727                right_camera->setViewport(0, 0, traits->width/2, traits->height);
1728                right_camera->setDrawBuffer(GL_FRONT);
1729                right_camera->setReadBuffer(GL_FRONT);
1730                right_camera->setAllowEventFocus(true);
1731                right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1732                right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1733
1734                // attach the texture and use it as the color buffer.
1735                right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1736               
1737
1738                // create Keystone left distortion camera
1739                left_keystone->setGridColor(osg::Vec4(1.0f,0.0f,0.0,1.0));
1740                osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1741                                                                                left_start, 0, traits->width/2, traits->height,
1742                                                                                traits->doubleBuffer ? GL_BACK : GL_FRONT,
1743                                                                                left_texture.get(), left_keystone.get());
1744
1745                left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1746
1747                // attach Keystone editing event handler.
1748                left_keystone_camera->addEventCallback(new KeystoneHandler(left_keystone.get()));
1749
1750
1751                // create Keystone right distortion camera
1752                right_keystone->setGridColor(osg::Vec4(0.0f,1.0f,0.0,1.0));
1753                osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1754                                                                                right_start, 0, traits->width/2, traits->height,
1755                                                                                traits->doubleBuffer ? GL_BACK : GL_FRONT,
1756                                                                                right_texture.get(), right_keystone.get());
1757
1758                right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1759
1760                // attach Keystone editing event handler.
1761                right_keystone_camera->addEventCallback(new KeystoneHandler(right_keystone.get()));
1762
1763                camera->setAllowEventFocus(false);
1764               
1765            }
1766           
1767            break;
1768        }
1769        case(osg::DisplaySettings::VERTICAL_SPLIT):
1770        {
1771            // disconect the camera from the graphics context.
1772            camera->setGraphicsContext(0);
1773
1774            bool left_eye_bottom_viewport = ds->getSplitStereoVerticalEyeMapping()==osg::DisplaySettings::LEFT_EYE_BOTTOM_VIEWPORT;
1775            int left_start = (left_eye_bottom_viewport) ? 0 : traits->height/2;
1776            int right_start = (left_eye_bottom_viewport) ? traits->height/2 : 0;
1777           
1778            // bottom viewport Camera
1779            osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(),
1780                               0, left_start, traits->width, traits->height/2, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1781                               -1.0);
1782
1783            // top vieport camera
1784            osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(),
1785                               0, right_start, traits->width, traits->height/2, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1786                               1.0);
1787
1788            // for keystone:
1789            // left camera to render to left texture using whole viewport of left texture
1790            // right camera to render to right texture using whole viewport of right texture
1791            // left keystone camera to render to left viewport/window
1792            // right keystone camera to render to right viewport/window
1793            // two keystone, one for each of the left and right viewports/windows
1794
1795            if (!keystones.empty())
1796            {
1797                // for keystone:
1798                // left camera to render to left texture using whole viewport of left texture
1799                // right camera to render to right texture using whole viewport of right texture
1800                // left keystone camera to render to left viewport/window
1801                // right keystone camera to render to right viewport/window
1802                // two keystone, one for each of the left and right viewports/windows
1803
1804                osg::ref_ptr<Keystone> left_keystone = keystones[0];
1805                osg::ref_ptr<Keystone> right_keystone = keystones[1];
1806
1807                // create distortion texture
1808                osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width, traits->height/2);
1809
1810                // convert to RTT Camera
1811                left_camera->setViewport(0, 0, traits->width, traits->height/2);
1812                left_camera->setDrawBuffer(GL_FRONT);
1813                left_camera->setReadBuffer(GL_FRONT);
1814                left_camera->setAllowEventFocus(true);
1815                left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1816                left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1817
1818                // attach the texture and use it as the color buffer.
1819                left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1820
1821
1822                // create distortion texture
1823                osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width, traits->height/2);
1824
1825                // convert to RTT Camera
1826                right_camera->setViewport(0, 0, traits->width, traits->height/2);
1827                right_camera->setDrawBuffer(GL_FRONT);
1828                right_camera->setReadBuffer(GL_FRONT);
1829                right_camera->setAllowEventFocus(true);
1830                right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1831                right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1832
1833                // attach the texture and use it as the color buffer.
1834                right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1835               
1836
1837                // create Keystone left distortion camera
1838                left_keystone->setGridColor(osg::Vec4(1.0f,0.0f,0.0,1.0));
1839                osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1840                                                                                0, left_start, traits->width, traits->height/2,
1841                                                                                traits->doubleBuffer ? GL_BACK : GL_FRONT,
1842                                                                                left_texture.get(), left_keystone.get());
1843
1844                left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1845
1846                // attach Keystone editing event handler.
1847                left_keystone_camera->addEventCallback(new KeystoneHandler(left_keystone.get()));
1848
1849
1850                // create Keystone right distortion camera
1851                right_keystone->setGridColor(osg::Vec4(0.0f,1.0f,0.0,1.0));
1852                osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1853                                                                                0, right_start, traits->width, traits->height/2,
1854                                                                                traits->doubleBuffer ? GL_BACK : GL_FRONT,
1855                                                                                right_texture.get(), right_keystone.get());
1856
1857                right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1858
1859                // attach Keystone editing event handler.
1860                right_keystone_camera->addEventCallback(new KeystoneHandler(right_keystone.get()));
1861
1862                camera->setAllowEventFocus(false);
1863               
1864            }
1865
1866            break;
1867        }
1868        case(osg::DisplaySettings::LEFT_EYE):
1869        {
1870            // disconect the camera from the graphics context.
1871            camera->setGraphicsContext(0);
1872
1873            // single window, whole window, just left eye offsets
1874            osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, -1.0);
1875
1876            // for keystone:
1877            // treat as standard keystone correction.
1878            // left eye camera to render to texture
1879            // keystone camera then render to window
1880            // one keystone and editing for window
1881
1882            if (!keystones.empty())
1883            {
1884                // for keystone:
1885                // left camera to render to texture using red colour mask
1886                // right camera to render to same texture using cyan colour mask
1887                // keystone camera to render to whole screen without colour masks
1888                // one keystone and editing for the one window
1889
1890                osg::ref_ptr<Keystone> keystone = keystones.front();
1891
1892                // create distortion texture
1893                osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1894
1895                // convert to RTT Camera
1896                left_camera->setDrawBuffer(GL_FRONT);
1897                left_camera->setReadBuffer(GL_FRONT);
1898                left_camera->setAllowEventFocus(false);
1899                left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1900                left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1901
1902                // attach the texture and use it as the color buffer.
1903                left_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1904
1905
1906                // create Keystone distortion camera
1907                osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1908                                                                                0, 0, traits->width, traits->height,
1909                                                                                traits->doubleBuffer ? GL_BACK : GL_FRONT,
1910                                                                                texture.get(), keystone.get());
1911
1912                distortion_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1913               
1914                // attach Keystone editing event handler.
1915                distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1916            }
1917            break;
1918        }
1919        case(osg::DisplaySettings::RIGHT_EYE):
1920        {
1921            // disconect the camera from the graphics context.
1922            camera->setGraphicsContext(0);
1923
1924            // single window, whole window, just right eye offsets
1925            osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, 1.0);
1926
1927            // for keystone:
1928            // treat as standard keystone correction.
1929            // left eye camera to render to texture
1930            // keystone camera then render to window
1931            // one keystone and editing for window
1932
1933            if (!keystones.empty())
1934            {
1935                // for keystone:
1936                // left camera to render to texture using red colour mask
1937                // right camera to render to same texture using cyan colour mask
1938                // keystone camera to render to whole screen without colour masks
1939                // one keystone and editing for the one window
1940
1941                osg::ref_ptr<Keystone> keystone = keystones.front();
1942
1943                // create distortion texture
1944                osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1945
1946                // convert to RTT Camera
1947                right_camera->setDrawBuffer(GL_FRONT);
1948                right_camera->setReadBuffer(GL_FRONT);
1949                right_camera->setAllowEventFocus(false);
1950                right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1951                right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1952
1953                // attach the texture and use it as the color buffer.
1954                right_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1955
1956                // create Keystone distortion camera
1957                osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1958                                                                                             0, 0, traits->width, traits->height,
1959                                                                                             traits->doubleBuffer ? GL_BACK : GL_FRONT,
1960                                                                                             texture.get(), keystone.get());
1961
1962                distortion_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1963               
1964                // attach Keystone editing event handler.
1965                distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1966            }
1967            break;
1968        }
1969        case(osg::DisplaySettings::HORIZONTAL_INTERLACE):
1970        case(osg::DisplaySettings::VERTICAL_INTERLACE):
1971        case(osg::DisplaySettings::CHECKERBOARD):
1972        {
1973            // disconect the camera from the graphics context.
1974            camera->setGraphicsContext(0);
1975
1976            // set up the stencil buffer
1977            {
1978                osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1979                camera->setGraphicsContext(gc.get());
1980                camera->setViewport(0, 0, traits->width, traits->height);
1981                camera->setDrawBuffer(traits->doubleBuffer ? GL_BACK : GL_FRONT);
1982                camera->setReadBuffer(camera->getDrawBuffer());
1983                camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);
1984                camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT|GL_STENCIL_BUFFER_BIT);
1985                camera->setClearStencil(0);
1986                camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1987                addSlave(camera.get(), false);
1988
1989                osg::ref_ptr<osg::Geometry> geometry = osg::createTexturedQuadGeometry(osg::Vec3(-1.0f,-1.0f,0.0f), osg::Vec3(2.0f,0.0f,0.0f), osg::Vec3(0.0f,2.0f,0.0f), 0.0f, 0.0f, 1.0f, 1.0f);
1990                osg::ref_ptr<osg::Geode> geode = new osg::Geode;
1991                geode->addDrawable(geometry.get());
1992                camera->addChild(geode.get());
1993
1994                geode->setCullingActive(false);
1995               
1996                osg::ref_ptr<osg::StateSet> stateset = geode->getOrCreateStateSet();
1997
1998                // set up stencil
1999                osg::ref_ptr<osg::Stencil> stencil = new osg::Stencil;
2000                stencil->setFunction(osg::Stencil::ALWAYS, 1, ~0u);
2001                stencil->setOperation(osg::Stencil::REPLACE, osg::Stencil::REPLACE, osg::Stencil::REPLACE);
2002                stencil->setWriteMask(~0u);
2003                stateset->setAttributeAndModes(stencil.get(), osg::StateAttribute::ON);
2004
2005                // set up polygon stipple
2006                if(ds->getStereoMode() == osg::DisplaySettings::VERTICAL_INTERLACE)
2007                {
2008                    stateset->setAttributeAndModes(new osg::PolygonStipple(patternVertEven), osg::StateAttribute::ON);
2009                }
2010                else if(ds->getStereoMode() == osg::DisplaySettings::HORIZONTAL_INTERLACE)
2011                {
2012                    stateset->setAttributeAndModes(new osg::PolygonStipple(patternHorzEven), osg::StateAttribute::ON);
2013                }
2014                else
2015                {
2016                    stateset->setAttributeAndModes(new osg::PolygonStipple(patternCheckerboard), osg::StateAttribute::ON);
2017                }
2018
2019                stateset->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
2020                stateset->setMode(GL_DEPTH_TEST, osg::StateAttribute::OFF);
2021
2022            }
2023
2024            // left Camera
2025            {
2026                osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, -1.0);
2027                left_camera->setClearMask(0);
2028                left_camera->setClearMask(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
2029                left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
2030
2031                osg::ref_ptr<osg::Stencil> stencil = new osg::Stencil;
2032                stencil->setFunction(osg::Stencil::EQUAL, 0, ~0u);
2033                stencil->setOperation(osg::Stencil::KEEP, osg::Stencil::KEEP, osg::Stencil::KEEP);
2034                left_camera->getOrCreateStateSet()->setAttributeAndModes(stencil.get(), osg::StateAttribute::ON);
2035            }
2036
2037            // right Camera
2038            {
2039                osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, 1.0);
2040                right_camera->setClearMask(GL_DEPTH_BUFFER_BIT);
2041                right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
2042
2043                osg::ref_ptr<osg::Stencil> stencil = new osg::Stencil;
2044                stencil->setFunction(osg::Stencil::NOTEQUAL, 0, ~0u);
2045                stencil->setOperation(osg::Stencil::KEEP, osg::Stencil::KEEP, osg::Stencil::KEEP);
2046                right_camera->getOrCreateStateSet()->setAttributeAndModes(stencil.get(), osg::StateAttribute::ON);
2047            }
2048            break;
2049        }
2050    }
2051}
Note: See TracBrowser for help on using the browser.