Text archives Help
- From: abe@sci.utah.edu
- To: manta@sci.utah.edu
- Subject: [MANTA] r504 - in branches/AFR: Engine/Control Engine/ImageTraversers Engine/ImageTraversers/AFR StandAlone
- Date: Fri, 26 Aug 2005 01:48:20 -0600 (MDT)
Author: abe
Date: Fri Aug 26 01:48:19 2005
New Revision: 504
Modified:
branches/AFR/Engine/Control/AFRPipeline.cc
branches/AFR/Engine/ImageTraversers/AFImageTraverser.cc
branches/AFR/Engine/ImageTraversers/AFImageTraverser.h
branches/AFR/Engine/ImageTraversers/AFR/sample.h
branches/AFR/Engine/ImageTraversers/CMakeLists.txt
branches/AFR/StandAlone/manta.cc
Log:
Incremental change to AFRPipeline. More debugging is necessary.
M StandAlone/manta.cc
M Engine/Control/AFRPipeline.cc
M Engine/ImageTraversers/AFR/sample.h
M Engine/ImageTraversers/AFImageTraverser.cc
M Engine/ImageTraversers/CMakeLists.txt
M Engine/ImageTraversers/AFImageTraverser.h
Modified: branches/AFR/Engine/Control/AFRPipeline.cc
==============================================================================
--- branches/AFR/Engine/Control/AFRPipeline.cc (original)
+++ branches/AFR/Engine/Control/AFRPipeline.cc Fri Aug 26 01:48:19 2005
@@ -72,7 +72,6 @@
#include <dlfcn.h>
#include <stdio.h>
-
using namespace Manta;
using namespace Manta::Afr;
using namespace std;
@@ -401,7 +400,7 @@
}
// skipToRendering:
- for (;;) {
+ // for (;;) {
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
@@ -439,7 +438,7 @@
// Determine how to break out of inner loop.
!!!!!!!!!!!!!!!!!!!!!!!!!!
- } // End of inner loop.
+ // } // End of inner loop.
} // End of outer loop.
}
@@ -629,50 +628,83 @@
///////////////////////////////////////////////////////////////////////////////
// SETUP PIPELINES
///////////////////////////////////////////////////////////////////////////////
-void AFRPipeline::setupPipelines(int numProcs)
-{
+void AFRPipeline::setupPipelines(int numProcs) {
+
int numChannels = static_cast<int>(channels.size());
- SetupContext globalcontext(this, numChannels, 0, numProcs,
+
+ std::cout << "Thread in setupPipelines" << std::endl;
+
+ // Create a setup context.
+ SetupContext globalcontext(this, numChannels, 0, numProcs,
currentLoadBalancer, currentPixelSampler,
currentRenderer);
+
+
/////////////////////////////////////////////////////////////////////////////
+ // Begin the image traverser setup.
currentImageTraverser->setupBegin(globalcontext, numChannels);
+
+
/////////////////////////////////////////////////////////////////////////////
+ // Invoke setup callbacks.
for(vector<SetupCallback*>::iterator iter = setupCallbacks.begin();
iter != setupCallbacks.end(); iter++)
- (*iter)->setupBegin(globalcontext, numChannels);
- for(int index = 0;index < static_cast<int>(channels.size());index++){
+ (*iter)->setupBegin(globalcontext, numChannels);
+
+
/////////////////////////////////////////////////////////////////////////////
+ // Setup each channel.
+ for(int index = 0;index < static_cast<int>(channels.size());index++){
Channel* channel = channels[index];
SetupContext context(this, index, numChannels, 0, numProcs,
channel->stereo, channel->xres, channel->yres,
- currentLoadBalancer, currentPixelSampler,
+
currentLoadBalancer, currentPixelSampler,
currentRenderer);
+
+ // Try to setup the channels repeatedly until the context
stops changing.
int iteration = 100;
do {
context.setChanged(false);
context.clearMasterWindow();
+
for(vector<SetupCallback*>::iterator iter = setupCallbacks.begin();
- iter != setupCallbacks.end(); iter++)
- (*iter)->setupDisplayChannel(context);
- currentImageTraverser->setupDisplayChannel(context);
+ iter != setupCallbacks.end(); iter++)
+ (*iter)->setupDisplayChannel(context);
+
+ // Setup the image traverser display channel....?
+ currentImageTraverser->setupDisplayChannel(context);
+
+ // Setup the image display.
channel->display->setupDisplayChannel(context);
} while(context.isChanged() && --iteration > 0);
+
+ // Check for setup error.
if(!iteration)
throw InternalError("Pipeline/resolution negotiation failed",
__FILE__, __LINE__);
+
+ // Set the channel resolution based on the context.
context.getResolution(channel->stereo, channel->xres, channel->yres);
+
+ // Check the resolution.
if(channel->xres <= 0)
throw IllegalValue<int>("Resolution should be positive",
channel->xres);
if(channel->yres <= 0)
throw IllegalValue<int>("Resolution should be positive",
channel->yres);
-
+
+
///////////////////////////////////////////////////////////////////////////
+ // Determine pipeline depth.
if(context.getMinDepth() > context.getMaxDepth())
throw InternalError("Pipeline depth negotiation failed",
__FILE__, __LINE__);
+
int depth = context.getMinDepth();
if(depth == 1 && context.getMaxDepth() > 1)
depth = 2; // Prefer double-buffering
- cerr << "AFRPipeline::setupPipelines:: depth = "<< depth << "\n";
- channel->pipelineDepth = depth;
- unsigned long osize = channel->images.size();
+
+ // cerr << "AFRPipeline::setupPipelines:: depth = "<< depth << "\n";
+
+
///////////////////////////////////////////////////////////////////////////
+ // Create images for each stage in the pipeline.
+ channel->pipelineDepth = depth;
+ unsigned long osize = channel->images.size(); // unsigned long would be
a pretty long pipeline right?
for(unsigned long i=depth;i<osize;i++)
delete channel->images[i];
for(unsigned long i=osize;i<static_cast<unsigned long>(depth);i++)
@@ -680,7 +712,7 @@
channel->images.resize(depth);
}
}
-
+
///////////////////////////////////////////////////////////////////////////////
// RESIZE IMAGES
///////////////////////////////////////////////////////////////////////////////
Modified: branches/AFR/Engine/ImageTraversers/AFImageTraverser.cc
==============================================================================
--- branches/AFR/Engine/ImageTraversers/AFImageTraverser.cc (original)
+++ branches/AFR/Engine/ImageTraversers/AFImageTraverser.cc Fri Aug 26
01:48:19 2005
@@ -17,7 +17,6 @@
#include <Core/XUtils/XHelper.h>
#include <SCIRun/Core/Thread/Time.h>
-
#include <cmath>
#ifdef __sgi
@@ -33,7 +32,6 @@
using namespace Manta;
using namespace Manta::Afr;
-
using SCIRun::IllegalValue;
using SCIRun::InternalError;
@@ -74,15 +72,16 @@
// confirm!
}
-void AFImageTraverser::setupBegin(SetupContext& context,
- int numChannels)
-{
+void AFImageTraverser::setupBegin(SetupContext& context, int numChannels) {
+
context.loadBalancer->setupBegin(context, numChannels);
}
-void AFImageTraverser::setupDisplayChannel(SetupContext& context)
-{
+void AFImageTraverser::setupDisplayChannel(SetupContext& context) {
+
+ // Specify that the rendering pipeline should use only one image
buffer.
context.constrainPipelineDepth(1,1); // single buffer mode is set here
+
bool stereo; // get whether it is stereo, (shoot two rays for left
// and right eye)
int i;
@@ -90,34 +89,41 @@
samplingrate = 400000; // let us for now assume something realistic
samples_done = 0;
initpass = false;
+
// this will change when we can actually simulate
context.getResolution(stereo, xres, yres);
+
//---------------------------------------------------------
// initialize random number generators for each thread.
//---------------------------------------------------------
myRandomNumber = new MT_RNG[context.numProcs];
- for(i=0; i<context.numProcs; i++)
- {
+ for(i=0; i<context.numProcs; i++) {
myRandomNumber[i].seed_rng(1234*(i+1)); // just to begin give a simple
seed
}
+
+
/////////////////////////////////////////////////////////////////////////////
+ // Create a kdtree for each client.
+
//---------------------------------------------------------
// make our kdtree, given the resolution of image/channel
//---------------------------------------------------------
kdtree.setAB(xres, yres, samplingrate);
int numlevels = kdtree.init(xres, yres, samplingrate); // initialize
the tree
kdtree.setTileCut(numlevels/2, 0, samplingrate);
- // set a mean number of tiles, i.e. cut at middle of tree
+
+ // set a mean number of tiles, i.e. cut at middle of tree
kdtree.resetPseudoRandomSeed();
//--------------------------------------------------------------------
// allocate the queues, based on samplinrate and number of processors.
//--------------------------------------------------------------------
- if(context.numProcs<2)
- {
+ if(context.numProcs<2) {
throw IllegalValue<int>("numProcs must be > 1 for AFR, use -np 2 or
more", context.numProcs);
}
+
num_clients = context.numProcs-1;
- // note that max fragment size is hard coded to 32 in fragment.h
+
+ // note that max fragment size is hard coded to 32 in fragment.h
// so not to exceed that limit.
fragment_size = 16; // ****** HARD CODED *******
chunk_size = num_clients*256; // this is done so that
@@ -130,7 +136,9 @@
sampleQ[ODD_FRAME] = new Sample[chunk_size];
samplesetQ[EVEN_FRAME] = new CQ<SampleSet>[num_clients];
samplesetQ[ODD_FRAME] = new CQ<SampleSet>[num_clients];
- for(i = 0; i<num_clients; i++)
+
+ // Initialize
+ for(i = 0; i<num_clients; i++)
{
temporalQ[i].init(qSize+1);
samplesetQ[EVEN_FRAME][i].init(chunk_size);
@@ -146,6 +154,7 @@
//--------------------------------------------------------------------
cout << "numFragments = " << numFragments << endl;
context.loadBalancer->setupDisplayChannel(context, numFragments);
+
// make fist set of fragments from uniform tiling
allocateFragments(EVEN_FRAME);
}
@@ -168,9 +177,10 @@
//cout << "done setup frame" << endl;
}
+// Called from setupDisplayChannel.
+// Called from masterThread.
+void AFImageTraverser::allocateFragments(const FrameType f) {
-void AFImageTraverser::allocateFragments(const FrameType f)
-{
int j;
int x, y;
Sample s;
@@ -285,11 +295,17 @@
//cout << "DONE" << endl;
}
+
+///////////////////////////////////////////////////////////////////////////////
+// Render all of samples in a fragment.
+// This is similar to a pixel sampler.
+///////////////////////////////////////////////////////////////////////////////
void AFImageTraverser::renderFragment(const RenderContext& context,
int assignment, Image* image,
int xres, int yres)
{
//cout << "inside renderFragment " << endl;
+
// renders the fragment by jittering it and stores result in temporalQ
int flags = RayPacket::HaveImageCoordinates | RayPacket::ConstantEye;
Fragment fragment;
@@ -297,17 +313,24 @@
fragment_size : (chunk_size-assignment*fragment_size);
fragment.setSize(fsize);
fragment.setFlags(Fragment::ConstantEye);
- int myID = context.proc;
+
+ int myID = context.proc;
+
+
/////////////////////////////////////////////////////////////////////////////
+ // Create ray packets.
for(int f=0;f<fsize;f+=RayPacket::MaxSize) {
- int size = RayPacket::MaxSize;
+
+ int size = RayPacket::MaxSize;
if(size<fragment_size) size = fsize;
if(size >= fsize-f)
size = fsize-f;
+
// Create a ray packet
int depth = 0;
RayPacketData raydata;
RayPacket rays(raydata, size, depth, flags);
+ // Copy samples from the sampleQ into the fragment.
for(int i=0;i<size;i++) {
Fragment::Element& fe = fragment.get(f+i);
float cx, cy;
@@ -320,6 +343,7 @@
fe.which_eye = 0;
// normalized
double px, py;
+
//cout << "raytracing: " << fe.x << ", " << fe.y << endl;
// we will jitter later, now just add 0.5 <TODO>
if(xres>yres) // let the smaller dimension be mapped to [-1,1]
@@ -332,15 +356,22 @@
px = (double)(-1.0 + 2.0*(double)(cx)/(double)xres);
py = (double)(-1.0 + 2.0*(double)(cy)/(double)xres);
}
- //printf("%f, %f\n", (float)px, (float)py);
+
+ // Specify the position and color pointer for the packet element.
rays.setPixel(i, 0, px, py, &fe.color);
}
+
///////////////////////////////////////////////////////////////////////////
// Trace the rays. The results will automatically go into the fragment
context.renderer->traceEyeRays(context, rays);
+
+ // Compute world space hit positions.
rays.computeHitPositions();
+
+
///////////////////////////////////////////////////////////////////////////
// okay now copy from fragment to temporalQ
for(int i=0;i<size;i++) {
+
int sind = fragment_size*assignment + f+i;
Fragment::Element& fe = fragment.get(f+i);
@@ -354,17 +385,20 @@
sampleQ[frametype[myID]][sind].worldCoord[2] = re.hitPosition.z();
}
}
- //cout << "setting image" << endl;
+
+
/////////////////////////////////////////////////////////////////////////////
+ // Skip reconstruction and set the image pixel.
image->set(fragment);
}
-void AFImageTraverser::renderImage(const RenderContext& context,
- Image* image)
-{
- /*
- basically do the following, if it is a master thread call master
function, else
- call the client function
- */
+///////////////////////////////////////////////////////////////////////////////
+// Manta ImageTraverser callback.
+///////////////////////////////////////////////////////////////////////////////
+void AFImageTraverser::renderImage( const RenderContext& context, Image*
image ) {
+
+ // Basically do the following, if it is a master thread call master
function, else
+ // call the client function
+
if(context.proc == 0) // master thread
{
masterThread(context, image);
@@ -382,7 +416,11 @@
clientThread(context, image, assignment);
}
}
- // update the frame type
+
+ // update the frame type
+
+ // Do all of the threads call this??
+
if(frametype[context.proc]==EVEN_FRAME)
frametype[context.proc] = ODD_FRAME;
else
@@ -391,10 +429,13 @@
}
}
-void AFImageTraverser::masterThread(const RenderContext& context,
- Image* image)
-{
- //cout << "inside masterthread for frametype " << frametype[0] << endl;
+
+///////////////////////////////////////////////////////////////////////////////
+// This method implements the master thread functionality.
+///////////////////////////////////////////////////////////////////////////////
+void AFImageTraverser::masterThread(const RenderContext& context, Image*
image) {
+
+ // cout << "inside masterthread for frametype " << frametype[0] << endl;
// update kdtree based on previous results from the client threads
bool stereo;
@@ -456,9 +497,11 @@
//cout << "masterThread done" << endl;
}
-void AFImageTraverser::clientThread(const RenderContext& context,
- Image* image, int assignment)
-{
+///////////////////////////////////////////////////////////////////////////////
+// This method implements the master thread functionality.
+///////////////////////////////////////////////////////////////////////////////
+void AFImageTraverser::clientThread(const RenderContext& context, Image*
image, int assignment) {
+
//cout << "inside clientThread for frametype " << frametype[context.proc]
<< endl;
bool stereo;
int xres, yres;
@@ -472,26 +515,25 @@
// let us not write temporal samples to image
//cout << "now placing fragment in temporalQ" << endl;
- for(int i=0; i<fragment_size; i++)
- {
+ for(int i=0; i<fragment_size; i++) {
//cout << "placing id: " << frametype[myID] << ", " <<
fragment_size*assignment+i << endl;
temporalQ[myID-1].qInsert(&sampleQ[frametype[myID]][fragment_size*assignment+i]);
}
- //cout << "placement in temporalQ done" << endl;
+
+ //cout << "placement in temporalQ done" << endl;
/* place the fragment in the temporalQ, take fragment_size number of
* items from the temporalQ one by one, reproject them and complete
* a crosshair, placing it in samplesetQ.
*/
- if(initpass)
- {
+ if(initpass) {
+
int i;
Sample *sp;
//cout << "now making xhairs" << endl;
- for(i=0; i<fragment_size; i++)
- {
+ for(i=0; i<fragment_size; i++) {
+
sp = temporalQ[myID-1].seekLast();
- if(sp!=NULL)
- {
+ if(sp!=NULL) {
temporalQ[myID-1].qDelete();
Manta::Real px, py, pz;
px = sp->worldCoord[0];
@@ -531,6 +573,10 @@
//cout << "making xhairs done" << endl;
}
+///////////////////////////////////////////////////////////////////////////////
+// This method adjusts the kdtree-cut by either merging nodes along the cut
+// or splitting nodes and adding their children to the cut.
+///////////////////////////////////////////////////////////////////////////////
void AFImageTraverser::adjustTiles(Timestamp currenttime)
{
float minError, maxError;
Modified: branches/AFR/Engine/ImageTraversers/AFImageTraverser.h
==============================================================================
--- branches/AFR/Engine/ImageTraversers/AFImageTraverser.h (original)
+++ branches/AFR/Engine/ImageTraversers/AFImageTraverser.h Fri Aug 26
01:48:19 2005
@@ -18,45 +18,73 @@
namespace Manta {
using namespace std;
- namespace Afr {
- enum FrameType { EVEN_FRAME, ODD_FRAME };
- class AFImageTraverser : public ImageTraverser {
- public:
- AFImageTraverser(const vector<string>& args);
- virtual ~AFImageTraverser();
- virtual void setupBegin(SetupContext&, int numChannels);
- virtual void setupDisplayChannel(SetupContext&);
- virtual void setupFrame(const RenderContext& context);
- virtual void renderImage(const RenderContext& context, Image* image);
- void allocateFragments(const FrameType f);
- void renderFragment(const RenderContext& context,
- int assignment, Image* image,
- int xres, int yres);
- void renderCrossHair(const RenderContext& context, int myID,
- Image *image, int xres, int yres, Sample *s);
- void masterThread(const RenderContext& context, Image* image);
- void clientThread(const RenderContext& context, Image* image, int
assignment);
- void adjustTiles(Timestamp currenttime);
- static ImageTraverser* create(const vector<string>& args);
- private:
- AFImageTraverser(const AFImageTraverser&);
- AFImageTraverser& operator=(const AFImageTraverser&);
- KDTree kdtree;
- int numFragments;
- CQ<Sample> *temporalQ;
- Sample *sampleQ[2];
- CQ<SampleSet> *samplesetQ[2];
- MT_RNG *myRandomNumber;
- int num_clients;
- int chunk_size, fragment_size;
- int samplingrate;
- unsigned int samples_done;
- int *client_done_counter;
- FrameType *frametype;
- double chunkTimeStamp;
- bool initpass;
- };
-}
-}
+
+ namespace Afr {
+
+ enum FrameType { EVEN_FRAME, ODD_FRAME };
+ class AFImageTraverser : public ImageTraverser {
+ public:
+ AFImageTraverser(const vector<string>& args);
+ virtual ~AFImageTraverser();
+ virtual void setupBegin(SetupContext&, int
numChannels);
+ virtual void setupDisplayChannel(SetupContext&);
+ virtual void setupFrame(const RenderContext& context);
+ virtual void renderImage(const RenderContext&
context, Image* image);
+ void allocateFragments(const FrameType f);
+
+ // Render fragments.
+ void renderFragment(const RenderContext& context, int
assignment, Image* image, int xres, int yres);
+ void renderCrossHair(const RenderContext& context,
int myID, Image *image, int xres, int yres, Sample *s);
+
+ // Master thread task.
+ void masterThread(const RenderContext& context,
Image* image);
+
+ // Sampler/Renderer task.
+ void clientThread(const RenderContext& context,
Image* image, int assignment);
+
+ // Update kdtree-cut.
+ void adjustTiles(Timestamp currenttime);
+
+ static ImageTraverser* create(const vector<string>&
args);
+
+ private:
+ AFImageTraverser(const AFImageTraverser&);
+ AFImageTraverser& operator=(const AFImageTraverser&);
+
+ // Image kd-tree.
+ KDTree kdtree;
+
+ // Not sure ???
+ int numFragments;
+
+ //
+ CQ<Sample> *temporalQ;
+
+ // Double buffered: Sample queue.
+ // Samples are obtained from the kdtree and placed in
the queue. Later samples
+ // are removed from the queue and added to fragments.
+ Sample *sampleQ[2];
+
+ // Double buffered ???
+ CQ<SampleSet> *samplesetQ[2];
+
+ // Random number generator array??
+ MT_RNG *myRandomNumber;
+
+ // ??????????????
+ int num_clients;
+ int chunk_size, fragment_size;
+ int samplingrate;
+ unsigned int samples_done;
+
+ // ??????????????
+ int *client_done_counter;
+ FrameType *frametype;
+ double chunkTimeStamp;
+ bool initpass;
+ };
+
+ };
+};
#endif
Modified: branches/AFR/Engine/ImageTraversers/AFR/sample.h
==============================================================================
--- branches/AFR/Engine/ImageTraversers/AFR/sample.h (original)
+++ branches/AFR/Engine/ImageTraversers/AFR/sample.h Fri Aug 26 01:48:19
2005
@@ -23,309 +23,310 @@
* from which we will reconstruct our image.
*/
namespace Manta {
-namespace Afr {
- typedef float Timestamp;
- typedef float FloatColor[3];
- const FloatColor UNDEFINED_SAMPLE_COLOR = {0.0,0.0,0.0};
- const Timestamp UNDEFINED_SAMPLE_TIMESTAMP = 0.0f;
- struct SamplePrintFormat
- {
- float worldCoord[3], vel[3];
- unsigned char rgb[3];
- float timestamp;
- };
-
- class Sample
- {
- public:
- /** color of the sample */
- FloatColor c;
- float viewCoord[3]; // floating point 2D coordinate of the sample on
camera viewport
- /** time at which the sample was generated (in milliseconds) */
- float worldCoord[3]; // the 3d location of the sample
- Timestamp t; // timestamp of the sample.
- /**
- * @name Constructors
- * @{
- */
- Sample()
- {
- reset();
- }
-
- void reset()
- {
- c[0]= UNDEFINED_SAMPLE_COLOR[0];
- c[1]= UNDEFINED_SAMPLE_COLOR[1];
- c[2]= UNDEFINED_SAMPLE_COLOR[2];
- t = UNDEFINED_SAMPLE_TIMESTAMP;
- }
-
- void init(const float x, const float y, const Timestamp tstamp)
- {
- t = tstamp;
- viewCoord[0] = x;
- viewCoord[1] = y;
- // <TODO> find viewCoord[2] by eucledian dist from
- // eye and hit point
- }
-
- void set(const float x, const float y, const Timestamp tstamp,
- const float wx, const float wy, const float wz,
- const float r, const float g, const float b )
- {
- c[0]= UNDEFINED_SAMPLE_COLOR[0];
- c[1]= UNDEFINED_SAMPLE_COLOR[1];
- c[2]= UNDEFINED_SAMPLE_COLOR[2];
- t = tstamp;
- viewCoord[0] = x;
- viewCoord[1] = y;
- viewCoord[2] = 1.0;
- c[0] = r;
- c[1] = g;
- c[2] = b;
- worldCoord[0] = wx;
- worldCoord[1] = wy;
- worldCoord[2] = wz;
- }
-
- void operator=(const Sample &value)
- {
- int i;
- for(i=0; i<3; i++)
- {
- c[i] = value.c[i];
- viewCoord[i] = value.viewCoord[i];
- worldCoord[i] = value.worldCoord[i];
- }
- t = value.t;
- }
-
- void print_to_file(FILE *fp)
- {
- SamplePrintFormat spf;
- spf.worldCoord[0] = worldCoord[0];
- spf.worldCoord[1] = worldCoord[1];
- spf.worldCoord[2] = worldCoord[2];
- spf.vel[0] = 0;
- spf.vel[1] = 0;
- spf.vel[2] = 0;
- spf.timestamp = t;
- spf.rgb[0] = (unsigned char)(c[0]*255);
- spf.rgb[1] = (unsigned char)(c[1]*255);
- spf.rgb[2] = (unsigned char)(c[2]*255);
- fwrite(&spf, sizeof(spf), 1, fp);
- }
-
- void print()
- {
- /* cout << "worldCoord = " << worldCoord[0] << ", " << worldCoord[1] <<
", " << worldCoord[2]
- << "; viewCoord = " << viewCoord[0] << ", " << viewCoord[1] << endl
- << "timestamp = " << t << endl
- << "; color = " << c[0] << ", " << c[1] << ", " << c[2] << endl
- << endl;*/
- }
-
- inline float getRGBDistance(FloatColor &fc)
- {
- return sqrt((c[0]-fc[0])*(c[0]-fc[0])
- + (c[1]-fc[1])*(c[1]-fc[1])
- + (c[2]-fc[2])*(c[2]-fc[2]));
- }
- inline float getRGBMeasure()
- {
- return sqrt((c[0]-0.5)*(c[0]-0.5)
- + (c[1]-0.5)*(c[1]-0.5)
- + (c[2]-0.5)*(c[2]-0.5));
- }
- // Indicates whether the sample has been set to a valid value
- inline int isUndefined()
- {
- return (t == UNDEFINED_SAMPLE_TIMESTAMP);
- };
-
- // Returns the standard NTSC intensity (normalized) of the sample
- inline float intensity()
- {
- return NTSC_WT_R*c[0] + NTSC_WT_G*c[1] + NTSC_WT_B*c[2];
- };
- };
-
- enum CrosshairSampleLoc { CENTER_SAMPLE, LEFT_SAMPLE, RIGHT_SAMPLE,
- TOP_SAMPLE, BOTTOM_SAMPLE, TEMPORAL_SAMPLE};
- class SampleSet
- {
- public:
- Sample left, right, bottom, top, center, temporal;
- float tgrad, xgrad, ygrad;
- Timestamp timestamp;
- float viewX, viewY, viewZ;
-
- SampleSet()
- {
- reset();
- }
-
- void reset()
- {
- xgrad = ygrad = tgrad = 0.0;
- viewX = viewY = viewZ = 0;
- timestamp = 0.0;
- left.reset();
- right.reset();
- top.reset();
- bottom.reset();
- center.reset();
- temporal.reset();
- }
-
- void set(CrosshairSampleLoc sloc, const float x, const float y, const
Timestamp tstamp,
- const float wx, const float wy, const float wz,
- const float r, const float g, const float b)
- {
- switch(sloc)
- {
- case CENTER_SAMPLE: center.set(x,y,tstamp,wx,wy,wz,r,g,b);
- break;
- case LEFT_SAMPLE: left.set(x,y,tstamp,wx,wy,wz,r,g,b);;
- break;
- case RIGHT_SAMPLE: right.set(x,y,tstamp,wx,wy,wz,r,g,b);;
- break;
- case BOTTOM_SAMPLE: bottom.set(x,y,tstamp,wx,wy,wz,r,g,b);;
- break;
- case TOP_SAMPLE: top.set(x,y,tstamp,wx,wy,wz,r,g,b);;
- break;
- case TEMPORAL_SAMPLE: temporal.set(x,y,tstamp,wx,wy,wz,r,g,b);;
- break;
- default: return;
- };
- timestamp = tstamp;
- }
-
- void computeGradients(Timestamp currenttime)
- {
- // float li, ri, bi, ti, ci, tmpi;
-
- viewX = center.viewCoord[0];
- viewY = center.viewCoord[1];
- viewZ = center.viewCoord[2];
- timestamp = currenttime;
-// li = left.intensity();
-// ri = right.intensity();
-// ti = top.intensity();
-// bi = bottom.intensity();
-// ci = center.intensity();
-// tmpi = temporal.intensity();
-
- if(fabsf(center.viewCoord[0]-left.viewCoord[0])>0.001
- && fabsf(center.viewCoord[0]-right.viewCoord[0])>0.001 )
- {
- //xgrad =
fabsf(ci-li)/fabsf(center.viewCoord[0]-left.viewCoord[0])
- // + fabsf(ci-ri)/fabsf(center.viewCoord[0]-right.viewCoord[0]);
- xgrad =
center.getRGBDistance(left.c)/fabsf(center.viewCoord[0]-left.viewCoord[0])
- +
center.getRGBDistance(right.c)/fabsf(center.viewCoord[0]-right.viewCoord[0]);
- }
- else
- {
- //xgrad = (fabsf(ci-li) + fabsf(ci-ri))/2.0;
- xgrad = (center.getRGBDistance(left.c) +
center.getRGBDistance(right.c))/2.0;
- }
-
- if(fabsf(center.viewCoord[0]-top.viewCoord[1])>0.001
- && fabsf(center.viewCoord[0]-bottom.viewCoord[1])>0.001 )
- {
- //ygrad = fabsf(ci-bi)/fabsf(center.viewCoord[1]-top.viewCoord[1])
- // +
fabsf(ci-ti)/fabsf(center.viewCoord[1]-bottom.viewCoord[1]);
- ygrad =
center.getRGBDistance(bottom.c)/fabsf(center.viewCoord[1]-top.viewCoord[1])
- +
center.getRGBDistance(top.c)/fabsf(center.viewCoord[1]-bottom.viewCoord[1]);
- }
- else
- {
- //ygrad = (fabsf(ci-bi) + fabsf(ci-ti))/2.0;
- ygrad = (center.getRGBDistance(bottom.c) +
center.getRGBDistance(top.c))/2.0;
- }
-
- if(fabsf(center.t-temporal.t)>0.001)
- {
- //tgrad = fabsf(ci-tmpi)/(center.t - temporal.t);
- tgrad = center.getRGBDistance(temporal.c)/(center.t - temporal.t);
- }
- else
- {
- //tgrad = fabsf(ci-tmpi);
- tgrad = center.getRGBDistance(bottom.c);
- }
-
- //<TODO> set occlusion status here
- }
-
- void print_to_file(FILE *fp)
- {
- left.print_to_file(fp);
- right.print_to_file(fp);
- top.print_to_file(fp);
- bottom.print_to_file(fp);
- center.print_to_file(fp);
- }
-
- void print()
- {
- /* cout << "sample set ---------------->" << endl;
- cout << "center: "; center.print();
- cout << "left: "; left.print();
- cout << "right: "; right.print();
- cout << "top: "; top.print();
- cout << "bottom: "; bottom.print();
- cout << "temporal: "; temporal.print();
- cout << "<---------------------------" << endl << endl;*/
- }
-
- float getIntensity(CrosshairSampleLoc sloc)
- {
- switch(sloc)
- {
- case CENTER_SAMPLE: return center.intensity();
- break;
- case LEFT_SAMPLE: return left.intensity();
- break;
- case RIGHT_SAMPLE: return right.intensity();
- break;
- case BOTTOM_SAMPLE: return bottom.intensity();
- break;
- case TOP_SAMPLE: return top.intensity();
- break;
- case TEMPORAL_SAMPLE: return temporal.intensity();
- break;
- default: return 0.0;
- };
- }
-
- Timestamp getSampleTimeStamp(CrosshairSampleLoc sloc)
- {
- switch(sloc)
- {
- case CENTER_SAMPLE: return center.t;
- break;
- case LEFT_SAMPLE: return left.t;
- break;
- case RIGHT_SAMPLE: return right.t;
- break;
- case BOTTOM_SAMPLE: return bottom.t;
- break;
- case TOP_SAMPLE: return top.t;
- break;
- case TEMPORAL_SAMPLE: return temporal.t;
- break;
- default: return 0.0;
- };
- }
-
- bool isOccluded()
- {
- return false; //<TODO>
- }
- };
-} // end namespace Afr
+ namespace Afr {
+
+ typedef float Timestamp;
+ typedef float FloatColor[3];
+ const FloatColor UNDEFINED_SAMPLE_COLOR = {0.0,0.0,0.0};
+ const Timestamp UNDEFINED_SAMPLE_TIMESTAMP = 0.0f;
+ struct SamplePrintFormat
+ {
+ float worldCoord[3], vel[3];
+ unsigned char rgb[3];
+ float timestamp;
+ };
+
+ class Sample
+ {
+ public:
+ /** color of the sample */
+ FloatColor c;
+ float viewCoord[3]; // floating point 2D coordinate
of the sample on camera viewport
+ /** time at which the sample was generated
(in milliseconds) */
+ float worldCoord[3]; // the 3d location of the sample
+ Timestamp t; // timestamp of the sample.
+ /**
+ * @name Constructors
+ * @{
+ */
+ Sample()
+ {
+ reset();
+ }
+
+ void reset()
+ {
+ c[0]= UNDEFINED_SAMPLE_COLOR[0];
+ c[1]= UNDEFINED_SAMPLE_COLOR[1];
+ c[2]= UNDEFINED_SAMPLE_COLOR[2];
+ t = UNDEFINED_SAMPLE_TIMESTAMP;
+ }
+
+ void init(const float x, const float y, const
Timestamp tstamp)
+ {
+ t = tstamp;
+ viewCoord[0] = x;
+ viewCoord[1] = y;
+ // <TODO> find viewCoord[2] by eucledian dist
from
+ // eye and hit point
+ }
+
+ void set(const float x, const float y, const
Timestamp tstamp,
+ const float wx,
const float wy, const float wz,
+ const float r, const
float g, const float b )
+ {
+ c[0]= UNDEFINED_SAMPLE_COLOR[0];
+ c[1]= UNDEFINED_SAMPLE_COLOR[1];
+ c[2]= UNDEFINED_SAMPLE_COLOR[2];
+ t = tstamp;
+ viewCoord[0] = x;
+ viewCoord[1] = y;
+ viewCoord[2] = 1.0;
+ c[0] = r;
+ c[1] = g;
+ c[2] = b;
+ worldCoord[0] = wx;
+ worldCoord[1] = wy;
+ worldCoord[2] = wz;
+ }
+
+ void operator=(const Sample &value)
+ {
+ int i;
+ for(i=0; i<3; i++)
+ {
+ c[i] = value.c[i];
+ viewCoord[i] = value.viewCoord[i];
+ worldCoord[i] = value.worldCoord[i];
+ }
+ t = value.t;
+ }
+
+ void print_to_file(FILE *fp)
+ {
+ SamplePrintFormat spf;
+ spf.worldCoord[0] = worldCoord[0];
+ spf.worldCoord[1] = worldCoord[1];
+ spf.worldCoord[2] = worldCoord[2];
+ spf.vel[0] = 0;
+ spf.vel[1] = 0;
+ spf.vel[2] = 0;
+ spf.timestamp = t;
+ spf.rgb[0] = (unsigned char)(c[0]*255);
+ spf.rgb[1] = (unsigned char)(c[1]*255);
+ spf.rgb[2] = (unsigned char)(c[2]*255);
+ fwrite(&spf, sizeof(spf), 1, fp);
+ }
+
+ void print()
+ {
+ /* cout << "worldCoord = " << worldCoord[0] << ", "
<< worldCoord[1] << ", " << worldCoord[2]
+ << "; viewCoord = " << viewCoord[0]
<< ", " << viewCoord[1] << endl
+ << "timestamp = " << t << endl
+ << "; color = " << c[0] << ", " <<
c[1] << ", " << c[2] << endl
+ << endl;*/
+ }
+
+ inline float getRGBDistance(FloatColor &fc)
+ {
+ return sqrt((c[0]-fc[0])*(c[0]-fc[0])
+
+ (c[1]-fc[1])*(c[1]-fc[1])
+
+ (c[2]-fc[2])*(c[2]-fc[2]));
+ }
+ inline float getRGBMeasure()
+ {
+ return sqrt((c[0]-0.5)*(c[0]-0.5)
+ +
(c[1]-0.5)*(c[1]-0.5)
+ +
(c[2]-0.5)*(c[2]-0.5));
+ }
+ // Indicates whether the sample has been set to a
valid value
+ inline int isUndefined()
+ {
+ return (t == UNDEFINED_SAMPLE_TIMESTAMP);
+ };
+
+ // Returns the standard NTSC intensity (normalized)
of the sample
+ inline float intensity()
+ {
+ return NTSC_WT_R*c[0] + NTSC_WT_G*c[1] +
NTSC_WT_B*c[2];
+ };
+ };
+
+ enum CrosshairSampleLoc { CENTER_SAMPLE, LEFT_SAMPLE,
RIGHT_SAMPLE,
+
TOP_SAMPLE, BOTTOM_SAMPLE, TEMPORAL_SAMPLE};
+ class SampleSet
+ {
+ public:
+ Sample left, right, bottom, top, center,
temporal;
+ float tgrad, xgrad, ygrad;
+ Timestamp timestamp;
+ float viewX, viewY, viewZ;
+
+ SampleSet()
+ {
+ reset();
+ }
+
+ void reset()
+ {
+ xgrad = ygrad = tgrad = 0.0;
+ viewX = viewY = viewZ = 0;
+ timestamp = 0.0;
+ left.reset();
+ right.reset();
+ top.reset();
+ bottom.reset();
+ center.reset();
+ temporal.reset();
+ }
+
+ void set(CrosshairSampleLoc sloc, const float
x, const float y, const Timestamp tstamp,
+
const float wx, const float wy, const float wz,
+
const float r, const float g, const float b)
+ {
+ switch(sloc)
+ {
+ case CENTER_SAMPLE:
center.set(x,y,tstamp,wx,wy,wz,r,g,b);
+ break;
+ case LEFT_SAMPLE:
left.set(x,y,tstamp,wx,wy,wz,r,g,b);;
+ break;
+ case RIGHT_SAMPLE:
right.set(x,y,tstamp,wx,wy,wz,r,g,b);;
+ break;
+ case BOTTOM_SAMPLE:
bottom.set(x,y,tstamp,wx,wy,wz,r,g,b);;
+ break;
+ case TOP_SAMPLE:
top.set(x,y,tstamp,wx,wy,wz,r,g,b);;
+ break;
+ case TEMPORAL_SAMPLE:
temporal.set(x,y,tstamp,wx,wy,wz,r,g,b);;
+ break;
+ default: return;
+ };
+ timestamp = tstamp;
+ }
+
+ void computeGradients(Timestamp currenttime)
+ {
+ // float li, ri, bi, ti, ci,
tmpi;
+
+ viewX = center.viewCoord[0];
+ viewY = center.viewCoord[1];
+ viewZ = center.viewCoord[2];
+ timestamp = currenttime;
+ // li = left.intensity();
+ // ri = right.intensity();
+ // ti = top.intensity();
+ // bi = bottom.intensity();
+ // ci = center.intensity();
+ // tmpi = temporal.intensity();
+
+
if(fabsf(center.viewCoord[0]-left.viewCoord[0])>0.001
+ &&
fabsf(center.viewCoord[0]-right.viewCoord[0])>0.001 )
+ {
+ //xgrad =
fabsf(ci-li)/fabsf(center.viewCoord[0]-left.viewCoord[0])
+ // +
fabsf(ci-ri)/fabsf(center.viewCoord[0]-right.viewCoord[0]);
+ xgrad =
center.getRGBDistance(left.c)/fabsf(center.viewCoord[0]-left.viewCoord[0])
+ +
center.getRGBDistance(right.c)/fabsf(center.viewCoord[0]-right.viewCoord[0]);
+ }
+ else
+ {
+ //xgrad = (fabsf(ci-li) +
fabsf(ci-ri))/2.0;
+ xgrad =
(center.getRGBDistance(left.c) + center.getRGBDistance(right.c))/2.0;
+ }
+
+
if(fabsf(center.viewCoord[0]-top.viewCoord[1])>0.001
+ &&
fabsf(center.viewCoord[0]-bottom.viewCoord[1])>0.001 )
+ {
+ //ygrad =
fabsf(ci-bi)/fabsf(center.viewCoord[1]-top.viewCoord[1])
+ // +
fabsf(ci-ti)/fabsf(center.viewCoord[1]-bottom.viewCoord[1]);
+ ygrad =
center.getRGBDistance(bottom.c)/fabsf(center.viewCoord[1]-top.viewCoord[1])
+ +
center.getRGBDistance(top.c)/fabsf(center.viewCoord[1]-bottom.viewCoord[1]);
+ }
+ else
+ {
+ //ygrad = (fabsf(ci-bi) +
fabsf(ci-ti))/2.0;
+ ygrad =
(center.getRGBDistance(bottom.c) + center.getRGBDistance(top.c))/2.0;
+ }
+
+ if(fabsf(center.t-temporal.t)>0.001)
+ {
+ //tgrad =
fabsf(ci-tmpi)/(center.t - temporal.t);
+ tgrad =
center.getRGBDistance(temporal.c)/(center.t - temporal.t);
+ }
+ else
+ {
+ //tgrad = fabsf(ci-tmpi);
+ tgrad =
center.getRGBDistance(bottom.c);
+ }
+
+ //<TODO> set occlusion status here
+ }
+
+ void print_to_file(FILE *fp)
+ {
+ left.print_to_file(fp);
+ right.print_to_file(fp);
+ top.print_to_file(fp);
+ bottom.print_to_file(fp);
+ center.print_to_file(fp);
+ }
+
+ void print()
+ {
+ /* cout << "sample set ---------------->" <<
endl;
+ cout << "center: "; center.print();
+ cout << "left: "; left.print();
+ cout << "right: "; right.print();
+ cout << "top: "; top.print();
+ cout << "bottom: "; bottom.print();
+ cout << "temporal: ";
temporal.print();
+ cout <<
"<---------------------------" << endl << endl;*/
+ }
+
+ float getIntensity(CrosshairSampleLoc sloc)
+ {
+ switch(sloc)
+ {
+ case CENTER_SAMPLE: return
center.intensity();
+ break;
+ case LEFT_SAMPLE: return
left.intensity();
+ break;
+ case RIGHT_SAMPLE: return
right.intensity();
+ break;
+ case BOTTOM_SAMPLE: return
bottom.intensity();
+ break;
+ case TOP_SAMPLE: return
top.intensity();
+ break;
+ case TEMPORAL_SAMPLE: return
temporal.intensity();
+ break;
+ default: return 0.0;
+ };
+ }
+
+ Timestamp
getSampleTimeStamp(CrosshairSampleLoc sloc)
+ {
+ switch(sloc)
+ {
+ case CENTER_SAMPLE: return
center.t;
+ break;
+ case LEFT_SAMPLE: return
left.t;
+ break;
+ case RIGHT_SAMPLE: return
right.t;
+ break;
+ case BOTTOM_SAMPLE: return
bottom.t;
+ break;
+ case TOP_SAMPLE: return
top.t;
+ break;
+ case TEMPORAL_SAMPLE: return
temporal.t;
+ break;
+ default: return 0.0;
+ };
+ }
+
+ bool isOccluded()
+ {
+ return false; //<TODO>
+ }
+ };
+ } // end namespace Afr
} // end namespace Manta
#endif
Modified: branches/AFR/Engine/ImageTraversers/CMakeLists.txt
==============================================================================
--- branches/AFR/Engine/ImageTraversers/CMakeLists.txt (original)
+++ branches/AFR/Engine/ImageTraversers/CMakeLists.txt Fri Aug 26 01:48:19
2005
@@ -5,8 +5,17 @@
ImageTraversers/FramelessImageTraverser.cc
ImageTraversers/DissolveImageTraverser.cc
ImageTraversers/DissolveTiledImageTraverser.cc
+
ImageTraversers/AFImageTraverser.cc
+ ImageTraversers/AFR/CQ.h
+ ImageTraversers/AFR/evil.h
+ ImageTraversers/AFR/kdtree.cc
+ ImageTraversers/AFR/kdtree.h
+ ImageTraversers/AFR/sample.h
ImageTraversers/AFR/stats.cc
+ ImageTraversers/AFR/stats.h
ImageTraversers/AFR/tiles.cc
- ImageTraversers/AFR/kdtree.cc
- )
+ ImageTraversers/AFR/tiles.h
+ )
+
+# SOURCE_GROUP(ImageTraversers FILES ${Manta_ImageTraversers_SRCS})
\ No newline at end of file
Modified: branches/AFR/StandAlone/manta.cc
==============================================================================
--- branches/AFR/StandAlone/manta.cc (original)
+++ branches/AFR/StandAlone/manta.cc Fri Aug 26 01:48:19 2005
@@ -10,12 +10,15 @@
#include <Core/Thread/Time.h>
#include <strings.h>
+#include <Engine/Control/AFRPipeline.h>
+
#include <sgi_stl_warnings_off.h>
#include <iostream>
#include <sgi_stl_warnings_on.h>
using namespace std;
using namespace Manta;
+
using SCIRun::InternalError;
using SCIRun::Time;
@@ -74,15 +77,15 @@
BenchHelper(RTRTInterface* rtrt, long numFrames);
void start(int, int);
void stop(int, int);
-
+
private:
- RTRTInterface* rtrt;
+ RTRTInterface* rtrt;
double startTime;
long numFrames;
};
BenchHelper::BenchHelper(RTRTInterface* rtrt, long numFrames)
- : rtrt(rtrt), numFrames(numFrames)
+: rtrt(rtrt), numFrames(numFrames)
{
}
@@ -106,15 +109,16 @@
#if HAVE_IEEEFP_H
fpsetmask(FP_X_OFL|FP_X_DZ|FP_X_INV);
#endif
-
-
+
+
// Copy args into a vector<string>
vector<string> args;
for(int i=1;i<argc;i++)
args.push_back(argv[i]);
-
+
try {
- RTRTInterface* rtrt = createRTRT();
+ RTRTInterface* rtrt = Manta::Afr::createAFRPipeline();
+
if(getenv("MANTA_SCENEPATH"))
rtrt->setScenePath(getenv("MANTA_SCENEPATH"));
else
@@ -142,35 +146,35 @@
int xres = 512, yres = 512;
bool channelCreated=false;
bool haveUI = false;
-
+
int argc = static_cast<int>(args.size());
for(int i=0;i<argc;i++){
string arg = args[i];
if(arg == "-help"){
- usage(rtrt);
+ usage(rtrt);
} else if(arg == "-bench"){
- long numFrames = 100;
- long warmup = 10;
- if(getLongArg(i, args, numFrames)){
- getLongArg(i, args, warmup);
- }
- BenchHelper* b = new BenchHelper(rtrt, numFrames);
- // Ask for two callbacks, one at frame "warmup", and one at
- // frame warmup+numFrames
- rtrt->addOneShotCallback(RTRTInterface::Absolute, warmup,
- Callback::create(b, &BenchHelper::start));
- rtrt->addOneShotCallback(RTRTInterface::Absolute, warmup+numFrames,
- Callback::create(b, &BenchHelper::stop));
+ long numFrames = 100;
+ long warmup = 10;
+ if(getLongArg(i, args, numFrames)){
+ getLongArg(i, args, warmup);
+ }
+ BenchHelper* b = new BenchHelper(rtrt,
numFrames);
+ // Ask for two callbacks, one at frame
"warmup", and one at
+ // frame warmup+numFrames
+
rtrt->addOneShotCallback(RTRTInterface::Absolute, warmup,
+
Callback::create(b,
&BenchHelper::start));
+
rtrt->addOneShotCallback(RTRTInterface::Absolute, warmup+numFrames,
+
Callback::create(b,
&BenchHelper::stop));
} else if(arg == "-camera"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- currentCamera = rtrt->createCamera(s);
- if(!currentCamera){
- cerr << "Error creating camera: " << s << ", available cameras
are:\n";
- printList(cerr, rtrt->listCameras());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ currentCamera = rtrt->createCamera(s);
+ if(!currentCamera){
+ cerr << "Error creating camera: " <<
s << ", available cameras are:\n";
+ printList(cerr, rtrt->listCameras());
+ exit(1);
+ }
} else if(arg == "-idlemode"){
string s;
if(!getStringArg(i, args, s))
@@ -181,112 +185,112 @@
exit(1);
}
} else if(arg == "-imagedisplay"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->createChannel(s, currentCamera, false, xres, yres)){
- cerr << "Invalid image display: " << s << ", available image
displays are:\n";
- printList(cerr, rtrt->listImageDisplays());
- exit(1);
- }
- channelCreated=true;
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->createChannel(s, currentCamera,
false, xres, yres)){
+ cerr << "Invalid image display: " <<
s << ", available image displays are:\n";
+ printList(cerr,
rtrt->listImageDisplays());
+ exit(1);
+ }
+ channelCreated=true;
} else if(arg == "-imagetraverser"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->selectImageTraverser(s)){
- cerr << "Invalid image traverser: " << s << ", available image
traversers are:\n";
- printList(cerr, rtrt->listImageTraversers());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->selectImageTraverser(s)){
+ cerr << "Invalid image traverser: "
<< s << ", available image traversers are:\n";
+ printList(cerr,
rtrt->listImageTraversers());
+ exit(1);
+ }
} else if(arg == "-imagetype"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->selectImageType(s)){
- cerr << "Invalid image type: " << s << ", available image types
are:\n";
- printList(cerr, rtrt->listImageTypes());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->selectImageType(s)){
+ cerr << "Invalid image type: " << s
<< ", available image types are:\n";
+ printList(cerr,
rtrt->listImageTypes());
+ exit(1);
+ }
} else if(arg == "-loadbalancer"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->selectLoadBalancer(s)){
- cerr << "Invalid load balancer: " << s << ", available load
balancers are:\n";
- printList(cerr, rtrt->listLoadBalancers());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->selectLoadBalancer(s)){
+ cerr << "Invalid load balancer: " <<
s << ", available load balancers are:\n";
+ printList(cerr,
rtrt->listLoadBalancers());
+ exit(1);
+ }
} else if(arg == "-np"){
- long np;
- if(!getLongArg(i, args, np))
- usage(rtrt);
- rtrt->changeNumWorkers(static_cast<int>(np));
+ long np;
+ if(!getLongArg(i, args, np))
+ usage(rtrt);
+ rtrt->changeNumWorkers(static_cast<int>(np));
} else if(arg == "-pixelsampler"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->selectPixelSampler(s)){
- cerr << "Invalid pixel sampler: " << s << ", available pixel
samplers are:\n";
- printList(cerr, rtrt->listPixelSamplers());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->selectPixelSampler(s)){
+ cerr << "Invalid pixel sampler: " <<
s << ", available pixel samplers are:\n";
+ printList(cerr,
rtrt->listPixelSamplers());
+ exit(1);
+ }
} else if(arg == "-renderer"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->selectRenderer(s)){
- cerr << "Invalid renderer: " << s << ", available renderers are:\n";
- printList(cerr, rtrt->listRenderers());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->selectRenderer(s)){
+ cerr << "Invalid renderer: " << s <<
", available renderers are:\n";
+ printList(cerr,
rtrt->listRenderers());
+ exit(1);
+ }
} else if(arg == "-res"){
- if(!getResolutionArg(i, args, xres, yres)){
- cerr << "Error parsing resolution: " << args[i+1] << '\n';
- usage(rtrt);
- }
+ if(!getResolutionArg(i, args, xres, yres)){
+ cerr << "Error parsing resolution: "
<< args[i+1] << '\n';
+ usage(rtrt);
+ }
} else if(arg == "-scene"){
- if(rtrt->haveScene())
- cerr << "WARNING: multiple scenes specified, will use last one\n";
- string scene;
- if(!getStringArg(i, args, scene))
- usage(rtrt);
- if(!rtrt->readScene(scene)){
- cerr << "Error reading scene: " << scene << '\n';
- exit(1);
- }
+ if(rtrt->haveScene())
+ cerr << "WARNING: multiple scenes
specified, will use last one\n";
+ string scene;
+ if(!getStringArg(i, args, scene))
+ usage(rtrt);
+ if(!rtrt->readScene(scene)){
+ cerr << "Error reading scene: " <<
scene << '\n';
+ exit(1);
+ }
} else if(arg == "-shadows"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- if(!rtrt->selectShadowAlgorithm(s)){
- cerr << "Invalid shadow algorithm: " << s << ", available shadow
algorithms are:\n";
- printList(cerr, rtrt->listShadowAlgorithms());
- exit(1);
- }
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ if(!rtrt->selectShadowAlgorithm(s)){
+ cerr << "Invalid shadow algorithm: "
<< s << ", available shadow algorithms are:\n";
+ printList(cerr,
rtrt->listShadowAlgorithms());
+ exit(1);
+ }
} else if(arg == "-ui"){
- string s;
- if(!getStringArg(i, args, s))
- usage(rtrt);
- UserInterface* ui = rtrt->createUserInterface(s);
- if(!ui){
- cerr << "Unknown user interface: " << s << ", available user
interfaces are:\n";
- printList(cerr, rtrt->listUserInterfaces());
- exit(1);
- }
- ui->startup();
- haveUI = true;
+ string s;
+ if(!getStringArg(i, args, s))
+ usage(rtrt);
+ UserInterface* ui =
rtrt->createUserInterface(s);
+ if(!ui){
+ cerr << "Unknown user interface: " <<
s << ", available user interfaces are:\n";
+ printList(cerr,
rtrt->listUserInterfaces());
+ exit(1);
+ }
+ ui->startup();
+ haveUI = true;
} else {
- cerr << "Unknown argument: " << arg << '\n';
- usage(rtrt);
+ cerr << "Unknown argument: " << arg << '\n';
+ usage(rtrt);
}
}
if(!haveUI){
UserInterface* ui = rtrt->createUserInterface("X");
if(!ui){
- cerr << "Cannot find default user interface: X, available user
interfaces are:\n";
- printList(cerr, rtrt->listUserInterfaces());
- exit(1);
+ cerr << "Cannot find default user interface:
X, available user interfaces are:\n";
+ printList(cerr, rtrt->listUserInterfaces());
+ exit(1);
}
ui->startup();
}
@@ -303,11 +307,11 @@
if(e.stackTrace())
cerr << "Stack trace: " << e.stackTrace() << '\n';
exit(1);
-
+
} catch (std::exception e){
cerr << "Caught std exception: " << e.what() << '\n';
exit(1);
-
+
} catch(...){
cerr << "Caught unknown exception\n";
exit(1);
@@ -335,28 +339,28 @@
scene->setBackground(new
ConstantBackground(ColorDB::getNamedColor("SkyBlue3")*0.5));
Material* red=new Phong(Color(RGBColor(.6,0,0)),
Color(RGBColor(.6,.6,.6)), 32, 0.4);
-
+
Material* plane_matl = new Phong(new
CheckerTexture<Color>(Color(RGBColor(.6,.6,.6)),
-
Color(RGBColor(0,0,0)),
- Vector(1,0,0),
- Vector(0,1,0)),
- new
Constant<Color>(Color(RGBColor(.6,.6,.6))),
- 32,
- new CheckerTexture<double>(0.2, 0.5,
- Vector(1,0,0),
- Vector(0,1,0)));
-
-
+
Color(RGBColor(0,0,0)),
+
Vector(1,0,0),
+
Vector(0,1,0)),
+
new
Constant<Color>(Color(RGBColor(.6,.6,.6))),
+
32,
+
new
CheckerTexture<double>(0.2, 0.5,
+
Vector(1,0,0),
+
Vector(0,1,0)));
+
+
Group* world = new Group();
Primitive* floor = new Parallelogram(plane_matl, Point(-20,-20,0),
- Vector(40,0,0), Vector(0,40,0));
+
Vector(40,0,0), Vector(0,40,0));
// Setup world-space texture coordinates for the checkerboard floor
UniformMapper* uniformmap = new UniformMapper();
floor->setTexCoordMapper(uniformmap);
world->add(floor);
world->add(new Sphere(red, Point(0,0,1.2), 1.0));
scene->setObject(world);
-
+
LightSet* lights = new LightSet();
lights->add(new PointLight(Point(0,5,8), Color(RGBColor(.6,.1,.1))));
lights->add(new PointLight(Point(5,0,8), Color(RGBColor(.1,.6,.1))));
- [MANTA] r504 - in branches/AFR: Engine/Control Engine/ImageTraversers Engine/ImageTraversers/AFR StandAlone, abe, 08/26/2005
Archive powered by MHonArc 2.6.16.