Attached is the basics of what is needed to get the gesture tracker to
listen to a port and react. For now, I've left out the XML server socket
bits and just cut it down to minimal code. The XML server socket works,
I just don't want to confuse that with the request to simply read a UDP
socket and react to a lookatPoint or gesture. (The XML server socket is
my criteria)
GestureTracker::init( LLPumpIO* ) is the entry starter.
The questions I have so far are:
* is this correct way to setup and use mLookAt and mLookAt->setLookAt()
? Also, not sure yet if it can be init'd when mainLoop() starts or only
after login.
* I didn't find yet what attention type the head motion should have, but
I picked free-look. If the position is updated, then the attention timer
is reset or does it still time out?
Everything is tested except portions of the action() method.
Cheers
Philippe Bossut (Merov Linden) wrote:
Hi all,
Gee! ....
#include "llviewerprecompiledheaders.h"
#include "lliopipe.h"
#include "llpumpio.h"
#include "llchainio.h"
#include "lliosocket.h"
#include "llbufferstream.h"
#include <boost/algorithm/string.hpp>
#include <boost/lexical_cast.hpp>
#include "llgesturemgr.h"
#include "llhudmanager.h"
#include "llhudeffectlookat.h"
#define MAGIC_PORT 30134
namespace Alpha
{
// called "alpha" because this namespace is not for production code, just example classes and stubs
//
class UDPStreamReader : public LLIOPipe
{
static const int CHUNK_SIZE = 1024 ;
protected:
LLSocket::ptr_t mSocket ;
virtual EStatus process_impl( const LLChannelDescriptors& channels, buffer_ptr_t& iobuffer, bool& eos, LLSD& context, LLPumpIO* pump)
{
LLBufferStream istr(channels, iobuffer.get());
while( istr.good() )
{
char buf[CHUNK_SIZE] ;
istr.read( buf, CHUNK_SIZE );
if( ! stream( buf, istr.gcount() ) )
return STATUS_STOP ;
}
return STATUS_OK ;
}
protected:
virtual bool stream(const char* buffer, int length ) ;
};
}
namespace Snowglobe
{
class GestureTracker : public Alpha::UDPStreamReader
{
LLPointer<LLHUDEffectLookAt> mLookAt ;
std::string mInput ;
void action( std::string str )
{
std::cout << "input '" << str << "'" << std::endl ;
std::vector<std::string> s ;
boost::split( s, str, boost::is_any_of("/")) ;
if( s[0] == "gesture")
gGestureManager.triggerAndReviseString( s[1] ) ;
else
if( s[0] == "lookatPoint")
{
LLVector3 lookatPoint ;
std::vector<std::string> v ;
boost::split( v, s[1] , boost::is_any_of(",")) ;
try
{
lookatPoint.setVec( boost::lexical_cast<float>( v[0] ), boost::lexical_cast<float>( v[1] ), boost::lexical_cast<float>( v[2] ) ) ;
}
catch( boost::bad_lexical_cast& )
{
}
mLookAt->setLookAt( LOOKAT_TARGET_FREELOOK, NULL, lookatPoint ) ;
}
}
virtual bool stream( const char* buffer, int length )
{
int start = 0 ;
int delim ;
mInput.append( buffer, length ) ;
while( (delim = mInput.find("\n", start)) != std::string::npos )
{
action( std::string( mInput.data()+start, delim-start-1 ) ) ;
start = delim + 1 ;
}
if(start != 0)
mInput = mInput.substr(start) ;
return true ;
}
GestureTracker()
{
mLookAt = (LLHUDEffectLookAt *)LLHUDManager::getInstance()->createViewerEffect(LLHUDObject::LL_HUD_EFFECT_LOOKAT);
}
static void init(LLPumpIO* pump)
{
GestureTracker* reader = new GestureTracker() ;
reader->mSocket = LLSocket::create(gAPRPoolp, LLSocket::DATAGRAM_UDP, MAGIC_PORT );
LLPumpIO::chain_t readChain;
readChain.push_back(LLIOPipe::ptr_t(new LLIOSocketReader(reader->mSocket)));
readChain.push_back(LLIOPipe::ptr_t(reader));
pump->addChain(readChain, NEVER_CHAIN_EXPIRY_SECS);
}
};
}
_______________________________________________
Policies and (un)subscribe information available here:
http://wiki.secondlife.com/wiki/SLDev
Please read the policies before posting to keep unmoderated posting privileges