vrpn  07.33
Virtual Reality Peripheral Network
vrpn_Tracker_ViewPoint.C
Go to the documentation of this file.
1 //
3 // Name: vrpn_Tracker_ViewPoint.C
4 //
5 // Author: David Borland
6 //
7 // EventLab at the University of Barcelona
8 //
9 // Description: VRPN server class for Arrington Research ViewPoint EyeTracker.
10 //
11 // The VRPN server connects to the eye tracker using the VPX_InterApp DLL.
12 // Whatever other control software is being used to connect to the eye tracker
13 // (e.g. the ViewPoint software that comes with the tracker) to perform
14 // calibration, etc. should link to the same copy of the DLL, so they can share
15 // information.
16 //
17 // -------------------------------------------------------------------------------
18 //
19 // Tracker:
20 //
21 // The tracker has two sensors, as the ViewPoint can optionally have binocular
22 // tracking. In the case of monocular tracking, only sensor 0 (EYE_A) will have
23 // valid information. Retrieving smoothed or raw tracking data is controlled by
24 // the smoothedData parameter.
25 //
26 // Position: The (x,y) gaze point in gaze space (smoothed or raw).
27 //
28 // Rotation: The (x,y) gaze angle as a quaternion (smoothed or raw).
29 //
30 // Velocity: The x- and y- components of the eye movement velocity in gaze space
31 // (always smoothed).
32 //
33 // -------------------------------------------------------------------------------
34 //
35 // Analog:
36 //
37 // There are a lot of additional data that can be retrieved from the tracker.
38 // These values are always calculated from the smoothed gaze point. Currently,
39 // the following are sent as analog values, but more can be added as needed.
40 // Please see the ViewPoint documentation regarding what other data are available.
41 //
42 // Because each channel needs to be duplicated in the case of a binocular tracker,
43 // the first n/2 values are for EYE_A, and the second n/2 values are for EYE_B.
44 //
45 // EYE_A:
46 //
47 // Channel 0: The pupil aspect ratio, from 0.0 to 1.0. Can be used to detect
48 // blinks when it falls below a given threshold.
49 //
50 // Channel 1: The total velocity (magnitude of eye movement velocity). Can be
51 // used to detect saccades.
52 //
53 // Channel 2: The fixation seconds (length of time below the velocity criterion
54 // used to detect saccades). 0 if saccade is occurring.
55 //
56 // EYE_B:
57 //
58 // Channels 3-5: See EYE_A.
59 //
61 
62 #include "vrpn_Tracker_ViewPoint.h"
63 
64 #ifdef VRPN_USE_VIEWPOINT
65 
66 #include VRPN_VIEWPOINT_H
67 #include "quat.h"
68 
69 vrpn_Tracker_ViewPoint::vrpn_Tracker_ViewPoint(const char* name, vrpn_Connection* c, bool smoothedData) :
70  vrpn_Tracker(name, c), vrpn_Analog(name, c), useSmoothedData(smoothedData)
71 {
72  // Check the DLL version
73  double version = VPX_GetDLLVersion();
74  if (VPX_VersionMismatch(VPX_SDK_VERSION)) {
75  fprintf(stderr, "vrpn_Tracker_ViewPoint::vrpn_Tracker_ViewPoint(): Warning, SDK version is %g, while DLL version is %g \n", version, VPX_SDK_VERSION);
76  }
77  else {
78  printf("vrpn_Tracker_ViewPoint::vrpn_Tracker_ViewPoint(): SDK version %g matches DLL version %g \n", version, VPX_SDK_VERSION);
79  }
80 
81  // Two sensors, one for each eye
83 
84  // Currently 3 analog channels per eye
85  const int channels_per_eye = 3;
86 
87  // Total number of channels is two times the number of channels per eye
88  vrpn_Analog::num_channel = channels_per_eye * 2;
89 
90  // VRPN stuff
92 }
93 
94 vrpn_Tracker_ViewPoint::~vrpn_Tracker_ViewPoint()
95 {
96 }
97 
98 void vrpn_Tracker_ViewPoint::mainloop()
99 {
100  // Call the server mainloop
101  server_mainloop();
102 
103  // Get data from the DLL
104  get_report();
105 }
106 
107 void vrpn_Tracker_ViewPoint::get_report()
108 {
109  // Get a time stamp
110  struct timeval current_time;
111  vrpn_gettimeofday(&current_time, NULL);
112 
113  // Set the time stamp for each device type
114  vrpn_Tracker::timestamp = current_time;
115  vrpn_Analog::timestamp = current_time;
116 
117  // Get tracker and analog data
118  get_tracker();
119  get_analog();
120 }
121 
122 
123 void vrpn_Tracker_ViewPoint::get_tracker()
124 {
125  // Get information for each eye
126  for (int i = 0; i < 2; i++) {
127  // The sensor
128  d_sensor = i;
129 
130 
131  // Which eye?
132  VPX_EyeType eye;
133  if (d_sensor == 0) eye = EYE_A;
134  else eye = EYE_B;
135 
136 
137  // Get tracker data from the DLL
138  VPX_RealPoint gp, cv, ga;
139  if (useSmoothedData) {
140  // Use smoothed data, when available
141  VPX_GetGazePointSmoothed2(eye, &gp);
142  VPX_GetComponentVelocity2(eye, &cv); // Always smoothed
143  VPX_GetGazeAngleSmoothed2(eye, &ga);
144  }
145  else {
146  // Use raw data
147  VPX_GetGazePoint2(eye, &gp);
148  VPX_GetComponentVelocity2(eye, &cv); // Always smoothed
149  VPX_GetGazeAngle2(eye, &ga);
150  }
151 
152 
153  // Set the tracker position from the gaze point
154  pos[0] = gp.x;
155  pos[1] = gp.y;
156  pos[2] = 0.0;
157 
158 
159  // Set the tracker velocity from the eye velocity
160  vel[0] = cv.x;
161  vel[1] = cv.y;
162  vel[2] = 0.0;
163 
164 
165  // Convert the gaze angle to a quaternion
166  q_from_euler(d_quat, 0.0, Q_DEG_TO_RAD(ga.y), Q_DEG_TO_RAD(ga.x));
167 
168 
169  // Send the data for this eye
170  send_report();
171  }
172 }
173 
174 void vrpn_Tracker_ViewPoint::get_analog()
175 {
176  // Get information for each eye
177  for (int i = 0; i < 2; i++) {
178  // Which eye?
179  VPX_EyeType eye;
180  if (i == 0) eye = EYE_A;
181  else eye = EYE_B;
182 
183 
184  // Analog channel index offset for second eye
185  unsigned int eyeOffset = i * vrpn_Analog::num_channel / 2;
186 
187 
188  // Get analog information from the DLL
189  double ar, tv, fs;
190  VPX_GetPupilAspectRatio2(eye, &ar);
191  VPX_GetTotalVelocity2(eye, &tv);
192  VPX_GetFixationSeconds2(eye, &fs);
193 
194 
195  // Set the analog channels
196  channel[0 + eyeOffset] = ar;
197  channel[1 + eyeOffset] = tv;
198  channel[2 + eyeOffset] = fs;
199  }
200 
201 
202  // Send all analog data
204 }
205 
206 
207 void vrpn_Tracker_ViewPoint::send_report()
208 {
209  // Send tracker data
210  if (d_connection) {
211  char msgbuf[1000];
212  int len = vrpn_Tracker::encode_to(msgbuf);
213  if (d_connection->pack_message(len, vrpn_Tracker::timestamp, position_m_id, d_sender_id, msgbuf,
215  fprintf(stderr,"vrpn_Tracker_ViewPoint: cannot write message: tossing\n");
216  }
217  len = vrpn_Tracker::encode_vel_to(msgbuf);
218  if (d_connection->pack_message(len, vrpn_Tracker::timestamp, velocity_m_id, d_sender_id, msgbuf,
220  fprintf(stderr,"vrpn_Tracker_ViewPoint: cannot write message: tossing\n");
221  }
222  }
223 }
224 
225 #endif
const vrpn_uint32 vrpn_CONNECTION_LOW_LATENCY
virtual void report_changes(vrpn_uint32 class_of_service=vrpn_CONNECTION_LOW_LATENCY, const struct timeval time=vrpn_ANALOG_NOW)
Send a report only if something has changed (for servers) Optionally, tell what time to stamp the val...
Definition: vrpn_Analog.C:71
int register_server_handlers(void)
Definition: vrpn_Tracker.C:318
Generic connection class not specific to the transport mechanism.
vrpn_int32 num_channel
Definition: vrpn_Analog.h:40
vrpn_int32 num_sensors
Definition: vrpn_Tracker.h:114
virtual int encode_to(char *buf)
Definition: vrpn_Tracker.C:533
#define vrpn_gettimeofday
Definition: vrpn_Shared.h:89
struct timeval timestamp
Definition: vrpn_Tracker.h:100
virtual int encode_vel_to(char *buf)
Definition: vrpn_Tracker.C:558
struct timeval timestamp
Definition: vrpn_Analog.h:41