Tachyon (current)  Current Main Branch
camera.c
Go to the documentation of this file.
1 /*
2  * camera.c - This file contains all of the functions for doing camera work.
3  *
4  * (C) Copyright 1994-2022 John E. Stone
5  * SPDX-License-Identifier: BSD-3-Clause
6  *
7  * $Id: camera.c,v 1.82 2022/02/18 17:55:28 johns Exp $
8  *
9  */
10 
11 #if defined(__linux)
12 #define _GNU_SOURCE 1 /* required for sincos()/sincosf() */
13 #endif
14 
15 #include <stdio.h>
16 #include <stdlib.h>
17 #include <string.h>
18 #include <math.h>
19 
20 #define TACHYON_INTERNAL 1
21 #include "tachyon.h"
22 #include "macros.h"
23 #include "vector.h"
24 #include "camera.h"
25 #include "util.h"
26 #include "intersect.h"
27 
28 /*
29  * camera_init()
30  * take camera parameters stored in scene definition and do all
31  * necessary initialization and whatever pre-calculation can be done.
32  */
33 void camera_init(scenedef *scene) {
34  flt sx, sy;
35  vector newupvec;
36  vector newviewvec;
37  vector newrightvec;
38 
39  /* recompute the camera vectors */
40  VCross(&scene->camera.upvec, &scene->camera.viewvec, &newrightvec);
41  VNorm(&newrightvec);
42 
43  VCross(&scene->camera.viewvec, &newrightvec, &newupvec);
44  VNorm(&newupvec);
45 
46  newviewvec=scene->camera.viewvec;
47  VNorm(&newviewvec);
48  scene->camera.rightvec=newrightvec;
49  scene->camera.upvec=newupvec;
50 
51  sx = (flt) scene->hres;
52  sy = (flt) scene->vres;
53 
54  /* calculate the width and height of the image plane in world coords */
55  /* given the aspect ratio, image resolution, and zoom factor */
56  scene->camera.px=((sx / sy) / scene->aspectratio) / scene->camera.camzoom;
57  scene->camera.py=1.0 / scene->camera.camzoom;
58  scene->camera.psx = scene->camera.px / sx;
59  scene->camera.psy = scene->camera.py / sy;
60 
61  if (scene->camera.frustumcalc == RT_CAMERA_FRUSTUM_AUTO) {
62  scene->camera.left = -0.5 * scene->camera.px;
63  scene->camera.right = 0.5 * scene->camera.px;
64  scene->camera.bottom = -0.5 * scene->camera.py;
65  scene->camera.top = 0.5 * scene->camera.py;
66  }
67 
68  /* setup function pointer for camera ray generation */
69  switch (scene->camera.projection) {
71  if (scene->antialiasing > 0) {
72  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_perspective_aa_ray;
73  } else {
74  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_perspective_ray;
75  }
76  break;
77 
79  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_perspective_aa_dof_ray;
80  break;
81 
83  if (scene->antialiasing > 0) {
84  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_orthographic_aa_ray;
85  } else {
86  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_orthographic_ray;
87  }
88  break;
89 
91  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_orthographic_aa_dof_ray;
92  break;
93 
95  if (scene->antialiasing > 0) {
96  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_equirectangular_aa_stereo_ray;
97  } else {
98  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_equirectangular_stereo_ray;
99  }
100  break;
101 
103  if (scene->antialiasing > 0) {
104  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_fisheye_aa_ray;
105  } else {
106  scene->camera.cam_ray = (color (*)(void *,flt,flt)) cam_fisheye_ray;
107  }
108  break;
109  }
110 
111 
112  /* assuming viewvec is a unit vector, then the center of the */
113  /* image plane is the camera center + vievec */
114  switch (scene->camera.projection) {
116  scene->camera.projcent.x = scene->camera.center.x + scene->camera.viewvec.x;
117  scene->camera.projcent.y = scene->camera.center.y + scene->camera.viewvec.y;
118  scene->camera.projcent.z = scene->camera.center.z + scene->camera.viewvec.z;
119  break;
120 
122  scene->camera.projcent = scene->camera.center;
123 
124  /* assuming viewvec is a unit vector, then the lower left */
125  /* corner of the image plane is calculated below */
126  scene->camera.lowleft.x = scene->camera.projcent.x +
127  (scene->camera.left * scene->camera.rightvec.x) +
128  (scene->camera.bottom * scene->camera.upvec.x);
129  scene->camera.lowleft.y = scene->camera.projcent.y +
130  (scene->camera.left * scene->camera.rightvec.y) +
131  (scene->camera.bottom * scene->camera.upvec.y);
132  scene->camera.lowleft.z = scene->camera.projcent.z +
133  (scene->camera.left * scene->camera.rightvec.z) +
134  (scene->camera.bottom * scene->camera.upvec.z);
135  break;
136 
139  default:
140  scene->camera.projcent.x = scene->camera.center.x + scene->camera.viewvec.x;
141  scene->camera.projcent.y = scene->camera.center.y + scene->camera.viewvec.y;
142  scene->camera.projcent.z = scene->camera.center.z + scene->camera.viewvec.z;
143 
144  /* assuming viewvec is a unit vector, then the lower left */
145  /* corner of the image plane is calculated below */
146  /* for normal perspective rays, we are really storing the */
147  /* direction to the lower left, not the lower left itself, */
148  /* since this allows us to eliminate a subtraction per pixel */
149  scene->camera.lowleft.x = scene->camera.projcent.x +
150  (scene->camera.left * scene->camera.rightvec.x) +
151  (scene->camera.bottom * scene->camera.upvec.x)
152  - scene->camera.center.x;
153  scene->camera.lowleft.y = scene->camera.projcent.y +
154  (scene->camera.left * scene->camera.rightvec.y) +
155  (scene->camera.bottom * scene->camera.upvec.y)
156  - scene->camera.center.y;
157  scene->camera.lowleft.z = scene->camera.projcent.z +
158  (scene->camera.left * scene->camera.rightvec.z) +
159  (scene->camera.bottom * scene->camera.upvec.z)
160  - scene->camera.center.z;
161  break;
162  }
163 
164  /* size of image plane */
165  scene->camera.px = scene->camera.right - scene->camera.left;
166  scene->camera.py = scene->camera.top - scene->camera.bottom;
167  scene->camera.psx = scene->camera.px / scene->hres;
168  scene->camera.psy = scene->camera.py / scene->vres;
169 
170  scene->camera.iplaneright.x = scene->camera.px * scene->camera.rightvec.x / sx;
171  scene->camera.iplaneright.y = scene->camera.px * scene->camera.rightvec.y / sx;
172  scene->camera.iplaneright.z = scene->camera.px * scene->camera.rightvec.z / sx;
173 
174  scene->camera.iplaneup.x = scene->camera.py * scene->camera.upvec.x / sy;
175  scene->camera.iplaneup.y = scene->camera.py * scene->camera.upvec.y / sy;
176  scene->camera.iplaneup.z = scene->camera.py * scene->camera.upvec.z / sy;
177 }
178 
179 
180 /*
181  * camray_init()
182  * initializes a camera ray which will be reused over and over
183  * by the current worker thread. This includes attaching thread-specific
184  * data to this ray.
185  */
186 void camray_init(scenedef *scene, ray *primary, unsigned long serial,
187  unsigned long * mbox,
188  unsigned int aarandval,
189  unsigned int aorandval) {
190  /* setup the right function pointer depending on what features are in use */
191  if (scene->flags & RT_SHADE_CLIPPING) {
192  primary->add_intersection = add_clipped_intersection;
193  } else {
194  primary->add_intersection = add_regular_intersection;
195  }
196 
197  primary->serial = serial;
198  primary->mbox = mbox;
199  primary->scene = scene;
200  primary->depth = scene->raydepth; /* set to max ray depth */
201  primary->transcnt = scene->transcount; /* set to max trans surf cnt */
202  primary->randval = aarandval; /* AA random number seed */
203 #if defined(RT_ACCUMULATE_ON)
204  rng_frand_seed(&primary->frng, aorandval); /* seed 32-bit FP RNG */
205 #else
206  rng_frand_init(&primary->frng); /* seed 32-bit FP RNG */
207 #endif
208 
209  /* orthographic ray direction is always coaxial with view direction */
210  primary->d = scene->camera.viewvec;
211 
212  /* for perspective rendering without depth of field */
213  primary->o = scene->camera.center;
214 }
215 
216 
217 #if 0
218 /*
219  * cam_perspective_aa_dof_maxvariance_ray()
220  * Generate a perspective camera ray incorporating
221  * antialiasing and depth-of-field.
222  */
223 color cam_perspective_aa_dof_maxvariance_ray(ray * ry, flt x, flt y) {
224  color col, sample;
225  color colsum, colsumsq, colvar;
226  int samples, samplegroup;
227  scenedef * scene=ry->scene;
228  flt dx, dy, rnsamples, ftmp;
229  flt maxvar;
230 
231  sample=cam_perspective_dof_ray(ry, x, y); /* generate primary ray */
232  colsum = sample; /* accumulate first sample */
233  colsumsq.r = colsum.r * colsum.r; /* accumulate first squared sample */
234  colsumsq.g = colsum.g * colsum.g;
235  colsumsq.b = colsum.b * colsum.b;
236 
237  /* perform antialiasing if enabled. */
238  /* samples are run through a very simple box filter averaging */
239  /* each of the sample pixel colors to produce a final result */
240  /* No special weighting is done based on the jitter values in */
241  /* the circle of confusion nor for the jitter within the */
242  /* pixel in the image plane. */
243 
244  samples = 1; /* only one ray cast so far */
245  while (samples < scene->antialiasing) {
246 #if 0
247  samplegroup = scene->antialiasing;
248 #else
249  if (samples > 32) {
250  samplegroup = samples + 1;
251  } else {
252  samplegroup = samples + 32;
253  }
254 #endif
255 
256  for (; samples <= samplegroup; samples++) {
257  /* calculate random eye aperture offset */
258  float jxy[2];
259  jitter_offset2f(&ry->randval, jxy);
260  dx = jxy[0] * ry->scene->camera.aperture * ry->scene->hres;
261  dy = jxy[1] * ry->scene->camera.aperture * ry->scene->vres;
262 
263  /* perturb the eye center by the random aperture offset */
264  ry->o.x = ry->scene->camera.center.x +
265  dx * ry->scene->camera.iplaneright.x +
266  dy * ry->scene->camera.iplaneup.x;
267  ry->o.y = ry->scene->camera.center.y +
268  dx * ry->scene->camera.iplaneright.y +
269  dy * ry->scene->camera.iplaneup.y;
270  ry->o.z = ry->scene->camera.center.z +
271  dx * ry->scene->camera.iplaneright.z +
272  dy * ry->scene->camera.iplaneup.z;
273 
274  /* shoot the ray, jittering the pixel position in the image plane */
275  jitter_offset2f(&ry->randval, jxy);
276  sample=cam_perspective_dof_ray(ry, x + jxy[0], y + jxy[1]);
277 
278  colsum.r += sample.r; /* accumulate samples */
279  colsum.g += sample.g;
280  colsum.b += sample.b;
281 
282  colsumsq.r += sample.r * sample.r; /* accumulate squared samples */
283  colsumsq.g += sample.g * sample.g;
284  colsumsq.b += sample.b * sample.b;
285  }
286 
287  /* calculate the variance for the color samples we've taken so far */
288  rnsamples = 1.0 / samples;
289  ftmp = colsum.r * rnsamples;
290  colvar.r = ((colsumsq.r * rnsamples) - ftmp*ftmp) * rnsamples;
291  ftmp = colsum.g * rnsamples;
292  colvar.g = ((colsumsq.g * rnsamples) - ftmp*ftmp) * rnsamples;
293  ftmp = colsum.b * rnsamples;
294  colvar.b = ((colsumsq.b * rnsamples) - ftmp*ftmp) * rnsamples;
295 
296  maxvar = 0.002; /* default maximum color variance to accept */
297 
298  /* early exit antialiasing if we're below maximum allowed variance */
299  if ((colvar.r < maxvar) && (colvar.g < maxvar) && (colvar.b < maxvar)) {
300  break; /* no more samples should be needed, we are happy now */
301  }
302  }
303 
304  /* average sample colors, back to range 0.0 - 1.0 */
305  col.r = colsum.r * rnsamples;
306  col.g = colsum.g * rnsamples;
307  col.b = colsum.b * rnsamples;
308 
309  return col;
310 }
311 
312 #endif
313 
314 /*
315  * cam_perspective_aa_dof_ray()
316  * Generate a perspective camera ray incorporating
317  * antialiasing and depth-of-field.
318  */
319 color cam_perspective_aa_dof_ray(ray * ry, flt x, flt y) {
320  color col, avcol;
321  int alias;
322  scenedef * scene=ry->scene;
323  float scale;
324 #if defined(RT_ACCUMULATE_ON)
325  col.r = col.g = col.b = 0.0f;
326  cam_prep_perspective_ray(ry, x, y); /* compute ray, but don't shoot */
327 #else
328  col=cam_perspective_ray(ry, x, y); /* shoot centered ray */
329 #endif
330 
331  /* perform antialiasing if enabled. */
332  /* samples are run through a very simple box filter averaging */
333  /* each of the sample pixel colors to produce a final result */
334  /* No special weighting is done based on the jitter values in */
335  /* the circle of confusion nor for the jitter within the */
336  /* pixel in the image plane. */
337 #if defined(RT_ACCUMULATE_ON)
338  for (alias=0; alias <= scene->antialiasing; alias++) {
339 #else
340  for (alias=1; alias <= scene->antialiasing; alias++) {
341 #endif
342  float jxy[2];
343  jitter_offset2f(&ry->randval, jxy);
344  avcol=cam_perspective_dof_ray(ry, x + jxy[0], y + jxy[1]);
345 
346  col.r += avcol.r; /* accumulate antialiasing samples */
347  col.g += avcol.g;
348  col.b += avcol.b;
349  }
350 
351  /* average sample colors, back to range 0.0 - 1.0 */
352  scale = 1.0f / (scene->antialiasing + 1.0f);
353  col.r *= scale;
354  col.g *= scale;
355  col.b *= scale;
356 
357  return col;
358 }
359 
360 
361 /*
362  * cam_perspective_dof_ray()
363  * Generate a perspective camera ray for depth-of-field rendering
364  */
365 color cam_perspective_dof_ray(ray * ry, flt x, flt y) {
366  flt rdx, rdy, rdz, invlen;
367  color col;
368  float jxy[2];
369  vector focuspoint;
370  vector oldorigin=ry->o; /* save un-jittered ray origin */
371  scenedef * scene=ry->scene;
372 
373  /* starting from the lower left corner of the image plane, */
374  /* we move to the center of the pel we're calculating: */
375  /* lowerleft + (rightvec * X_distance) + (upvec * Y_distance) */
376  /* rdx/y/z are the ray directions (unnormalized) */
377  rdx = scene->camera.lowleft.x +
378  (x * scene->camera.iplaneright.x) +
379  (y * scene->camera.iplaneup.x);
380 
381  rdy = scene->camera.lowleft.y +
382  (x * scene->camera.iplaneright.y) +
383  (y * scene->camera.iplaneup.y);
384 
385  rdz = scene->camera.lowleft.z +
386  (x * scene->camera.iplaneright.z) +
387  (y * scene->camera.iplaneup.z);
388 
389  /* normalize the ray direction vector */
390  invlen = 1.0 / SQRT(rdx*rdx + rdy*rdy + rdz*rdz);
391  ry->d.x = rdx * invlen;
392  ry->d.y = rdy * invlen;
393  ry->d.z = rdz * invlen;
394 
395  /* compute center of rotation point in focal plane */
396  focuspoint.x = ry->o.x + ry->d.x * scene->camera.dof_focaldist;
397  focuspoint.y = ry->o.y + ry->d.y * scene->camera.dof_focaldist;
398  focuspoint.z = ry->o.z + ry->d.z * scene->camera.dof_focaldist;
399 
400  /* jitter ray origin within aperture disc */
401  jitter_disc2f(&ry->randval, jxy);
402  jxy[0] *= scene->camera.dof_aperture_rad;
403  jxy[1] *= scene->camera.dof_aperture_rad;
404 
405  ry->o.x += jxy[0] * scene->camera.rightvec.x +
406  jxy[1] * scene->camera.upvec.x;
407  ry->o.y += jxy[0] * scene->camera.rightvec.y +
408  jxy[1] * scene->camera.upvec.y;
409  ry->o.z += jxy[0] * scene->camera.rightvec.z +
410  jxy[1] * scene->camera.upvec.z;
411 
412  /* compute new ray direction from jittered origin */
413  ry->d.x = focuspoint.x - ry->o.x;
414  ry->d.y = focuspoint.y - ry->o.y;
415  ry->d.z = focuspoint.z - ry->o.z;
416  VNorm(&ry->d);
417 
418  /* initialize ray attributes for a primary ray */
419  ry->maxdist = FHUGE; /* unbounded ray */
420  ry->opticdist = 0.0; /* ray is just starting */
421 
422  /* camera only generates primary rays */
423  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
424 
425  ry->serial++; /* increment the ray serial number */
426  intersect_objects(ry); /* trace the ray */
427  col=scene->shader(ry); /* shade the hit point */
428 
429  ry->o = oldorigin; /* restore previous ray origin */
430 
431  return col;
432 }
433 
434 
435 /*
436  * cam_perspective_aa_ray()
437  * Generate a perspective camera ray incorporating antialiasing.
438  */
439 color cam_perspective_aa_ray(ray * ry, flt x, flt y) {
440  color col, avcol;
441  int alias;
442  scenedef * scene=ry->scene;
443  float scale;
444 #if defined(RT_ACCUMULATE_ON)
445  col.r = col.g = col.b = 0.0f;
446  cam_prep_perspective_ray(ry, x, y); /* generate ray */
447 #else
448  col=cam_perspective_ray(ry, x, y); /* generate ray */
449 #endif
450 
451  /* perform antialiasing if enabled. */
452  /* samples are run through a very simple box filter averaging */
453  /* each of the sample pixel colors to produce a final result */
454 #if defined(RT_ACCUMULATE_ON)
455  for (alias=0; alias <= scene->antialiasing; alias++) {
456 #else
457  for (alias=1; alias <= scene->antialiasing; alias++) {
458 #endif
459  float jxy[2];
460  jitter_offset2f(&ry->randval, jxy);
461  avcol=cam_perspective_ray(ry, x + jxy[0], y + jxy[1]);
462 
463  col.r += avcol.r; /* accumulate antialiasing samples */
464  col.g += avcol.g;
465  col.b += avcol.b;
466  }
467 
468  /* average sample colors, back to range 0.0 - 1.0 */
469  scale = 1.0f / (scene->antialiasing + 1.0f);
470  col.r *= scale;
471  col.g *= scale;
472  col.b *= scale;
473 
474  return col;
475 }
476 
477 
478 /*
479  * cam_prep_perspective_ray()
480  * Prep, but don't trace a perspective camera ray, no antialiasing
481  */
482 void cam_prep_perspective_ray(ray * ry, flt x, flt y) {
483  flt rdx, rdy, rdz, len;
484  scenedef * scene=ry->scene;
485 
486  /* starting from the lower left corner of the image plane, */
487  /* we move to the center of the pel we're calculating: */
488  /* lowerleft + (rightvec * X_distance) + (upvec * Y_distance) */
489  /* rdx/y/z are the ray directions (unnormalized) */
490  rdx = scene->camera.lowleft.x +
491  (x * scene->camera.iplaneright.x) +
492  (y * scene->camera.iplaneup.x);
493 
494  rdy = scene->camera.lowleft.y +
495  (x * scene->camera.iplaneright.y) +
496  (y * scene->camera.iplaneup.y);
497 
498  rdz = scene->camera.lowleft.z +
499  (x * scene->camera.iplaneright.z) +
500  (y * scene->camera.iplaneup.z);
501 
502  /* normalize the ray direction vector */
503  len = 1.0 / SQRT(rdx*rdx + rdy*rdy + rdz*rdz);
504  ry->d.x = rdx * len;
505  ry->d.y = rdy * len;
506  ry->d.z = rdz * len;
507 
508  /* initialize ray attributes for a primary ray */
509  ry->maxdist = FHUGE; /* unbounded ray */
510  ry->opticdist = 0.0; /* ray is just starting */
511 
512  /* camera only generates primary rays */
513  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
514 
515  ry->serial++; /* increment the ray serial number */
516 }
517 
518 
519 /*
520  * cam_perspective_ray()
521  * Generate and trace a perspective camera ray, no antialiasing
522  */
523 color cam_perspective_ray(ray * ry, flt x, flt y) {
524  scenedef * scene=ry->scene;
525  cam_prep_perspective_ray(ry, x, y);
526  intersect_objects(ry); /* trace the ray */
527  return scene->shader(ry); /* shade the hit point */
528 }
529 
530 
531 /*
532  * cam_orthographic_aa_dof_ray()
533  * Generate an orthographic camera ray, potentially incorporating
534  * antialiasing.
535  */
536 color cam_orthographic_aa_dof_ray(ray * ry, flt x, flt y) {
537  color col, avcol;
538  int alias;
539  scenedef * scene=ry->scene;
540  float scale;
541 
542  col=cam_orthographic_ray(ry, x, y); /* generate ray */
543 
544  /* perform antialiasing if enabled. */
545  /* samples are run through a very simple box filter averaging */
546  /* each of the sample pixel colors to produce a final result */
547  for (alias=1; alias <= scene->antialiasing; alias++) {
548  float jxy[2];
549  jitter_offset2f(&ry->randval, jxy);
550  avcol=cam_orthographic_dof_ray(ry, x + jxy[0], y + jxy[1]);
551 
552  col.r += avcol.r; /* accumulate antialiasing samples */
553  col.g += avcol.g;
554  col.b += avcol.b;
555  }
556 
557  /* average sample colors, back to range 0.0 - 1.0 */
558  scale = 1.0f / (scene->antialiasing + 1.0f);
559  col.r *= scale;
560  col.g *= scale;
561  col.b *= scale;
562 
563  return col;
564 }
565 
566 
567 /*
568  * cam_orthographic_dof_ray()
569  * Generate an orthographic camera ray for depth-of-field rendering
570  */
571 color cam_orthographic_dof_ray(ray * ry, flt x, flt y) {
572  color col;
573  float jxy[2];
574  vector focuspoint;
575  vector olddir=ry->d; /* save un-jittered ray direction */
576  scenedef * scene=ry->scene;
577 
578  /* starting from the lower left corner of the image plane, */
579  /* we move to the center of the pel we're calculating: */
580  /* lowerleft + (rightvec * X_distance) + (upvec * Y_distance) */
581  ry->o.x = scene->camera.lowleft.x +
582  (x * scene->camera.iplaneright.x) +
583  (y * scene->camera.iplaneup.x);
584 
585  ry->o.y = scene->camera.lowleft.y +
586  (x * scene->camera.iplaneright.y) +
587  (y * scene->camera.iplaneup.y);
588 
589  ry->o.z = scene->camera.lowleft.z +
590  (x * scene->camera.iplaneright.z) +
591  (y * scene->camera.iplaneup.z);
592 
593  /* compute center of rotation point in focal plane */
594  focuspoint.x = ry->o.x + ry->d.x * scene->camera.dof_focaldist;
595  focuspoint.y = ry->o.y + ry->d.y * scene->camera.dof_focaldist;
596  focuspoint.z = ry->o.z + ry->d.z * scene->camera.dof_focaldist;
597 
598  /* jitter ray origin within aperture disc */
599  jitter_disc2f(&ry->randval, jxy);
600  jxy[0] *= scene->camera.dof_aperture_rad;
601  jxy[1] *= scene->camera.dof_aperture_rad;
602 
603  ry->o.x += jxy[0] * scene->camera.rightvec.x +
604  jxy[1] * scene->camera.upvec.x;
605  ry->o.y += jxy[0] * scene->camera.rightvec.y +
606  jxy[1] * scene->camera.upvec.y;
607  ry->o.z += jxy[0] * scene->camera.rightvec.z +
608  jxy[1] * scene->camera.upvec.z;
609 
610  /* compute new ray direction from jittered origin */
611  ry->d.x = focuspoint.x - ry->o.x;
612  ry->d.y = focuspoint.y - ry->o.y;
613  ry->d.z = focuspoint.z - ry->o.z;
614  VNorm(&ry->d);
615 
616  /* initialize ray attributes for a primary ray */
617  ry->maxdist = FHUGE; /* unbounded ray */
618  ry->opticdist = 0.0; /* ray is just starting */
619 
620  /* camera only generates primary rays */
621  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
622 
623  ry->serial++; /* increment the ray serial number */
624  intersect_objects(ry); /* trace the ray */
625  col=scene->shader(ry); /* shade the hit point */
626 
627  ry->d = olddir; /* restore previous ray direction */
628 
629  return col;
630 }
631 
632 
633 /*
634  * cam_orthographic_aa_ray()
635  * Generate an orthographic camera ray, potentially incorporating
636  * antialiasing.
637  */
638 color cam_orthographic_aa_ray(ray * ry, flt x, flt y) {
639  color col, avcol;
640  int alias;
641  scenedef * scene=ry->scene;
642  float scale;
643 
644  col=cam_orthographic_ray(ry, x, y); /* generate ray */
645 
646  /* perform antialiasing if enabled. */
647  /* samples are run through a very simple box filter averaging */
648  /* each of the sample pixel colors to produce a final result */
649  for (alias=1; alias <= scene->antialiasing; alias++) {
650  float jxy[2];
651  jitter_offset2f(&ry->randval, jxy);
652  avcol=cam_orthographic_ray(ry, x + jxy[0], y + jxy[1]);
653 
654  col.r += avcol.r; /* accumulate antialiasing samples */
655  col.g += avcol.g;
656  col.b += avcol.b;
657  }
658 
659  /* average sample colors, back to range 0.0 - 1.0 */
660  scale = 1.0f / (scene->antialiasing + 1.0f);
661  col.r *= scale;
662  col.g *= scale;
663  col.b *= scale;
664 
665  return col;
666 }
667 
668 
669 /*
670  * cam_orthographic_ray()
671  * Generate an orthographic camera ray, no antialiasing
672  */
673 color cam_orthographic_ray(ray * ry, flt x, flt y) {
674  scenedef * scene=ry->scene;
675 
676  /* starting from the lower left corner of the image plane, */
677  /* we move to the center of the pel we're calculating: */
678  /* lowerleft + (rightvec * X_distance) + (upvec * Y_distance) */
679  ry->o.x = scene->camera.lowleft.x +
680  (x * scene->camera.iplaneright.x) +
681  (y * scene->camera.iplaneup.x);
682 
683  ry->o.y = scene->camera.lowleft.y +
684  (x * scene->camera.iplaneright.y) +
685  (y * scene->camera.iplaneup.y);
686 
687  ry->o.z = scene->camera.lowleft.z +
688  (x * scene->camera.iplaneright.z) +
689  (y * scene->camera.iplaneup.z);
690 
691  /* initialize ray attributes for a primary ray */
692  ry->maxdist = FHUGE; /* unbounded ray */
693  ry->opticdist = 0.0; /* ray is just starting */
694 
695  /* camera only generates primary rays */
696  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
697 
698  ry->serial++; /* increment the ray serial number */
699  intersect_objects(ry); /* trace the ray */
700  return scene->shader(ry); /* shade the hit point */
701 }
702 
703 /*
704  * cam_equirectangular_ray()
705  * Generate an omnidirectional latitude-longitude or 'equirectangular'
706  * camera ray, no antialiasing
707  */
708 color cam_equirectangular_ray(ray * ry, flt x, flt y) {
709  flt sin_ax, cos_ax, sin_ay, cos_ay;
710  flt rdx, rdy, rdz, invlen;
711  scenedef * scene=ry->scene;
712 
713  /* compute mx and my, the midpoint coords of the viewport subimage */
714  flt mx = scene->hres * 0.5; /* X spans width of full image */
715  flt my = scene->vres * 0.5; /* Y spans width of full image */
716 
717  /* compute lat-long radians per pixel in the resulting viewport subimage */
718  flt radperpix_x = (3.1415926 / scene->hres) * 2.0;
719  flt radperpix_y = (3.1415926 / scene->vres);
720 
721  /* compute the lat-long angles the pixel/sample's viewport coords */
722  flt ax = (x - mx) * radperpix_x; /* longitude */
723  flt ay = (y - my) * radperpix_y; /* latitude */
724 
725  SINCOS(ax, &sin_ax, &cos_ax);
726  SINCOS(ay, &sin_ay, &cos_ay);
727 
728  /* compute the ray direction vector components from the */
729  /* lat-long angles and the camera orthogonal basis vectors */
730  rdx = cos_ay * (cos_ax * scene->camera.viewvec.x +
731  sin_ax * scene->camera.rightvec.x) +
732  sin_ay * scene->camera.upvec.x;
733 
734  rdy = cos_ay * (cos_ax * scene->camera.viewvec.y +
735  sin_ax * scene->camera.rightvec.y) +
736  sin_ay * scene->camera.upvec.y;
737 
738  rdz = cos_ay * (cos_ax * scene->camera.viewvec.z +
739  sin_ax * scene->camera.rightvec.z) +
740  sin_ay * scene->camera.upvec.z;
741 
742  /* normalize the ray direction vector */
743  invlen = 1.0 / SQRT(rdx*rdx + rdy*rdy + rdz*rdz);
744  ry->d.x = rdx * invlen;
745  ry->d.y = rdy * invlen;
746  ry->d.z = rdz * invlen;
747 
748  ry->o.x = scene->camera.center.x;
749  ry->o.y = scene->camera.center.y;
750  ry->o.z = scene->camera.center.z;
751 
752  /* initialize ray attributes for a primary ray */
753  ry->maxdist = FHUGE; /* unbounded ray */
754  ry->opticdist = 0.0; /* ray is just starting */
755 
756  /* camera only generates primary rays */
757  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
758 
759  ry->serial++; /* increment the ray serial number */
760  intersect_objects(ry); /* trace the ray */
761  return scene->shader(ry); /* shade the hit point */
762 }
763 
764 /*
765  * cam_equirectangular_aa_ray()
766  * Generate an omnidirectional latitude-longitude or 'equirectangular'
767  * camera ray, potentially incorporating antialiasing
768  */
769 color cam_equirectangular_aa_ray(ray * ry, flt x, flt y) {
770  color col, avcol;
771  int alias;
772  scenedef * scene=ry->scene;
773  float scale;
774 
775  col=cam_equirectangular_ray(ry, x, y); /* generate ray */
776 
777  /* perform antialiasing if enabled. */
778  /* samples are run through a very simple box filter averaging */
779  /* each of the sample pixel colors to produce a final result */
780  for (alias=1; alias <= scene->antialiasing; alias++) {
781  float jxy[2];
782  jitter_offset2f(&ry->randval, jxy);
783  avcol=cam_equirectangular_ray(ry, x + jxy[0], y + jxy[1]);
784 
785  col.r += avcol.r; /* accumulate antialiasing samples */
786  col.g += avcol.g;
787  col.b += avcol.b;
788  }
789 
790  /* average sample colors, back to range 0.0 - 1.0 */
791  scale = 1.0f / (scene->antialiasing + 1.0f);
792  col.r *= scale;
793  col.g *= scale;
794  col.b *= scale;
795 
796  return col;
797 }
798 
799 
800 /*
801  * cam_equirectangular_stereo_ray()
802  * Generate an omnidirectional equirectangular camera ray, with or without
803  * modulation of stereo eye separation by latitude angle, no antialiasing
804  * This camera splits the image into over/under stereoscopic viewports
805  * that contain the left eye view in the top subimage,
806  * and the right eye view in the bottom subimage.
807  */
808 color cam_equirectangular_stereo_ray(ray * ry, flt x, flt y) {
809  flt sin_ax, cos_ax, sin_ay, cos_ay; /* lat-long angle sin/cos values */
810  flt rdx, rdy, rdz; /* ray direction vector components */
811  flt invlen; /* unit vector normalization factor */
812  flt vpx, vpy, eyeshift; /* viewport width/height and half-IOD */
813  vector eye_axial; /* rightward stereo eye shift direction */
814  scenedef * scene=ry->scene; /* global scene data and attributes */
815 
816  /* compute stereo subimage viewport coordinates from image coordinates */
817  flt vpszy = scene->vres * 0.5; /* two vertically stacked viewports in Y */
818  vpx = x; /* X coordinate is also viewport X coord */
819  if (y >= vpszy) {
820  vpy = y - vpszy; /* left eye viewport subimage on top */
821  eyeshift = -scene->camera.eyeshift; /* shift left, by half of IOD */
822  } else {
823  vpy = y; /* right eye viewport subimage on bottom */
824  eyeshift = scene->camera.eyeshift; /* shift right, by half of IOD */
825  }
826 
827  /* compute mx and my, the midpoint coords of the viewport subimage */
828  flt mx = scene->hres * 0.5; /* viewport spans width of full image */
829  flt my = vpszy * 0.5; /* two vertically stacked viewports in Y */
830 
831  /* compute lat-long radians per pixel in the resulting viewport subimage */
832  flt radperpix_x = (3.1415926 / scene->hres) * 2.0;
833  flt radperpix_y = (3.1415926 / vpszy);
834 
835  /* compute the lat-long angles the pixel/sample's viewport (VP) coords */
836  flt ax = (vpx - mx) * radperpix_x; /* X angle from mid VP, longitude */
837  flt ay = (vpy - my) * radperpix_y; /* Y angle from mid VP, latitude */
838 
839  SINCOS(ax, &sin_ax, &cos_ax);
840  SINCOS(ay, &sin_ay, &cos_ay);
841 
842  /* compute the ray direction vector components (rd[xyz]) from the */
843  /* lat-long angles and orthogonal camera basis vectors */
844  rdx = cos_ay * (cos_ax * scene->camera.viewvec.x +
845  sin_ax * scene->camera.rightvec.x) +
846  sin_ay * scene->camera.upvec.x;
847 
848  rdy = cos_ay * (cos_ax * scene->camera.viewvec.y +
849  sin_ax * scene->camera.rightvec.y) +
850  sin_ay * scene->camera.upvec.y;
851 
852  rdz = cos_ay * (cos_ax * scene->camera.viewvec.z +
853  sin_ax * scene->camera.rightvec.z) +
854  sin_ay * scene->camera.upvec.z;
855 
856  /* normalize the ray direction vector to unit length, */
857  /* and set the new ray direction */
858  invlen = 1.0 / SQRT(rdx*rdx + rdy*rdy + rdz*rdz);
859  ry->d.x = rdx * invlen;
860  ry->d.y = rdy * invlen;
861  ry->d.z = rdz * invlen;
862 
863  /* eye_axial: cross-product of up and ray direction, a unit "right" */
864  /* vector used to shift the stereoscopic eye position (ray origin) */
865  VCross(&scene->camera.upvec, &ry->d, &eye_axial);
866 
867  /* optionally modulate eyeshift by latitude angle to */
868  /* prevent backward-stereo when looking at the poles */
869  if (scene->camera.modulate_eyeshift) {
870  /* modulate eyeshift by latitude angle and cosine-power scale factor */
871  eyeshift *= powf(fabsf(cos_ay), scene->camera.modulate_eyeshift_pow);
872  }
873 
874  /* shift the eye (ray origin) by the eyeshift */
875  /* distance (half of the IOD) */
876  ry->o.x = scene->camera.center.x + eyeshift * eye_axial.x;
877  ry->o.y = scene->camera.center.y + eyeshift * eye_axial.y;
878  ry->o.z = scene->camera.center.z + eyeshift * eye_axial.z;
879 
880  /* initialize ray attributes for a primary ray */
881  ry->maxdist = FHUGE; /* unbounded ray */
882  ry->opticdist = 0.0; /* ray is just starting */
883 
884  /* camera only generates primary rays */
885  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
886 
887  ry->serial++; /* increment the ray serial number */
888  intersect_objects(ry); /* trace the ray */
889  return scene->shader(ry); /* shade the hit point */
890 }
891 
892 
893 /*
894  * cam_equirectangular_aa_stereo_ray()
895  * Generate an omnidirectional latitude-longitude or 'equirectangular'
896  * camera ray, potentially incorporating antialiasing
897  * This camera splits the image into over/under stereoscopic viewports
898  * that contain the left eye view in the top subimage,
899  * and the right eye view in the bottom subimage.
900  */
902  color col, avcol;
903  int alias;
904  scenedef * scene=ry->scene;
905  float scale;
906 
907  col=cam_equirectangular_stereo_ray(ry, x, y); /* trace un-jittered ray */
908 
909  /* perform antialiasing if enabled. */
910  /* samples are run through a very simple box filter averaging */
911  /* each of the sample pixel colors to produce a final result */
912  for (alias=1; alias <= scene->antialiasing; alias++) {
913  float jxy[2];
914  jitter_offset2f(&ry->randval, jxy);
915  avcol=cam_equirectangular_stereo_ray(ry, x + jxy[0], y + jxy[1]);
916 
917  col.r += avcol.r; /* accumulate antialiasing samples */
918  col.g += avcol.g;
919  col.b += avcol.b;
920  }
921 
922  /* average sample colors, back to range 0.0 - 1.0 */
923  scale = 1.0f / (scene->antialiasing + 1.0f);
924  col.r *= scale;
925  col.g *= scale;
926  col.b *= scale;
927 
928  return col;
929 }
930 
931 
932 /*
933  * cam_fisheye_ray()
934  * Generate a perspective camera ray, no antialiasing
935  */
936 color cam_fisheye_ray(ray * ry, flt x, flt y) {
937  flt ax, ay;
938  scenedef * scene=ry->scene;
939 
940  ax = scene->camera.left + x * scene->camera.psx;
941  ay = scene->camera.bottom + y * scene->camera.psy;
942 
943  ry->d.x = COS(ay) * (COS(ax) * scene->camera.viewvec.x +
944  SIN(ax) * scene->camera.rightvec.x) +
945  SIN(ay) * scene->camera.upvec.x;
946 
947  ry->d.y = COS(ay) * (COS(ax) * scene->camera.viewvec.y +
948  SIN(ax) * scene->camera.rightvec.y) +
949  SIN(ay) * scene->camera.upvec.y;
950 
951  ry->d.z = COS(ay) * (COS(ax) * scene->camera.viewvec.z +
952  SIN(ax) * scene->camera.rightvec.z) +
953  SIN(ay) * scene->camera.upvec.z;
954 
955  /* initialize ray attributes for a primary ray */
956  ry->maxdist = FHUGE; /* unbounded ray */
957  ry->opticdist = 0.0; /* ray is just starting */
958 
959  /* camera only generates primary rays */
960  ry->flags = RT_RAY_PRIMARY | RT_RAY_REGULAR;
961 
962  ry->serial++; /* increment the ray serial number */
963  intersect_objects(ry); /* trace the ray */
964  return scene->shader(ry); /* shade the hit point */
965 }
966 
967 /*
968  * cam_fisheye_aa_ray()
969  * Generate a fisheye camera ray, potentially incorporating
970  * antialiasing.
971  */
972 color cam_fisheye_aa_ray(ray * ry, flt x, flt y) {
973  color col, avcol;
974  int alias;
975  scenedef * scene=ry->scene;
976  float scale;
977 
978  col=cam_fisheye_ray(ry, x, y); /* generate ray */
979 
980  /* perform antialiasing if enabled. */
981  /* samples are run through a very simple box filter averaging */
982  /* each of the sample pixel colors to produce a final result */
983  for (alias=1; alias <= scene->antialiasing; alias++) {
984  float jxy[2];
985  jitter_offset2f(&ry->randval, jxy);
986  avcol=cam_fisheye_ray(ry, x + jxy[0], y + jxy[1]);
987 
988  col.r += avcol.r; /* accumulate antialiasing samples */
989  col.g += avcol.g;
990  col.b += avcol.b;
991  }
992 
993  /* average sample colors, back to range 0.0 - 1.0 */
994  scale = 1.0f / (scene->antialiasing + 1.0f);
995  col.r *= scale;
996  col.g *= scale;
997  col.b *= scale;
998 
999  return col;
1000 }
1001 
1002 
1003 void cameraprojection(camdef * camera, int mode) {
1004  camera->projection=mode;
1005 }
1006 
1007 
1015 void camerafrustum(camdef * camera, flt left, flt right, flt bottom, flt top) {
1016  camera->frustumcalc = RT_CAMERA_FRUSTUM_USER;
1017  camera->left = left;
1018  camera->right = right;
1019  camera->bottom = bottom;
1020  camera->top = top;
1021 }
1022 
1023 
1024 void cameradof(camdef * camera, flt focaldist, flt aperture) {
1025  camera->dof_focaldist=focaldist;
1026 
1027  /* we have to mult by an extra factor of two to get the */
1028  /* disc samples from -0.5:0.5 to -rad:rad */
1029  /* this should probably also incorporate camera zoom... */
1030  if (aperture < 0.01)
1031  aperture=0.01;
1032 
1033  camera->dof_aperture_rad=focaldist / aperture;
1034 }
1035 
1036 
1037 /*
1038  * When the user specifies zoom factor, we are implicitly expecting to
1039  * auto-calculate the resulting view frustum.
1040  */
1041 void camerazoom(camdef * camera, flt zoom) {
1042  camera->frustumcalc = RT_CAMERA_FRUSTUM_AUTO;
1043  camera->camzoom = zoom;
1044 }
1045 
1046 /*
1047  * Don't use this anymore
1048  */
1049 void cameradefault(camdef * camera) {
1050  camerazoom(camera, 1.0);
1051  camera->dof_focaldist = 1.0;
1052  camera->dof_aperture_rad = 0.0;
1053 }
1054 
1055 
1056 void cameraposition(camdef * camera, vector center, vector viewvec,
1057  vector upvec) {
1058  vector newupvec;
1059  vector newviewvec;
1060  vector newrightvec;
1061 
1062  /* recompute the camera vectors */
1063  VCross(&upvec, &viewvec, &newrightvec);
1064  VNorm(&newrightvec);
1065 
1066  VCross(&viewvec, &newrightvec, &newupvec);
1067  VNorm(&newupvec);
1068 
1069  newviewvec=viewvec;
1070  VNorm(&newviewvec);
1071 
1072  camera->center=center;
1073  camera->viewvec=newviewvec;
1074  camera->rightvec=newrightvec;
1075  camera->upvec=newupvec;
1076 }
1077 
1078 
1079 void getcameraposition(camdef * camera, vector * center, vector * viewvec,
1080  vector * upvec, vector * rightvec) {
1081  *center = camera->center;
1082  *viewvec = camera->viewvec;
1083  *upvec = camera->upvec;
1084  *rightvec = camera->rightvec;
1085 }
1086 
1087 
color cam_equirectangular_stereo_ray(ray *ry, flt x, flt y)
Definition: camera.c:808
static __device__ __inline__ void jitter_offset2f(unsigned int &pval, float2 &xy)
color cam_equirectangular_aa_ray(ray *ry, flt x, flt y)
Definition: camera.c:769
color cam_orthographic_aa_dof_ray(ray *ry, flt x, flt y)
Definition: camera.c:536
color cam_perspective_dof_ray(ray *ry, flt x, flt y)
Definition: camera.c:365
void cameradefault(camdef *camera)
Definition: camera.c:1049
void rng_frand_seed(rng_frand_handle *rngh, unsigned int seed)
Definition: util.c:564
void VNorm(apivector *)
color cam_perspective_aa_ray(ray *ry, flt x, flt y)
Definition: camera.c:439
color cam_equirectangular_aa_stereo_ray(ray *ry, flt x, flt y)
Definition: camera.c:901
__host__ __device__ float3 fabsf(const float3 &a)
color cam_equirectangular_ray(ray *ry, flt x, flt y)
Definition: camera.c:708
color cam_perspective_aa_dof_ray(ray *ry, flt x, flt y)
Definition: camera.c:319
color cam_orthographic_ray(ray *ry, flt x, flt y)
Definition: camera.c:673
#define RT_PROJECTION_FISHEYE
Fisheye projection mode.
Definition: tachyon.h:505
static __device__ __inline__ void jitter_disc2f(unsigned int &pval, float2 &xy, float radius)
#define RT_PROJECTION_STEREO_EQUIRECTANGULAR
over/under omnistereo equirectangular
Definition: tachyon.h:506
void cameraprojection(camdef *camera, int mode)
Definition: camera.c:1003
double flt
generic floating point number, using double
Definition: tachyon.h:47
#define SIN(x)
Definition: util.h:30
void camerazoom(camdef *camera, flt zoom)
Definition: camera.c:1041
void VCross(apivector *a, apivector *b, apivector *c)
#define RT_PROJECTION_PERSPECTIVE
Perspective projection mode.
Definition: tachyon.h:500
#define COS(x)
Definition: util.h:26
color cam_fisheye_ray(ray *ry, flt x, flt y)
Definition: camera.c:936
void add_clipped_intersection(flt t, const object *obj, ray *ry)
Definition: intersect.c:90
Tachyon cross-platform timers, special math function wrappers, and RNGs.
void cameradof(camdef *camera, flt focaldist, flt aperture)
Definition: camera.c:1024
color cam_orthographic_aa_ray(ray *ry, flt x, flt y)
Definition: camera.c:638
color cam_fisheye_aa_ray(ray *ry, flt x, flt y)
Definition: camera.c:972
#define SQRT(x)
Definition: util.h:31
void camera_init(scenedef *scene)
Definition: camera.c:33
#define SINCOS(ax, sx, cx)
Definition: util.h:39
void cam_prep_perspective_ray(ray *ry, flt x, flt y)
Definition: camera.c:482
#define RT_PROJECTION_PERSPECTIVE_DOF
Perspective projection mode.
Definition: tachyon.h:502
void camray_init(scenedef *scene, ray *primary, unsigned long serial, unsigned long *mbox, unsigned int aarandval, unsigned int aorandval)
Definition: camera.c:186
void getcameraposition(camdef *camera, vector *center, vector *viewvec, vector *upvec, vector *rightvec)
Definition: camera.c:1079
color cam_perspective_ray(ray *ry, flt x, flt y)
Definition: camera.c:523
void add_regular_intersection(flt t, const object *obj, ray *ry)
Definition: intersect.c:77
void cameraposition(camdef *camera, vector center, vector viewvec, vector upvec)
Definition: camera.c:1056
#define RT_PROJECTION_ORTHOGRAPHIC
Orthographic projection mode.
Definition: tachyon.h:501
Tachyon public API function prototypes and declarations used to drive the ray tracing engine...
color cam_orthographic_dof_ray(ray *ry, flt x, flt y)
Definition: camera.c:571
#define RT_PROJECTION_ORTHOGRAPHIC_DOF
Orthographic projection mode.
Definition: tachyon.h:503
void rng_frand_init(rng_frand_handle *rngh)
Definition: util.c:556
void intersect_objects(ray *ry)
Definition: intersect.c:47
void camerafrustum(camdef *camera, flt left, flt right, flt bottom, flt top)
When the user directly specifies the world coordinates of the view frustum, it overrides the normal c...
Definition: camera.c:1015