File: AORenderer.ispc

package info (click to toggle)
ospray 3.2.0-1
  • links: PTS, VCS
  • area: main
  • in suites: trixie
  • size: 10,040 kB
  • sloc: cpp: 80,569; ansic: 951; sh: 805; makefile: 171; python: 69
file content (163 lines) | stat: -rw-r--r-- 5,016 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
// Copyright 2009 Intel Corporation
// SPDX-License-Identifier: Apache-2.0

#include "camera/Camera.ih"
#include "camera/CameraDispatch.ih"
#include "common/DeviceRT.ih"
#include "common/FeatureFlagsEnum.h"
#include "fb/FrameBuffer.ih"
#include "fb/FrameBufferDispatch.ih"
#include "fb/RenderTaskDesc.ih"
#include "math/random.ih"
#include "pf/PixelFilterDispatch.ih"
#include "render/Renderer.ih"
#include "render/ScreenSample.ih"
// AO renderer
#include "surfaces.ih"
#include "volumes.ih"
// c++ shared
#include "AORendererShared.h"

OSPRAY_BEGIN_ISPC_NAMESPACE

static void AORenderer_renderSample(Renderer *uniform _self,
    FrameBuffer *uniform fb,
    World *uniform world,
    varying ScreenSample &sample,
    const uniform FeatureFlagsHandler &ffh)
{
  uniform AORenderer *uniform self = (uniform AORenderer * uniform) _self;

  const uniform FeatureFlags ff = getFeatureFlags(ffh);

  RandomSampler randomSampler;
  RandomSampler_init(&randomSampler,
      sample.sampleID.x + fb->size.x * sample.sampleID.y,
      sample.sampleID.z);

  uniform bool firstHit = true;
  const float originalRayTFar = sample.ray.t;

#ifdef OSPRAY_ENABLE_VOLUMES
#ifdef OSPRAY_TARGET_SYCL
  // We only support a single volume interval on the GPU
  VolumeInterval vInterval;
  VolumeIntervals volumeIntervals;
  volumeIntervals.numVolumeIntervals = 0;
  volumeIntervals.numAllocated = 1;
  volumeIntervals.intervals = &vInterval;
#else
  // Allocate memory for volume intervals
  VolumeIntervals volumeIntervals;
  allocVolumeIntervals(volumeIntervals);
#endif
#endif

  // This is our main ray
  Ray &ray = sample.ray;
  RayCone &rayCone = sample.rayCone;

  // First trace the ray across clipping scene to calculate ray intervals,
  // this step should keep ray structure unchanged
  RayIntervals rayIntervals;
  traceClippingRay(world, ray, rayIntervals, ffh);

  // Iterate over all translucent geometry till we are fully opaque
  vec4f outputColor = make_vec4f(0.f);
  while (outputColor.w < 0.99f) {
    // Then trace normal geometry using calculated ray intervals,
    // if hit ray.t will be updated
    if (ff.geometry) {
      traceGeometryRayIntervals(world, ray, rayIntervals, ffh);
    }

#ifdef OSPRAY_ENABLE_VOLUMES
    if (ff.other & FFO_VOLUME_IN_SCENE) {
      // Determine volume intervals by tracing ray in the volume scene
      Ray volumeRay = ray;
      traceVolumeRay(world, volumeRay, volumeIntervals);

      // Sample volumes across volume intervals (in front of geometry hit)
      if (volumeIntervals.numVolumeIntervals > 0) {
        vec4f volumeColor = integrateVolumeIntervals(volumeIntervals,
            rayIntervals,
            volumeRay,
            randomSampler,
            self->volumeSamplingRate,
            ffh);
        outputColor = outputColor + (1.f - outputColor.w) * volumeColor;
      }
    }
#endif

    // If any geometry has been hit
    vec4f blendedColor;
    const bool rayHadHit = hadHit(ray);
    if (rayHadHit && ff.geometry) {
      // Prepare differential geometry structure
      DifferentialGeometry dg;
      computeDG(world, self, ray, rayCone, dg, ffh);

      // Shade geometry
      SSI surfaceShading;
      surfaceShading = AORenderer_computeShading(self, world, dg, sample, ffh);

      // Use shaded color for blending
      blendedColor = surfaceShading.shadedColor;

      // Initialize other per sample data with first hit values
      if (firstHit) {
        sample.z = ray.t;
        sample.albedo = surfaceShading.albedo;
        sample.normal = dg.Ns;
        sample.instID = dg.instID;
        sample.geomID = dg.objID;
        sample.primID = ray.primID;
      }

      // Prepare ray for next loop iteration,
      // start from the last geometry hit all over to initial Tfar
      setRay(ray, ray.t + dg.epsilon, originalRayTFar);
    } else {
      blendedColor = Renderer_getBackground(&self->super, sample.pos, ffh);
      // Initialize other per sample data with first hit values
      if (firstHit) {
        sample.z = ray.t;
        sample.albedo = make_vec3f(blendedColor);
        sample.normal = sample.ray.dir;
      }
    }

    // Blend with output final color
    outputColor = outputColor + (1.f - outputColor.w) * blendedColor;
    firstHit = false;
    if (!rayHadHit) {
      break;
    }
  }

#if defined(OSPRAY_ENABLE_VOLUMES) && !defined(OSPRAY_TARGET_SYCL)
  freeVolumeIntervals(volumeIntervals);
#endif
  sample.rgb = make_vec3f(outputColor);
  sample.alpha = outputColor.w;
}

#define renderSampleFn AORenderer_renderSample
#include "render/RendererRenderTaskFn.inl"
#undef renderSampleFn

inline void AORenderer_renderTask(const uniform vec3ui itemIndex,
    Renderer *uniform self,
    FrameBuffer *uniform fb,
    Camera *uniform camera,
    World *uniform world,
    const uint32 *uniform taskIDs,
    const uniform FeatureFlagsHandler &ffh)
{
  Renderer_default_renderTask(itemIndex, self, fb, camera, world, taskIDs, ffh);
}

DEFINE_RENDERER_KERNEL_LAUNCHER(AORenderer_renderTask);

OSPRAY_END_ISPC_NAMESPACE