Welcome to mirror list, hosted at ThFree Co, Russian Federation.

eevee_film_lib.glsl « shaders « eevee « engines « draw « blender « source - git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 82f184128eb8fa1ed0922f454a8055427191c9af (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148

/**
 * Film accumulation utils functions.
 **/

#pragma BLENDER_REQUIRE(eevee_camera_lib.glsl)

bool film_is_color_data(FilmData film_data)
{
  return film_data.data_type < FILM_DATA_FLOAT;
}

vec4 film_data_encode(FilmData film_data, vec4 data, float weight)
{
  if (film_is_color_data(film_data)) {
    /* Could we assume safe color from earlier pass? */
    data = safe_color(data);
    /* Convert transmittance to opacity. */
    data.a = saturate(1.0 - data.a);
  }

  if (film_data.data_type == FILM_DATA_COLOR_LOG) {
    /* TODO(fclem) Pre-expose. */
    data.rgb = log2(1.0 + data.rgb);
  }
  else if (film_data.data_type == FILM_DATA_DEPTH) {
    /* TODO(fclem) Depth should be converted to radial depth in panoramic projection. */
  }
  else if (film_data.data_type == FILM_DATA_MOTION) {
    /* Motion vectors are in camera uv space. But final motion vectors are in pixel units. */
    data *= film_data.uv_scale_inv.xyxy;
  }

  if (film_is_color_data(film_data)) {
    data *= weight;
  }
  return data;
}

vec4 film_data_decode(FilmData film_data, vec4 data, float weight)
{
  if (film_is_color_data(film_data)) {
    data *= safe_rcp(weight);
  }

  if (film_data.data_type == FILM_DATA_COLOR_LOG) {
    /* TODO(fclem) undo Pre-expose. */
    data.rgb = exp2(data.rgb) - 1.0;
  }
  return data;
}

/* Returns uv's position in the previous frame. */
vec2 film_uv_history_get(CameraData cam, CameraData camera_history, vec2 uv)
{
#if 0 /* TODO reproject history */
  vec3 V = camera_view_from_uv(cam, uv);
  vec3 V_prev = transform_point(hitory_mat, V);
  vec2 uv_history = camera_uv_from_view(camera_history, V_prev);
  return uv_history;
#endif
  return uv;
}

/* -------------------------------------------------------------------- */
/** \name Filter
 * \{ */

float film_filter_weight(CameraData cam, vec2 offset)
{
#if 1 /* Faster */
  /* Gaussian fitted to Blackman-Harris. */
  float r = len_squared(offset) / sqr(cam.filter_size);
  const float sigma = 0.284;
  const float fac = -0.5 / (sigma * sigma);
  float weight = exp(fac * r);
#else
  /* Blackman-Harris filter. */
  float r = M_2PI * saturate(0.5 + length(offset) / (2.0 * camera.filter_size));
  float weight = 0.35875 - 0.48829 * cos(r) + 0.14128 * cos(2.0 * r) - 0.01168 * cos(3.0 * r);
#endif
  /* Always return a weight above 0 to avoid blind spots between samples. */
  return max(weight, 1e-6);
}

/* Camera UV is the full-frame UV. Film uv is after cropping from render border. */
vec2 film_sample_from_camera_uv(FilmData film_data, vec2 sample_uv)
{
  return (sample_uv - film_data.uv_bias) * film_data.uv_scale_inv;
}

vec2 film_sample_to_camera_uv(FilmData film_data, vec2 sample_co)
{
  return sample_co * film_data.uv_scale + film_data.uv_bias;
}

void film_process_sample(CameraData cam,
                         FilmData film_data,
                         mat4 input_persmat,
                         mat4 input_persinv,
                         sampler2D input_tx,
                         vec2 sample_offset,
                         inout vec4 data,
                         inout float weight)
{
  /* Project sample from destrination space to source texture. */
  vec2 sample_center = gl_FragCoord.xy;
  vec2 sample_uv = film_sample_to_camera_uv(film_data, sample_center + sample_offset);
  vec3 vV_dst = camera_view_from_uv(cam, sample_uv);
  /* Pixels outside of projection range. */
  if (vV_dst == vec3(0.0)) {
    return;
  }

  bool is_persp = cam.type != CAMERA_ORTHO;
  vec2 uv_src = camera_uv_from_view(input_persmat, is_persp, vV_dst);
  /* Snap to sample actual location (pixel center). */
  vec2 input_size = vec2(textureSize(input_tx, 0));
  vec2 texel_center_src = floor(uv_src * input_size) + 0.5;
  uv_src = texel_center_src / input_size;
  /* Discard pixels outside of input range. */
  if (any(greaterThan(abs(uv_src - 0.5), vec2(0.5)))) {
    return;
  }

  /* Reproject sample location in destination space to have correct distance metric. */
  vec3 vV_src = camera_view_from_uv(input_persinv, uv_src);
  vec2 uv_cam = camera_uv_from_view(cam, vV_src);
  vec2 sample_dst = film_sample_from_camera_uv(film_data, uv_cam);

  /* Equirectangular projection might wrap and have more than one point mapping to the same
   * original coordinate. We need to get the closest pixel center.
   * NOTE: This is wrong for projection outside the main frame. */
  if (cam.type == CAMERA_PANO_EQUIRECT) {
    sample_center = film_sample_to_camera_uv(film_data, sample_center);
    vec3 vV_center = camera_view_from_uv(cam, sample_center);
    sample_center = camera_uv_from_view(cam, vV_center);
    sample_center = film_sample_from_camera_uv(film_data, sample_center);
  }
  /* Compute filter weight and add to weighted sum. */
  vec2 offset = sample_dst - sample_center;
  float sample_weight = film_filter_weight(cam, offset);
  vec4 sample_data = textureLod(input_tx, uv_src, 0.0);
  data += film_data_encode(film, sample_data, sample_weight);
  weight += sample_weight;
}

/** \} */