1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
gpu / command_buffer / service / indexed_buffer_binding_host.cc [blame]
// Copyright 2016 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/command_buffer/service/indexed_buffer_binding_host.h"
#include "gpu/command_buffer/service/buffer_manager.h"
namespace gpu {
namespace gles2 {
IndexedBufferBindingHost::IndexedBufferBinding::IndexedBufferBinding()
: type(IndexedBufferBindingType::kBindBufferNone),
offset(0),
size(0),
effective_full_buffer_size(0) {}
IndexedBufferBindingHost::IndexedBufferBinding::IndexedBufferBinding(
const IndexedBufferBindingHost::IndexedBufferBinding& other)
: type(other.type),
buffer(other.buffer.get()),
offset(other.offset),
size(other.size),
effective_full_buffer_size(other.effective_full_buffer_size) {
}
IndexedBufferBindingHost::IndexedBufferBinding::~IndexedBufferBinding() =
default;
bool IndexedBufferBindingHost::IndexedBufferBinding::operator==(
const IndexedBufferBindingHost::IndexedBufferBinding& other) const {
if (type == IndexedBufferBindingType::kBindBufferNone &&
other.type == IndexedBufferBindingType::kBindBufferNone) {
// This should be the most common case so an early out.
return true;
}
return (type == other.type &&
buffer.get() == other.buffer.get() &&
offset == other.offset &&
size == other.size &&
effective_full_buffer_size == other.effective_full_buffer_size);
}
void IndexedBufferBindingHost::IndexedBufferBinding::SetBindBufferBase(
Buffer* _buffer) {
if (!_buffer) {
Reset();
return;
}
type = IndexedBufferBindingType::kBindBufferBase;
buffer = _buffer;
offset = 0;
size = 0;
effective_full_buffer_size = 0;
}
void IndexedBufferBindingHost::IndexedBufferBinding::SetBindBufferRange(
Buffer* _buffer, GLintptr _offset, GLsizeiptr _size) {
if (!_buffer) {
Reset();
return;
}
type = IndexedBufferBindingType::kBindBufferRange;
buffer = _buffer;
offset = _offset;
size = _size;
effective_full_buffer_size = _buffer ? _buffer->size() : 0;
}
void IndexedBufferBindingHost::IndexedBufferBinding::Reset() {
type = IndexedBufferBindingType::kBindBufferNone;
buffer = nullptr;
offset = 0;
size = 0;
effective_full_buffer_size = 0;
}
IndexedBufferBindingHost::IndexedBufferBindingHost(
uint32_t max_bindings,
GLenum target,
bool needs_emulation,
bool round_down_uniform_bind_buffer_range_size)
: is_bound_(false),
needs_emulation_(needs_emulation),
round_down_uniform_bind_buffer_range_size_(
round_down_uniform_bind_buffer_range_size),
max_non_null_binding_index_plus_one_(0u),
target_(target) {
DCHECK(needs_emulation);
buffer_bindings_.resize(max_bindings);
}
IndexedBufferBindingHost::~IndexedBufferBindingHost() {
SetIsBound(false);
}
void IndexedBufferBindingHost::DoBindBufferBase(GLuint index, Buffer* buffer) {
DCHECK_LT(index, buffer_bindings_.size());
GLuint service_id = buffer ? buffer->service_id() : 0;
glBindBufferBase(target_, index, service_id);
if (buffer_bindings_[index].buffer && is_bound_) {
buffer_bindings_[index].buffer->OnUnbind(target_, true);
}
buffer_bindings_[index].SetBindBufferBase(buffer);
if (buffer && is_bound_) {
buffer->OnBind(target_, true);
}
UpdateMaxNonNullBindingIndex(index);
}
void IndexedBufferBindingHost::DoBindBufferRange(GLuint index,
Buffer* buffer,
GLintptr offset,
GLsizeiptr size) {
DCHECK_LT(index, buffer_bindings_.size());
GLuint service_id = buffer ? buffer->service_id() : 0;
if (buffer && needs_emulation_) {
DoAdjustedBindBufferRange(target_, index, service_id, offset, size,
buffer->size(),
round_down_uniform_bind_buffer_range_size_);
} else {
glBindBufferRange(target_, index, service_id, offset, size);
}
if (buffer_bindings_[index].buffer && is_bound_) {
buffer_bindings_[index].buffer->OnUnbind(target_, true);
}
buffer_bindings_[index].SetBindBufferRange(buffer, offset, size);
if (buffer && is_bound_) {
buffer->OnBind(target_, true);
}
UpdateMaxNonNullBindingIndex(index);
}
// static
void IndexedBufferBindingHost::DoAdjustedBindBufferRange(
GLenum target,
GLuint index,
GLuint service_id,
GLintptr offset,
GLsizeiptr size,
GLsizeiptr full_buffer_size,
bool round_down_uniform_bind_buffer_range_size) {
GLsizeiptr adjusted_size = size;
if (offset >= full_buffer_size) {
// Situation 1: We can't really call glBindBufferRange with reasonable
// offset/size without triggering a GL error because size == 0 isn't
// valid.
// TODO(zmo): it's ambiguous in the GL 4.1 spec whether BindBufferBase
// generates a GL error in such case. In reality, no error is generated on
// MacOSX with AMD/4.1.
glBindBufferBase(target, index, service_id);
return;
}
if (offset + size > full_buffer_size) {
adjusted_size = full_buffer_size - offset;
// size needs to be a multiple of 4.
adjusted_size = adjusted_size & ~3;
if (adjusted_size == 0) {
// Situation 2: The original size is valid, but the adjusted size
// is 0 and isn't valid. Handle it the same way as situation 1.
glBindBufferBase(target, index, service_id);
return;
}
}
if (round_down_uniform_bind_buffer_range_size) {
adjusted_size = adjusted_size & ~3;
if (adjusted_size == 0) {
// This case is invalid and we shouldn't call the driver.
// Without rounding, this would generate INVALID_OPERATION
// at draw time because the size is not enough to fill the smallest
// possible uniform block (4 bytes).
// The size of the range is set in DoBindBufferRange and validated in
// BufferManager::RequestBuffersAccess. It is fine to not bind the buffer
// because any draw call with this buffer range binding will generate
// INVALID_OPERATION.
// Clear the buffer binding because it will not be used.
glBindBufferBase(target, index, 0);
return;
}
}
glBindBufferRange(target, index, service_id, offset, adjusted_size);
}
void IndexedBufferBindingHost::OnBufferData(Buffer* buffer) {
DCHECK(buffer);
if (needs_emulation_) {
// If some bound buffers change size since last time the transformfeedback
// is bound, we might need to reset the ranges.
for (size_t ii = 0; ii < buffer_bindings_.size(); ++ii) {
if (buffer_bindings_[ii].buffer.get() != buffer)
continue;
if (buffer_bindings_[ii].type ==
IndexedBufferBindingType::kBindBufferRange &&
buffer_bindings_[ii].effective_full_buffer_size != buffer->size()) {
DoAdjustedBindBufferRange(target_, ii, buffer->service_id(),
buffer_bindings_[ii].offset,
buffer_bindings_[ii].size, buffer->size(),
round_down_uniform_bind_buffer_range_size_);
buffer_bindings_[ii].effective_full_buffer_size = buffer->size();
}
}
}
}
void IndexedBufferBindingHost::RemoveBoundBuffer(
GLenum target,
Buffer* buffer,
Buffer* target_generic_bound_buffer,
bool have_context) {
DCHECK(buffer);
bool need_to_recover_generic_binding = false;
for (size_t ii = 0; ii < buffer_bindings_.size(); ++ii) {
if (buffer_bindings_[ii].buffer.get() == buffer) {
buffer_bindings_[ii].Reset();
UpdateMaxNonNullBindingIndex(ii);
if (have_context) {
glBindBufferBase(target, ii, 0);
need_to_recover_generic_binding = true;
}
}
}
if (need_to_recover_generic_binding && target_generic_bound_buffer)
glBindBuffer(target, target_generic_bound_buffer->service_id());
}
void IndexedBufferBindingHost::SetIsBound(bool is_bound) {
if (is_bound && needs_emulation_) {
// If some bound buffers change size since last time the transformfeedback
// is bound, we might need to reset the ranges.
for (size_t ii = 0; ii < buffer_bindings_.size(); ++ii) {
Buffer* buffer = buffer_bindings_[ii].buffer.get();
if (buffer &&
buffer_bindings_[ii].type ==
IndexedBufferBindingType::kBindBufferRange &&
buffer_bindings_[ii].effective_full_buffer_size != buffer->size()) {
DoAdjustedBindBufferRange(target_, ii, buffer->service_id(),
buffer_bindings_[ii].offset,
buffer_bindings_[ii].size, buffer->size(),
round_down_uniform_bind_buffer_range_size_);
buffer_bindings_[ii].effective_full_buffer_size = buffer->size();
}
}
}
if (is_bound != is_bound_) {
is_bound_ = is_bound;
for (auto& bb : buffer_bindings_) {
if (bb.buffer) {
if (is_bound_) {
bb.buffer->OnBind(target_, true);
} else {
bb.buffer->OnUnbind(target_, true);
}
}
}
}
}
Buffer* IndexedBufferBindingHost::GetBufferBinding(GLuint index) const {
DCHECK_LT(index, buffer_bindings_.size());
return buffer_bindings_[index].buffer.get();
}
GLsizeiptr IndexedBufferBindingHost::GetBufferSize(GLuint index) const {
DCHECK_LT(index, buffer_bindings_.size());
return buffer_bindings_[index].size;
}
GLsizeiptr IndexedBufferBindingHost::GetEffectiveBufferSize(
GLuint index) const {
DCHECK_LT(index, buffer_bindings_.size());
const IndexedBufferBinding& binding = buffer_bindings_[index];
if (!binding.buffer.get())
return 0;
GLsizeiptr full_buffer_size = binding.buffer->size();
switch (binding.type) {
case IndexedBufferBindingType::kBindBufferBase:
return full_buffer_size;
case IndexedBufferBindingType::kBindBufferRange:
if (binding.offset + binding.size > full_buffer_size)
return full_buffer_size - binding.offset;
return binding.size;
case IndexedBufferBindingType::kBindBufferNone:
return 0;
}
return buffer_bindings_[index].size;
}
GLintptr IndexedBufferBindingHost::GetBufferStart(GLuint index) const {
DCHECK_LT(index, buffer_bindings_.size());
return buffer_bindings_[index].offset;
}
void IndexedBufferBindingHost::RestoreBindings(
IndexedBufferBindingHost* prev) {
// This is used only for UNIFORM_BUFFER bindings in context switching.
DCHECK(target_ == GL_UNIFORM_BUFFER && (!prev || prev->target_ == target_));
size_t limit = max_non_null_binding_index_plus_one_;
if (prev && prev->max_non_null_binding_index_plus_one_ > limit) {
limit = prev->max_non_null_binding_index_plus_one_;
}
for (size_t ii = 0; ii < limit; ++ii) {
if (prev && buffer_bindings_[ii] == prev->buffer_bindings_[ii]) {
continue;
}
switch (buffer_bindings_[ii].type) {
case IndexedBufferBindingType::kBindBufferBase:
case IndexedBufferBindingType::kBindBufferNone:
DoBindBufferBase(ii, buffer_bindings_[ii].buffer.get());
break;
case IndexedBufferBindingType::kBindBufferRange:
DoBindBufferRange(ii, buffer_bindings_[ii].buffer.get(),
buffer_bindings_[ii].offset,
buffer_bindings_[ii].size);
break;
}
}
}
void IndexedBufferBindingHost::UpdateMaxNonNullBindingIndex(
size_t changed_index) {
size_t plus_one = changed_index + 1;
DCHECK_LT(changed_index, buffer_bindings_.size());
if (buffer_bindings_[changed_index].buffer.get()) {
max_non_null_binding_index_plus_one_ =
std::max(max_non_null_binding_index_plus_one_, plus_one);
} else {
if (plus_one == max_non_null_binding_index_plus_one_) {
for (size_t ii = changed_index; ii > 0; --ii) {
if (buffer_bindings_[ii - 1].buffer.get()) {
max_non_null_binding_index_plus_one_ = ii;
break;
}
}
}
}
}
bool IndexedBufferBindingHost::UsesBuffer(
size_t used_binding_count, const Buffer* buffer) const {
DCHECK(buffer);
DCHECK_LE(used_binding_count, buffer_bindings_.size());
for (size_t ii = 0; ii < used_binding_count; ++ii) {
if (buffer == buffer_bindings_[ii].buffer)
return true;
}
return false;
}
} // namespace gles2
} // namespace gpu