1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
media / base / mac / video_frame_mac.mm [blame]
// Copyright 2012 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifdef UNSAFE_BUFFERS_BUILD
// TODO(crbug.com/40285824): Remove this and convert code to safer constructs.
#pragma allow_unsafe_buffers
#endif
#include "media/base/mac/video_frame_mac.h"
#include <stddef.h>
#include <stdint.h>
#include <algorithm>
#include "base/logging.h"
#include "base/strings/sys_string_conversions.h"
#include "media/base/mac/color_space_util_mac.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
namespace {
// Maximum number of planes supported by this implementation.
const int kMaxPlanes = 3;
// CVPixelBuffer release callback. See |GetCvPixelBufferRepresentation()|.
void CvPixelBufferReleaseCallback(void* frame_ref,
const void* data,
size_t size,
size_t num_planes,
const void* planes[]) {
free(const_cast<void*>(data));
reinterpret_cast<const VideoFrame*>(frame_ref)->Release();
}
// Current list of acceptable CVPixelFormat mappings. If we start supporting
// RGB frame encoding we'll need to extend this list.
bool IsAcceptableCvPixelFormat(VideoPixelFormat format, OSType cv_format) {
if (format == PIXEL_FORMAT_I420) {
return cv_format == kCVPixelFormatType_420YpCbCr8Planar ||
cv_format == kCVPixelFormatType_420YpCbCr8PlanarFullRange;
}
if (format == PIXEL_FORMAT_NV12) {
return cv_format == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
cv_format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
}
if (format == PIXEL_FORMAT_NV12A) {
return cv_format == kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar;
}
return false;
}
bool CvPixelBufferHasColorSpace(CVPixelBufferRef pixel_buffer) {
if (@available(macOS 12, iOS 15, *)) {
return CVBufferHasAttachment(pixel_buffer,
kCVImageBufferColorPrimariesKey) &&
CVBufferHasAttachment(pixel_buffer,
kCVImageBufferTransferFunctionKey) &&
CVBufferHasAttachment(pixel_buffer, kCVImageBufferYCbCrMatrixKey);
} else {
#if !defined(__IPHONE_15_0) || __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_15_0
return CVBufferGetAttachment(pixel_buffer, kCVImageBufferColorPrimariesKey,
nullptr) &&
CVBufferGetAttachment(pixel_buffer,
kCVImageBufferTransferFunctionKey, nullptr) &&
CVBufferGetAttachment(pixel_buffer, kCVImageBufferYCbCrMatrixKey,
nullptr);
#else
return false;
#endif
}
}
void SetCvPixelBufferColorSpace(const gfx::ColorSpace& frame_cs,
CVPixelBufferRef pixel_buffer) {
// Apply required colorimetric attachments.
CFStringRef primary, transfer, matrix;
if (frame_cs.IsValid() &&
GetImageBufferColorValues(frame_cs, &primary, &transfer, &matrix)) {
CVBufferSetAttachment(pixel_buffer, kCVImageBufferColorPrimariesKey,
primary, kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(pixel_buffer, kCVImageBufferTransferFunctionKey,
transfer, kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(pixel_buffer, kCVImageBufferYCbCrMatrixKey, matrix,
kCVAttachmentMode_ShouldPropagate);
} else if (!CvPixelBufferHasColorSpace(pixel_buffer)) {
CVBufferSetAttachment(pixel_buffer, kCVImageBufferColorPrimariesKey,
kCVImageBufferColorPrimaries_ITU_R_709_2,
kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(pixel_buffer, kCVImageBufferTransferFunctionKey,
kCVImageBufferTransferFunction_ITU_R_709_2,
kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(pixel_buffer, kCVImageBufferYCbCrMatrixKey,
kCVImageBufferYCbCrMatrix_ITU_R_709_2,
kCVAttachmentMode_ShouldPropagate);
}
}
} // namespace
MEDIA_EXPORT base::apple::ScopedCFTypeRef<CVPixelBufferRef>
WrapVideoFrameInCVPixelBuffer(scoped_refptr<VideoFrame> frame) {
base::apple::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer;
if (!frame) {
return pixel_buffer;
}
const gfx::Rect& visible_rect = frame->visible_rect();
bool crop_needed = visible_rect != gfx::Rect(frame->coded_size());
if (!crop_needed) {
// If the frame has a GMB, yank out its IOSurface if possible.
if (frame->HasMappableGpuBuffer()) {
auto handle = frame->GetGpuMemoryBufferHandle();
if (handle.type == gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER) {
gfx::ScopedIOSurface io_surface = handle.io_surface;
if (io_surface) {
CVReturn cv_return = CVPixelBufferCreateWithIOSurface(
nullptr, io_surface.get(), nullptr,
pixel_buffer.InitializeInto());
if (cv_return != kCVReturnSuccess) {
DLOG(ERROR) << "CVPixelBufferCreateWithIOSurface failed: "
<< cv_return;
pixel_buffer.reset();
}
if (!IsAcceptableCvPixelFormat(
frame->format(),
CVPixelBufferGetPixelFormatType(pixel_buffer.get()))) {
DLOG(ERROR) << "Dropping CVPixelBuffer w/ incorrect format.";
pixel_buffer.reset();
} else {
SetCvPixelBufferColorSpace(frame->ColorSpace(), pixel_buffer.get());
}
return pixel_buffer;
}
}
}
}
// If the frame is backed by a GPU buffer, but needs cropping, map it and
// and handle like a software frame. There is no memcpy here.
if (frame->HasMappableGpuBuffer()) {
frame = ConvertToMemoryMappedFrame(std::move(frame));
}
if (!frame) {
return pixel_buffer;
}
// VideoFrame only supports YUV formats and most of them are 'YVU' ordered,
// which CVPixelBuffer does not support. This means we effectively can only
// represent I420 and NV12 frames. In addition, VideoFrame does not carry
// colorimetric information, so this function assumes standard video range
// and ITU Rec 709 primaries.
const VideoPixelFormat video_frame_format = frame->format();
const bool is_full_range =
frame->ColorSpace().GetRangeID() == gfx::ColorSpace::RangeID::FULL;
OSType cv_format;
if (video_frame_format == PIXEL_FORMAT_I420) {
cv_format = is_full_range ? kCVPixelFormatType_420YpCbCr8PlanarFullRange
: kCVPixelFormatType_420YpCbCr8Planar;
} else if (video_frame_format == PIXEL_FORMAT_NV12) {
cv_format = is_full_range ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
} else if (video_frame_format == PIXEL_FORMAT_NV12A) {
if (is_full_range) {
DVLOG(1) << "Full range NV12A is not supported by the OS.";
}
cv_format = kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar;
} else {
DLOG(ERROR) << "Unsupported frame format: " << video_frame_format;
return pixel_buffer;
}
DCHECK(IsAcceptableCvPixelFormat(video_frame_format, cv_format));
int num_planes = VideoFrame::NumPlanes(video_frame_format);
DCHECK_LE(num_planes, kMaxPlanes);
// Build arrays for each plane's data pointer, dimensions and byte alignment.
void* plane_ptrs[kMaxPlanes];
size_t plane_widths[kMaxPlanes];
size_t plane_heights[kMaxPlanes];
size_t plane_bytes_per_row[kMaxPlanes];
for (int plane_i = 0; plane_i < num_planes; ++plane_i) {
plane_ptrs[plane_i] = const_cast<uint8_t*>(frame->visible_data(plane_i));
gfx::Size plane_size =
VideoFrame::PlaneSize(video_frame_format, plane_i, visible_rect.size());
plane_widths[plane_i] = plane_size.width();
plane_heights[plane_i] = plane_size.height();
plane_bytes_per_row[plane_i] = frame->stride(plane_i);
}
// CVPixelBufferCreateWithPlanarBytes needs a dummy plane descriptor or the
// release callback will not execute. The descriptor is freed in the callback.
void* descriptor =
calloc(1, std::max(sizeof(CVPlanarPixelBufferInfo_YCbCrPlanar),
sizeof(CVPlanarPixelBufferInfo_YCbCrBiPlanar)));
// Wrap the frame's data in a CVPixelBuffer. Because this is a C API, we can't
// give it a smart pointer to the frame, so instead pass a raw pointer and
// increment the frame's reference count manually.
CVReturn result = CVPixelBufferCreateWithPlanarBytes(
kCFAllocatorDefault, visible_rect.width(), visible_rect.height(),
cv_format, descriptor, 0, num_planes, plane_ptrs, plane_widths,
plane_heights, plane_bytes_per_row, &CvPixelBufferReleaseCallback,
frame.get(), nullptr, pixel_buffer.InitializeInto());
if (result != kCVReturnSuccess) {
DLOG(ERROR) << " CVPixelBufferCreateWithPlanarBytes failed: " << result;
return base::apple::ScopedCFTypeRef<CVPixelBufferRef>(nullptr);
}
// The CVPixelBuffer now references the data of the frame, so increment its
// reference count manually. The release callback set on the pixel buffer will
// release the frame.
frame->AddRef();
SetCvPixelBufferColorSpace(frame->ColorSpace(), pixel_buffer.get());
return pixel_buffer;
}
MEDIA_EXPORT bool IOSurfaceIsWebGPUCompatible(IOSurfaceRef io_surface) {
switch (IOSurfaceGetPixelFormat(io_surface)) {
case kCVPixelFormatType_64RGBAHalf:
case kCVPixelFormatType_TwoComponent16Half:
case kCVPixelFormatType_OneComponent16Half:
case kCVPixelFormatType_ARGB2101010LEPacked:
case kCVPixelFormatType_32RGBA:
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_TwoComponent8:
case kCVPixelFormatType_OneComponent8:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
return true;
default:
return false;
}
}
} // namespace media