Compare commits
No commits in common. "a7aa2fa59826bf154ea977d331014afaaafdc8cd" and "96cb1af807da40290bebe33bf450b2010f5167e4" have entirely different histories.
a7aa2fa598
...
96cb1af807
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -12,18 +12,14 @@ namespace godot {
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
constexpr size_t WIDTH = 1280;
|
inline void YUVToRGB(int Y, int U, int V, BYTE& R, BYTE& G, BYTE& B) {
|
||||||
constexpr size_t HEIGHT = 720;
|
R = std::max(0, std::min(255, static_cast<int>(Y + 1.402 * (V - 128))));
|
||||||
|
G = std::max(0, std::min(255, static_cast<int>(Y - 0.344136 * (U - 128) - 0.714136 * (V - 128))));
|
||||||
#define CLIP(X) ( (X) > 255 ? 255 : (X) < 0 ? 0 : X)
|
B = std::max(0, std::min(255, static_cast<int>(Y + 1.772 * (U - 128))));
|
||||||
|
|
||||||
void YUVToRGB(int Y, int U, int V, uint8_t& R, uint8_t& G, uint8_t& B) {
|
|
||||||
R = CLIP((Y + (91881 * V >> 16) - 179));
|
|
||||||
G = CLIP((Y - ((22544 * U + 46793 * V) >> 16) + 135));
|
|
||||||
B = CLIP((Y + (116129 * U >> 16) - 226));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ConvertToRGB420(unsigned char* planes[4], int stride[4], godot::Ref<godot::Image> m_imageBuffer, int width, int height) {
|
inline void ConvertToRGB420(unsigned char* planes[4], int stride[4], godot::Ref<godot::Image> m_imageBuffer, int width, int height) {
|
||||||
|
constexpr float inv_255 = 1.0f / 255.0f;
|
||||||
auto data = m_imageBuffer->get_data().ptr();
|
auto data = m_imageBuffer->get_data().ptr();
|
||||||
|
|
||||||
for (int y = 0; y < height; ++y) {
|
for (int y = 0; y < height; ++y) {
|
||||||
|
@ -33,7 +29,7 @@ void ConvertToRGB420(unsigned char* planes[4], int stride[4], godot::Ref<godot::
|
||||||
int U = planes[VPX_PLANE_U][uv_row + (x / 2)];
|
int U = planes[VPX_PLANE_U][uv_row + (x / 2)];
|
||||||
int V = planes[VPX_PLANE_V][uv_row + (x / 2)];
|
int V = planes[VPX_PLANE_V][uv_row + (x / 2)];
|
||||||
|
|
||||||
uint8_t R, G, B;
|
BYTE R, G, B;
|
||||||
YUVToRGB(Y, U, V, R, G, B);
|
YUVToRGB(Y, U, V, R, G, B);
|
||||||
|
|
||||||
const auto index = (y * width + x) * 3;
|
const auto index = (y * width + x) * 3;
|
||||||
|
@ -62,8 +58,8 @@ Obs::Obs()
|
||||||
res = vpx_codec_enc_config_default(vpx_codec_vp9_cx(), &m_cfg, 0);
|
res = vpx_codec_enc_config_default(vpx_codec_vp9_cx(), &m_cfg, 0);
|
||||||
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "VP9 cfg fail");
|
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "VP9 cfg fail");
|
||||||
|
|
||||||
m_cfg.g_w = WIDTH;
|
m_cfg.g_w = 1920;
|
||||||
m_cfg.g_h = HEIGHT;
|
m_cfg.g_h = 1080;
|
||||||
m_cfg.rc_target_bitrate = 1500;
|
m_cfg.rc_target_bitrate = 1500;
|
||||||
m_cfg.g_timebase.num = 1000;
|
m_cfg.g_timebase.num = 1000;
|
||||||
m_cfg.g_timebase.den = 30001;
|
m_cfg.g_timebase.den = 30001;
|
||||||
|
@ -82,7 +78,7 @@ Obs::Obs()
|
||||||
res = vpx_codec_dec_init(&m_decoder, vpx_codec_vp9_dx(), nullptr, 0);
|
res = vpx_codec_dec_init(&m_decoder, vpx_codec_vp9_dx(), nullptr, 0);
|
||||||
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "Could not intitialize decoder");
|
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "Could not intitialize decoder");
|
||||||
|
|
||||||
m_imageBuffer = Image::create(WIDTH, HEIGHT, false, godot::Image::FORMAT_RGB8); // TODO: adjust resolution
|
m_imageBuffer = Image::create(1920, 1080, false, godot::Image::FORMAT_RGB8); // TODO: adjust resolution
|
||||||
}
|
}
|
||||||
|
|
||||||
Obs::~Obs()
|
Obs::~Obs()
|
||||||
|
@ -100,13 +96,13 @@ PackedByteArray Obs::getEncodedScreenFrame(size_t id)
|
||||||
vpx_codec_err_t res{};
|
vpx_codec_err_t res{};
|
||||||
ERR_FAIL_COND_V_MSG(!&m_encoder, {}, "VP9 encoder is not initialized");
|
ERR_FAIL_COND_V_MSG(!&m_encoder, {}, "VP9 encoder is not initialized");
|
||||||
|
|
||||||
auto frame = m_capturer.capture(id, WIDTH, HEIGHT);
|
auto frame = m_capturer.capture(id);
|
||||||
if (frame.data.empty()) {
|
if (frame.data.empty()) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
vpx_image_t img;
|
vpx_image_t img;
|
||||||
vpx_img_wrap(&img, VPX_IMG_FMT_I420, WIDTH, HEIGHT, 1, frame.data.data());
|
vpx_img_wrap(&img, VPX_IMG_FMT_I420, 1920, 1080, 1, frame.data.data());
|
||||||
|
|
||||||
res = vpx_codec_encode(&m_encoder, &img, 0, 1, 0, VPX_DL_REALTIME);
|
res = vpx_codec_encode(&m_encoder, &img, 0, 1, 0, VPX_DL_REALTIME);
|
||||||
ERR_FAIL_COND_V_MSG(res != VPX_CODEC_OK, {}, "Could not encode frame");
|
ERR_FAIL_COND_V_MSG(res != VPX_CODEC_OK, {}, "Could not encode frame");
|
||||||
|
@ -152,7 +148,7 @@ void Obs::renderFrameToMesh(PackedByteArray frame, Ref<StandardMaterial3D> mat)
|
||||||
vpx_codec_iter_t iter = NULL;
|
vpx_codec_iter_t iter = NULL;
|
||||||
vpx_image_t* img = NULL;
|
vpx_image_t* img = NULL;
|
||||||
while ((img = vpx_codec_get_frame(&m_decoder, &iter)) != NULL) {
|
while ((img = vpx_codec_get_frame(&m_decoder, &iter)) != NULL) {
|
||||||
ConvertToRGB420(img->planes, img->stride, m_imageBuffer, WIDTH, HEIGHT);
|
ConvertToRGB420(img->planes, img->stride, m_imageBuffer, 1920, 1080);
|
||||||
|
|
||||||
if (m_imageTexture.is_null()) {
|
if (m_imageTexture.is_null()) {
|
||||||
m_imageTexture = ImageTexture::create_from_image(m_imageBuffer);
|
m_imageTexture = ImageTexture::create_from_image(m_imageBuffer);
|
||||||
|
|
|
@ -89,7 +89,7 @@ public:
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Frame nextFrame(size_t outputWidth, size_t outputHeight)
|
Frame nextFrame()
|
||||||
{
|
{
|
||||||
IDXGIResource* desktopResource = nullptr;
|
IDXGIResource* desktopResource = nullptr;
|
||||||
DXGI_OUTDUPL_FRAME_INFO frameInfo;
|
DXGI_OUTDUPL_FRAME_INFO frameInfo;
|
||||||
|
@ -103,7 +103,7 @@ public:
|
||||||
if (FAILED(hr)) {
|
if (FAILED(hr)) {
|
||||||
if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
|
if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
|
||||||
// TODO: maybe adjust this value?
|
// TODO: maybe adjust this value?
|
||||||
//std::this_thread::sleep_for(std::chrono::milliseconds(33));
|
std::this_thread::sleep_for(std::chrono::milliseconds(33));
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,40 +151,13 @@ public:
|
||||||
return { m_width, m_height, m_buffer };
|
return { m_width, m_height, m_buffer };
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
// Rescale if necessary
|
|
||||||
if (m_width != outputWidth || m_height != outputHeight) {
|
|
||||||
m_rescaleBuffer.resize(outputWidth * outputHeight * 4);
|
|
||||||
|
|
||||||
rescale(mappedResource, m_width, m_height, outputWidth, outputHeight);
|
|
||||||
|
|
||||||
// Convert BGRA to YUV420
|
// Convert BGRA to YUV420
|
||||||
const auto yuvSize = outputWidth * outputHeight * 3 / 2;
|
const auto yuvSize = m_width * m_height * 3 / 2;
|
||||||
if (m_buffer.size() != yuvSize) {
|
if (m_buffer.size() != yuvSize) {
|
||||||
m_buffer.resize(yuvSize);
|
m_buffer.resize(yuvSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
rgbToYuv2(outputWidth, outputHeight);
|
rgbToYuv(mappedResource, m_width, m_height);
|
||||||
|
|
||||||
// Unmap and release the staging texture
|
|
||||||
m_context->Unmap(m_stagingTexture.Get(), 0);
|
|
||||||
|
|
||||||
return { m_width, m_height, m_buffer };
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Convert BGRA to YUV420
|
|
||||||
const auto yuvSize = outputWidth * outputHeight * 3 / 2;
|
|
||||||
if (m_buffer.size() != yuvSize) {
|
|
||||||
m_buffer.resize(yuvSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (m_width != outputWidth || m_height != outputHeight) {
|
|
||||||
rgbToYuvRescale(mappedResource, m_width, m_height, outputWidth, outputHeight);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
rgbToYuv(mappedResource, outputWidth, outputHeight);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmap and release the staging texture
|
// Unmap and release the staging texture
|
||||||
m_context->Unmap(m_stagingTexture.Get(), 0);
|
m_context->Unmap(m_stagingTexture.Get(), 0);
|
||||||
|
@ -192,55 +165,6 @@ public:
|
||||||
return {m_width, m_height, m_buffer};
|
return {m_width, m_height, m_buffer};
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Color {
|
|
||||||
uint8_t r, g, b, a;
|
|
||||||
};
|
|
||||||
|
|
||||||
void rescale(D3D11_MAPPED_SUBRESOURCE mappedResource, size_t inputWidth, size_t inputHeight, size_t outputWidth, size_t outputHeight)
|
|
||||||
{
|
|
||||||
auto rgb = static_cast<uint8_t*>(mappedResource.pData);
|
|
||||||
|
|
||||||
|
|
||||||
for (size_t y = 0; y < outputHeight; ++y) {
|
|
||||||
for (size_t x = 0; x < outputWidth; ++x) {
|
|
||||||
// Calculate corresponding position in the input image
|
|
||||||
float srcX = static_cast<float>(x) / outputWidth * inputWidth;
|
|
||||||
float srcY = static_cast<float>(y) / outputHeight * inputHeight;
|
|
||||||
|
|
||||||
// Calculate the four surrounding pixels in the input image
|
|
||||||
size_t x1 = static_cast<size_t>(std::floor(srcX));
|
|
||||||
size_t x2 = std::min(x1 + 1, inputWidth - 1);
|
|
||||||
size_t y1 = static_cast<size_t>(std::floor(srcY));
|
|
||||||
size_t y2 = std::min(y1 + 1, inputHeight - 1);
|
|
||||||
|
|
||||||
// Calculate interpolation weights
|
|
||||||
float dx = srcX - x1;
|
|
||||||
float dy = srcY - y1;
|
|
||||||
|
|
||||||
// Interpolate RGBA values using bilinear interpolation
|
|
||||||
Color c11 = { rgb[(y1 * inputWidth + x1) * 4], rgb[(y1 * inputWidth + x1) * 4 + 1], rgb[(y1 * inputWidth + x1) * 4 + 2], rgb[(y1 * inputWidth + x1) * 4 + 3] };
|
|
||||||
Color c12 = { rgb[(y1 * inputWidth + x2) * 4], rgb[(y1 * inputWidth + x2) * 4 + 1], rgb[(y1 * inputWidth + x2) * 4 + 2], rgb[(y1 * inputWidth + x2) * 4 + 3] };
|
|
||||||
Color c21 = { rgb[(y2 * inputWidth + x1) * 4], rgb[(y2 * inputWidth + x1) * 4 + 1], rgb[(y2 * inputWidth + x1) * 4 + 2], rgb[(y2 * inputWidth + x1) * 4 + 3] };
|
|
||||||
Color c22 = { rgb[(y2 * inputWidth + x2) * 4], rgb[(y2 * inputWidth + x2) * 4 + 1], rgb[(y2 * inputWidth + x2) * 4 + 2], rgb[(y2 * inputWidth + x2) * 4 + 3] };
|
|
||||||
|
|
||||||
Color result = {
|
|
||||||
static_cast<uint8_t>((1.0f - dx) * (1.0f - dy) * c11.r + dx * (1.0f - dy) * c12.r + (1.0f - dx) * dy * c21.r + dx * dy * c22.r),
|
|
||||||
static_cast<uint8_t>((1.0f - dx) * (1.0f - dy) * c11.g + dx * (1.0f - dy) * c12.g + (1.0f - dx) * dy * c21.g + dx * dy * c22.g),
|
|
||||||
static_cast<uint8_t>((1.0f - dx) * (1.0f - dy) * c11.b + dx * (1.0f - dy) * c12.b + (1.0f - dx) * dy * c21.b + dx * dy * c22.b),
|
|
||||||
static_cast<uint8_t>((1.0f - dx) * (1.0f - dy) * c11.a + dx * (1.0f - dy) * c12.a + (1.0f - dx) * dy * c21.a + dx * dy * c22.a)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Set the result in the output buffer
|
|
||||||
m_rescaleBuffer[(y * outputWidth + x) * 4] = result.r;
|
|
||||||
m_rescaleBuffer[(y * outputWidth + x) * 4 + 1] = result.g;
|
|
||||||
m_rescaleBuffer[(y * outputWidth + x) * 4 + 2] = result.b;
|
|
||||||
m_rescaleBuffer[(y * outputWidth + x) * 4 + 3] = result.a;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
void rgbToYuv(D3D11_MAPPED_SUBRESOURCE mappedResource, size_t width, size_t height)
|
void rgbToYuv(D3D11_MAPPED_SUBRESOURCE mappedResource, size_t width, size_t height)
|
||||||
{
|
{
|
||||||
auto rgb = static_cast<uint8_t*>(mappedResource.pData);
|
auto rgb = static_cast<uint8_t*>(mappedResource.pData);
|
||||||
|
@ -267,8 +191,7 @@ public:
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
for (size_t x = 0; x < width; x += 1) {
|
for (size_t x = 0; x < width; x += 1) {
|
||||||
uint8_t b = rgb[4 * i];
|
uint8_t b = rgb[4 * i];
|
||||||
uint8_t g = rgb[4 * i + 1];
|
uint8_t g = rgb[4 * i + 1];
|
||||||
|
@ -280,89 +203,6 @@ public:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void rgbToYuv2(size_t width, size_t height)
|
|
||||||
{
|
|
||||||
auto rgb = m_rescaleBuffer.data();
|
|
||||||
|
|
||||||
size_t upos = width * height;
|
|
||||||
size_t vpos = upos + upos / 4;
|
|
||||||
size_t i = 0;
|
|
||||||
|
|
||||||
for (size_t line = 0; line < height; ++line) {
|
|
||||||
if (!(line % 2)) {
|
|
||||||
for (size_t x = 0; x < width; x += 2) {
|
|
||||||
uint8_t b = rgb[4 * i];
|
|
||||||
uint8_t g = rgb[4 * i + 1];
|
|
||||||
uint8_t r = rgb[4 * i + 2];
|
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
|
||||||
|
|
||||||
m_buffer[upos++] = ((-38 * r + -74 * g + 112 * b) >> 8) + 128;
|
|
||||||
m_buffer[vpos++] = ((112 * r + -94 * g + -18 * b) >> 8) + 128;
|
|
||||||
|
|
||||||
b = rgb[4 * i];
|
|
||||||
g = rgb[4 * i + 1];
|
|
||||||
r = rgb[4 * i + 2];
|
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
for (size_t x = 0; x < width; x += 1) {
|
|
||||||
uint8_t b = rgb[4 * i];
|
|
||||||
uint8_t g = rgb[4 * i + 1];
|
|
||||||
uint8_t r = rgb[4 * i + 2];
|
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void rgbToYuvRescale(D3D11_MAPPED_SUBRESOURCE mappedResource, size_t width, size_t height, size_t outputWidth, size_t outputHeight)
|
|
||||||
{
|
|
||||||
auto rgb = static_cast<uint8_t*>(mappedResource.pData);
|
|
||||||
|
|
||||||
size_t upos = outputWidth * outputHeight;
|
|
||||||
size_t vpos = upos + upos / 4;
|
|
||||||
size_t i = 0;
|
|
||||||
|
|
||||||
for (size_t line = 0; line < outputHeight; ++line) {
|
|
||||||
size_t originalLine = static_cast<size_t>((static_cast<double>(line) / outputHeight) * height);
|
|
||||||
|
|
||||||
if (!(line % 2)) {
|
|
||||||
for (size_t x = 0; x < outputWidth; x += 2) {
|
|
||||||
size_t originalX = static_cast<size_t>((static_cast<double>(x) / outputWidth) * width);
|
|
||||||
uint8_t b = rgb[4 * (originalLine * width + originalX)];
|
|
||||||
uint8_t g = rgb[4 * (originalLine * width + originalX) + 1];
|
|
||||||
uint8_t r = rgb[4 * (originalLine * width + originalX) + 2];
|
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
|
||||||
|
|
||||||
m_buffer[upos++] = ((-38 * r + -74 * g + 112 * b) >> 8) + 128;
|
|
||||||
m_buffer[vpos++] = ((112 * r + -94 * g + -18 * b) >> 8) + 128;
|
|
||||||
|
|
||||||
originalX = static_cast<size_t>((static_cast<double>(x + 1) / outputWidth) * width);
|
|
||||||
b = rgb[4 * (originalLine * width + originalX)];
|
|
||||||
g = rgb[4 * (originalLine * width + originalX) + 1];
|
|
||||||
r = rgb[4 * (originalLine * width + originalX) + 2];
|
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
for (size_t x = 0; x < outputWidth; x += 1) {
|
|
||||||
size_t originalX = static_cast<size_t>((static_cast<double>(x) / outputWidth) * width);
|
|
||||||
uint8_t b = rgb[4 * (originalLine * width + originalX)];
|
|
||||||
uint8_t g = rgb[4 * (originalLine * width + originalX) + 1];
|
|
||||||
uint8_t r = rgb[4 * (originalLine * width + originalX) + 2];
|
|
||||||
|
|
||||||
m_buffer[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void reset()
|
void reset()
|
||||||
{
|
{
|
||||||
m_stagingTexture.Reset();
|
m_stagingTexture.Reset();
|
||||||
|
@ -385,7 +225,6 @@ private:
|
||||||
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_stagingTexture;
|
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_stagingTexture;
|
||||||
|
|
||||||
std::vector<uint8_t> m_buffer;
|
std::vector<uint8_t> m_buffer;
|
||||||
std::vector<uint8_t> m_rescaleBuffer;
|
|
||||||
bool m_frameAcquired{ false };
|
bool m_frameAcquired{ false };
|
||||||
|
|
||||||
size_t m_width{};
|
size_t m_width{};
|
||||||
|
@ -402,9 +241,9 @@ WindowCapturer::~WindowCapturer()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
Frame WindowCapturer::capture(size_t id, size_t width, size_t height)
|
Frame WindowCapturer::capture(size_t id)
|
||||||
{
|
{
|
||||||
return m_impl->nextFrame(width, height);
|
return m_impl->nextFrame();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ public:
|
||||||
WindowCapturer();
|
WindowCapturer();
|
||||||
~WindowCapturer();
|
~WindowCapturer();
|
||||||
|
|
||||||
Frame capture(size_t id, size_t width, size_t height);
|
Frame capture(size_t id = 0);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<AcceleratedWindowCapturer> m_impl;
|
std::unique_ptr<AcceleratedWindowCapturer> m_impl;
|
||||||
|
|
Loading…
Reference in New Issue