Video streaming PoC

This commit is contained in:
weil 2024-01-29 10:56:48 +01:00
parent 6fd700419b
commit 18d2488c42
4 changed files with 404 additions and 0 deletions

173
src/GodotObs.cpp Normal file
View File

@ -0,0 +1,173 @@
#include "GodotObs.h"
#include <godot_cpp/variant/utility_functions.hpp>
#include <Windows.h>
#include "platform/win32/capture_window.h"
//#define PROFILER_ENABLED
#include<chrono>
#pragma comment(lib, "User32.lib")
#pragma comment(lib, "Gdi32.lib")
namespace godot {
namespace {
inline void YUVToRGB(int Y, int U, int V, BYTE& R, BYTE& G, BYTE& B) {
R = std::max(0, std::min(255, static_cast<int>(Y + 1.402 * (V - 128))));
G = std::max(0, std::min(255, static_cast<int>(Y - 0.344136 * (U - 128) - 0.714136 * (V - 128))));
B = std::max(0, std::min(255, static_cast<int>(Y + 1.772 * (U - 128))));
}
inline void ConvertToRGB420(unsigned char* planes[4], int stride[4], godot::Ref<godot::Image> m_imageBuffer, int width, int height) {
constexpr float inv_255 = 1.0f / 255.0f;
auto data = m_imageBuffer->get_data().ptr();
for (int y = 0; y < height; ++y) {
int uv_row = (y / 2) * stride[VPX_PLANE_U];
for (int x = 0; x < width; ++x) {
int Y = planes[VPX_PLANE_Y][y * stride[VPX_PLANE_Y] + x];
int U = planes[VPX_PLANE_U][uv_row + (x / 2)];
int V = planes[VPX_PLANE_V][uv_row + (x / 2)];
BYTE R, G, B;
YUVToRGB(Y, U, V, R, G, B);
const auto index = (y * width + x) * 3;
*(uint8_t*)&data[index] = R;
*(uint8_t*)&data[index+1] = G;
*(uint8_t*)&data[index+2] = B;
}
}
}
}
void Obs::_bind_methods()
{
ClassDB::bind_method(D_METHOD("get_screen_frame"), &Obs::getEncodedScreenFrame);
ClassDB::bind_method(D_METHOD("render_frame"), &Obs::renderFrameToMesh);
}
Obs::Obs()
{
vpx_codec_err_t res{};
// encoder initialize
res = vpx_codec_enc_config_default(vpx_codec_vp9_cx(), &m_cfg, 0);
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "VP9 cfg fail");
m_cfg.g_w = 1920;
m_cfg.g_h = 1080;
m_cfg.rc_target_bitrate = 1500;
m_cfg.g_timebase.num = 1; // TODO: remove?
m_cfg.g_timebase.den = 30; // TODO: expose
m_cfg.g_lag_in_frames = 0;
m_cfg.g_threads = 4;
//m_cfg.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
//m_cfg.g_timebase = (1000.f / 30000.0);
res = vpx_codec_enc_init(&m_encoder, vpx_codec_vp9_cx(), &m_cfg, 0);
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "VP9 could not be initialized");
vpx_codec_control(&m_encoder, VP8E_SET_CPUUSED, 13);
// decoder initialize
res = vpx_codec_dec_init(&m_decoder, vpx_codec_vp9_dx(), nullptr, 0);
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "Could not intitialize decoder");
m_imageBuffer = Image::create(1920, 1080, false, godot::Image::FORMAT_RGB8);
}
Obs::~Obs()
{
}
PackedByteArray Obs::getEncodedScreenFrame(size_t id)
{
const auto start = std::chrono::high_resolution_clock::now();
vpx_codec_err_t res{};
auto capturer = microtaur::WindowCapturer{};
auto frame = capturer.capture(id);
ERR_FAIL_COND_V_MSG(!&m_encoder, {}, "VP9 encoder is not initialized");
vpx_image_t img;
vpx_img_wrap(&img, VPX_IMG_FMT_I420, 1920, 1080, 1, frame.data.data());
res = vpx_codec_encode(&m_encoder, &img, 0, 1, 0, VPX_DL_REALTIME);
ERR_FAIL_COND_V_MSG(res != VPX_CODEC_OK, {}, "Could not encode frame");
vpx_codec_iter_t iter{nullptr};
const vpx_codec_cx_pkt_t* pkt;
PackedByteArray out;
while ((pkt = vpx_codec_get_cx_data(&m_encoder, &iter))) {
if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
out.resize(pkt->data.frame.sz);
std::memcpy(&out[0], pkt->data.frame.buf, pkt->data.frame.sz);
m_pts += pkt->data.frame.pts;
m_duration += pkt->data.frame.duration;
}
}
#ifdef PROFILER_ENABLED
auto end = std::chrono::high_resolution_clock::now();
auto encodeTime = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
m_totalEncodeTime += encodeTime;
m_encodeCount++;
m_avgEncodeTime = static_cast<double>(m_totalEncodeTime) / m_encodeCount;
if (m_encodeCount % 10 == 0) {
UtilityFunctions::print("Average Encode time: ", m_avgEncodeTime);
}
#endif
return out;
}
void Obs::renderFrameToMesh(PackedByteArray frame, Ref<StandardMaterial3D> mat)
{
const auto start = std::chrono::high_resolution_clock::now();
vpx_codec_err_t res{};
res = vpx_codec_decode(&m_decoder, (const uint8_t*)&frame[0], frame.size(), NULL, 0);
ERR_FAIL_COND_MSG(res != VPX_CODEC_OK, "VP9 decode failed");
vpx_codec_iter_t iter = NULL;
vpx_image_t* img = NULL;
while ((img = vpx_codec_get_frame(&m_decoder, &iter)) != NULL) {
ConvertToRGB420(img->planes, img->stride, m_imageBuffer, 1920, 1080);
if (m_imageTexture.is_null()) {
m_imageTexture = ImageTexture::create_from_image(m_imageBuffer);
}
else {
m_imageTexture->update(m_imageBuffer);
}
mat->set_texture(godot::StandardMaterial3D::TEXTURE_ALBEDO, m_imageTexture);
#ifdef PROFILER_ENABLED
auto end = std::chrono::high_resolution_clock::now();
auto decodeTime = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
m_totalDecodeTime += decodeTime;
m_decodeCount++;
m_avgDecodeTime = static_cast<double>(m_totalDecodeTime) / m_decodeCount;
if (m_decodeCount % 10 == 0) {
UtilityFunctions::print("Average Decode time: ", m_avgDecodeTime);
}
#endif
}
}
}

55
src/GodotObs.h Normal file
View File

@ -0,0 +1,55 @@
#pragma once
#include <godot_cpp/classes/node.hpp>
#include <godot_cpp/core/class_db.hpp>
#include <godot_cpp/classes/standard_material3d.hpp>
#include <godot_cpp/classes/image.hpp>
#include <godot_cpp/classes/image_texture.hpp>
#include <vpx/vpx_encoder.h>
#include <vpx/vpx_decoder.h>
#include <vpx/vp8cx.h>
#include <vpx/vp8dx.h>
#include <vpx/vpx_decoder.h>
#include <vpx/vpx_codec.h>
namespace godot {
class Obs : public Node
{
GDCLASS(Obs, Node);
protected:
static void _bind_methods();
public:
Obs();
~Obs();
PackedByteArray getEncodedScreenFrame(size_t id);
void renderFrameToMesh(PackedByteArray frame, Ref<StandardMaterial3D> mat);
private:
bool m_initialized{false};
bool m_decInitialized{false};
vpx_codec_enc_cfg_t m_cfg;
vpx_codec_ctx_t m_encoder{};
vpx_codec_ctx_t m_decoder{};
vpx_codec_pts_t m_pts{1};
vpx_codec_pts_t m_duration{1};
Ref<Image> m_imageBuffer{};
Ref<ImageTexture> m_imageTexture{};
size_t m_avgEncodeTime{};
size_t m_encodeCount{};
size_t m_totalEncodeTime{};
size_t m_avgDecodeTime{};
size_t m_decodeCount{};
size_t m_totalDecodeTime{};
};
}

View File

@ -0,0 +1,156 @@
#include "capture_window.h"
#include "Windows.h"
#include <chrono>
namespace microtaur
{
namespace
{
struct Bitmap {
HBITMAP bmp;
int width;
int height;
};
struct MonitorEnumData {
int targetMonitorIndex;
int currentMonitorIndex;
HMONITOR hMonitor;
};
BOOL CALLBACK MonitorEnumProc(HMONITOR hMonitor, HDC hdcMonitor, LPRECT lprcMonitor, LPARAM dwData) {
MonitorEnumData* pData = reinterpret_cast<MonitorEnumData*>(dwData);
if (pData->currentMonitorIndex == pData->targetMonitorIndex) {
pData->hMonitor = hMonitor;
return FALSE;
}
pData->currentMonitorIndex++;
return TRUE;
}
Bitmap CaptureScreen(int screenId) {
MonitorEnumData med;
med.targetMonitorIndex = screenId;
med.currentMonitorIndex = 0;
med.hMonitor = nullptr;
EnumDisplayMonitors(NULL, NULL, MonitorEnumProc, reinterpret_cast<LPARAM>(&med));
if (med.hMonitor == nullptr) {
// No monitor found with the given ID
return { NULL, 0, 0 };
}
MONITORINFOEX mi;
mi.cbSize = sizeof(mi);
GetMonitorInfo(med.hMonitor, &mi);
HDC hMonitorDC = CreateDC(TEXT("DISPLAY"), mi.szDevice, NULL, NULL);
HDC hMemoryDC = CreateCompatibleDC(hMonitorDC);
int width = mi.rcMonitor.right - mi.rcMonitor.left;
int height = mi.rcMonitor.bottom - mi.rcMonitor.top;
HBITMAP hBitmap = CreateCompatibleBitmap(hMonitorDC, width, height);
HBITMAP hOldBitmap = static_cast<HBITMAP>(SelectObject(hMemoryDC, hBitmap));
BitBlt(hMemoryDC, 0, 0, width, height, hMonitorDC, 0, 0, SRCCOPY);
SelectObject(hMemoryDC, hOldBitmap);
DeleteDC(hMemoryDC);
DeleteDC(hMonitorDC);
return { hBitmap, width, height };
}
void RGBtoYUV(BYTE R, BYTE G, BYTE B, BYTE& Y, BYTE& U, BYTE& V) {
int yTemp = 0.299 * R + 0.587 * G + 0.114 * B;
int uTemp = -0.14713 * R - 0.28886 * G + 0.436 * B + 128;
int vTemp = 0.615 * R - 0.51498 * G - 0.10001 * B + 128;
Y = static_cast<BYTE>(std::max(0, std::min(255, yTemp)));
U = static_cast<BYTE>(std::max(0, std::min(255, uTemp)));
V = static_cast<BYTE>(std::max(0, std::min(255, vTemp)));
}
// Function to create YUV frame_data from HBITMAP
std::vector<uint8_t> CreateYUVFrameFromHBITMAP(HBITMAP hBitmap, int width, int height) {
// Calculate the size for YUV 4:2:0 format
std::vector<uint8_t> data(width * height + (width * height) / 4 + (width * height) / 4);
HDC hdcScreen = GetDC(NULL);
HDC hdcMem = CreateCompatibleDC(hdcScreen);
BITMAPINFOHEADER bi;
memset(&bi, 0, sizeof(bi));
bi.biSize = sizeof(BITMAPINFOHEADER);
bi.biWidth = width;
bi.biHeight = -height; // Negative height for top-down bitmap
bi.biPlanes = 1;
bi.biBitCount = 24; // Assuming RGB 24-bit format
bi.biCompression = BI_RGB;
// First call to GetDIBits to populate biSizeImage
GetDIBits(hdcMem, hBitmap, 0, height, NULL, (BITMAPINFO*)&bi, DIB_RGB_COLORS);
BYTE* rgbData = new BYTE[bi.biSizeImage];
// Second call to GetDIBits to get the actual bitmap data
GetDIBits(hdcMem, hBitmap, 0, height, rgbData, (BITMAPINFO*)&bi, DIB_RGB_COLORS);
// Calculate the stride for the bitmap
int stride = ((width * bi.biBitCount + 31) / 32) * 4; // Bitmap scanline padding
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
// Correct index with stride
int i = (y * stride) + (x * 3);
BYTE B = rgbData[i];
BYTE G = rgbData[i + 1];
BYTE R = rgbData[i + 2];
BYTE Y, U, V;
RGBtoYUV(R, G, B, Y, U, V);
data.data()[y * width + x] = Y;
// Correct subsampling for U and V components
if (x % 2 == 0 && y % 2 == 0) {
int uvIndex = (y / 2) * (width / 2) + (x / 2);
data.data()[width * height + uvIndex] = U; // U plane
data.data()[width * height + (width * height / 4) + uvIndex] = V; // V plane
}
}
}
// Clean up resources
DeleteDC(hdcMem);
ReleaseDC(NULL, hdcScreen);
delete[] rgbData;
return data;
}
}
Frame WindowCapturer::capture(size_t id)
{
const auto start = std::chrono::high_resolution_clock::now();
auto bitmap = CaptureScreen(id);
auto out = Frame{
static_cast<size_t>(bitmap.width),
static_cast<size_t>(bitmap.height),
CreateYUVFrameFromHBITMAP(bitmap.bmp, bitmap.width, bitmap.height)
};
DeleteObject(bitmap.bmp);
return out;
}
}

View File

@ -0,0 +1,20 @@
#pragma once
#include <stdint.h>
#include <vector>
namespace microtaur {
struct Frame
{
size_t width;
size_t height;
std::vector<uint8_t> data;
};
class WindowCapturer
{
public:
Frame capture(size_t id = 0);
};
}