HumanRender/human_render/LiveRoom/SharedMemory.cpp

229 lines
7.1 KiB
C++
Raw Normal View History

2024-12-19 17:46:41 +00:00
#include "LiveRoom/SharedMemory.h"
#include <Windows.h>
#include <iostream>
#include <stdio.h>
#include "LiveRoom/Const.h"
DWORD nINdexFrame = 0; //当前读到共享内存中的25*5 帧中的那一帧,音视频都使用这个,在度音频数据中维护
// 取一帧视频对应的音频数据n
//返回值為0 正常取流
int Audio_share_memory_get_oneframe(unsigned char* pAudiobuffer_per, LPVOID pAudio_Memory)
{
//这里有一个内存拷贝的过程,可能会影响性能,如果直接使用指针,又怕正在播放中配对方修改
LONGLONG dwFileOffset = nINdexFrame * Audio_buffer_size_per_get;
unsigned char* pcontent = static_cast<unsigned char*>(pAudio_Memory);
pcontent = pcontent + dwFileOffset;
printf("read audio %d %d %d %d.\n", *pcontent,*(pcontent+1), *(pcontent + 2), *(pcontent + 3));
ZeroMemory(pAudiobuffer_per, Audio_buffer_size_per_get);
CopyMemory(pAudiobuffer_per, pcontent, Audio_buffer_size_per_get);
return 0;
}
// 取一帧视频数据
int Video_share_memory_get_oneframe(unsigned char* pVideobuffer_per, LPVOID pVideo_Memory)
{
//printf("read %d video data.\n", nINdexFrame);
LONGLONG dwFileOffset = nINdexFrame * Video_buffer_size_per_get;
if (nINdexFrame == Share_memory_frame_count - 1)
{
//读到最后的一帧
nINdexFrame = 0;
}
else {
nINdexFrame = nINdexFrame + 1;
}
unsigned char* pcontent = static_cast<unsigned char*>(pVideo_Memory);
pcontent = pcontent + dwFileOffset;
//printf("read %d frame video data.\n", nINdexFrame);
ZeroMemory(pVideobuffer_per, Video_buffer_size_per_get);
CopyMemory(pVideobuffer_per, pcontent, Video_buffer_size_per_get);
return 0;
}
// 取一帧视频图片数据 或者mask图片数据,没有内存拷贝
unsigned char* Video_share_memory_get_oneframe_Ex(LPVOID pVideo_Memory)
{
//printf("read %d video data.\n", nINdexFrame);
// char szbuffer[64] = { 0 };
// sprintf_s(szbuffer, "play video speechID :%d", nINdexFrame);
// WriteLog(szbuffer);
LONGLONG dwFileOffset = nINdexFrame * Video_buffer_size_per_get;
unsigned char* pcontent = static_cast<unsigned char*>(pVideo_Memory);
pcontent = pcontent + dwFileOffset;
return pcontent;
}
// 取一帧视频图片数据对应mask图片数据,没有内存拷贝
unsigned char* Mask_share_memory_get_oneframe_Ex(LPVOID pMask_Memory)
{
//printf("read %d video data.\n", nINdexFrame);
LONGLONG dwFileOffset = nINdexFrame * Video_buffer_size_per_get;
unsigned char* pcontent = static_cast<unsigned char*>(pMask_Memory);
pcontent = pcontent + dwFileOffset;
//
// //printf("read %d frame video data.\n", nINdexFrame);
// char szbuffer[64] = { 0 };
// sprintf_s(szbuffer, "read index :%d", nINdexFrame);
// WriteLog(szbuffer);
return pcontent;
}
unsigned long long nTotalFrameCnt = 0; //当前累计播放的视频数
//读的index + 1
void Memory_read_index_add()
{
nTotalFrameCnt = nTotalFrameCnt + 1;
nINdexFrame = ++nINdexFrame % Share_memory_frame_count;
}
// 获得当前readindex视频对应的音频数据,没有内存拷贝
unsigned char* Audio_share_memory_get_oneframe_Ex(LPVOID pAudio_Memory, LPVOID pEndFlag_Memory)
{
// char szbuffer[64] = { 0 };
LONGLONG dwFileOffset = nINdexFrame * Audio_buffer_size_per_get;
unsigned char* pcontent = static_cast<unsigned char*>(pAudio_Memory);
pcontent = pcontent + dwFileOffset;
//
// sprintf_s(szbuffer, "play audio :%d %lld", nINdexFrame,dwFileOffset);
// WriteLog(szbuffer);
static int s_uiEndFlag = 1;
unsigned char* pcontentFlag = static_cast<unsigned char*>(pEndFlag_Memory);
/*memset(szbuffer, 0, 256);
sprintf_s(szbuffer, "r0:pcontentFlag: %d\n", pcontentFlag);
WriteLog(szbuffer);*/
UINT dwFileOffsetFlag = nINdexFrame * End_flag_buffer_size_per_get / 5;
pcontentFlag = pcontentFlag + dwFileOffsetFlag;
//这里应该加一个有效值校验?
UINT uiEndFlagData = *pcontentFlag;
/*memset(szbuffer, 0, 256);
sprintf_s(szbuffer, "r1:pcontentFlag: %d dwFileOffsetFlag:%d uiEndFlagData=%d w: s_uiEndFlag=%d\n", pcontentFlag, dwFileOffsetFlag, uiEndFlagData, s_uiEndFlag);
WriteLog(szbuffer);*/
if (s_uiEndFlag == uiEndFlagData)
{
}
else
{
s_uiEndFlag = uiEndFlagData;
if (1 == s_uiEndFlag)
{
// // 发通知
// udpSocket.SendData("127.0.0.1", 15497, g_strPushEnd.c_str(), g_strPushEnd.length());
// char szbuffer[64] = { 0 };
// sprintf_s(szbuffer, "发送静默通知\n");
// WriteLog(szbuffer);
}
}
return pcontent;
}
std::string Speech_Id_Cache_get_oneframe_Ex(LPVOID pSpeech_Id_Cache_Memory)
{
// char szbuffer[64] = { 0 };
// sprintf_s(szbuffer, "play audio speechID :%d", nINdexFrame);
// WriteLog(szbuffer);
LONGLONG dwFileOffset = nINdexFrame * Speech_Id_Cache_buffer_size_per_get / 5;
char* pcontent = static_cast<char*>(pSpeech_Id_Cache_Memory);
pcontent = pcontent + dwFileOffset;
return std::string(pcontent);
}
//取當前的共享內存中的索引,蔣當前的值/6400,如為2則表示第二個0.2秒的音視頻都好了,这个数据应该是6400的整数倍
DWORD Index_share_memory_get_cur_index(LPVOID pIndex_Memory)
{
int* pcontent = static_cast<int*>(pIndex_Memory);
//这里应该加一个有效值校验?
int indexdata = *pcontent;
int tmpIndex = indexdata / 1280;
if ( tmpIndex == 0){
//0表示第25个0.2秒的数据准备好了 即 120到 124的音频和视频准备好了
return Share_memory_frame_count;
}
//
// char szbuffer[64] = { 0 };
// sprintf_s(szbuffer, "memory index : %d, data: %d", tmpIndex, indexdata);
// WriteLog(szbuffer);
return tmpIndex;
}
//这个index是已经矫正好的
void Index_share_memory_set_cur_index(DWORD curIndex)
{
//蒋内存的指针后移5帧
nINdexFrame = curIndex -5;
}
//當前的index知道的共享內存中的data是否已經播放完这个地方的逻辑可能有问题
BOOL current_index_data_is_played_over(DWORD curIndex)
{
int remainder = curIndex % 5;
if (remainder == 0)
return TRUE;
else
return FALSE;
}
DWORD get_cur_play_video_index()
{
return nINdexFrame;
}
//将当前播放视频的index 写入到读共享内存中
//这个index值为0,1,2,3。。。。124 ,没帧对应的时长为 40毫秒
void Bota_Video_Read_share_memory_set_cur_index(LPVOID pRead_Memory)
{
// char szbuffer[64] = { 0 };
// sprintf_s(szbuffer, "bota read index : %llu", nTotalFrameCnt);
// WriteLog(szbuffer);
UINT* pcontent = static_cast<UINT*>(pRead_Memory);
*pcontent = nTotalFrameCnt;
}
//判断当前播放帧的位置是否在内存中的位置的前面则是服务端慢了假设内存的index表示这5个帧
//server端打印 put:120.0 6400 153600 160000这是写入共享内存的是 160000
//memIndex 取值区间 5,10 。。。125 这个是写的index
//curIndex 取值区间 0,1,2 到 124 这个是读的index
//curIndex + Share_memory_frame_count - 设定的间隔如10 % Share_memory_frame_count
/*
* memindex 5 120-4
*/
BOOL Server_is_slow(DWORD curIndex, DWORD memIndex)
{
if (curIndex < memIndex)
{
return FALSE;
}
else
{
if (curIndex == Share_memory_frame_count - 1)
{
return FALSE;
}
return TRUE;
}
}