#include "QtCameraHardWareCopilot.h" #include "Timer.h" #include "json.hpp" #include bool QtCameraHardWareCopilot::s_bSwitchProcess = true; const float VERTICAL_CAMERA_WIDTH = 3.76963; const float VERTICAL_CAMERA_HEIGHT = 5.45; const float HORIZONTAL_CAMERA_WIDTH = 6; const float HORIZONTAL_CAMERA_HEIGHT = 4.5; QtCameraHardWareCopilot::QtCameraHardWareCopilot(QWidget *parent) : QMainWindow(parent) { ui.setupUi(this); //init timer m_pUpdateTimer = new QTimer(this); m_pUpdateTimer->setInterval(40); SetConnect(); //init the QtlineEditors ui.HorizontalOffsetEdit->init("0", true, PANO_IMAGE_BOUNDING_WIDTH); ui.VerticalOffsetEdit->init("0", true, PANO_IMAGE_BOUNDING_WIDTH); ui.LightEdit->init("0", true, 255); //init the CameraQueueScene ui.OffsetOverviewView->setScene(&m_CameraQueueScene); //init the CameraQueueScene rect //m_CameraQueueScene.setSceneRect(0, 0, ui.OffsetOverviewView->width(), ui.OffsetOverviewView->height()); //init the combox of the Modify model ui.comboBoxSelectModel->insertItem(0,QString::fromLocal8Bit("单个相机")); ui.comboBoxSelectModel->insertItem(1,QString::fromLocal8Bit("相机组")); ui.comboBoxSelectModel->setCurrentIndex(0); emit ui.comboBoxSelectModel->currentIndexChanged(0); ui.OffsetOverviewView->setMouseTracking(true); ui.OffsetOverviewView->viewport()->rect().setRect(0,0,CAMERA_SCENE_WIDTH,CAMERA_SCENE_HEIGHT); ui.OffsetOverviewView->setDragMode(ui.OffsetOverviewView->NoDrag); ui.OffsetOverviewView->setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff); ui.OffsetOverviewView->setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff); //init the current Selected CameraItem pointer m_pCurrentCameraItem = nullptr; //init the Camerainfo pointer m_pCurrentCameraInfo = nullptr; m_CurrentFramePtr = nullptr; //init the pano image m_pPanoImageBuffer = nullptr; m_nPanoWidth = 0; m_nPanoHeight = 0; m_nPanoPitch = 0; m_pUpdateTimer->start(); m_PanoPainter.SetPixmap(&m_PanoPixmap); m_pRefreshProcessThread = new std::thread(ThreadProcessAll, this); m_pRefreshProcessThread->detach(); m_CurShowMode = ShowMode::Fit; m_bMousePressOrNot = false; m_bCameraItemModified = false; m_bBlockThread = false; m_StitchingTailor.bUseYUV = false; m_bCameraItemMoveModel = true; CameraItem::pCameraItemModified = &m_bCameraItemModified; //init the m_grouplist m_CameraItemList.clear(); m_CameraQueueScene.SetGripper(&m_CameraGripper); m_fCameraSubStreamToMainStreamHeight = VERTICAL_CAMERA_HEIGHT; m_fCameraSubStreamToMainStreamWidth = VERTICAL_CAMERA_WIDTH; } QtCameraHardWareCopilot::~QtCameraHardWareCopilot() { } bool QtCameraHardWareCopilot::reFreshAllCameraFrame(QtCameraHardWareCopilot* pClass) { //use all the CameraItem to update the image int nCurIndex = 0; for (auto& group : pClass->m_CameraItemList) { for (auto& item : group.CameraItemGroup) { //prepare the image demands param int nWidth; int nHeight; int nPicth; unsigned char* pImage = NULL; //use the CameraItem to update the image if (!item.bActive) continue; cv::Mat& matFromI420 = item.m_CurrentFrame; if (item.m_VideoStreamCatcher.m_pVideoDataManager->GetDecoderDataCount() <= 0) { continue; } item.m_VideoStreamCatcher.GetVideoImage(pImage, nWidth, nHeight, nPicth); { cv::Mat YUVImageYChannel = cv::Mat(nHeight, nWidth, CV_8UC3, pImage); cv::add(YUVImageYChannel, cv::Scalar(item.m_CameraInfo.nLightOffset), YUVImageYChannel); } g_CurFrameMutex.lock(); matFromI420.deallocate(); if (pClass->m_StitchingTailor.bUseYUV) { matFromI420 = cv::Mat(nHeight, nWidth, CV_8UC1, pImage); //libyuv::CopyRow_Any_SSE2(pImage, matFromI420.data, matFromI420.step * matFromI420.rows); } else { matFromI420 = cv::Mat(nHeight, nWidth, CV_8UC3, pImage); } item.m_VideoStreamCatcher.m_pVideoDataManager->ReleaseDecoderData(item.pCurrentFrame); item.pCurrentFrame = pImage; g_CurFrameMutex.unlock(); //calculate the lefttop pt in the PanoImage int StitchingLeft = PANO_IMAGE_BOUNDING_WIDTH + item.nItemColIndex * nWidth + item.m_CameraInfo.nHorizonOffset; int StitchingTop = PANO_IMAGE_BOUNDING_WIDTH + item.nItemRowIndex * nHeight + item.m_CameraInfo.nVerticalOffset; pClass->m_StitchingTailor.AddWorkOrderInqueue(cv::Point(StitchingLeft, StitchingTop), //item->pCurrentFrame,nWidth,nHeight,nPicth, &item.m_CurrentFrame, &item.m_vvOverlapBounding); } } return true; } void QtCameraHardWareCopilot::reFreshAllCameraFrameGpu() { //use all the CameraItem to update the image int nCurIndex = 0; for (auto& group : m_CameraItemList) { for (auto& item : group.CameraItemGroup) { //prepare the image demands param int nWidth; int nHeight; int nPicth; unsigned char* pImage = nullptr; //use the CameraItem to update the image GPU_DATA& CurGpuData = item.m_CurrentGpuData; if (item.m_VideoStreamCatcher.m_pVideoDataManager->GetDecoderDataCount() <= 0) continue; item.m_VideoStreamCatcher.GetVideoImage(pImage, nWidth, nHeight, nPicth); //{ // cv::Mat YUVImageYChannel = cv::Mat(nHeight, nWidth, CV_8UC1, pImage); // cv::add(YUVImageYChannel, cv::Scalar(item->m_CameraInfo.nLightOffset), YUVImageYChannel); //} //g_CurFrameMutex.lock(); cudaError Err = cudaFree(CurGpuData.pData); CurGpuData.pData = pImage; CurGpuData.nWidth = nWidth; CurGpuData.nHeight = nHeight; CurGpuData.nPitch = nPicth; //g_CurFrameMutex.unlock(); //calculate the lefttop pt in the PanoImage int StitchingLeft = PANO_IMAGE_BOUNDING_WIDTH + item.nItemColIndex * nWidth + item.m_CameraInfo.nHorizonOffset; int StitchingTop = PANO_IMAGE_BOUNDING_WIDTH + item.nItemRowIndex * nHeight + item.m_CameraInfo.nVerticalOffset; m_StitchingTailor.StitchingOnPanoYUVByGPU(cv::Point(StitchingLeft, StitchingTop), item.m_CurrentGpuData.pData, item.m_CurrentGpuData.nWidth, item.m_CurrentGpuData.nHeight, item.m_CurrentGpuData.nPitch, &item.m_vvOverlapBounding); /* m_StitchingTailor.AddWorkOrderInQueueGpu( cv::Point(StitchingLeft, StitchingTop), item->m_CurrentGpuData.pData, item->m_CurrentGpuData.nWidth, item->m_CurrentGpuData.nHeight, item->m_CurrentGpuData.nPitch, &item->m_vvOverlapBounding);*/ } } } /// /// set a function to achieve Image fusion /// /// The pointer to mainwindow class. void QtCameraHardWareCopilot::LaplaceImageFusion(QtCameraHardWareCopilot* pClass) { ////use opencv to achieve the image fusion ////first to get the panoImage size //int nPanoWidth = pClass->m_nPanoWidth; //int nPanoHeight = pClass->m_nPanoHeight; //int nPanoPicth = pClass->m_nPanoPitch; ////set a iter to get the CameraItem //std::list::iterator Iter = pClass->m_CameraItemList.begin(); ////Calculate the Current Stitching Rect //int nListSize = int(pClass->m_CameraItemList.size()); //if (nListSize < 2) // return; //for (int i = 0; i < nListSize; i++) //{ // if ((*Iter)->pIntersectInstance == NULL) // continue; // //Use quote to get the CameraItem // CameraItem& CameraItem1 = **(Iter); // CameraItem& CameraItem2 = *((*Iter)->pIntersectInstance); // //Widen the overlap Rect // MfcLabel::fRect OverlapRect = CameraItem1.IntersectRect; // /*int OverLapLeft = PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nHorizonOffset + CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemIndex - 10; // int OverLapRight = OverLapLeft + CameraItem1.IntersectRect.width() + 20; // int OverLapTop = PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nVerticalOffset - 10; // */ // //OverlapRect.Left = OverlapRect.Left-10 < 0 ? 0 : OverlapRect.Left-10; // // OverlapRect.Right = OverlapRect.Right+10 > pClass->m_nPanoWidth ? pClass->m_nPanoWidth : OverlapRect.Right+10; // //OverlapRect.Top += PANO_IMAGE_BOUNDING_WIDTH; // //OverlapRect.Bottom += PANO_IMAGE_BOUNDING_WIDTH; // //get target image point // //unsigned char* pTargetImage = pClass->m_pPanoImageBuffer + OverlapRect.Top * nPanoPicth + OverlapRect.Left * 3; // //calculate the laplace fusion in widen intersect // pClass->m_ImageFusion.FusionImageByLaplacePyramid( // pClass->m_pPanoImageBuffer, // cv::Rect(OverlapRect.Left, OverlapRect.Top, OverlapRect.Width(), OverlapRect.Height()), // NULL, // cv::Size(0,0), // 3, // 3, // 0.5, // cv::Point(0,0) // ); //} } void QtCameraHardWareCopilot::AddWeightColorBalance(QtCameraHardWareCopilot* pClass) { // //calculate the weight of each camera // //define a iter to get the CameraItem // std::list::iterator Iter = pClass->m_CameraItemList.begin(); // //define a int to get the list size // int nListSize = int(pClass->m_CameraItemList.size()); // //if size is less than 2,its dont have a intersect, return // if (nListSize < 2) //return; // //calculate the rect need to be balanced // for (int Item = 0; Item < nListSize; Item++) // { // CameraItem& CameraInfer = **Iter; // if ((*Iter)->pIntersectInstance == NULL) // continue; // CameraItem& CameraTarget = *((*Iter)->pIntersectInstance); // //get the camera1 intersect rect // MfcLabel::fRect IntersectRect = CameraInfer.IntersectRect; // //get the camera2 intersect rect // MfcLabel::fRect IntersectRect2 = CameraInfer.IntersectRect; // IntersectRect2.Left = 0; // IntersectRect2.Right = CameraInfer.IntersectRect.width();//IntersectRect2.Right - CameraTarget.nItemIndex* CameraTarget.m_VideoStreamCatcher.m_nVideoWidth - CameraTarget.m_CameraInfo.nHorizonOffset - PANO_IMAGE_BOUNDING_WIDTH; // IntersectRect2.Top = CameraInfer.m_CameraInfo.nVerticalOffset < 0 ? -CameraInfer.m_CameraInfo.nVerticalOffset : 0; // IntersectRect2.Bottom = IntersectRect2.Top + CameraInfer.IntersectRect.height(); // //get the target image // unsigned char* pTargetImage = NULL; // int nTargetWidth = 0; // int nTargetHeight = 0; // int nTargetPitch = 0; // cv::Mat& matTarget = CameraTarget.m_CurrentFrame; // nTargetWidth = matTarget.cols; // nTargetHeight = matTarget.rows; // nTargetPitch = matTarget.step; // pTargetImage = matTarget.data; // pClass->m_ImageColorBalance.FusionImageByBorderColor( // pClass->m_pPanoImageBuffer, // cv::Rect(IntersectRect.Left, IntersectRect.Top, IntersectRect.Width(), IntersectRect.Height()), // pTargetImage, // cv::Rect(IntersectRect2.Left, IntersectRect2.Top, IntersectRect2.Width(), IntersectRect2.Height()) // ); // int X = 10; // } //check current frequency count //int& FrequencyCount = pClass->m_ImageColorBalance.GetRefreshFrequency(); //if(FrequencyCount == 0) //{ // FrequencyCount = pClass->m_ImageColorBalance.GetStdRefreshFrequency(); // //Complete once balance weight calculate //} //else //{ // FrequencyCount--; //} } void QtCameraHardWareCopilot::ThreadProcessAll(QtCameraHardWareCopilot* pClass) { //set a while to process all data from decoder while (s_bSwitchProcess) { if (!pClass->m_bBlockThread && !pClass->m_bCameraItemModified) { if (pClass->m_pPanoImageBuffer == NULL) continue; reFreshAllCameraFrame(pClass); //pClass->MeanWeightBalanceImage(); //pClass->PushImageToQueue(); pClass->m_ImageFusion.Init(pClass->m_pPanoImageBuffer, pClass->m_nPanoWidth, pClass->m_nPanoHeight, pClass->m_nPanoPitch); if (pClass->m_CameraItemList.size() != 0) pClass->m_ImageColorBalance.Init(pClass->m_pPanoImageBuffer, pClass->m_nPanoWidth, pClass->m_nPanoHeight, pClass->m_nPanoPitch, pClass->m_CameraItemList.front().CameraItemGroup.front().m_VideoStreamCatcher.m_nOutputImageWidth, pClass->m_CameraItemList.front().CameraItemGroup.front().m_VideoStreamCatcher.m_nOutputImageHeight, pClass->m_CameraItemList.front().CameraItemGroup.front().m_VideoStreamCatcher.m_nOutputImagePitch, 25); //calculate overlap rect //AddWeightColorBalance(pClass); //pClass->ThinRectWindowMeanImage(pClass); // pClass->BlendingGradientImage(pClass); } else { while (pClass->m_StitchingTailor.bTailorThreadWork) { continue; } g_PanoMutex.lock(); pClass->m_StitchingTailor.ClearWorkOrder(); if (pClass->m_bCameraItemModified) { pClass->ResetPanoImageBuffer(); } pClass->m_StitchingTailor.Init(pClass->m_pPanoImageBuffer, pClass->m_nPanoWidth, pClass->m_nPanoHeight,cv::Point(0,0)); pClass->CalculateAllItemIntersectRectWithOutBlender(); g_PanoMutex.unlock(); } } } void QtCameraHardWareCopilot::ThreadProcessAllGpu(QtCameraHardWareCopilot* pClass) { //set a while to process all data from decoder while (s_bSwitchProcess) { if (!pClass->m_bBlockThread && !pClass->m_bCameraItemModified) { if (pClass->m_pPanoImageBuffer == NULL) continue; TimerCounter Timer; pClass->reFreshAllCameraFrameGpu(); pClass->m_ImageFusion.Init(pClass->m_pPanoImageBuffer, pClass->m_nPanoWidth, pClass->m_nPanoHeight, pClass->m_nPanoPitch); pClass->m_ImageFusion.Init_Gpu(pClass->m_pPanoImageBufferGPU, pClass->m_nPanoWidth, pClass->m_nPanoHeight, pClass->m_nPanoPitch); Timer.Start(); //calculate overlap rect //AddWeightColorBalance(pClass); //pClass->ThinRectWindowMeanImage(pClass); pClass->BlendingGradientImageGpu(); Timer.Stop(); double nTime = Timer.GetTime(); int x = 0; } else { while (pClass->m_StitchingTailor.bTailorThreadWork) { continue; } g_PanoMutex.lock(); pClass->m_StitchingTailor.ClearWorkOrder(); if (pClass->m_bCameraItemModified) { pClass->ResetPanoImageBuffer(); } pClass->m_StitchingTailor.Init_Gpu(pClass->m_pPanoImageBuffer,pClass->m_pPanoImageBufferGPU, pClass->m_nPanoWidth, pClass->m_nPanoHeight, cv::Point(0, 0)); pClass->CalculateAllItemIntersectRect(); g_PanoMutex.unlock(); } } } void QtCameraHardWareCopilot::ThinRectWindowMeanImage(QtCameraHardWareCopilot* pClass) { //first to get the panoImage size int nPanoWidth = m_nPanoWidth; int nPanoHeight = m_nPanoHeight; int nPanoPicth = m_nPanoPitch; //define a list iterator to get the group of CameraItem std::list::iterator Group = m_CameraItemList.begin(); for (int Row = 0; Row ::iterator Iter = (Group)->CameraItemGroup.begin(); //Calculate the Current Stitching Rect int nListSize = int((Group++)->CameraItemGroup.size()); if (nListSize < 2) continue; for (int i = 0; i < nListSize; i++) { if ((*Iter).pIntersectInstance == NULL) continue; //Use quote to get the CameraItem CameraItem& CameraItem1 = *(Iter); CameraItem& CameraItem2 = *((*Iter).pIntersectInstance); //Widen the overlap Rect MfcLabel::fRect OverlapRect = CameraItem1.IntersectRect; //扩展窗口 OverlapRect.Left -= 10; OverlapRect.Right += 10; QRectF CurCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nHorizonOffset + CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nVerticalOffset, CameraItem1.m_VideoStreamCatcher.m_nVideoWidth, CameraItem1.m_VideoStreamCatcher.m_nVideoHeight ); QRectF NextCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CameraItem2.m_CameraInfo.nHorizonOffset + CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + CameraItem2.m_CameraInfo.nVerticalOffset, CameraItem2.m_VideoStreamCatcher.m_nVideoWidth, CameraItem2.m_VideoStreamCatcher.m_nVideoHeight ); QRect InferImageRect = CameraItem1.IntersectRect; InferImageRect.setLeft(InferImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nHorizonOffset - CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemColIndex - 10); InferImageRect.setTop(InferImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nVerticalOffset); InferImageRect.setWidth(CameraItem1.IntersectRect.width() + 10); InferImageRect.setHeight(CameraItem1.IntersectRect.height()); QRect TargetImageRect = CameraItem2.PreIntersectRect; TargetImageRect.setLeft(TargetImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nHorizonOffset - CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemColIndex - 10); TargetImageRect.setTop(TargetImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nVerticalOffset); TargetImageRect.setWidth(CameraItem2.PreIntersectRect.width() + 10); TargetImageRect.setHeight(CameraItem2.PreIntersectRect.height()); m_ImageFusion.FusionImageByThinRectWindowSlideMean( m_pPanoImageBuffer, cv::Rect(OverlapRect.Left, OverlapRect.Top, OverlapRect.Width(), OverlapRect.Height()), CameraItem1.m_CurrentFrame, cv::Rect(InferImageRect.x(), InferImageRect.y(), InferImageRect.width(), InferImageRect.height()), CameraItem2.m_CurrentFrame, cv::Rect(TargetImageRect.x(), TargetImageRect.y(), CameraItem2.PreIntersectRect.width(), CameraItem2.PreIntersectRect.height()), 5, 0, 1.5); Iter++; } } } void QtCameraHardWareCopilot::PushImageToQueue() { int nCurIndex = 0; for (auto& Group : m_CameraItemList) { for (auto& item : Group.CameraItemGroup) { //prepare the image demands param int nWidth; int nHeight; int nPicth; unsigned char* pImage = NULL; //use the CameraItem to update the image cv::Mat& matFromI420 = item.m_CurrentFrame; nWidth = matFromI420.cols; nHeight = matFromI420.rows; nPicth = matFromI420.step[0]; if (nWidth == 0 || nHeight == 0) continue; //calculate the lefttop pt in the PanoImage int StitchingLeft = PANO_IMAGE_BOUNDING_WIDTH + item.nItemColIndex * nWidth + item.m_CameraInfo.nHorizonOffset; int StitchingTop = PANO_IMAGE_BOUNDING_WIDTH + 0 + item.m_CameraInfo.nVerticalOffset; m_StitchingTailor.AddWorkOrderInqueue(cv::Point(StitchingLeft, StitchingTop), //item->pCurrentFrame,nWidth,nHeight,nPicth, &item.m_CurrentFrame, &item.m_vvOverlapBounding); } } } void QtCameraHardWareCopilot::BlendingGradientImage(QtCameraHardWareCopilot* pClass) { //first to get the panoImage size int nPanoWidth = m_nPanoWidth; int nPanoHeight = m_nPanoHeight; int nPanoPicth = m_nPanoPitch; //define a list iterator to get the group of CameraItem std::list::iterator Group = m_CameraItemList.begin(); for (int Row = 0; Row ::iterator Iter = (Group) ->CameraItemGroup.begin(); //Calculate the Current Stitching Rect int nListSize = int((Group++)->CameraItemGroup.size()); if (nListSize < 2) continue; for (int i = 0; i < nListSize; i++) { if (!(*Iter).bActive) continue; if ((*Iter).pIntersectInstance == NULL) continue; //Use quote to get the CameraItem CameraItem& CameraItem1 = *(Iter); CameraItem& CameraItem2 = *((*Iter).pIntersectInstance); //Widen the overlap Rect MfcLabel::fRect OverlapRect = CameraItem1.IntersectRect; //扩展窗口 OverlapRect.Left -= 0; OverlapRect.Right += 0; QRectF CurCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nHorizonOffset + CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nVerticalOffset, CameraItem1.m_VideoStreamCatcher.m_nVideoWidth, CameraItem1.m_VideoStreamCatcher.m_nVideoHeight ); QRectF NextCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CameraItem2.m_CameraInfo.nHorizonOffset + CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + CameraItem2.m_CameraInfo.nVerticalOffset, CameraItem2.m_VideoStreamCatcher.m_nVideoWidth, CameraItem2.m_VideoStreamCatcher.m_nVideoHeight ); QRect InferImageRect = CameraItem1.IntersectRect; InferImageRect.setLeft(InferImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nHorizonOffset - CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemColIndex); InferImageRect.setTop(InferImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nVerticalOffset); InferImageRect.setWidth(CameraItem1.IntersectRect.width()); InferImageRect.setHeight(CameraItem1.IntersectRect.height()); QRect TargetImageRect = CameraItem2.PreIntersectRect; TargetImageRect.setLeft(TargetImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nHorizonOffset - CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemColIndex); TargetImageRect.setTop(TargetImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nVerticalOffset); TargetImageRect.setWidth(CameraItem2.PreIntersectRect.width()); TargetImageRect.setHeight(CameraItem2.PreIntersectRect.height()); if (m_StitchingTailor.bUseYUV) { m_ImageFusion.FusionImageByBlendingGradientYUV( m_pPanoImageBuffer, cv::Rect(OverlapRect.Left, OverlapRect.Top, OverlapRect.Width(), OverlapRect.Height()), CameraItem1.m_CurrentFrame.data, CameraItem1.m_CurrentFrame.cols, CameraItem1.m_CurrentFrame.rows, CameraItem1.m_CurrentFrame.step, cv::Rect(InferImageRect.x(), InferImageRect.y(), InferImageRect.width(), InferImageRect.height()), CameraItem2.m_CurrentFrame.data, CameraItem2.m_CurrentFrame.cols, CameraItem2.m_CurrentFrame.rows, CameraItem2.m_CurrentFrame.step, cv::Rect(TargetImageRect.x(), TargetImageRect.y(), CameraItem2.PreIntersectRect.width(), CameraItem2.PreIntersectRect.height()), false ); } else { m_ImageFusion.FusionImageByBlendingGradient( m_pPanoImageBuffer, cv::Rect(OverlapRect.Left, OverlapRect.Top, OverlapRect.Width(), OverlapRect.Height()), CameraItem1.m_CurrentFrame, cv::Rect(InferImageRect.x(), InferImageRect.y(), InferImageRect.width(), InferImageRect.height()), CameraItem2.m_CurrentFrame, cv::Rect(TargetImageRect.x(), TargetImageRect.y(), CameraItem2.PreIntersectRect.width(), CameraItem2.PreIntersectRect.height())); } Iter++; } } } void QtCameraHardWareCopilot::BlendingGradientImageGpu() { //first to get the panoImage size int nPanoWidth = m_nPanoWidth; int nPanoHeight = m_nPanoHeight; int nPanoPicth = m_nPanoPitch; //define a list iterator to get the group of CameraItem std::list::iterator Group = m_CameraItemList.begin(); for (int Row = 0; Row ::iterator Iter = Group->CameraItemGroup.begin(); //Calculate the Current Stitching Rect int nListSize = int((Group++)->CameraItemGroup.size()); if (nListSize < 2) continue; for (int i = 0; i < nListSize; i++) { if ((*Iter).pIntersectInstance == NULL) continue; //Use quote to get the CameraItem CameraItem& CameraItem1 = *(Iter); CameraItem& CameraItem2 = *((*Iter).pIntersectInstance); //Widen the overlap Rect MfcLabel::fRect OverlapRect = CameraItem1.IntersectRect; //扩展窗口 OverlapRect.Left -= 0; OverlapRect.Right += 0; /*QRectF CurCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nHorizonOffset + CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemIndex, PANO_IMAGE_BOUNDING_WIDTH + CameraItem1.m_CameraInfo.nVerticalOffset, CameraItem1.m_VideoStreamCatcher.m_nVideoWidth, CameraItem1.m_VideoStreamCatcher.m_nVideoHeight ); QRectF NextCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CameraItem2.m_CameraInfo.nHorizonOffset + CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemIndex, PANO_IMAGE_BOUNDING_WIDTH + CameraItem2.m_CameraInfo.nVerticalOffset, CameraItem2.m_VideoStreamCatcher.m_nVideoWidth, CameraItem2.m_VideoStreamCatcher.m_nVideoHeight );*/ QRect InferImageRect = CameraItem1.IntersectRect; InferImageRect.setLeft(InferImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nHorizonOffset - CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemColIndex); InferImageRect.setTop(InferImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nVerticalOffset); InferImageRect.setWidth(CameraItem1.IntersectRect.width()); InferImageRect.setHeight(CameraItem1.IntersectRect.height()); QRect TargetImageRect = CameraItem2.PreIntersectRect; TargetImageRect.setLeft(TargetImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nHorizonOffset - CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemColIndex); TargetImageRect.setTop(TargetImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nVerticalOffset); TargetImageRect.setWidth(CameraItem2.PreIntersectRect.width()); TargetImageRect.setHeight(CameraItem2.PreIntersectRect.height()); if (m_StitchingTailor.bUseYUV) { m_ImageFusion.FusionImageByBlendingGradientYUVByGpu( m_pPanoImageBufferGPU, cv::Rect(OverlapRect.Left, OverlapRect.Top, OverlapRect.Width(), OverlapRect.Height()), CameraItem1.m_CurrentGpuData.pData, CameraItem1.m_CurrentGpuData.nWidth, CameraItem1.m_CurrentGpuData.nHeight, CameraItem1.m_CurrentGpuData.nPitch, cv::Rect(InferImageRect.x(), InferImageRect.y(), InferImageRect.width(), InferImageRect.height()), CameraItem2.m_CurrentGpuData.pData, CameraItem2.m_CurrentGpuData.nWidth, CameraItem2.m_CurrentGpuData.nHeight, CameraItem2.m_CurrentGpuData.nPitch, cv::Rect(TargetImageRect.x(), TargetImageRect.y(), CameraItem2.PreIntersectRect.width(), CameraItem2.PreIntersectRect.height()) ); } else { m_ImageFusion.FusionImageByBlendingGradient( m_pPanoImageBuffer, cv::Rect(OverlapRect.Left, OverlapRect.Top, OverlapRect.Width(), OverlapRect.Height()), CameraItem1.m_CurrentFrame, cv::Rect(InferImageRect.x(), InferImageRect.y(), InferImageRect.width(), InferImageRect.height()), CameraItem2.m_CurrentFrame, cv::Rect(TargetImageRect.x(), TargetImageRect.y(), CameraItem2.PreIntersectRect.width(), CameraItem2.PreIntersectRect.height())); } Iter++; } } } void QtCameraHardWareCopilot::MeanWeightBalanceImage() { //first to get the panoImage size int nPanoWidth = m_nPanoWidth; int nPanoHeight = m_nPanoHeight; int nPanoPicth = m_nPanoPitch; std::list::iterator Iter = m_CameraItemList.begin(); for (int RowIndex = 0; RowIndex < int(m_CameraItemList.size()); RowIndex++) { //define a iterator to get the CameraItem std::list::iterator IterRow = Iter->CameraItemGroup.begin(); int nListSize = int(Iter->CameraItemGroup.size()); if (nListSize < 2) continue; for (int i = 0; i < nListSize; i++) { if ((*IterRow).pIntersectInstance == NULL) continue; //Use quote to get the CameraItem CameraItem& CameraItem1 = *(IterRow); CameraItem& CameraItem2 = *((*IterRow).pIntersectInstance); //Widen the overlap Rect MfcLabel::fRect OverlapRect = CameraItem1.IntersectRect; if (CameraItem1.IntersectRect.isEmpty()) continue; //扩展窗口 OverlapRect.Left -= 0; OverlapRect.Right += 0; QRect InferImageRect = CameraItem1.IntersectRect; InferImageRect.setLeft(InferImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nHorizonOffset - CameraItem1.m_VideoStreamCatcher.m_nVideoWidth * CameraItem1.nItemColIndex); InferImageRect.setTop(InferImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem1.m_CameraInfo.nVerticalOffset); InferImageRect.setWidth(CameraItem1.IntersectRect.width()); InferImageRect.setHeight(CameraItem1.IntersectRect.height()); QRect TargetImageRect = CameraItem2.PreIntersectRect; TargetImageRect.setLeft(TargetImageRect.left() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nHorizonOffset - CameraItem2.m_VideoStreamCatcher.m_nVideoWidth * CameraItem2.nItemColIndex); TargetImageRect.setTop(TargetImageRect.top() - PANO_IMAGE_BOUNDING_WIDTH - CameraItem2.m_CameraInfo.nVerticalOffset); TargetImageRect.setWidth(CameraItem2.PreIntersectRect.width()); TargetImageRect.setHeight(CameraItem2.PreIntersectRect.height()); g_CurFrameMutex.lock(); cv::Rect cvInferRc = cv::Rect(InferImageRect.left(), InferImageRect.right(), InferImageRect.width(), InferImageRect.height()); cv::Mat InferCurrentFrameROI = CameraItem1.m_CurrentFrame(cvInferRc); cv::Rect cvTargetRc = cv::Rect(TargetImageRect.left(), TargetImageRect.right(), TargetImageRect.width(), TargetImageRect.height()); cv::Mat TargetCurrentFrameROI = CameraItem2.m_CurrentFrame(cvTargetRc); float fParams = 0.5, fMeans = 0.0; m_ImageColorBalance.InterectRectDiff(TargetCurrentFrameROI, InferCurrentFrameROI, fParams, fMeans); CameraItem2.m_CurrentFrame = CameraItem2.m_CurrentFrame * fParams; g_CurFrameMutex.unlock(); Iter++; } } } void QtCameraHardWareCopilot::CalculateAllItemIntersectRect() { //to get the intersect rect of all the CameraItem //first to get the cameralist size //this part is calculate the horizontal intersect rect of all the cameraitem //set a iter to get the TAG... group std::list::iterator GroupIter = m_CameraItemList.begin(); for (int RowNum = 0; RowNum < int(m_CameraItemList.size()); RowNum++) { TAG_CAM_ROW_GROUP& CurGroup = *(GroupIter++); int nCameraNum = int(CurGroup.CameraItemGroup.size()); if (nCameraNum < 2) continue; //then to get the intersect rect of all the CameraItem std::list::iterator Iter = (CurGroup.CameraItemGroup.begin()); std::list::iterator InLoopIter = (CurGroup.CameraItemGroup.begin()); for (int CamNum = 0; CamNum < nCameraNum; CamNum++) { if (!Iter->bActive) continue; InLoopIter = Iter; CameraItem* pPreCameraItem = nullptr; CameraItem* pNextCameraItem = nullptr; if (Iter != CurGroup.CameraItemGroup.begin()) pPreCameraItem = &(*(--InLoopIter)); CameraItem& CurrentCameraItem = (*(Iter++)); if (Iter != CurGroup.CameraItemGroup.end()) pNextCameraItem = &(*Iter); //if the current CameraItem is the first one, select the pre one CurrentCameraItem.nItemColIndex = CamNum; QRectF CurCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CurrentCameraItem.m_CameraInfo.nHorizonOffset + CurrentCameraItem.m_VideoStreamCatcher.m_nVideoWidth * CurrentCameraItem.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + CurrentCameraItem.m_CameraInfo.nVerticalOffset, CurrentCameraItem.m_VideoStreamCatcher.m_nVideoWidth, CurrentCameraItem.m_VideoStreamCatcher.m_nVideoHeight ); CurrentCameraItem.ImageRect = CurCameraItemRect.toRect(); QRectF NextCameraItemRect; if (pNextCameraItem != nullptr) NextCameraItemRect = QRectF( PANO_IMAGE_BOUNDING_WIDTH + pNextCameraItem->m_CameraInfo.nHorizonOffset + pNextCameraItem->m_VideoStreamCatcher.m_nVideoWidth * pNextCameraItem->nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + pNextCameraItem->m_CameraInfo.nVerticalOffset, pNextCameraItem->m_VideoStreamCatcher.m_nVideoWidth, pNextCameraItem->m_VideoStreamCatcher.m_nVideoHeight ); //get the intersect rect QRectF IntersectRect = CurCameraItemRect.intersected(NextCameraItemRect); IntersectRect.setLeft(IntersectRect.left()); IntersectRect.setRight(IntersectRect.right()); QRectF PreItemInterRect; if (pPreCameraItem != nullptr) PreItemInterRect = pPreCameraItem->IntersectRect; //get the relative overlap rect if (IntersectRect.width() == 0 || IntersectRect.height() == 0) { CurrentCameraItem.pIntersectInstance = NULL; } QRect CurInterRect; CurInterRect.setTop(//IntersectRect.top() < CurCameraItemRect.top() ? IntersectRect.top() - CurCameraItemRect.top()); CurInterRect.setLeft(CurCameraItemRect.left() - IntersectRect.left()); CurInterRect.setWidth(IntersectRect.width()); CurInterRect.setHeight(IntersectRect.height()); QRectF PreItemIntersectRectInCurItemCoordinate; if (PreItemInterRect.width() != 0 || PreItemInterRect.height() != 0) { PreItemIntersectRectInCurItemCoordinate.setTop( //PreItemInterRect.top() < CurCameraItemRect.top() ? PreItemInterRect.top() - CurCameraItemRect.top()); PreItemIntersectRectInCurItemCoordinate.setLeft( //PreItemInterRect.left() < CurCameraItemRect.left() ? PreItemInterRect.left() - CurCameraItemRect.left()); PreItemIntersectRectInCurItemCoordinate.setWidth(PreItemInterRect.width()); PreItemIntersectRectInCurItemCoordinate.setHeight(PreItemInterRect.height()); } //flag the intersect instance CurrentCameraItem.pIntersectInstance = pNextCameraItem; //according to intersect rect to calculate the overlap vector CurrentCameraItem.m_vvOverlapBounding.resize(CurrentCameraItem.m_VideoStreamCatcher.m_nVideoHeight); for (int i = 0; i < CurrentCameraItem.m_VideoStreamCatcher.m_nVideoHeight; i++) { CurrentCameraItem.m_vvOverlapBounding[i].resize(2); //judge this line is overlap or not if (pPreCameraItem != nullptr) { if (PreItemIntersectRectInCurItemCoordinate.top() > i || i > PreItemIntersectRectInCurItemCoordinate.bottom()) { CurrentCameraItem.m_vvOverlapBounding[i][0] = 0; } else { CurrentCameraItem.m_vvOverlapBounding[i][0] = PreItemIntersectRectInCurItemCoordinate.width(); } } else CurrentCameraItem.m_vvOverlapBounding[i][0] = 0; if (pNextCameraItem != nullptr) { if (CurInterRect.top() > i || i > CurInterRect.bottom()) { CurrentCameraItem.m_vvOverlapBounding[i][1] = CurCameraItemRect.width(); } else { CurrentCameraItem.m_vvOverlapBounding[i][1] = CurCameraItemRect.width() - CurrentCameraItem.IntersectRect.width(); } } else CurrentCameraItem.m_vvOverlapBounding[i][1] = CurCameraItemRect.width(); } //todo: after complete the main process this program has to be changed CurrentCameraItem.IntersectRect = IntersectRect.toRect(); CurrentCameraItem.PreIntersectRect = PreItemInterRect.toRect(); // if (CurrentCameraItem.IntersectRect.left() < 50) // { // CurrentCameraItem.IntersectRect.setLeft(50); // } } m_bBlockThread = false; //calculate the row group max available rect CalculateHorItemMaxAvailableRect(CurGroup.CameraItemGroup, CurGroup.AvailableRect); } if (int(m_CameraItemList.size()) < 2) return; //calculate the intersect rect group rect GroupIter = m_CameraItemList.begin(); for (int GroupIndex = 0; GroupIndex < int(m_CameraItemList.size()); GroupIndex++) { TAG_CAM_ROW_GROUP* pPreGroup = nullptr; TAG_CAM_ROW_GROUP* pNextGroup = nullptr; if (GroupIter->AvailableRect.Width == 0 || GroupIter->AvailableRect.Height == 0) continue; if (GroupIter != m_CameraItemList.begin()) { pPreGroup = &(*(--GroupIter)); ++GroupIter; } TAG_CAM_ROW_GROUP& CurrentCameraGroup = (*(GroupIter++)); if (GroupIter != m_CameraItemList.end()) pNextGroup = &(*GroupIter); else continue; QRectF CurGroupRect ( GroupIter->AvailableRect.Left, GroupIter->AvailableRect.Top, GroupIter->AvailableRect.Width, GroupIter->AvailableRect.Height ); QRectF NextGroupRect( pNextGroup->AvailableRect.Left, pNextGroup->AvailableRect.Top, pNextGroup->AvailableRect.Width, pNextGroup->AvailableRect.Height ); QRectF CurIntersectNextRect = CurGroupRect.intersected(NextGroupRect); if (CurIntersectNextRect.width() == 0 || CurIntersectNextRect.height() == 0) { CurrentCameraGroup.pNextIntersectGroup = nullptr; continue; } else { CurrentCameraGroup.pNextIntersectGroup = pNextGroup; } QRect CurInterRect; CurInterRect.setTop(//IntersectRect.top() < CurCameraItemRect.top() ? CurIntersectNextRect.top() - CurGroupRect.top()); CurInterRect.setLeft(CurIntersectNextRect.left() - CurGroupRect.left()); CurInterRect.setWidth(CurIntersectNextRect.width()); CurInterRect.setHeight(CurIntersectNextRect.height()); CurrentCameraGroup.vvGroupOverlapBounding.resize(CurrentCameraGroup.AvailableRect.Height); for (int i = 0; i < CurrentCameraGroup.AvailableRect.Height; i++) { CurrentCameraGroup.vvGroupOverlapBounding[i].resize(2); //judge this line is overlap or not /* if (ppregroup != nullptr) { if (PreItemIntersectRectInCurItemCoordinate.top() > i || i > PreItemIntersectRectInCurItemCoordinate.bottom()) { CurrentCameraGroup.m_vvOverlapBounding[i][0] = 0; } else { CurrentCameraGroup.m_vvOverlapBounding[i][0] = PreItemIntersectRectInCurItemCoordinate.width(); } } else*/ CurrentCameraGroup.vvGroupOverlapBounding[i][0] = 0; if (CurrentCameraGroup.pNextIntersectGroup != nullptr) { if (CurInterRect.top() > i || i > CurInterRect.bottom()) { CurrentCameraGroup.vvGroupOverlapBounding[i][1] = CurGroupRect.width(); } else { CurrentCameraGroup.vvGroupOverlapBounding[i][1] = 0; } } else CurrentCameraGroup.vvGroupOverlapBounding[i][1] = CurGroupRect.width(); } } } void QtCameraHardWareCopilot::CalculateAllItemIntersectRectWithOutBlender() { //to get the intersect rect of all the CameraItem //first to get the cameralist size //this part is calculate the horizontal intersect rect of all the cameraitem //set a iter to get the TAG... group std::list::iterator GroupIter = m_CameraItemList.begin(); for (int RowNum = 0; RowNum < int(m_CameraItemList.size()); RowNum++) { TAG_CAM_ROW_GROUP& CurGroup = *(GroupIter); if (GroupIter != (--m_CameraItemList.end())) GroupIter++; int nCameraNum = int(CurGroup.CameraItemGroup.size()); if (nCameraNum < 2) continue; //then to get the intersect rect of all the CameraItem std::list::iterator Iter = (CurGroup.CameraItemGroup.begin()); std::list::iterator InLoopIter = (CurGroup.CameraItemGroup.begin()); for (int CamNum = 0; CamNum < nCameraNum; CamNum++) { if (!Iter->bActive) continue; InLoopIter = Iter; CameraItem* pPreCameraItem = nullptr; CameraItem* pNextCameraItem = nullptr; if (Iter != CurGroup.CameraItemGroup.begin()) pPreCameraItem = &(*(--InLoopIter)); CameraItem& CurrentCameraItem = (*(Iter++)); if (Iter != CurGroup.CameraItemGroup.end()) pNextCameraItem = &(*Iter); //if the current CameraItem is the first one, select the pre one CurrentCameraItem.nItemColIndex = CamNum; QRectF CurCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + CurrentCameraItem.m_CameraInfo.nHorizonOffset + CurrentCameraItem.m_VideoStreamCatcher.m_nVideoWidth * CurrentCameraItem.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + CurrentCameraItem.m_CameraInfo.nVerticalOffset, CurrentCameraItem.m_VideoStreamCatcher.m_nVideoWidth, CurrentCameraItem.m_VideoStreamCatcher.m_nVideoHeight ); CurrentCameraItem.ImageRect = CurCameraItemRect.toRect(); QRectF NextCameraItemRect; if (pNextCameraItem != nullptr) NextCameraItemRect = QRectF( PANO_IMAGE_BOUNDING_WIDTH + pNextCameraItem->m_CameraInfo.nHorizonOffset + pNextCameraItem->m_VideoStreamCatcher.m_nVideoWidth * pNextCameraItem->nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + pNextCameraItem->m_CameraInfo.nVerticalOffset, pNextCameraItem->m_VideoStreamCatcher.m_nVideoWidth, pNextCameraItem->m_VideoStreamCatcher.m_nVideoHeight ); //get the intersect rect QRectF IntersectRect = CurCameraItemRect.intersected(NextCameraItemRect); IntersectRect.setLeft(IntersectRect.left()); IntersectRect.setRight(IntersectRect.right()); QRectF PreItemInterRect; if (pPreCameraItem != nullptr) PreItemInterRect = pPreCameraItem->IntersectRect; //get the relative overlap rect if (IntersectRect.width() == 0 || IntersectRect.height() == 0) { CurrentCameraItem.pIntersectInstance = NULL; } QRect CurInterRect; CurInterRect.setTop(//IntersectRect.top() < CurCameraItemRect.top() ? IntersectRect.top() - CurCameraItemRect.top()); CurInterRect.setLeft(CurCameraItemRect.left() - IntersectRect.left()); CurInterRect.setWidth(IntersectRect.width()); CurInterRect.setHeight(IntersectRect.height()); QRectF PreItemIntersectRectInCurItemCoordinate; if (PreItemInterRect.width() != 0 || PreItemInterRect.height() != 0) { PreItemIntersectRectInCurItemCoordinate.setTop( //PreItemInterRect.top() < CurCameraItemRect.top() ? PreItemInterRect.top() - CurCameraItemRect.top()); PreItemIntersectRectInCurItemCoordinate.setLeft( //PreItemInterRect.left() < CurCameraItemRect.left() ? PreItemInterRect.left() - CurCameraItemRect.left()); PreItemIntersectRectInCurItemCoordinate.setWidth(PreItemInterRect.width()); PreItemIntersectRectInCurItemCoordinate.setHeight(PreItemInterRect.height()); } //flag the intersect instance CurrentCameraItem.pIntersectInstance = pNextCameraItem; //according to intersect rect to calculate the overlap vector CurrentCameraItem.m_vvOverlapBounding.resize(CurrentCameraItem.m_VideoStreamCatcher.m_nVideoHeight); for (int i = 0; i < CurrentCameraItem.m_VideoStreamCatcher.m_nVideoHeight; i++) { CurrentCameraItem.m_vvOverlapBounding[i].resize(2); //judge this line is overlap or not //if (pPreCameraItem != nullptr) //{ // if (PreItemIntersectRectInCurItemCoordinate.top() > i || i > PreItemIntersectRectInCurItemCoordinate.bottom()) // { // CurrentCameraItem.m_vvOverlapBounding[i][0] = 0; // } // else // { // CurrentCameraItem.m_vvOverlapBounding[i][0] = PreItemIntersectRectInCurItemCoordinate.width(); // } //} //else CurrentCameraItem.m_vvOverlapBounding[i][0] = 0; if (pNextCameraItem != nullptr) { if (CurInterRect.top() > i || i > CurInterRect.bottom()) { CurrentCameraItem.m_vvOverlapBounding[i][1] = CurCameraItemRect.width(); } else { CurrentCameraItem.m_vvOverlapBounding[i][1] = CurCameraItemRect.width() - CurrentCameraItem.IntersectRect.width(); } } else CurrentCameraItem.m_vvOverlapBounding[i][1] = CurCameraItemRect.width(); } //todo: after complete the main process this program has to be changed CurrentCameraItem.IntersectRect = IntersectRect.toRect(); CurrentCameraItem.PreIntersectRect = PreItemInterRect.toRect(); // if (CurrentCameraItem.IntersectRect.left() < 50) // { // CurrentCameraItem.IntersectRect.setLeft(50); // } } m_bBlockThread = false; //calculate the row group max available rect CalculateHorItemMaxAvailableRect(CurGroup.CameraItemGroup, CurGroup.AvailableRect); } if (int(m_CameraItemList.size()) < 2) return; //calculate the intersect rect group rect GroupIter = m_CameraItemList.begin(); for (int GroupIndex = 0; GroupIndex < int(m_CameraItemList.size()); GroupIndex++) { TAG_CAM_ROW_GROUP* pPreGroup = nullptr; TAG_CAM_ROW_GROUP* pNextGroup = nullptr; if (GroupIter->AvailableRect.Width == 0 || GroupIter->AvailableRect.Height == 0) continue; if (GroupIter != m_CameraItemList.begin()) { pPreGroup = &(*(--GroupIter)); ++GroupIter; } TAG_CAM_ROW_GROUP& CurrentCameraGroup = (*(GroupIter++)); if (GroupIter != m_CameraItemList.end()) pNextGroup = &(*GroupIter); else continue; //now we have confirm the current group and the next group //we have to calculate the intersect rect of item in upper group to lower group for (auto& Upper : CurrentCameraGroup.CameraItemGroup) { QRectF CurCameraItemRect( PANO_IMAGE_BOUNDING_WIDTH + Upper.m_CameraInfo.nHorizonOffset + Upper.m_VideoStreamCatcher.m_nVideoWidth * Upper.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + Upper.m_VideoStreamCatcher.m_nVideoHeight * Upper.nItemRowIndex + Upper.m_CameraInfo.nVerticalOffset, Upper.m_VideoStreamCatcher.m_nVideoWidth, Upper.m_VideoStreamCatcher.m_nVideoHeight ); for (auto& lower : pNextGroup->CameraItemGroup) { QRectF NextCameraItemRect; NextCameraItemRect = QRectF( PANO_IMAGE_BOUNDING_WIDTH + lower.m_CameraInfo.nHorizonOffset + lower.m_VideoStreamCatcher.m_nVideoWidth * lower.nItemColIndex, PANO_IMAGE_BOUNDING_WIDTH + lower.m_VideoStreamCatcher.m_nVideoHeight * lower.nItemRowIndex + lower.m_CameraInfo.nVerticalOffset, lower.m_VideoStreamCatcher.m_nVideoWidth, lower.m_VideoStreamCatcher.m_nVideoHeight ); QRectF IntersectRect = CurCameraItemRect.intersected(NextCameraItemRect); //turn the intersect rect to current item coordinate QRectF PreItemIntersectRectInCurItemCoordinate; PreItemIntersectRectInCurItemCoordinate.setTop( //PreItemInterRect.top() < CurCameraItemRect.top() ? IntersectRect.top() - CurCameraItemRect.top()); PreItemIntersectRectInCurItemCoordinate.setLeft( IntersectRect.left() - CurCameraItemRect.left()); PreItemIntersectRectInCurItemCoordinate.setWidth(IntersectRect.width()); PreItemIntersectRectInCurItemCoordinate.setHeight(IntersectRect.height()); //last we have to calculate the overlap vector for (int i = 0; i < Upper.m_vvOverlapBounding.size(); i++) { if(i < PreItemIntersectRectInCurItemCoordinate.top() || i > PreItemIntersectRectInCurItemCoordinate.bottom()) continue; if (Upper.m_vvOverlapBounding[i][0] < PreItemIntersectRectInCurItemCoordinate.left()) { if (Upper.m_vvOverlapBounding[i][1] >= PreItemIntersectRectInCurItemCoordinate.left()) Upper.m_vvOverlapBounding[i][1] = PreItemIntersectRectInCurItemCoordinate.left(); else Upper.m_vvOverlapBounding[i][1] = Upper.m_vvOverlapBounding[i][0]; } else if (Upper.m_vvOverlapBounding[i][0] >= PreItemIntersectRectInCurItemCoordinate.left()) { if(Upper.m_vvOverlapBounding[i][1] >= PreItemIntersectRectInCurItemCoordinate.right()) Upper.m_vvOverlapBounding[i][0] = PreItemIntersectRectInCurItemCoordinate.right(); else Upper.m_vvOverlapBounding[i][0] = Upper.m_vvOverlapBounding[i][1]; } //if () // Upper.m_vvOverlapBounding[i][0] = PreItemIntersectRectInCurItemCoordinate.right(); //if() } } } } } void QtCameraHardWareCopilot::CalculateHorItemMaxAvailableRect(std::list& RowGroup, MfcLabel::Rect& AvailableRect) { int nCameraNum = int(RowGroup.size()); std::list::iterator Iter = RowGroup.begin(); AvailableRect.Left = INT_MAX; AvailableRect.Top = INT_MIN; AvailableRect.Right = INT_MIN; AvailableRect.Bottom = INT_MAX; for (; Iter != RowGroup.end(); Iter++) { AvailableRect.Left = MIN(AvailableRect.Left, Iter->ImageRect.left()); AvailableRect.Right = MAX(AvailableRect.Right, Iter->ImageRect.right()); AvailableRect.Top = MAX(AvailableRect.Top, Iter->ImageRect.top()); AvailableRect.Bottom = MIN(AvailableRect.Bottom, Iter->ImageRect.bottom()); } AvailableRect.Width = AvailableRect.Right - AvailableRect.Left; AvailableRect.Height = AvailableRect.Bottom - AvailableRect.Top; } void QtCameraHardWareCopilot::SetConnect() { //connect the VecInc and VecDec to the slot connect(ui.VecIncOffsetButton, &QPushButton::pressed, this, &QtCameraHardWareCopilot::OnVecIncBtnClicked); connect(ui.VecDecOffsetButton, &QPushButton::pressed, this, &QtCameraHardWareCopilot::OnVecDecBtnClicked); //connect the HorInc and HorDec to the slot connect(ui.HorIncOffsetButton, &QPushButton::pressed, this, &QtCameraHardWareCopilot::OnHorIncBtnClicked); connect(ui.HorDecOffsetButton, &QPushButton::pressed, this, &QtCameraHardWareCopilot::OnHorDecBtnClicked); //connect the Menu to the slot connect(ui.menuCamera,&QMenu::triggered, this, &QtCameraHardWareCopilot::OnMenuBtnClicked); connect(ui.menuShowMode,&QMenu::triggered, this, &QtCameraHardWareCopilot::OnMenuBtnClicked); //connect the CameraQueueScene to the slot connect(&m_CameraQueueScene, &CameraModifyScene::CameraItemModified, this, &QtCameraHardWareCopilot::OnCameraItemModified); //connect the graphicsView mousepress event to scene mousepress event //connect the qtimer to window refresh connect(m_pUpdateTimer, &QTimer::timeout, this, &QtCameraHardWareCopilot::RefreshAllSource); //connect the INI file to the slot connect(ui.INIFilemenu,&QMenu::triggered, this, &QtCameraHardWareCopilot::OnMenuBtnClicked); //connect the comboSelectModel to the slot connect(ui.comboBoxSelectModel, QOverload::of(&QComboBox::activated), this, &QtCameraHardWareCopilot::OnComboxSelectModel); } void QtCameraHardWareCopilot::paintEvent(QPaintEvent* event) { //this; ui.PanoramaImglabel->setPixmap(m_PanoPixmap); ui.PanoramaImglabel->show(); } void QtCameraHardWareCopilot::OnVecDecBtnClicked() { //modify the CameraInfo m_CameraGripper.MoveVertical(-1); //updata the lineedit in mainwindow ui.VerticalOffsetEdit->setText(QString::number(m_pCurrentCameraItem->m_CameraInfo.nVerticalOffset)); //modify the CameraItem m_pCurrentCameraItem->setPos(m_pCurrentCameraItem->pos().x(), m_pCurrentCameraItem->pos().y() - 1); //emit the signal to CameraQueueScene emit m_CameraQueueScene.CameraItemModified(); } void QtCameraHardWareCopilot::OnVecIncBtnClicked() { //modify the CameraInfo m_CameraGripper.MoveVertical(1); //updata the lineedit in mainwindow ui.VerticalOffsetEdit->setText(QString::number(m_pCurrentCameraItem->m_CameraInfo.nVerticalOffset)); //modify the CameraItem m_pCurrentCameraItem->setPos(m_pCurrentCameraItem->pos().x(), m_pCurrentCameraItem->pos().y() + 1); //emit the signal to CameraQueueScene emit m_CameraQueueScene.CameraItemModified(); } void QtCameraHardWareCopilot::OnHorDecBtnClicked() { //modify the CameraInfo m_CameraGripper.MoveHorizontal(-1); //updata the lineedit in mainwindow ui.HorizontalOffsetEdit->setText(QString::number(m_pCurrentCameraItem->m_CameraInfo.nHorizonOffset)); //modify the CameraItem m_pCurrentCameraItem->setPos(m_pCurrentCameraItem->pos().x() - 1, m_pCurrentCameraItem->pos().y()); //emit the signal to CameraQueueScene emit m_CameraQueueScene.CameraItemModified(); } void QtCameraHardWareCopilot::OnHorIncBtnClicked() { //modify the CameraInfo m_CameraGripper.MoveHorizontal(1); //updata the lineedit in mainwindow ui.HorizontalOffsetEdit->setText(QString::number(m_pCurrentCameraItem->m_CameraInfo.nHorizonOffset)); //modify the CameraItem m_pCurrentCameraItem->setPos(m_pCurrentCameraItem->pos().x() + 1, m_pCurrentCameraItem->pos().y()); //emit the signal to CameraQueueScene emit m_CameraQueueScene.CameraItemModified(); } void QtCameraHardWareCopilot::OnCameraItemModified() { m_StitchingTailor.Init(m_pPanoImageBuffer, m_nPanoWidth, m_nPanoHeight, cv::Point(0, 0)); //according to the CameraItem Index to get the CameraInfo QList pItemList = m_CameraQueueScene.items(); CameraItem* pItem = nullptr; for (auto& iter : pItemList) { pItem = (CameraItem*)iter; if (pItem->bSelected) break; } int nCameraNum = (int)pItemList.size(); //recalculate the overlap bounding //CalculateAllItemIntersectRect(); m_bBlockThread = true; //set the current CameraItem pointer m_pCurrentCameraItem = pItem; //set the current CameraInfo pointer m_pCurrentCameraInfo = &pItem->m_CameraInfo; //updata the lineedit in mainwindow ui.HorizontalOffsetEdit->SetEditTarget(&pItem->m_CameraInfo.nHorizonOffset); ui.HorizontalOffsetEdit->setText(QString::number(pItem->m_CameraInfo.nHorizonOffset)); ui.VerticalOffsetEdit->SetEditTarget(&pItem->m_CameraInfo.nVerticalOffset); ui.VerticalOffsetEdit->setText(QString::number(pItem->m_CameraInfo.nVerticalOffset)); ui.LightEdit->SetEditTarget(&pItem->m_CameraInfo.nLightOffset); ui.LightEdit->setText(QString::number(pItem->m_CameraInfo.nLightOffset)); ui.CurCameraAddresslabel->setText(pItem->m_CameraInfo.GetCompleteIpAddress()); m_bBlockThread = false; m_CameraQueueScene.update(); } void QtCameraHardWareCopilot::OnMenuBtnClicked(QAction* pAction) { if (pAction == ui.actionIncCamear) { ////create a IncCamera //IncCamera* pIncCamera = new IncCamera(); ////set the IncCamera block the main thread //pIncCamera->setWindowModality(Qt::ApplicationModal); ////Show IncCamera //pIncCamera->show(); ////block the main thread until the IncCamera close //while (pIncCamera->isVisible()) //{ // QCoreApplication::processEvents(); //} ////if the IncCamera is closed by push the cancel button break the function //if (pIncCamera->GetIsOKBtnClicked() != true) //{ // return; //} ////wait the IncCamera close,get the information from the IncCamera,and push the information to the CameraInfoList ////m_CameraItemList.push_back(pIncCamera->GetCameraInfo()); ////first check the number of the CameraItem //int CameraItemNum = m_CameraItemList.size(); ////create a CameraItem //CameraItem* pCameraItem = new CameraItem(nullptr); //pCameraItem->m_CameraInfo = pIncCamera->GetCameraInfo(); ////init the CameraItem //if (!pCameraItem->Init(CameraItemNum, QPointF(0, 0))) //{ // //if the CameraItem init failed,delete the CameraItem and return // QString Title = "错误"; // QString Text = "当前输入的链接无法初始化解码器"; // QMessageBox box; // box.setWindowTitle(Title); // box.setText(Text); // box.addButton(QMessageBox::Cancel); // delete pCameraItem; // return; //} ////add the CameraItem to the list //m_CameraItemList.push_back(pCameraItem); ////add the CameraItem to the scene //m_CameraQueueScene.AddCameraItem(pCameraItem); ////reset the pano buffer ////ResetPanoImageBuffer(); //m_bCameraItemModified = true; ////update the scene //m_CameraQueueScene.update(); ////according to the buffer left top is (0,0),this is dual to(应该是‘影响’才正确) the offset //m_StitchingTailor.Init(m_pPanoImageBuffer, m_nPanoWidth, m_nPanoHeight, cv::Point(0, 0)); ////emit the signal to the CameraItem bas been selected //emit m_CameraQueueScene.CameraItemSelected(pCameraItem); ////open the thread catch stream //pCameraItem->m_VideoStreamCatcher.Start(); //delete pIncCamera; } else if (pAction == ui.actionDecCamera) { } else if (pAction == ui.actionCreateDeviceTemplate) { DeviceTempleteCreator * pDeviceTempleteCreator = new DeviceTempleteCreator(this); pDeviceTempleteCreator->m_CameraRowGroupList.swap(m_CameraItemList); m_CameraItemList.clear(); if (pDeviceTempleteCreator->m_CameraRowGroupList.size() == 0) { TAG_CAM_ROW_GROUP tagCamRowGroup; pDeviceTempleteCreator->m_CameraRowGroupList.push_back(tagCamRowGroup); } pDeviceTempleteCreator->ChangeCurrentGroup(&pDeviceTempleteCreator->m_CameraRowGroupList.back()); pDeviceTempleteCreator->setWindowModality(Qt::ApplicationModal); pDeviceTempleteCreator->SetScene(&m_CameraQueueScene); pDeviceTempleteCreator->show(); while (pDeviceTempleteCreator->isVisible()) { QCoreApplication::processEvents(); } m_CameraItemList.swap(pDeviceTempleteCreator->m_CameraRowGroupList); if (pDeviceTempleteCreator->ui.CheckCameraPoseHor->checkState()) { m_fCameraSubStreamToMainStreamHeight = HORIZONTAL_CAMERA_HEIGHT; m_fCameraSubStreamToMainStreamHeight = HORIZONTAL_CAMERA_WIDTH; } if (pDeviceTempleteCreator->ui.CheckCameraPoseVec->checkState()) { m_fCameraSubStreamToMainStreamHeight = VERTICAL_CAMERA_HEIGHT; m_fCameraSubStreamToMainStreamWidth = VERTICAL_CAMERA_WIDTH; } delete pDeviceTempleteCreator; } else if (pAction == ui.actionFit) { m_CurShowMode = ShowMode::Fit; //sdl init { //m_SdlProc.Init((void*)ui.PanoramaImglabel->winId(), m_nPanoWidth, m_nPanoHeight, false); //m_SdlProc.SetRect(0, 0, ui.PanoramaImglabel->rect().width(), ui.PanoramaImglabel->rect().height()); } } else if (pAction == ui.actionFree) { m_CurShowMode = ShowMode::Free; } else if (pAction == ui.actionReadINI) { //use qfiledialog to get the ini file load path QString strIniFilePath = QFileDialog::getOpenFileName(this, "选择ini文件", "", "ini文件(*.ini)"); //if the path is empty return if (strIniFilePath.isEmpty()) return; ReadSerializeIniFile(strIniFilePath); } else if (pAction == ui.actionSAVEINI) { //use qfiledialog to get the ini file save path QString strIniFilePath = QFileDialog::getSaveFileName(this, "选择ini文件", "", "ini文件(*.ini)"); //if the path is empty return if (strIniFilePath.isEmpty()) return; WriteSerializeIniFile(strIniFilePath); } else if (pAction == ui.DetailParamSetAction) { //create a DetailParamSet ui ParamSetUI *pParamSetUI = new ParamSetUI(); //set show mode //set the IncCamera block the main thread pParamSetUI->setWindowModality(Qt::ApplicationModal); //Show IncCamera pParamSetUI->show(); //block the main thread until the IncCamera close while (pParamSetUI->isVisible()) { QCoreApplication::processEvents(); } m_ParamSet = pParamSetUI->m_ParamSet; } } void QtCameraHardWareCopilot::ResetPanoImageBuffer() { //reset the Pano IMage buffer //first check the item number int nCameraNum = 0; for (auto& group : m_CameraItemList) { nCameraNum = MAX(nCameraNum, int(group.CameraItemGroup.size())); } //QList pList = m_CameraQueueScene.items(); //nCameraNum = int(pList.size()); //if nCameranum ==0 out if (nCameraNum <= 0) return; //according to this allocate buffer //check now panobuffer is null or not if (m_pPanoImageBufferGPU != nullptr) { cudaError Error = cudaFree(m_pPanoImageBufferGPU); delete[] m_pPanoImageBuffer; m_pPanoImageBuffer = nullptr; } CameraItem* pFirstItem =(CameraItem*)&(m_CameraItemList.front().CameraItemGroup.front()); int nCurFrameImageWidth = pFirstItem->m_VideoStreamCatcher.m_nOutputImageWidth; int nCurFrameImageHeight = pFirstItem->m_VideoStreamCatcher.m_nOutputImageHeight; int nCurFrameImagePitch; if (m_StitchingTailor.bUseYUV) { nCurFrameImagePitch = ((nCurFrameImageWidth * nCameraNum + PANO_IMAGE_BOUNDING_WIDTH * 2)*3 + 3) / 4 * 4; //由于yuv图像的特殊性,所以这里需要对长度和宽度做出修正对齐,保证最终的图像能够显示 nCurFrameImageHeight = ((nCurFrameImageHeight * int(m_CameraItemList.size()) + PANO_IMAGE_BOUNDING_WIDTH * 2 + 1) / 2) * 2; m_pPanoImageBuffer = new unsigned char[nCurFrameImagePitch * (nCurFrameImageHeight) * 3 / 2]; unsigned char* GpuBUffer = nullptr; cudaError Error = cudaMalloc(&GpuBUffer, nCurFrameImagePitch * (nCurFrameImageHeight) * 3 / 2); m_pPanoImageBufferGPU = GpuBUffer; } else { nCurFrameImagePitch = ((nCurFrameImageWidth * nCameraNum + PANO_IMAGE_BOUNDING_WIDTH * 2) * 3 + 3) / 4 * 4; nCurFrameImageHeight = (nCurFrameImageHeight * int(m_CameraItemList.size()) + PANO_IMAGE_BOUNDING_WIDTH * 2 + 1) / 2 * 2; m_pPanoImageBuffer = new unsigned char[nCurFrameImagePitch * (nCurFrameImageHeight)]; } m_nPanoWidth = nCurFrameImageWidth * nCameraNum + PANO_IMAGE_BOUNDING_WIDTH * 2; m_nPanoHeight = nCurFrameImageHeight; m_nPanoPitch = nCurFrameImagePitch; m_StitchingTailor.nPanoPitch = nCurFrameImagePitch; m_bCameraItemModified = false; } void QtCameraHardWareCopilot::RefreshAllSource() { //if (!m_bReFreshPrepared) // return; if (!m_bBlockThread && !m_bCameraItemModified) { //use Qt to display the image // create a QImage to show the unsigned char data m_PanoImage = QImage(m_pPanoImageBuffer, m_nPanoWidth, m_nPanoHeight, QImage::Format_BGR888); m_PanoPixmap = QPixmap::fromImage(m_PanoImage); m_PanoPainter.SetYUVShowOrNot(false); m_PanoPainter.SetOrginImage(&m_PanoImage); m_PanoPainter.SetLabelRect(ui.PanoramaImglabel->rect()); m_PanoPainter.EditScaleOrNot = true; m_PanoPainter.ImgRect = m_PanoImage.rect(); m_PanoPainter.ShowClientRect = ui.PanoramaImglabel->rect(); m_PanoPainter.Draw(m_CurShowMode); ////use sdl to display the image //{ // //if Show Mode is fit , init is in select func not here //if (m_CurShowMode == ShowMode::Fit) //{ // QByteArray QYuvImage(m_nPanoPitch * m_nPanoHeight * 1.5, 0); // memcpy(QYuvImage.data(), m_pPanoImageBuffer, m_nPanoPitch * m_nPanoHeight * 1.5); // m_PanoPainter.SetOrginImage(&QYuvImage, m_nPanoWidth, m_nPanoHeight); // m_PanoPainter.SetYUVShowOrNot(true); // m_PanoPainter.SetLabelRect(ui.PanoramaImglabel->rect()); // m_PanoPainter.Draw(ShowMode::Fit); // /* cudaMemcpy(m_pPanoImageBuffer, m_pPanoImageBufferGPU, m_nPanoPitch * m_nPanoHeight * 1.5, cudaMemcpyDeviceToHost); // cv::Mat PanoImage(m_nPanoHeight * 3 / 2, m_nPanoWidth, CV_8UC1, m_pPanoImageBuffer); // m_SdlProc.DisplayFrame(m_pPanoImageBuffer, m_nPanoWidth, m_nPanoHeight);*/ //} //} } } void QtCameraHardWareCopilot::mousePressEvent(QMouseEvent* event) { m_bMousePressOrNot = true; QPoint Pt = event->pos(); MfcLabel::fRect DetailShowRect, ViewPortRect; DetailShowRect = ui.PanoramaImglabel->geometry(); if (DetailShowRect.JudgePtInRectBoxOrNot(fPoint(Pt.x(), Pt.y()))) { m_PreMousePt = Pt; } } void QtCameraHardWareCopilot::mouseMoveEvent(QMouseEvent* event) { QPoint Pt = event->pos(); MfcLabel::fRect DetailShowRect, ViewPortRect;// GraphicsRect, GraphicsRect2; DetailShowRect = ui.PanoramaImglabel->geometry(); if (m_bMousePressOrNot && DetailShowRect.JudgePtInRectBoxOrNot(fPoint(Pt.x(), Pt.y()))) { QPoint CurOffset; CurOffset.rx() = Pt.x() - m_PreMousePt.x(); CurOffset.ry() = Pt.y() - m_PreMousePt.y(); m_PanoPainter.Offset.offset(CurOffset.x(), CurOffset.y()); m_PreMousePt = Pt; } } void QtCameraHardWareCopilot::mouseReleaseEvent(QMouseEvent* event) { m_bMousePressOrNot = false; m_PreMousePt.setX(0); m_PreMousePt.setY(0); } void QtCameraHardWareCopilot::wheelEvent(QWheelEvent* event) { QPoint Pt = event->pos(); MfcLabel::fRect DetailShowRect, ViewPortRect;// GraphicsRect, GraphicsRect2; DetailShowRect = ui.PanoramaImglabel->geometry(); if (DetailShowRect.JudgePtInRectBoxOrNot(fPoint(Pt.x(), Pt.y()))) { DetailShowRect = ui.PanoramaImglabel->geometry(); Pt.rx() = Pt.x() - DetailShowRect.Left; Pt.ry() = Pt.y() - DetailShowRect.Top; if (event->delta() > 0) { m_PanoPainter.CalculateWheelZoomOffsetAndScale(Pt, true, NULL); } else { m_PanoPainter.CalculateWheelZoomOffsetAndScale(Pt, false, NULL); } } } void QtCameraHardWareCopilot::OnComboxSelectModel(int index) { if (m_CameraItemList.size() == 0) { QMessageBox Warning; Warning.setText("Please add camera item first!"); //Warning.exec(); return; } //if the index is 0, it means the current model is single item to modify if (index == 0) { m_bCameraItemMoveModel = true; emit m_CameraQueueScene.CameraItemModified(); m_CameraGripper.EmptyMagazine(); m_CameraGripper.fillingGripper(m_pCurrentCameraItem); } else { m_bCameraItemMoveModel = false; emit m_CameraQueueScene.CameraItemModified(); m_CameraGripper.EmptyMagazine(); int nCurGroupRowIndex = m_pCurrentCameraItem->nItemRowIndex; std::list::iterator Group = m_CameraItemList.begin(); for (; Group->nRowCameraIndex != nCurGroupRowIndex; Group++); for (auto& item : Group->CameraItemGroup) m_CameraGripper.fillingGripper(&(item)); } m_CameraQueueScene.SetSelectModel(index==0?true:false); } void QtCameraHardWareCopilot::closeEvent(QCloseEvent* event) { s_bSwitchProcess = false; } void QtCameraHardWareCopilot::ReadSerializeIniFile(QString strfilepath) { //open file QFile file(strfilepath); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) { QMessageBox::warning(this, tr("Read File"), tr("Cannot open file:\n%1").arg(strfilepath)); return; } //use QDataStream to read the file QDataStream in(&file); //read the data int nItemRowNum = 0; in >> nItemRowNum; for (int i = 0; i < nItemRowNum; i++) { int nCurrentItemGroupNum = 0; in >> nCurrentItemGroupNum; m_CameraItemList.push_back(TAG_CAM_ROW_GROUP()); TAG_CAM_ROW_GROUP& CurrentRowGroup = m_CameraItemList.back(); CurrentRowGroup.nRowCameraCount = nCurrentItemGroupNum; m_CameraItemList.back().nRowCameraIndex = i; CurrentRowGroup.CameraItemGroup.resize(nCurrentItemGroupNum); std::list::iterator it = CurrentRowGroup.CameraItemGroup.begin(); for (int j = 0; j < nCurrentItemGroupNum; j++) { CameraItem* pCameraItem = &(*it); pCameraItem->nItemRowIndex = i; SerializeCameraInfo(in, &pCameraItem->m_CameraInfo, false); pCameraItem->m_CameraInfo.nStreamType = 1; //init the CameraItem if (!pCameraItem->Init(i, QPointF(0, 0))) { //if the CameraItem init failed,delete the CameraItem and return QString Title = "错误"; QString Text = "the input curl cant init the decoder"; QMessageBox box; box.setWindowTitle(Title); box.setText(Text); box.addButton(QMessageBox::Cancel); box.exec(); return; } //add the CameraItem to the scene m_CameraQueueScene.AddCameraItem(pCameraItem); //update the scene m_CameraQueueScene.update(); //reset the pano buffer //ResetPanoImageBuffer(); m_bCameraItemModified = true; //according to the buffer left top is (0,0),this is dual to(应该是‘影响’才正确) the offset m_StitchingTailor.Init(m_pPanoImageBuffer, m_nPanoWidth, m_nPanoHeight, cv::Point(0, 0)); //emit the signal to the CameraItem bas been selected emit m_CameraQueueScene.CameraItemModified(); //open the thread catch stream pCameraItem->m_VideoStreamCatcher.Start(); //get the next CameraItem it++; } } } void QtCameraHardWareCopilot::WriteSerializeIniFile(QString strfilepath) { QFile file(strfilepath); if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) { QMessageBox::warning(this, tr("Read File"), tr("Cannot open file:\n%1").arg(strfilepath)); return; } //use QDataStream to read the file QDataStream out(&file); //read the data int nRowNum = m_CameraItemList.size(); //set a iter to the list std::list::iterator iter = m_CameraItemList.begin(); //first write the row num out << nRowNum; for (int i = 0; i < nRowNum; i++) { int nItemNum = (*iter).CameraItemGroup.size(); out << nItemNum; std::list::iterator ItemIter = (*iter).CameraItemGroup.begin(); for (int j = 0; j < nItemNum; j++) { SerializeCameraInfo(out, &((*ItemIter++).m_CameraInfo), true); } iter++; } std::ofstream ofs; std::string testString = strfilepath.toStdString(); testString.copy((char*)testString.c_str(), testString.length() - 4, 0); testString = testString + ".dat"; ofs.open(testString, std::ios::out); nlohmann::json Json; //ofs << nItemNum; iter = m_CameraItemList.begin(); Json.push_back({ {"RowNum",nRowNum} }); Json.push_back({ {"640UpTo4kWidth",m_fCameraSubStreamToMainStreamWidth}, {"640UpTo4kHeight",m_fCameraSubStreamToMainStreamHeight} }); for (int i = 0; i < nRowNum; i++) { int nGroupNum = (*iter).CameraItemGroup.size(); Json.push_back({ {"ItemNum",nGroupNum } }); std::list::iterator ItemIter = (*iter).CameraItemGroup.begin(); for (int j = 0; j < nGroupNum; j++) { Json.push_back({ {"IpAddress1",((*ItemIter).m_CameraInfo).szIpAddress1.toStdString()}, {"IpAddress2",((*ItemIter).m_CameraInfo).szIpAddress2.toStdString()}, {"IpAddress3",((*ItemIter).m_CameraInfo).szIpAddress3.toStdString()}, {"IpAddress4",((*ItemIter).m_CameraInfo).szIpAddress4.toStdString()}, {"UserName",((*ItemIter).m_CameraInfo).szUserName.toStdString()}, {"Password",((*ItemIter).m_CameraInfo).szPassword.toStdString()}, {"Port",((*ItemIter).m_CameraInfo).nPort}, {"StreamType",0},//((*ItemIter).m_CameraInfo).nStreamType}, {"Channel",((*ItemIter).m_CameraInfo).nChannel}, {"IsCheckRight",((*ItemIter).m_CameraInfo).bIsCheckRight}, {"HorizonOffset",((*ItemIter).m_CameraInfo).nHorizonOffset}, {"VerticalOffset",((*ItemIter).m_CameraInfo).nVerticalOffset}, {"CameraMatrixColIndex",((*ItemIter)).nItemColIndex}, {"CameraMatrixRowIndex",(*ItemIter).nItemRowIndex}, }); ItemIter++; } iter++; } ofs << Json; ofs.close(); } void QtCameraHardWareCopilot::SerializeCameraInfo(QDataStream& Stream, CAMERA_INFO* pCameraInfo, bool StoreOrLoad) { if (StoreOrLoad) { //store the data Stream << pCameraInfo->szIpAddress1; Stream << pCameraInfo->szIpAddress2; Stream << pCameraInfo->szIpAddress3; Stream << pCameraInfo->szIpAddress4; Stream << pCameraInfo->szUserName; Stream << pCameraInfo->szPassword; Stream << pCameraInfo->nPort; Stream << pCameraInfo->nStreamType; Stream << pCameraInfo->nChannel; Stream << pCameraInfo->bIsCheckRight; Stream << pCameraInfo->nHorizonOffset; Stream << pCameraInfo->nVerticalOffset; Stream << pCameraInfo->nColIndex; Stream << pCameraInfo->nRowIndex; } else { //load the data Stream >> pCameraInfo->szIpAddress1; Stream >> pCameraInfo->szIpAddress2; Stream >> pCameraInfo->szIpAddress3; Stream >> pCameraInfo->szIpAddress4; Stream >> pCameraInfo->szUserName; Stream >> pCameraInfo->szPassword; Stream >> pCameraInfo->nPort; Stream >> pCameraInfo->nStreamType; Stream >> pCameraInfo->nChannel; Stream >> pCameraInfo->bIsCheckRight; Stream >> pCameraInfo->nHorizonOffset; Stream >> pCameraInfo->nVerticalOffset; Stream >> pCameraInfo->nColIndex; Stream >> pCameraInfo->nRowIndex; } }