Implementing the VENC Function
To encode multiple images into a video, call RunVenc to encode the video by using the same instance after creating an instance by calling CreateVenc, and then call DestroyVenc to release the instance.
In this example, the VENC API is called to encode YUV images into H.265 or H.264 streams.
std::string vencOutFileName("venc.bin"); std::shared_ptr<FILE> vencOutFile(nullptr); void VencCallBackDumpFile(struct VencOutMsg* vencOutMsg, void* userData) { if (vencOutFile.get() == nullptr) { HIAI_ENGINE_LOG(HIAI_VENC_CTL_ERROR, "get venc out file fail!"); return; } fwrite(vencOutMsg->outputData, 1, vencOutMsg->outputDataSize, vencOutFile.get()); fflush(vencOutFile.get()); } /* * venc new interface to achieve venc basic functions. */ void TEST_VENC() { std::shared_ptr<FILE> fpIn(fopen(g_inFileName, "rb"), fclose); vencOutFile.reset(fopen(vencOutFileName.c_str(), "wb"), fclose); if (fpIn.get() == nullptr || vencOutFile.get() == nullptr) { HIAI_ENGINE_LOG(HIAI_OPEN_FILE_ERROR, "open open venc in/out file failed."); return; } fseek(fpIn.get(), 0, SEEK_END); uint32_t fileLen = ftell(fpIn.get()); fseek(fpIn.get(), 0, SEEK_SET); struct VencConfig vencConfig; vencConfig.width = g_width; vencConfig.height = g_high; vencConfig.codingType = g_format; vencConfig.yuvStoreType = g_yuvStoreType; vencConfig.keyFrameInterval = 16; vencConfig.vencOutMsgCallBack = VencCallBackDumpFile; vencConfig.userData = nullptr; int32_t vencHandle = CreateVenc(&vencConfig); if (vencHandle == -1) { HIAI_ENGINE_LOG(HIAI_VENC_CTL_ERROR, "CreateVenc fail!"); return; } // input 16 frames once uint32_t inDataLenMaxOnce = g_width * g_high * 3 / 2; std::shared_ptr<char> inBuffer(static_cast<char*>(malloc(inDataLenMaxOnce)), free); if (inBuffer.get() == nullptr) { HIAI_ENGINE_LOG(HIAI_OPEN_FILE_ERROR, "alloc input buffer failed"); DestroyVenc(vencHandle); return; } uint32_t inDataUnhandledLen = fileLen; auto start = std::chrono::system_clock::now(); uint32_t frameCount = 0; while (inDataUnhandledLen > 0) { uint32_t inDataLen = inDataUnhandledLen; if (inDataUnhandledLen > inDataLenMaxOnce) { inDataLen = inDataLenMaxOnce; } inDataUnhandledLen -= inDataLen; uint32_t readLen = fread(inBuffer.get(), 1, inDataLen, fpIn.get()); if (readLen != inDataLen) { HIAI_ENGINE_LOG(HIAI_OPEN_FILE_ERROR, "error in read input file"); DestroyVenc(vencHandle); return; } struct VencInMsg vencInMsg; vencInMsg.inputData = inBuffer.get(); vencInMsg.inputDataSize = inDataLen; vencInMsg.keyFrameInterval = 16; vencInMsg.forceIFrame = 0; vencInMsg.eos = 0; if (RunVenc(vencHandle, &vencInMsg) == -1) { HIAI_ENGINE_LOG(HIAI_VENC_CTL_ERROR, "call video encode fail"); break; } ++frameCount; } auto end = std::chrono::system_clock::now(); auto duration = std::chrono::duration_cast<std::chrono::microseconds>(end - start); size_t timeCount = static_cast<size_t>(duration.count()); HIAI_ENGINE_LOG("Total frame count: %u", frameCount); HIAI_ENGINE_LOG("Time cost: %lu us", timeCount); HIAI_ENGINE_LOG("FPS: %lf", frameCount * 1.0 / (timeCount / 1000000.0)); DestroyVenc(vencHandle); return; }
Feedback
Was this page helpful?
Provide feedbackThank you very much for your feedback. We will continue working to improve the documentation.See the reply and handling status in My Cloud VOC.
For any further questions, feel free to contact us through the chatbot.
Chatbot