diff --git a/CMakeLists.txt b/CMakeLists.txt index 9dd7dab9..62486669 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -580,13 +580,18 @@ elseif (SORA_TARGET_OS STREQUAL "jetson") PUBLIC nvv4l2 nvv4lconvert - nvbuf_utils #nvbuf_fdmap #nvddk_vic #nvddk_2d_v2 nvjpeg nvbufsurface nvbufsurftransform + + # nvbuf_utils を NvUtils に移行した際、ドキュメントには libnvbuf_utils.so を参照するように記載があったが、 + # そのような so ファイルは存在しないためリンクが失敗した + # nvbuf_fdmap を追加したらリンクが通った + # https://developer.nvidia.com/sites/default/files/akamai/embedded/nvbuf_utils_to_nvutils_migration_guide.pdf + nvbuf_fdmap #nvos ) endif(USE_JETSON_ENCODER) diff --git a/src/hwenc_jetson/jetson_video_decoder.cpp b/src/hwenc_jetson/jetson_video_decoder.cpp index 9bae83ea..1a8300da 100644 --- a/src/hwenc_jetson/jetson_video_decoder.cpp +++ b/src/hwenc_jetson/jetson_video_decoder.cpp @@ -402,7 +402,8 @@ void JetsonVideoDecoder::CaptureLoop() { memcpy(dst_data + j * dst_stride, (char*)src_data + j * params.pitch, params.width); } - NvBufferMemUnMap(dst_dma_fd_, i, &src_data); + // NvBufferMemUnMap(dst_dma_fd_, i, &src_data); + NvBufSurfaceUnMap(dst_surface, 0, i); } webrtc::VideoFrame decoded_image = @@ -443,21 +444,26 @@ int JetsonVideoDecoder::SetCapture() { << "x" << format.fmt.pix_mp.height; if (dst_dma_fd_ != -1) { - // TODO: NvBufSurface にへんかんしてから破棄する? + // TODO: NvBufSurface に変換して class で破棄する? // NvBufferDestroy(dst_dma_fd_); // dst_dma_fd_ = -1; } - NvBufferCreateParams input_params = {0}; - input_params.payloadType = NvBufferPayload_SurfArray; - input_params.width = capture_crop_->c.width; - input_params.height = capture_crop_->c.height; - input_params.layout = NvBufferLayout_Pitch; - input_params.colorFormat = NvBufferColorFormat_YUV420; - input_params.nvbuf_tag = NvBufferTag_VIDEO_DEC; + NvBufSurfaceAllocateParams input_params = {0}; + input_params.params.width = capture_crop_->c.width; + input_params.params.height = capture_crop_->c.height; + input_params.params.layout = NVBUF_LAYOUT_PITCH; + input_params.params.colorFormat = NVBUF_COLOR_FORMAT_YUV420; + input_params.memtag = NvBufSurfaceTag_VIDEO_DEC; + + NvBufSurface* dst_surface = new NvBufSurface; + dst_surface->memType = NVBUF_MEM_SURFACE_ARRAY; + + NvBufSurface* dst_surfaces[] = {dst_surface}; - ret = NvBufferCreateEx(&dst_dma_fd_, &input_params); - INIT_ERROR(ret == -1, "create dmabuf failed"); + // TODO: dst_surface を class で保持する + ret = NvBufSurfaceAllocate(dst_surfaces, 1, &input_params); + INIT_ERROR(ret == -1, "failed to NvBufSurfaceAllocate"); decoder_->capture_plane.deinitPlane(); diff --git a/src/hwenc_jetson/jetson_video_encoder.cpp b/src/hwenc_jetson/jetson_video_encoder.cpp index 98332cb7..be87cba5 100644 --- a/src/hwenc_jetson/jetson_video_encoder.cpp +++ b/src/hwenc_jetson/jetson_video_encoder.cpp @@ -748,11 +748,19 @@ int32_t JetsonVideoEncoder::Encode( input_frame.timestamp_us() % rtc::kNumMicrosecsPerSec; for (int i = 0; i < MAX_PLANES; i++) { - if (NvBufferMemSyncForDevice(buffer->planes[i].fd, i, - (void**)&buffer->planes[i].data) < 0) { + NvBufSurface* surface = NULL; + if (-1 == NvBufSurfaceFromFd(buffer->planes[i].fd, (void**)(&surface))) { + RTC_LOG(LS_ERROR) << __FUNCTION__ + << "Failed to get NvBufSurface from FD"; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + if (NvBufSurfaceSyncForDevice(surface, 0, i) < 0) { RTC_LOG(LS_ERROR) << "Failed to NvBufferMemSyncForDevice"; return WEBRTC_VIDEO_CODEC_ERROR; } + + // TODO: NvBufsurface の解放 } if (encoder_->output_plane.qBuffer(v4l2_buf, nullptr) < 0) {