// // libtgvoip is free and unencumbered public domain software. // For more information, see http://unlicense.org or the UNLICENSE file // you should have received with this source code distribution. // #include #if TARGET_OS_IPHONE #include #endif #include "SampleBufferDisplayLayerRenderer.h" #include "../../PrivateDefines.h" #include "../../logging.h" #include "TGVVideoRenderer.h" using namespace tgvoip; using namespace tgvoip::video; SampleBufferDisplayLayerRenderer::SampleBufferDisplayLayerRenderer(TGVVideoRenderer* renderer) : renderer(renderer){ } SampleBufferDisplayLayerRenderer::~SampleBufferDisplayLayerRenderer(){ } void SampleBufferDisplayLayerRenderer::Reset(uint32_t codec, unsigned int width, unsigned int height, std::vector& csd){ LOGI("video renderer reset: %d x %d", width, height); if(formatDesc){ CFRelease(formatDesc); } if(codec==CODEC_AVC){ if(csd.size()!=2){ LOGE("H264 requires exactly 2 CSD buffers"); return; } const uint8_t* params[]={*csd[0]+4, *csd[1]+4}; size_t paramSizes[]={csd[0].Length()-4, csd[1].Length()-4}; OSStatus status=CMVideoFormatDescriptionCreateFromH264ParameterSets(NULL, 2, params, paramSizes, 4, &formatDesc); if(status!=noErr){ LOGE("CMVideoFormatDescriptionCreateFromH264ParameterSets failed: %d", status); return; } CGRect rect=CMVideoFormatDescriptionGetCleanAperture(formatDesc, true); LOGI("size from formatDesc: %f x %f", rect.size.width, rect.size.height); }else if(codec==CODEC_HEVC){ if(@available(iOS 11.0, *)){ if(csd.size()!=1){ LOGE("HEVC requires exactly 1 CSD buffer"); return; } int offsets[]={0, 0, 0}; Buffer& buf=csd[0]; int currentNAL=0; for(int i=0;i nalStartOffsets; uint8_t* _data=*frame; for(uint32_t offset=0;offset> 24), (uint8_t)(length >> 16), (uint8_t)(length >> 8), (uint8_t)length}; out.WriteBytes(lenBytes, 4); out.WriteBytes(frame, nalStartOffsets[i], length); } CMBlockBufferRef blockBuffer; OSStatus status=CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, out.GetBuffer(), out.GetLength(), kCFAllocatorNull, NULL, 0, out.GetLength(), 0, &blockBuffer); if(status!=noErr){ LOGE("CMBlockBufferCreateWithMemoryBlock failed: %d", status); return; } CMSampleBufferRef sampleBuffer; status=CMSampleBufferCreate(kCFAllocatorDefault, blockBuffer, true, NULL, NULL, formatDesc, 1, 0, NULL, 0, NULL, &sampleBuffer); if(status!=noErr){ LOGE("CMSampleBufferCreate failed: %d", status); return; } CFRelease(blockBuffer); CFArrayRef attachments=CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true); CFMutableDictionaryRef dict=(CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); [renderer _enqueueBuffer:sampleBuffer reset:needReset]; needReset=false; CFRelease(sampleBuffer); } void SampleBufferDisplayLayerRenderer::SetStreamEnabled(bool enabled){ if(enabled!=streamEnabled){ streamEnabled=enabled; if(enabled){ [renderer _setResumed]; }else{ [renderer _setStopped]; } } } int SampleBufferDisplayLayerRenderer::GetMaximumResolution(){ #if TARGET_OS_IPHONE CGRect screenSize=[UIScreen mainScreen].nativeBounds; CGFloat minSize=std::min(screenSize.size.width, screenSize.size.height); if(minSize>720.f){ return INIT_VIDEO_RES_1080; }else if(minSize>480.f){ return INIT_VIDEO_RES_720; }else{ return INIT_VIDEO_RES_480; } #else // OS X // TODO support OS X #endif return INIT_VIDEO_RES_1080; } void SampleBufferDisplayLayerRenderer::SetRotation(uint16_t rotation){ [renderer _setRotation:rotation]; } void SampleBufferDisplayLayerRenderer::SetStreamPaused(bool paused){ if(paused){ [renderer _setPaused]; }else if(streamEnabled){ [renderer _setResumed]; } } std::vector SampleBufferDisplayLayerRenderer::GetAvailableDecoders(){ std::vector res; res.push_back(CODEC_AVC); if(@available(iOS 11.0, *)){ if(VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC)){ res.push_back(CODEC_HEVC); } } return res; }