Add support for PIP, picture in picture feature

Add a new state and required implementation in new overlay to support playback
of two videos using overlay.
Add a new class VideoPIP to handle the pip feature.

Change-Id: Ibce7044bc1216cba77bd952e191aed5472ed6737
This commit is contained in:
Prashant Surana 2012-08-13 11:49:01 +05:30 committed by Andrew Sutherland
parent 77e0c7b380
commit cc5cb1781c
8 changed files with 459 additions and 1 deletions

View File

@ -13,6 +13,7 @@ LOCAL_CFLAGS := $(common_flags) -DLOG_TAG=\"hwcomposer\"
LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
LOCAL_SRC_FILES := hwc.cpp \
hwc_video.cpp \
hwc_pip.cpp \
hwc_utils.cpp \
hwc_uimirror.cpp \
hwc_uevents.cpp \

View File

@ -28,6 +28,7 @@
#include "hwc_utils.h"
#include "hwc_qbuf.h"
#include "hwc_video.h"
#include "hwc_pip.h"
#include "hwc_uimirror.h"
#include "hwc_copybit.h"
#include "hwc_external.h"
@ -86,6 +87,7 @@ static int hwc_prepare(hwc_composer_device_t *dev, hwc_layer_list_t* list)
if (LIKELY(list)) {
//reset for this draw round
VideoOverlay::reset();
VideoPIP::reset();
ExtOnly::reset();
getLayerStats(ctx, list);
@ -94,6 +96,8 @@ static int hwc_prepare(hwc_composer_device_t *dev, hwc_layer_list_t* list)
if(VideoOverlay::prepare(ctx, list)) {
ctx->overlayInUse = true;
//Nothing here
} else if(VideoPIP::prepare(ctx, list)) {
ctx->overlayInUse = true;
} else if(ExtOnly::prepare(ctx, list)) {
ctx->overlayInUse = true;
} else if(UIMirrorOverlay::prepare(ctx, list)) {
@ -171,6 +175,7 @@ static int hwc_set(hwc_composer_device_t *dev,
hwc_context_t* ctx = (hwc_context_t*)(dev);
if (LIKELY(list)) {
VideoOverlay::draw(ctx, list);
VideoPIP::draw(ctx,list);
ExtOnly::draw(ctx, list);
CopyBit::draw(ctx, list, (EGLDisplay)dpy, (EGLSurface)sur);
MDPComp::draw(ctx, list);

331
libhwcomposer/hwc_pip.cpp Normal file
View File

@ -0,0 +1,331 @@
/*
* Copyright (C) 2010 The Android Open Source Project
* Copyright (C) 2012, Code Aurora Forum. All rights reserved.
* Not a Contribution, Apache license notifications and license are retained
* for attribution purposes only.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define VIDEOPIP_DEBUG 0
#include <overlay.h>
#include "hwc_qbuf.h"
#include "hwc_external.h"
#include "hwc_pip.h"
namespace qhwc {
#define FINAL_TRANSFORM_MASK 0x000F
//Static Members
ovutils::eOverlayState VideoPIP::sState = ovutils::OV_CLOSED;
int VideoPIP::sYuvCount = 0;
int VideoPIP::sYuvLayerIndex = -1;
bool VideoPIP::sIsYuvLayerSkip = false;
int VideoPIP::sPIPLayerIndex = -1;
bool VideoPIP::sIsModeOn = false;
//Cache stats, figure out the state, config overlay
bool VideoPIP::prepare(hwc_context_t *ctx, hwc_layer_list_t *list) {
sIsModeOn = false;
if(!ctx->mMDP.hasOverlay) {
ALOGD_IF(VIDEOPIP_DEBUG,"%s, this hw doesnt support overlay", __FUNCTION__);
return false;
}
if(sYuvLayerIndex == -1 || sPIPLayerIndex == -1) {
return false;
}
chooseState(ctx);
//if the state chosen above is CLOSED, skip this block.
if(sState != ovutils::OV_CLOSED) {
hwc_layer_t *yuvLayer = &list->hwLayers[sYuvLayerIndex];
hwc_layer_t *pipLayer = NULL;
if(sPIPLayerIndex != -1) {
pipLayer = &list->hwLayers[sPIPLayerIndex];
}
if(configure(ctx, yuvLayer, pipLayer)) {
markFlags(&list->hwLayers[sYuvLayerIndex]);
if(sPIPLayerIndex != -1) {
//Mark PIP layer as HWC_OVERLAY
markFlags(&list->hwLayers[sPIPLayerIndex]);
}
sIsModeOn = true;
}
}
ALOGD_IF(VIDEOPIP_DEBUG, "%s: stats: yuvCount = %d, yuvIndex = %d,"
"IsYuvLayerSkip = %d, pipLayerIndex = %d, IsModeOn = %d",
__FUNCTION__, sYuvCount, sYuvLayerIndex,
sIsYuvLayerSkip, sPIPLayerIndex, sIsModeOn);
return sIsModeOn;
}
void VideoPIP::chooseState(hwc_context_t *ctx) {
ALOGD_IF(VIDEOPIP_DEBUG, "%s: old state = %s", __FUNCTION__,
ovutils::getStateString(sState));
ovutils::eOverlayState newState = ovutils::OV_CLOSED;
//Support 1 video layer
if(sYuvCount == 2 && !ctx->mExtDisplay->getExternalDisplay()) {
/* PIP: Picture in picture
If HDMI is not connected as secondary and there are two videos
we can use two VG pipes for video playback. */
newState = ovutils::OV_2D_PIP_VIDEO_ON_PANEL;
}
sState = newState;
ALOGD_IF(VIDEOPIP_DEBUG, "%s: new chosen state = %s", __FUNCTION__,
ovutils::getStateString(sState));
}
void VideoPIP::markFlags(hwc_layer_t *layer) {
switch(sState) {
case ovutils::OV_2D_PIP_VIDEO_ON_PANEL:
layer->compositionType = HWC_OVERLAY;
break;
default:
break;
}
}
/* Helpers */
bool configPrimaryVideo(hwc_context_t *ctx, hwc_layer_t *layer) {
overlay::Overlay& ov = *(ctx->mOverlay);
private_handle_t *hnd = (private_handle_t *)layer->handle;
ovutils::Whf info(hnd->width, hnd->height, hnd->format, hnd->size);
ovutils::eMdpFlags mdpFlags = ovutils::OV_MDP_FLAGS_NONE;
if (hnd->flags & private_handle_t::PRIV_FLAGS_SECURE_BUFFER) {
ovutils::setMdpFlags(mdpFlags,
ovutils::OV_MDP_SECURE_OVERLAY_SESSION);
}
ovutils::eIsFg isFgFlag = ovutils::IS_FG_OFF;
if (ctx->numHwLayers == 1) {
isFgFlag = ovutils::IS_FG_SET;
}
ovutils::PipeArgs parg(mdpFlags,
info,
ovutils::ZORDER_0,
isFgFlag,
ovutils::ROT_FLAG_DISABLED);
ovutils::PipeArgs pargs[ovutils::MAX_PIPES] = { parg, parg, parg };
ov.setSource(pargs, ovutils::OV_PIPE0);
hwc_rect_t sourceCrop = layer->sourceCrop;
// x,y,w,h
ovutils::Dim dcrop(sourceCrop.left, sourceCrop.top,
sourceCrop.right - sourceCrop.left,
sourceCrop.bottom - sourceCrop.top);
ovutils::Dim dpos;
hwc_rect_t displayFrame = layer->displayFrame;
dpos.x = displayFrame.left;
dpos.y = displayFrame.top;
dpos.w = (displayFrame.right - displayFrame.left);
dpos.h = (displayFrame.bottom - displayFrame.top);
//Calculate the rect for primary based on whether the supplied position
//is within or outside bounds.
const int fbWidth =
ovutils::FrameBufferInfo::getInstance()->getWidth();
const int fbHeight =
ovutils::FrameBufferInfo::getInstance()->getHeight();
if( displayFrame.left < 0 ||
displayFrame.top < 0 ||
displayFrame.right > fbWidth ||
displayFrame.bottom > fbHeight) {
calculate_crop_rects(sourceCrop, displayFrame, fbWidth, fbHeight);
//Update calculated width and height
dcrop.w = sourceCrop.right - sourceCrop.left;
dcrop.h = sourceCrop.bottom - sourceCrop.top;
dpos.x = displayFrame.left;
dpos.y = displayFrame.top;
dpos.w = displayFrame.right - displayFrame.left;
dpos.h = displayFrame.bottom - displayFrame.top;
}
//Only for Primary
ov.setCrop(dcrop, ovutils::OV_PIPE0);
int transform = layer->transform & FINAL_TRANSFORM_MASK;
ovutils::eTransform orient =
static_cast<ovutils::eTransform>(transform);
ov.setTransform(orient, ovutils::OV_PIPE0);
ov.setPosition(dpos, ovutils::OV_PIPE0);
if (!ov.commit(ovutils::OV_PIPE0)) {
ALOGE("%s: commit fails", __FUNCTION__);
return false;
}
return true;
}
// Configure the second video in pip scenario
bool configPIPVideo(hwc_context_t *ctx, hwc_layer_t *layer) {
overlay::Overlay& ov = *(ctx->mOverlay);
private_handle_t *hnd = (private_handle_t *)layer->handle;
ovutils::Whf info(hnd->width, hnd->height, hnd->format, hnd->size);
ovutils::eMdpFlags mdpFlags = ovutils::OV_MDP_FLAGS_NONE;
if (hnd->flags & private_handle_t::PRIV_FLAGS_SECURE_BUFFER) {
ovutils::setMdpFlags(mdpFlags,
ovutils::OV_MDP_SECURE_OVERLAY_SESSION);
}
ovutils::eIsFg isFgFlag = ovutils::IS_FG_OFF;
//Set z-order 1 since this video is on top of the
//primary video
ovutils::PipeArgs parg(mdpFlags,
info,
ovutils::ZORDER_1,
isFgFlag,
ovutils::ROT_FLAG_DISABLED);
ovutils::PipeArgs pargs[ovutils::MAX_PIPES] = { parg, parg, parg };
// Use pipe 1, pipe 0 is used for primary video
ov.setSource(pargs, ovutils::OV_PIPE1);
hwc_rect_t sourceCrop = layer->sourceCrop;
// x,y,w,h
ovutils::Dim dcrop(sourceCrop.left, sourceCrop.top,
sourceCrop.right - sourceCrop.left,
sourceCrop.bottom - sourceCrop.top);
ovutils::Dim dpos;
hwc_rect_t displayFrame = layer->displayFrame;
dpos.x = displayFrame.left;
dpos.y = displayFrame.top;
dpos.w = (displayFrame.right - displayFrame.left);
dpos.h = (displayFrame.bottom - displayFrame.top);
//Calculate the rect for primary based on whether the supplied position
//is within or outside bounds.
const int fbWidth =
ovutils::FrameBufferInfo::getInstance()->getWidth();
const int fbHeight =
ovutils::FrameBufferInfo::getInstance()->getHeight();
if( displayFrame.left < 0 ||
displayFrame.top < 0 ||
displayFrame.right > fbWidth ||
displayFrame.bottom > fbHeight) {
calculate_crop_rects(sourceCrop, displayFrame, fbWidth, fbHeight);
//Update calculated width and height
dcrop.w = sourceCrop.right - sourceCrop.left;
dcrop.h = sourceCrop.bottom - sourceCrop.top;
dpos.x = displayFrame.left;
dpos.y = displayFrame.top;
dpos.w = displayFrame.right - displayFrame.left;
dpos.h = displayFrame.bottom - displayFrame.top;
}
//Only for Primary
ov.setCrop(dcrop, ovutils::OV_PIPE1);
int transform = layer->transform & FINAL_TRANSFORM_MASK;
ovutils::eTransform orient =
static_cast<ovutils::eTransform>(transform);
ov.setTransform(orient, ovutils::OV_PIPE1);
ov.setPosition(dpos, ovutils::OV_PIPE1);
if (!ov.commit(ovutils::OV_PIPE1)) {
ALOGE("%s: commit fails", __FUNCTION__);
return false;
}
return true;
}
bool VideoPIP::configure(hwc_context_t *ctx, hwc_layer_t *yuvLayer,
hwc_layer_t *pipLayer) {
bool ret = true;
if (LIKELY(ctx->mOverlay)) {
overlay::Overlay& ov = *(ctx->mOverlay);
// Set overlay state
ov.setState(sState);
switch(sState) {
case ovutils::OV_2D_PIP_VIDEO_ON_PANEL:
//Configure the primary or background video
ret &= configPrimaryVideo(ctx, yuvLayer);
//Configure the PIP video
ret &= configPIPVideo(ctx, pipLayer);
break;
default:
return false;
}
} else {
//Ov null
return false;
}
return ret;
}
bool VideoPIP::draw(hwc_context_t *ctx, hwc_layer_list_t *list)
{
if(!sIsModeOn || sYuvLayerIndex == -1 || sPIPLayerIndex == -1) {
return true;
}
private_handle_t *hnd = (private_handle_t *)
list->hwLayers[sYuvLayerIndex].handle;
// Lock this buffer for read.
ctx->qbuf->lockAndAdd(hnd);
private_handle_t *piphnd = NULL;
piphnd = (private_handle_t *)list->hwLayers[sPIPLayerIndex].handle;
ctx->qbuf->lockAndAdd(piphnd);
bool ret = true;
overlay::Overlay& ov = *(ctx->mOverlay);
ovutils::eOverlayState state = ov.getState();
switch (state) {
case ovutils::OV_2D_PIP_VIDEO_ON_PANEL:
// Play first video (background)
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE0)) {
ALOGE("%s: queueBuffer failed for primary video", __FUNCTION__);
ret = false;
}
//Play pip video
if (piphnd && !ov.queueBuffer(piphnd->fd, piphnd->offset,
ovutils::OV_PIPE1)) {
ALOGE("%s: queueBuffer failed for pip video", __FUNCTION__);
ret = false;
}
break;
default:
ALOGE("%s Unused state %s", __FUNCTION__,
ovutils::getStateString(state));
break;
}
return ret;
}
}; //namespace qhwc

83
libhwcomposer/hwc_pip.h Normal file
View File

@ -0,0 +1,83 @@
/*
* Copyright (C) 2010 The Android Open Source Project
* Copyright (C) 2012, Code Aurora Forum. All rights reserved.
* Not a Contribution, Apache license notifications and license are retained
* for attribution purposes only.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HWC_PIP_H
#define HWC_PIP_H
#include "hwc_utils.h"
#define LIKELY( exp ) (__builtin_expect( (exp) != 0, true ))
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
namespace qhwc {
//Feature for using overlay to display videos.
class VideoPIP {
public:
//Sets up members and prepares overlay if conditions are met
static bool prepare(hwc_context_t *ctx, hwc_layer_list_t *list);
//Draws layer if this feature is on
static bool draw(hwc_context_t *ctx, hwc_layer_list_t *list);
//Receives data from hwc
static void setStats(int yuvCount, int yuvLayerIndex, bool isYuvLayerSkip,
int pipLayerIndex);
//resets values
static void reset();
private:
//Choose an appropriate overlay state based on conditions
static void chooseState(hwc_context_t *ctx);
//Configures overlay for video prim and ext
static bool configure(hwc_context_t *ctx, hwc_layer_t *yuvlayer,
hwc_layer_t *pipLayer);
//Marks layer flags if this feature is used
static void markFlags(hwc_layer_t *layer);
//returns yuv count
static int getYuvCount();
//The chosen overlay state.
static ovutils::eOverlayState sState;
//Number of yuv layers in this drawing round
static int sYuvCount;
//Index of YUV layer, relevant only if count is 1
static int sYuvLayerIndex;
//Flags if a yuv layer is animating or below something that is animating
static bool sIsYuvLayerSkip;
//Holds the PIP layer index in case of two videos, -1 by default
static int sPIPLayerIndex;
//Flags if this feature is on.
static bool sIsModeOn;
};
inline void VideoPIP::setStats(int yuvCount, int yuvLayerIndex,
bool isYuvLayerSkip, int pipLayerIndex) {
sYuvCount = yuvCount;
sYuvLayerIndex = yuvLayerIndex;
sIsYuvLayerSkip = isYuvLayerSkip;
sPIPLayerIndex = pipLayerIndex;
}
inline int VideoPIP::getYuvCount() { return sYuvCount; }
inline void VideoPIP::reset() {
sYuvCount = 0;
sYuvLayerIndex = -1;
sIsYuvLayerSkip = false;
sPIPLayerIndex = -1;
sIsModeOn = false;
sState = ovutils::OV_CLOSED;
}
}; //namespace qhwc
#endif //HWC_PIP_H

View File

@ -22,6 +22,7 @@
#include "hwc_utils.h"
#include "mdp_version.h"
#include "hwc_video.h"
#include "hwc_pip.h"
#include "hwc_qbuf.h"
#include "hwc_copybit.h"
#include "hwc_external.h"
@ -114,6 +115,7 @@ void getLayerStats(hwc_context_t *ctx, const hwc_layer_list_t *list)
//Video specific stats
int yuvCount = 0;
int yuvLayerIndex = -1;
int pipLayerIndex = -1; //2nd video in pip scenario
bool isYuvLayerSkip = false;
int skipCount = 0;
int ccLayerIndex = -1; //closed caption
@ -127,7 +129,16 @@ void getLayerStats(hwc_context_t *ctx, const hwc_layer_list_t *list)
if (UNLIKELY(isYuvBuffer(hnd))) {
yuvCount++;
yuvLayerIndex = i;
if(yuvCount==1) {
//Set the primary video to the video layer in
//lower z-order
yuvLayerIndex = i;
}
if(yuvCount == 2) {
//In case of two videos, set the pipLayerIndex to the
//second video
pipLayerIndex = i;
}
//Animating
if (isSkipLayer(&list->hwLayers[i])) {
isYuvLayerSkip = true;
@ -153,6 +164,8 @@ void getLayerStats(hwc_context_t *ctx, const hwc_layer_list_t *list)
VideoOverlay::setStats(yuvCount, yuvLayerIndex, isYuvLayerSkip,
ccLayerIndex);
VideoPIP::setStats(yuvCount, yuvLayerIndex, isYuvLayerSkip,
pipLayerIndex);
ExtOnly::setStats(extCount, extLayerIndex, isExtBlockPresent);
CopyBit::setStats(yuvCount, yuvLayerIndex, isYuvLayerSkip);
MDPComp::setStats(skipCount);

View File

@ -51,6 +51,7 @@ bool isStateValid(const utils::eOverlayState& st) {
case utils::OV_3D_VIDEO_ON_3D_PANEL:
case utils::OV_3D_VIDEO_ON_3D_TV:
case utils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV:
case utils::OV_2D_PIP_VIDEO_ON_PANEL:
case utils::OV_UI_MIRROR:
case utils::OV_2D_TRUE_UI_MIRROR:
case utils::OV_BYPASS_1_LAYER:

View File

@ -197,6 +197,19 @@ template <> struct StateTraits<utils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV>
typedef overlay::OverlayImpl<pipe0, pipe1> ovimpl;
};
template <> struct StateTraits<utils::OV_2D_PIP_VIDEO_ON_PANEL>
{
typedef overlay::GenericPipe<utils::PRIMARY> pipe0; //prim video
typedef overlay::GenericPipe<utils::PRIMARY> pipe1; //PIP video
typedef overlay::NullPipe pipe2; // place holder
typedef Rotator rot0;
typedef Rotator rot1;
typedef NullRotator rot2;
typedef overlay::OverlayImpl<pipe0, pipe1> ovimpl;
};
template <> struct StateTraits<utils::OV_UI_MIRROR>
{
typedef overlay::UIMirrorPipe pipe0;
@ -329,6 +342,9 @@ inline OverlayImplBase* OverlayState::handleEvent(utils::eOverlayState toState,
newov = handle_from<utils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV>(toState,
ov);
break;
case utils::OV_2D_PIP_VIDEO_ON_PANEL:
newov = handle_from<utils::OV_2D_PIP_VIDEO_ON_PANEL>(toState,ov);
break;
case utils::OV_UI_MIRROR:
newov = handle_from<utils::OV_UI_MIRROR>(toState, ov);
break;
@ -385,6 +401,9 @@ inline OverlayImplBase* OverlayState::handle_from(utils::eOverlayState toState,
ov = handle_from_to<FROM_STATE,
utils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV>(ov);
break;
case utils::OV_2D_PIP_VIDEO_ON_PANEL:
ov = handle_from_to<FROM_STATE, utils::OV_2D_PIP_VIDEO_ON_PANEL>(ov);
break;
case utils::OV_UI_MIRROR:
ov = handle_from_to<FROM_STATE, utils::OV_UI_MIRROR>(ov);
break;

View File

@ -389,6 +389,9 @@ enum eOverlayState{
/* 3D Video on two displays (panel and TV) */
OV_3D_VIDEO_ON_2D_PANEL_2D_TV,
/* PIP, two videos on TV or primary panel */
OV_2D_PIP_VIDEO_ON_PANEL,
/* UI Mirroring */
OV_UI_MIRROR,
OV_2D_TRUE_UI_MIRROR,
@ -631,6 +634,8 @@ inline const char* getStateString(eOverlayState state){
return "OV_3D_VIDEO_ON_3D_TV";
case OV_3D_VIDEO_ON_2D_PANEL_2D_TV:
return "OV_3D_VIDEO_ON_2D_PANEL_2D_TV";
case OV_2D_PIP_VIDEO_ON_PANEL:
return "OV_2D_PIP_VIDEO_ON_PANEL";
case OV_UI_MIRROR:
return "OV_UI_MIRROR";
case OV_2D_TRUE_UI_MIRROR: