hwc: Add VideoOverlay class

Add videoOverlay class for display videos with overlay.

Change-Id: I161d1732fdf8336d0b6dcf5326e6adb7514a2d02
This commit is contained in:
Saurabh Shah 2012-07-10 18:33:17 -07:00 committed by Naseer Ahmed
parent ac01712091
commit 5704501325
7 changed files with 412 additions and 330 deletions

View File

@ -9,6 +9,6 @@ LOCAL_SHARED_LIBRARIES := $(common_libs) libEGL liboverlay libgenlock \
libqdutils
LOCAL_CFLAGS := $(common_flags) -DLOG_TAG=\"hwcomposer\"
LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
LOCAL_SRC_FILES := hwc.cpp hwc_overlay.cpp hwc_utils.cpp
LOCAL_SRC_FILES := hwc.cpp hwc_video.cpp hwc_utils.cpp
include $(BUILD_SHARED_LIBRARY)

View File

@ -23,6 +23,7 @@
#include <EGL/egl.h>
#include "hwc_utils.h"
#include "hwc_video.h"
using namespace qhwc;
@ -65,19 +66,19 @@ static void hwc_registerProcs(struct hwc_composer_device* dev,
static int hwc_prepare(hwc_composer_device_t *dev, hwc_layer_list_t* list)
{
hwc_context_t* ctx = (hwc_context_t*)(dev);
ctx->overlayInUse = false;
//Prepare is called after a vsync, so unlock previous buffers here.
ctx->qbuf->unlockAllPrevious();
if (LIKELY(list)) {
getLayerStats(ctx, list);
cleanOverlays(ctx);
for (int i=list->numHwLayers-1; i >= 0 ; i--) {
private_handle_t *hnd =
(private_handle_t *)list->hwLayers[i].handle;
if (isSkipLayer(&list->hwLayers[i])) {
break;
} else if(isYuvBuffer(hnd)) {
handleYUV(ctx,&list->hwLayers[i]);
} else {
list->hwLayers[i].compositionType = HWC_FRAMEBUFFER;
}
if(VideoOverlay::prepare(ctx, list)) {
ctx->overlayInUse = true;
//Nothing here
} else if (0) {
//Other features
ctx->overlayInUse = true;
}
}
return 0;
@ -91,21 +92,18 @@ static int hwc_set(hwc_composer_device_t *dev,
int ret = 0;
hwc_context_t* ctx = (hwc_context_t*)(dev);
if (LIKELY(list)) {
for (size_t i=0; i<list->numHwLayers; i++) {
if (list->hwLayers[i].flags & HWC_SKIP_LAYER) {
continue;
} else if (list->hwLayers[i].compositionType == HWC_OVERLAY) {
drawLayerUsingOverlay(ctx, &(list->hwLayers[i]));
}
}
VideoOverlay::draw(ctx, list);
//XXX: Handle vsync with FBIO_WAITFORVSYNC ioctl
//All other operations (including pan display) should be NOWAIT
EGLBoolean sucess = eglSwapBuffers((EGLDisplay)dpy, (EGLSurface)sur);
} else {
//XXX: put in a wrapper for non overlay targets
setOverlayState(ctx, ovutils::OV_CLOSED);
ctx->mOverlay->setState(ovutils::OV_CLOSED);
ctx->qbuf->unlockAllPrevious();
}
ctx->qbuf->unlockAllPrevious();
if(!ctx->overlayInUse)
ctx->mOverlay->setState(ovutils::OV_CLOSED);
return ret;
}

View File

@ -1,265 +0,0 @@
/*
* Copyright (C) 2010 The Android Open Source Project
* Copyright (C) 2012, Code Aurora Forum. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "hwc_utils.h"
#define FINAL_TRANSFORM_MASK 0x000F
namespace qhwc {
// Determine overlay state based on decoded video info
static ovutils::eOverlayState determineOverlayState(hwc_context_t* ctx,
uint32_t bypassLayer,
uint32_t format)
{
ovutils::eOverlayState state = ovutils::OV_CLOSED;
// Sanity check
if (!ctx) {
ALOGE("%s: NULL ctx", __FUNCTION__);
return state;
}
overlay::Overlay& ov = *(ctx->mOverlay);
state = ov.getState();
// If there are any bypassLayers, state is based on number of layers
if ((bypassLayer > 0) && (ctx->hdmiEnabled == EXT_TYPE_NONE)) {
if (bypassLayer == 1) {
state = ovutils::OV_BYPASS_1_LAYER;
} else if (bypassLayer == 2) {
state = ovutils::OV_BYPASS_2_LAYER;
} else if (bypassLayer == 3) {
state = ovutils::OV_BYPASS_3_LAYER;
}
return state;
}
// RGB is ambiguous for determining overlay state
if (ovutils::isRgb(ovutils::getMdpFormat(format))) {
return state;
}
// Content type is either 2D or 3D
uint32_t fmt3D = 0;//XXX: 3D - ovutils::getS3DFormat(format);
// Determine state based on the external display, content type, and hw type
if (ctx->hdmiEnabled == EXT_TYPE_HDMI) {
// External display is HDMI
if (fmt3D) {
// Content type is 3D
if (ovutils::is3DTV()) {
// TV panel type is 3D
state = ovutils::OV_3D_VIDEO_ON_3D_TV;
} else {
// TV panel type is 2D
state = ovutils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV;
}
} else {
// Content type is 2D
if (ovutils::FrameBufferInfo::getInstance()->supportTrueMirroring()) {
// True UI mirroring is supported
state = ovutils::OV_2D_TRUE_UI_MIRROR;
} else {
// True UI mirroring is not supported
state = ovutils::OV_2D_VIDEO_ON_PANEL_TV;
}
}
} else if (ctx->hdmiEnabled == EXT_TYPE_WIFI) {
// External display is Wifi (currently unsupported)
ALOGE("%s: WIFI external display is unsupported", __FUNCTION__);
return state;
} else {
// No external display (primary panel only)
if (fmt3D) {
// Content type is 3D
if (ovutils::usePanel3D()) {
// Primary panel type is 3D
state = ovutils::OV_3D_VIDEO_ON_3D_PANEL;
} else {
// Primary panel type is 2D
state = ovutils::OV_3D_VIDEO_ON_2D_PANEL;
}
} else {
// Content type is 2D
state = ovutils::OV_2D_VIDEO_ON_PANEL;
}
}
return state;
}
void setOverlayState(hwc_context_t *ctx, ovutils::eOverlayState state)
{
if (!ctx) {
ALOGE("%s: NULL ctx", __FUNCTION__);
return;
}
overlay::Overlay *ov = ctx->mOverlay;
if (!ov) {
ALOGE("%s: NULL OV object", __FUNCTION__);
return;
}
ov->setState(state);
}
bool prepareOverlay(hwc_context_t *ctx, hwc_layer_t *layer)
{
bool ret = false;
if (LIKELY(ctx->mOverlay)) {
private_handle_t *hnd = (private_handle_t *)layer->handle;
overlay::Overlay& ov = *(ctx->mOverlay);
ovutils::Whf info(hnd->width, hnd->height, hnd->format, hnd->size);
// Set overlay state
ovutils::eOverlayState state = determineOverlayState(ctx, 0, info.format);
setOverlayState(ctx, state);
ovutils::eDest dest = ovutils::OV_PIPE_ALL;
// In the true UI mirroring case, video needs to go to OV_PIPE0 (for
// primary) and OV_PIPE1 (for external)
if (state == ovutils::OV_2D_TRUE_UI_MIRROR) {
dest = static_cast<ovutils::eDest>(
ovutils::OV_PIPE0 | ovutils::OV_PIPE1);
}
ovutils::eMdpFlags mdpFlags = ovutils::OV_MDP_FLAGS_NONE;
if (hnd->flags & private_handle_t::PRIV_FLAGS_SECURE_BUFFER) {
ovutils::setMdpFlags(mdpFlags,
ovutils::OV_MDP_SECURE_OVERLAY_SESSION);
}
// FIXME: Use source orientation for TV when source is portrait
int transform = layer->transform & FINAL_TRANSFORM_MASK;
ovutils::eTransform orient =
static_cast<ovutils::eTransform>(transform);
ovutils::eWait waitFlag = ovutils::NO_WAIT;
if (ctx->skipComposition == true) {
waitFlag = ovutils::WAIT;
}
ovutils::eIsFg isFgFlag = ovutils::IS_FG_OFF;
if (ctx->numHwLayers == 1) {
isFgFlag = ovutils::IS_FG_SET;
}
ovutils::PipeArgs parg(mdpFlags,
info,
waitFlag,
ovutils::ZORDER_0,
isFgFlag,
ovutils::ROT_FLAG_DISABLED);
ovutils::PipeArgs pargs[ovutils::MAX_PIPES] = { parg, parg, parg };
ov.setSource(pargs, dest);
hwc_rect_t sourceCrop = layer->sourceCrop;
// x,y,w,h
ovutils::Dim dcrop(sourceCrop.left, sourceCrop.top, // x, y
sourceCrop.right - sourceCrop.left, // w
sourceCrop.bottom - sourceCrop.top);// h
ov.setCrop(dcrop, dest);
ov.setTransform(orient, dest);
int orientation = 0;
ovutils::Dim dim;
hwc_rect_t displayFrame = layer->displayFrame;
dim.x = displayFrame.left;
dim.y = displayFrame.top;
dim.w = (displayFrame.right - displayFrame.left);
dim.h = (displayFrame.bottom - displayFrame.top);
dim.o = orientation;
ov.setPosition(dim, dest);
if (!ov.commit(dest)) {
ALOGE("%s: commit fails", __FUNCTION__);
return false;
}
}
return true;
}
bool drawLayerUsingOverlay(hwc_context_t *ctx, hwc_layer_t *layer)
{
private_handle_t *hnd = (private_handle_t *)layer->handle;
// Lock this buffer for read.
ctx->qbuf->lockAndAdd(hnd);
bool ret = true;
overlay::Overlay& ov = *(ctx->mOverlay);
ovutils::eOverlayState state = ov.getState();
// Differentiate between states that need to wait for vsync
switch (state) {
case ovutils::OV_2D_VIDEO_ON_PANEL_TV:
case ovutils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV:
case ovutils::OV_2D_TRUE_UI_MIRROR:
// If displaying on both primary and external, must play each
// pipe individually since wait for vsync needs to be done at
// the end. Do the following:
// - Play external
// - Play primary
// - Wait for external vsync to be done
// NOTE: In these states
// - primary VG = OV_PIPE0
// - external VG = OV_PIPE1
// - external RGB = OV_PIPE2
// - Only in true UI mirroring case, played by fb
// Play external
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE1)) {
ALOGE("%s: queueBuffer failed for external", __FUNCTION__);
ret = false;
}
// Play primary
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE0)) {
ALOGE("%s: queueBuffer failed for primary", __FUNCTION__);
ret = false;
}
// Wait for external vsync to be done
if (!ov.waitForVsync(ovutils::OV_PIPE1)) {
ALOGE("%s: waitForVsync failed for external", __FUNCTION__);
ret = false;
}
break;
default:
// In most cases, displaying only to one (primary or external)
// so use OV_PIPE_ALL since overlay will ignore NullPipes
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE_ALL)) {
ALOGE("%s: queueBuffer failed", __FUNCTION__);
ret = false;
}
break;
}
if (!ret) {
ALOGE("%s: failed", __FUNCTION__);
}
return ret;
}
void cleanOverlays(hwc_context_t *ctx )
{
//XXX: handle for HDMI
if(0 == ctx->yuvBufferCount)
setOverlayState(ctx, ovutils::OV_CLOSED);
}
}; //namespace qhwc

View File

@ -17,6 +17,7 @@
#include "hwc_utils.h"
#include "mdp_version.h"
#include "hwc_video.h"
namespace qhwc {
void initContext(hwc_context_t *ctx)
@ -37,6 +38,7 @@ void closeContext(hwc_context_t *ctx)
delete ctx->mOverlay;
ctx->mOverlay = NULL;
}
if(ctx->fbDev) {
framebuffer_close(ctx->fbDev);
ctx->fbDev = NULL;
@ -73,28 +75,90 @@ void dumpLayer(hwc_layer_t const* l)
void getLayerStats(hwc_context_t *ctx, const hwc_layer_list_t *list)
{
int yuvBufCount = 0;
int layersNotUpdatingCount = 0;
for (size_t i=0 ; i<list->numHwLayers; i++) {
private_handle_t *hnd = (private_handle_t *)list->hwLayers[i].handle;
//Video specific stats
int yuvCount = 0;
int yuvLayerIndex = -1;
bool isYuvLayerSkip = false;
for (size_t i = 0; i < list->numHwLayers; i++) {
private_handle_t *hnd =
(private_handle_t *)list->hwLayers[i].handle;
if (isYuvBuffer(hnd)) {
yuvBufCount++;
yuvCount++;
yuvLayerIndex = i;
//Animating
if (isSkipLayer(&list->hwLayers[i])) {
isYuvLayerSkip = true;
}
} else if (isSkipLayer(&list->hwLayers[i])) { //Popups
//If video layer is below a skip layer
if(yuvLayerIndex != -1 && yuvLayerIndex < (ssize_t)i) {
isYuvLayerSkip = true;
}
}
}
// Number of video/camera layers drawable with overlay
ctx->yuvBufferCount = yuvBufCount;
VideoOverlay::setStats(yuvCount, yuvLayerIndex, isYuvLayerSkip);
ctx->numHwLayers = list->numHwLayers;
return;
}
void handleYUV(hwc_context_t *ctx, hwc_layer_t *layer)
{
private_handle_t *hnd =
(private_handle_t *)layer->handle;
//XXX: Handle targets not using overlay
if(prepareOverlay(ctx, layer)) {
layer->compositionType = HWC_OVERLAY;
layer->hints |= HWC_HINT_CLEAR_FB;
//Crops source buffer against destination and FB boundaries
void calculate_crop_rects(hwc_rect_t& crop, hwc_rect_t& dst,
const int fbWidth, const int fbHeight) {
int& crop_x = crop.left;
int& crop_y = crop.top;
int& crop_r = crop.right;
int& crop_b = crop.bottom;
int crop_w = crop.right - crop.left;
int crop_h = crop.bottom - crop.top;
int& dst_x = dst.left;
int& dst_y = dst.top;
int& dst_r = dst.right;
int& dst_b = dst.bottom;
int dst_w = dst.right - dst.left;
int dst_h = dst.bottom - dst.top;
if(dst_x < 0) {
float scale_x = crop_w * 1.0f / dst_w;
float diff_factor = (scale_x * abs(dst_x));
crop_x = crop_x + (int)diff_factor;
crop_w = crop_r - crop_x;
dst_x = 0;
dst_w = dst_r - dst_x;;
}
if(dst_r > fbWidth) {
float scale_x = crop_w * 1.0f / dst_w;
float diff_factor = scale_x * (dst_r - fbWidth);
crop_r = crop_r - diff_factor;
crop_w = crop_r - crop_x;
dst_r = fbWidth;
dst_w = dst_r - dst_x;
}
if(dst_y < 0) {
float scale_y = crop_h * 1.0f / dst_h;
float diff_factor = scale_y * abs(dst_y);
crop_y = crop_y + diff_factor;
crop_h = crop_b - crop_y;
dst_y = 0;
dst_h = dst_b - dst_y;
}
if(dst_b > fbHeight) {
float scale_y = crop_h * 1.0f / dst_h;
float diff_factor = scale_y * (dst_b - fbHeight);
crop_b = crop_b - diff_factor;
crop_h = crop_b - crop_y;
dst_b = fbHeight;
dst_h = dst_b - dst_y;
}
}
};//namespace

View File

@ -47,10 +47,12 @@ enum external_display_type {
// Utility functions - implemented in hwc_utils.cpp
void dumpLayer(hwc_layer_t const* l);
void getLayerStats(hwc_context_t *ctx, const hwc_layer_list_t *list);
void handleYUV(hwc_context_t *ctx, hwc_layer_t *layer);
void initContext(hwc_context_t *ctx);
void closeContext(hwc_context_t *ctx);
void openFramebufferDevice(hwc_context_t *ctx);
//Crops source buffer against destination and FB boundaries
void calculate_crop_rects(hwc_rect_t& crop, hwc_rect_t& dst,
const int fbWidth, const int fbHeight);
// Inline utility functions
static inline bool isSkipLayer(const hwc_layer_t* l) {
@ -67,27 +69,7 @@ static inline bool isBufferLocked(const private_handle_t* hnd) {
return (hnd && (private_handle_t::PRIV_FLAGS_HWC_LOCK & hnd->flags));
}
// -----------------------------------------------------------------------------
// Overlay specific functions - inline or implemented in hwc_overlay.cpp
bool prepareOverlay(hwc_context_t *ctx, hwc_layer_t *layer);
//XXX: Refine draw functions
bool drawLayerUsingOverlay(hwc_context_t *ctx, hwc_layer_t *layer);
//XXX: Refine
void cleanOverlays(hwc_context_t *ctx );
void setOverlayState(hwc_context_t* ctx, ovutils::eOverlayState state);
// -----------------------------------------------------------------------------
// Copybit specific functions - inline or implemented in hwc_copybit.cpp
// -----------------------------------------------------------------------------
// HDMI specific functions - inline or implemented in hwc_hdmi.cpp
} //qhwc namespace
}; //qhwc namespace
// -----------------------------------------------------------------------------
@ -95,13 +77,11 @@ void setOverlayState(hwc_context_t* ctx, ovutils::eOverlayState state);
// This structure contains overall state
struct hwc_context_t {
hwc_composer_device_t device;
// Layer variables
int yuvBufferCount;
int hdmiEnabled;
int numHwLayers;
int mdpVersion;
bool hasOverlay;
bool skipComposition;
int overlayInUse;
//Framebuffer device
framebuffer_device_t *fbDev;
@ -113,7 +93,4 @@ struct hwc_context_t {
qhwc::QueuedBufferStore *qbuf;
};
#endif //HWC_UTILS_H

241
libhwcomposer/hwc_video.cpp Normal file
View File

@ -0,0 +1,241 @@
/*
* Copyright (C) 2010 The Android Open Source Project
* Copyright (C) 2012, Code Aurora Forum. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "hwc_video.h"
namespace qhwc {
#define FINAL_TRANSFORM_MASK 0x000F
#define VIDEO_DEBUG 0
//Static Members
ovutils::eOverlayState VideoOverlay::sState = ovutils::OV_CLOSED;
int VideoOverlay::sYuvCount = 0;
int VideoOverlay::sYuvLayerIndex = -1;
bool VideoOverlay::sIsModeOn = false;
bool VideoOverlay::sIsLayerSkip = false;
//Cache stats, figure out the state, config overlay
bool VideoOverlay::prepare(hwc_context_t *ctx, hwc_layer_list_t *list) {
sIsModeOn = false;
chooseState(ctx);
//if the state chosen above is CLOSED, skip this block.
if(sState != ovutils::OV_CLOSED) {
if(configure(ctx, &list->hwLayers[sYuvLayerIndex])) {
markFlags(&list->hwLayers[sYuvLayerIndex]);
}
}
ALOGD_IF(VIDEO_DEBUG, "%s: stats: yuvCount = %d, yuvIndex = %d,"
"IsModeOn = %d, IsSkipLayer = %d", __FUNCTION__, sYuvCount,
sYuvLayerIndex, sIsModeOn, sIsLayerSkip);
return sIsModeOn;
}
void VideoOverlay::chooseState(hwc_context_t *ctx) {
ALOGD_IF(VIDEO_DEBUG, "%s: old state = %s", __FUNCTION__,
ovutils::getStateString(sState));
ovutils::eOverlayState newState = ovutils::OV_CLOSED;
//TODO check if device supports overlay and hdmi
//Support 1 video layer
if(sYuvCount == 1) {
if(sIsLayerSkip && ctx->hdmiEnabled) { //Skip on primary, display on ext.
//TODO
//VIDEO_ON_TV_ONLY
} else if(sIsLayerSkip) { //skip on primary, no ext
newState = ovutils::OV_CLOSED;
} else if(ctx->hdmiEnabled) { //display on both
newState = ovutils::OV_2D_VIDEO_ON_PANEL_TV;
} else { //display on primary only
newState = ovutils::OV_2D_VIDEO_ON_PANEL;
}
}
sState = newState;
ALOGD_IF(VIDEO_DEBUG, "%s: new chosen state = %s", __FUNCTION__,
ovutils::getStateString(sState));
}
void VideoOverlay::markFlags(hwc_layer_t *layer) {
switch(sState) {
case ovutils::OV_2D_VIDEO_ON_PANEL:
case ovutils::OV_2D_VIDEO_ON_PANEL_TV:
layer->compositionType = HWC_OVERLAY;
layer->hints |= HWC_HINT_CLEAR_FB;
break;
//TODO
//case ovutils::OV_2D_VIDEO_ON_TV:
//just break, dont update flags.
default:
break;
}
}
bool VideoOverlay::configure(hwc_context_t *ctx, hwc_layer_t *layer)
{
if (LIKELY(ctx->mOverlay)) {
overlay::Overlay& ov = *(ctx->mOverlay);
// Set overlay state
ov.setState(sState);
private_handle_t *hnd = (private_handle_t *)layer->handle;
ovutils::Whf info(hnd->width, hnd->height, hnd->format, hnd->size);
//TODO change this based on state.
ovutils::eDest dest = ovutils::OV_PIPE_ALL;
ovutils::eMdpFlags mdpFlags = ovutils::OV_MDP_FLAGS_NONE;
if (hnd->flags & private_handle_t::PRIV_FLAGS_SECURE_BUFFER) {
ovutils::setMdpFlags(mdpFlags,
ovutils::OV_MDP_SECURE_OVERLAY_SESSION);
}
ovutils::eWait waitFlag = ovutils::NO_WAIT;
ovutils::eIsFg isFgFlag = ovutils::IS_FG_OFF;
if (ctx->numHwLayers == 1) {
isFgFlag = ovutils::IS_FG_SET;
}
ovutils::PipeArgs parg(mdpFlags,
info,
waitFlag,
ovutils::ZORDER_0,
isFgFlag,
ovutils::ROT_FLAG_DISABLED);
ovutils::PipeArgs pargs[ovutils::MAX_PIPES] = { parg, parg, parg };
ov.setSource(pargs, dest);
hwc_rect_t sourceCrop = layer->sourceCrop;
// x,y,w,h
ovutils::Dim dcrop(sourceCrop.left, sourceCrop.top,
sourceCrop.right - sourceCrop.left,
sourceCrop.bottom - sourceCrop.top);
//Only for External
ov.setCrop(dcrop, ovutils::OV_PIPE1);
// FIXME: Use source orientation for TV when source is portrait
//Only for External
ov.setTransform(0, dest);
ovutils::Dim dpos;
hwc_rect_t displayFrame = layer->displayFrame;
dpos.x = displayFrame.left;
dpos.y = displayFrame.top;
dpos.w = (displayFrame.right - displayFrame.left);
dpos.h = (displayFrame.bottom - displayFrame.top);
//Only for External
ov.setPosition(dpos, ovutils::OV_PIPE1);
//Calculate the rect for primary based on whether the supplied position
//is within or outside bounds.
const int fbWidth =
ovutils::FrameBufferInfo::getInstance()->getWidth();
const int fbHeight =
ovutils::FrameBufferInfo::getInstance()->getHeight();
if( displayFrame.left < 0 ||
displayFrame.top < 0 ||
displayFrame.right > fbWidth ||
displayFrame.bottom > fbHeight) {
calculate_crop_rects(sourceCrop, displayFrame, fbWidth, fbHeight);
//Update calculated width and height
dcrop.w = sourceCrop.right - sourceCrop.left;
dcrop.h = sourceCrop.bottom - sourceCrop.top;
dpos.w = displayFrame.right - displayFrame.left;
dpos.h = displayFrame.bottom - displayFrame.top;
}
//Only for Primary
ov.setCrop(dcrop, ovutils::OV_PIPE0);
int transform = layer->transform & FINAL_TRANSFORM_MASK;
ovutils::eTransform orient =
static_cast<ovutils::eTransform>(transform);
ov.setTransform(orient, ovutils::OV_PIPE0);
ov.setPosition(dpos, ovutils::OV_PIPE0);
//Both prim and external
if (!ov.commit(dest)) {
ALOGE("%s: commit fails", __FUNCTION__);
return false;
}
sIsModeOn = true;
}
return sIsModeOn;
}
bool VideoOverlay::draw(hwc_context_t *ctx, hwc_layer_list_t *list)
{
if(!sIsModeOn || sYuvLayerIndex == -1) {
return true;
}
private_handle_t *hnd =
(private_handle_t *)list->hwLayers[sYuvLayerIndex].handle;
// Lock this buffer for read.
ctx->qbuf->lockAndAdd(hnd);
bool ret = true;
overlay::Overlay& ov = *(ctx->mOverlay);
ovutils::eOverlayState state = ov.getState();
switch (state) {
case ovutils::OV_2D_VIDEO_ON_PANEL_TV:
case ovutils::OV_3D_VIDEO_ON_2D_PANEL_2D_TV:
// Play external
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE1)) {
ALOGE("%s: queueBuffer failed for external", __FUNCTION__);
ret = false;
}
// Play primary
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE0)) {
ALOGE("%s: queueBuffer failed for primary", __FUNCTION__);
ret = false;
}
// Wait for external vsync to be done
if (!ov.waitForVsync(ovutils::OV_PIPE1)) {
ALOGE("%s: waitForVsync failed for external", __FUNCTION__);
ret = false;
}
break;
default:
// In most cases, displaying only to one (primary or external)
// so use OV_PIPE_ALL since overlay will ignore NullPipes
if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE_ALL)) {
ALOGE("%s: queueBuffer failed", __FUNCTION__);
ret = false;
}
break;
}
return ret;
}
}; //namespace qhwc

67
libhwcomposer/hwc_video.h Normal file
View File

@ -0,0 +1,67 @@
/*
* Copyright (C) 2010 The Android Open Source Project
* Copyright (C) 2012, Code Aurora Forum. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HWC_VIDEO_H
#define HWC_VIDEO_H
#include "hwc_utils.h"
#define LIKELY( exp ) (__builtin_expect( (exp) != 0, true ))
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
namespace qhwc {
//Feature for using overlay to display videos.
class VideoOverlay {
public:
//Sets up members and prepares overlay if conditions are met
static bool prepare(hwc_context_t *ctx, hwc_layer_list_t *list);
//Draws layer if this feature is on
static bool draw(hwc_context_t *ctx, hwc_layer_list_t *list);
//Receives data from hwc
static void setStats(int yuvCount, int yuvLayerIndex, bool isYuvLayerSkip);
private:
//Choose an appropriate overlay state based on conditions
static void chooseState(hwc_context_t *ctx);
//Configures overlay
static bool configure(hwc_context_t *ctx, hwc_layer_t *layer);
//Marks layer flags if this feature is used
static void markFlags(hwc_layer_t *layer);
//returns yuv count
static int getYuvCount();
//The chosen overlay state.
static ovutils::eOverlayState sState;
//Number of yuv layers in this drawing round
static int sYuvCount;
//Index of YUV layer, relevant only if count is 1
static int sYuvLayerIndex;
//Flags if a yuv layer is animating or below something that is animating
static bool sIsLayerSkip;
//Flags if this feature is on.
static bool sIsModeOn;
};
inline void VideoOverlay::setStats(int yuvCount, int yuvLayerIndex,
bool isYuvLayerSkip) {
sYuvCount = yuvCount;
sYuvLayerIndex = yuvLayerIndex;
sIsLayerSkip = isYuvLayerSkip;
}
inline int VideoOverlay::getYuvCount() { return sYuvCount; }
}; //namespace qhwc
#endif //HWC_VIDEO_H