0

Clean up broken revert of 87790.

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@87794 0039d316-1c4b-4281-b951-d872f2087c98
This commit is contained in:
scherkus@chromium.org
2011-06-03 16:47:16 +00:00
parent ef03875e8f
commit ad75cf2077
14 changed files with 2550 additions and 0 deletions

159
media/omx/mock_omx.cc Normal file

@ -0,0 +1,159 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/omx/mock_omx.h"
#include "base/logging.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
MockOmx* MockOmx::instance_ = NULL;
// Static stub methods. They redirect method calls back to the mock object.
static OMX_ERRORTYPE MockSendCommand(OMX_HANDLETYPE component,
OMX_COMMANDTYPE command,
OMX_U32 param1,
OMX_PTR command_data) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->SendCommand(command, param1, command_data);
}
static OMX_ERRORTYPE MockGetParameter(OMX_HANDLETYPE component,
OMX_INDEXTYPE param_index,
OMX_PTR structure) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->GetParameter(param_index, structure);
}
static OMX_ERRORTYPE MockSetParameter(OMX_HANDLETYPE component,
OMX_INDEXTYPE param_index,
OMX_PTR structure) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->SetParameter(param_index, structure);
}
static OMX_ERRORTYPE MockGetConfig(OMX_HANDLETYPE component,
OMX_INDEXTYPE index,
OMX_PTR structure) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->GetConfig(index, structure);
}
static OMX_ERRORTYPE MockSetConfig(OMX_HANDLETYPE component,
OMX_INDEXTYPE index,
OMX_PTR structure) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->SetConfig(index, structure);
}
static OMX_ERRORTYPE MockAllocateBuffer(OMX_HANDLETYPE component,
OMX_BUFFERHEADERTYPE** buffer,
OMX_U32 port_index,
OMX_PTR app_private,
OMX_U32 size_bytes) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->AllocateBuffer(buffer, port_index, app_private,
size_bytes);
}
static OMX_ERRORTYPE MockUseBuffer(OMX_HANDLETYPE component,
OMX_BUFFERHEADERTYPE** buffer,
OMX_U32 port_index,
OMX_PTR app_private,
OMX_U32 size_bytes,
OMX_U8* pBuffer) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->UseBuffer(buffer, port_index, app_private,
size_bytes, pBuffer);
}
static OMX_ERRORTYPE MockFreeBuffer(OMX_HANDLETYPE component,
OMX_U32 port_index,
OMX_BUFFERHEADERTYPE* buffer) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->FreeBuffer(port_index, buffer);
}
static OMX_ERRORTYPE MockEmptyThisBuffer(OMX_HANDLETYPE component,
OMX_BUFFERHEADERTYPE* buffer) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->EmptyThisBuffer(buffer);
}
static OMX_ERRORTYPE MockFillThisBuffer(OMX_HANDLETYPE component,
OMX_BUFFERHEADERTYPE* buffer) {
CHECK(MockOmx::get()->component() ==
reinterpret_cast<OMX_COMPONENTTYPE*>(component));
return MockOmx::get()->FillThisBuffer(buffer);
}
// Stub methods to export symbols used for OpenMAX.
extern "C" {
OMX_ERRORTYPE OMX_Init() {
return MockOmx::get()->Init();
}
OMX_ERRORTYPE OMX_Deinit() {
return MockOmx::get()->Deinit();
}
OMX_ERRORTYPE OMX_GetHandle(
OMX_HANDLETYPE* handle, OMX_STRING name, OMX_PTR app_private,
OMX_CALLBACKTYPE* callbacks) {
return MockOmx::get()->GetHandle(handle, name, app_private, callbacks);
}
OMX_ERRORTYPE OMX_FreeHandle(OMX_HANDLETYPE handle) {
return MockOmx::get()->FreeHandle(handle);
}
OMX_ERRORTYPE OMX_GetComponentsOfRole(OMX_STRING name, OMX_U32* roles,
OMX_U8** component_names) {
return MockOmx::get()->GetComponentsOfRole(name, roles, component_names);
}
} // extern "C"
MockOmx::MockOmx() {
memset(&callbacks_, 0, sizeof(callbacks_));
memset(&component_, 0, sizeof(component_));
// Setup the function pointers to the static methods. They will redirect back
// to this mock object.
component_.SendCommand = &MockSendCommand;
component_.GetParameter = &MockGetParameter;
component_.SetParameter = &MockSetParameter;
component_.GetConfig = &MockGetConfig;
component_.SetConfig = &MockSetConfig;
component_.AllocateBuffer = &MockAllocateBuffer;
component_.UseBuffer = &MockUseBuffer;
component_.FreeBuffer = &MockFreeBuffer;
component_.EmptyThisBuffer = &MockEmptyThisBuffer;
component_.FillThisBuffer = &MockFillThisBuffer;
// Save this instance to static member.
CHECK(!instance_);
instance_ = this;
}
MockOmx::~MockOmx() {
CHECK(instance_);
instance_ = NULL;
}
// static
MockOmx* MockOmx::get() {
return instance_;
}
} // namespace media

101
media/omx/mock_omx.h Normal file

@ -0,0 +1,101 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_OMX_MOCK_OMX_H_
#define MEDIA_OMX_MOCK_OMX_H_
#include "base/basictypes.h"
#include "base/memory/scoped_ptr.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "third_party/openmax/il/OMX_Component.h"
#include "third_party/openmax/il/OMX_Core.h"
namespace media {
class MockOmx {
public:
MockOmx();
virtual ~MockOmx();
// The following mock methods are component specific.
MOCK_METHOD3(SendCommand, OMX_ERRORTYPE(
OMX_COMMANDTYPE command,
OMX_U32 param1,
OMX_PTR command_data));
MOCK_METHOD2(GetParameter, OMX_ERRORTYPE(
OMX_INDEXTYPE param_index,
OMX_PTR structure));
MOCK_METHOD2(SetParameter, OMX_ERRORTYPE(
OMX_INDEXTYPE param_index,
OMX_PTR structure));
MOCK_METHOD2(GetConfig, OMX_ERRORTYPE(
OMX_INDEXTYPE index,
OMX_PTR structure));
MOCK_METHOD2(SetConfig, OMX_ERRORTYPE(
OMX_INDEXTYPE index,
OMX_PTR structure));
MOCK_METHOD4(AllocateBuffer, OMX_ERRORTYPE(
OMX_BUFFERHEADERTYPE** buffer,
OMX_U32 port_index,
OMX_PTR app_private,
OMX_U32 size_bytes));
MOCK_METHOD5(UseBuffer, OMX_ERRORTYPE(
OMX_BUFFERHEADERTYPE** buffer,
OMX_U32 port_index,
OMX_PTR app_private,
OMX_U32 size_bytes,
OMX_U8* pBuffer));
MOCK_METHOD2(FreeBuffer, OMX_ERRORTYPE(
OMX_U32 port_index,
OMX_BUFFERHEADERTYPE* buffer));
MOCK_METHOD1(EmptyThisBuffer, OMX_ERRORTYPE(
OMX_BUFFERHEADERTYPE* buffer));
MOCK_METHOD1(FillThisBuffer, OMX_ERRORTYPE(
OMX_BUFFERHEADERTYPE* buffer));
// The following mock methods are defined global.
MOCK_METHOD0(Init, OMX_ERRORTYPE());
MOCK_METHOD0(Deinit, OMX_ERRORTYPE());
MOCK_METHOD4(GetHandle, OMX_ERRORTYPE(
OMX_HANDLETYPE* handle,
OMX_STRING name,
OMX_PTR app_private,
OMX_CALLBACKTYPE* callbacks));
MOCK_METHOD1(FreeHandle, OMX_ERRORTYPE(
OMX_HANDLETYPE handle));
MOCK_METHOD3(GetComponentsOfRole, OMX_ERRORTYPE(
OMX_STRING name,
OMX_U32* roles,
OMX_U8** component_names));
OMX_CALLBACKTYPE* callbacks() { return &callbacks_; }
OMX_COMPONENTTYPE* component() { return &component_; }
// Getter for the global instance of MockOmx.
static MockOmx* get();
private:
static MockOmx* instance_;
OMX_CALLBACKTYPE callbacks_;
OMX_COMPONENTTYPE component_;
DISALLOW_COPY_AND_ASSIGN(MockOmx);
};
} // namespace media
#endif // MEDIA_OMX_MOCK_OMX_H_

@ -0,0 +1,483 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define _CRT_SECURE_NO_WARNINGS
#if 0
#include <deque>
#include "base/callback.h"
#include "base/message_loop.h"
#include "base/task.h"
#include "media/base/buffers.h"
#include "media/base/mock_filters.h"
#include "media/base/mock_task.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/video/omx_video_decode_engine.h"
#include "media/video/video_decode_engine.h"
#include "media/omx/mock_omx.h"
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
using ::testing::DoAll;
using ::testing::InSequence;
using ::testing::IsNull;
using ::testing::NotNull;
using ::testing::Return;
using ::testing::SaveArg;
using ::testing::SetArgumentPointee;
using ::testing::StrEq;
using ::testing::StrictMock;
namespace media {
static const int kBufferCount = 3;
static const int kBufferSize = 4096;
static const char* kComponentName = "OMX.google.video_decoder.avc";
ACTION(ReturnComponentName) {
strcpy(((char**)arg2)[0], kComponentName);
}
ACTION(GetHandle) {
*arg0 = MockOmx::get()->component();
MockOmx::get()->component()->pApplicationPrivate = arg2;
memcpy(MockOmx::get()->callbacks(), arg3, sizeof(OMX_CALLBACKTYPE));
}
ACTION(GetParameterVideoInit) {
((OMX_PORT_PARAM_TYPE*)arg1)->nStartPortNumber = 0;
}
ACTION(GetParameterPortDefinition) {
OMX_PARAM_PORTDEFINITIONTYPE* port_format =
(OMX_PARAM_PORTDEFINITIONTYPE*)arg1;
CHECK(port_format->nPortIndex == 0 || port_format->nPortIndex == 1);
if (port_format->nPortIndex == 0)
port_format->eDir = OMX_DirInput;
else
port_format->eDir = OMX_DirOutput;
port_format->nBufferCountMin = kBufferCount;
port_format->nBufferCountActual = kBufferCount;
port_format->nBufferSize = kBufferSize;
}
ACTION(AllocateBuffer) {
*arg0 = new OMX_BUFFERHEADERTYPE();
memset(*arg0, 0, sizeof(OMX_BUFFERHEADERTYPE));
(*arg0)->nAllocLen = arg3;
(*arg0)->pBuffer = new uint8[arg3];
(*arg0)->nOutputPortIndex = 1;
(*arg0)->nInputPortIndex = OMX_ALL;
}
ACTION(UseBuffer) {
*arg0 = new OMX_BUFFERHEADERTYPE();
memset(*arg0, 0, sizeof(OMX_BUFFERHEADERTYPE));
(*arg0)->nOutputPortIndex = OMX_ALL;
(*arg0)->nInputPortIndex = 0;
}
ACTION(FreeBuffer) {
if (1 == arg1->nOutputPortIndex)
delete [] arg1->pBuffer;
delete arg1;
}
ACTION_P3(SendEvent, event, data1, data2) {
(*MockOmx::get()->callbacks()->EventHandler)(
MockOmx::get()->component(),
MockOmx::get()->component()->pApplicationPrivate,
event, static_cast<OMX_U32>(data1), static_cast<OMX_U32>(data2), NULL);
}
ACTION_P(EmptyBufferDone, output_pool_ptr) {
(*MockOmx::get()->callbacks()->EmptyBufferDone)(
MockOmx::get()->component(),
MockOmx::get()->component()->pApplicationPrivate,
arg0);
OMX_BUFFERHEADERTYPE* out_buffer = output_pool_ptr->front();
output_pool_ptr->pop_front();
if (arg0->nFlags & OMX_BUFFERFLAG_EOS)
out_buffer->nFlags |= OMX_BUFFERFLAG_EOS;
out_buffer->nFilledLen = kBufferSize;
(*MockOmx::get()->callbacks()->FillBufferDone)(
MockOmx::get()->component(),
MockOmx::get()->component()->pApplicationPrivate,
out_buffer);
}
ACTION_P(EnqueueOutputBuffer, output_pool_ptr) {
output_pool_ptr->push_back(arg0);
}
ACTION(FillEosBuffer) {
arg0->nFlags = OMX_BUFFERFLAG_EOS;
arg0->nFilledLen = 0;
(*MockOmx::get()->callbacks()->FillBufferDone)(
MockOmx::get()->component(),
MockOmx::get()->component()->pApplicationPrivate,
arg0);
}
class TestBuffer : public media::Buffer {
public:
TestBuffer() : size_(0), data_(NULL) { }
explicit TestBuffer(int size) : size_(size) {
if (size)
data_.reset(new uint8[size]);
else
data_.reset(NULL);
}
virtual const uint8* GetData() const {
return data_.get();
}
virtual size_t GetDataSize() const {
return size_;
}
private:
virtual ~TestBuffer() { }
int size_;
scoped_array<uint8> data_;
DISALLOW_COPY_AND_ASSIGN(TestBuffer);
};
class OmxCodecTest : public testing::Test {
public:
OmxCodecTest ()
: input_buffer_count_(0),
got_eos_(false),
omx_engine_(new OmxVideoDecodeEngine()) {
av_stream_.codec = &av_codec_context_;
av_codec_context_.width = 16;
av_codec_context_.height = 16;
feed_done_cb_ =
NewCallback(this, &OmxCodecTest::EmptyBufferDoneCallback);
decode_done_cb_ =
NewCallback(this, &OmxCodecTest::FillBufferDoneCallback);
}
~OmxCodecTest() {
}
protected:
void ExpectSettings() {
// Return the component name.
EXPECT_CALL(*MockOmx::get(), GetComponentsOfRole(_, _, IsNull()))
.WillOnce(DoAll(SetArgumentPointee<1>(1),
Return(OMX_ErrorNone)));
EXPECT_CALL(*MockOmx::get(), GetComponentsOfRole(_, _, NotNull()))
.WillOnce(DoAll(SetArgumentPointee<1>(1),
ReturnComponentName(),
Return(OMX_ErrorNone)));
// Handle get parameter calls.
EXPECT_CALL(*MockOmx::get(),
GetParameter(OMX_IndexParamVideoInit, NotNull()))
.WillRepeatedly(DoAll(GetParameterVideoInit(), Return(OMX_ErrorNone)));
EXPECT_CALL(*MockOmx::get(),
GetParameter(OMX_IndexParamPortDefinition, NotNull()))
.WillRepeatedly(DoAll(GetParameterPortDefinition(),
Return(OMX_ErrorNone)));
// Ignore all set parameter calls.
EXPECT_CALL(*MockOmx::get(), SetParameter(_, _))
.WillRepeatedly(Return(OMX_ErrorNone));
}
void ExpectToLoaded() {
InSequence s;
// Expect initialization.
EXPECT_CALL(*MockOmx::get(), Init())
.WillOnce(Return(OMX_ErrorNone));
// Return the handle.
EXPECT_CALL(*MockOmx::get(),
GetHandle(NotNull(), StrEq(kComponentName),
NotNull(), NotNull()))
.WillOnce(DoAll(GetHandle(),
Return(OMX_ErrorNone)));
}
void ExpectLoadedToIdle() {
InSequence s;
// Expect transition to idle.
EXPECT_CALL(*MockOmx::get(),
SendCommand(OMX_CommandStateSet, OMX_StateIdle, _))
.WillOnce(
DoAll(
SendEvent(OMX_EventCmdComplete, OMX_CommandStateSet,
OMX_StateIdle),
Return(OMX_ErrorNone)));
// Expect allocation of buffers.
EXPECT_CALL(*MockOmx::get(),
UseBuffer(NotNull(), 0, NotNull(), kBufferSize, _))
.Times(kBufferCount)
.WillRepeatedly(DoAll(UseBuffer(), Return(OMX_ErrorNone)));
// Expect allocation of output buffers and send command complete.
EXPECT_CALL(*MockOmx::get(),
AllocateBuffer(NotNull(), 1, IsNull(), kBufferSize))
.Times(kBufferCount)
.WillRepeatedly(DoAll(AllocateBuffer(), Return(OMX_ErrorNone)));
}
void ExpectToExecuting() {
InSequence s;
// Expect transition to executing.
EXPECT_CALL(*MockOmx::get(),
SendCommand(OMX_CommandStateSet, OMX_StateExecuting, _))
.WillOnce(DoAll(
SendEvent(OMX_EventCmdComplete, OMX_CommandStateSet,
OMX_StateExecuting),
Return(OMX_ErrorNone)));
// Expect initial FillThisBuffer() calls.
EXPECT_CALL(*MockOmx::get(), FillThisBuffer(NotNull()))
.Times(kBufferCount)
.WillRepeatedly(DoAll(EnqueueOutputBuffer(&output_pool_),
Return(OMX_ErrorNone)));
}
void ExpectToIdle() {
// Expect going to idle
EXPECT_CALL(*MockOmx::get(),
SendCommand(OMX_CommandStateSet, OMX_StateIdle, _))
.WillOnce(DoAll(
SendEvent(OMX_EventCmdComplete, OMX_CommandStateSet, OMX_StateIdle),
Return(OMX_ErrorNone)));
}
void ExpectIdleToLoaded() {
InSequence s;
// Expect transition to loaded.
EXPECT_CALL(*MockOmx::get(),
SendCommand(OMX_CommandStateSet, OMX_StateLoaded, _))
.WillOnce(DoAll(
SendEvent(OMX_EventCmdComplete, OMX_CommandStateSet,
OMX_StateLoaded),
Return(OMX_ErrorNone)));
// Expect free buffer for input port.
EXPECT_CALL(*MockOmx::get(), FreeBuffer(0, NotNull()))
.Times(kBufferCount)
.WillRepeatedly(DoAll(FreeBuffer(), Return(OMX_ErrorNone)));
EXPECT_CALL(*MockOmx::get(), FreeBuffer(1, NotNull()))
.Times(kBufferCount)
.WillRepeatedly(DoAll(FreeBuffer(), Return(OMX_ErrorNone)));
}
void ExpectToEmpty() {
InSequence s;
EXPECT_CALL(*MockOmx::get(), FreeHandle(MockOmx::get()->component()))
.WillOnce(Return(OMX_ErrorNone));
EXPECT_CALL(*MockOmx::get(), Deinit())
.WillOnce(Return(OMX_ErrorNone));
}
// TODO(hclam): Make a more generic about when to stop.
void ExpectStart() {
ExpectToLoaded();
ExpectLoadedToIdle();
ExpectToExecuting();
EXPECT_CALL(init_done_cb_task_, Run());
}
void ExpectStop() {
EXPECT_CALL(stop_task_, Run());
ExpectToIdle();
ExpectIdleToLoaded();
ExpectToEmpty();
}
void EmptyBufferDoneCallback(scoped_refptr<Buffer> buffer) {
if (buffer.get()) {
input_units_.push_back(buffer);
} else {
input_buffer_count_++;
scoped_refptr<Buffer> buffer_ref = new TestBuffer(input_buffer_count_);
input_units_.push_back(buffer_ref);
}
}
void FillBufferDoneCallback(scoped_refptr<VideoFrame> frame) {
output_units_.push_back(frame);
if (frame->IsEndOfStream())
got_eos_ = true;
}
void MakeEmptyBufferRequest() {
scoped_refptr<Buffer> buffer = input_units_.front();
input_units_.pop_front();
omx_engine_->EmptyThisBuffer(buffer);
}
void SendEOSInputBuffer() {
input_units_.pop_front();
scoped_refptr<Buffer> buffer_ref = new TestBuffer();
input_units_.push_front(buffer_ref);
EXPECT_CALL(*MockOmx::get(), EmptyThisBuffer(NotNull()))
.WillOnce(DoAll(EmptyBufferDone(&output_pool_), Return(OMX_ErrorNone)))
.RetiresOnSaturation();
MakeEmptyBufferRequest();
message_loop_.RunAllPending();
}
int input_buffer_count_;
std::deque<scoped_refptr<Buffer> > input_units_;
std::deque<scoped_refptr<VideoFrame> > output_units_;
std::deque<OMX_BUFFERHEADERTYPE*> fill_this_buffer_received_;
std::deque<OMX_BUFFERHEADERTYPE*> output_pool_;
MockOmx mock_omx_;
bool got_eos_;
MessageLoop message_loop_;
scoped_refptr<OmxVideoDecodeEngine> omx_engine_;
AVStream av_stream_;
AVCodecContext av_codec_context_;
VideoDecodeEngine::ProduceVideoSampleCallback* feed_done_cb_;
VideoDecodeEngine::ConsumeVideoFrameCallback* decode_done_cb_;
TaskMocker init_done_cb_task_;
TaskMocker stop_task_;
private:
DISALLOW_COPY_AND_ASSIGN(OmxCodecTest);
};
TEST_F(OmxCodecTest, SimpleStartAndStop) {
ExpectSettings();
ExpectStart();
omx_engine_->Initialize(&message_loop_,
&av_stream_,
feed_done_cb_,
decode_done_cb_,
init_done_cb_task_.CreateTask());
message_loop_.RunAllPending();
EXPECT_EQ(kBufferCount, input_buffer_count_);
EXPECT_EQ(VideoDecodeEngine::kNormal, omx_engine_->state());
ExpectStop();
omx_engine_->Stop(stop_task_.CreateTask());
message_loop_.RunAllPending();
}
TEST_F(OmxCodecTest, NormalFlow) {
ExpectSettings();
ExpectStart();
omx_engine_->Initialize(&message_loop_,
&av_stream_,
feed_done_cb_,
decode_done_cb_,
init_done_cb_task_.CreateTask());
message_loop_.RunAllPending();
EXPECT_EQ(kBufferCount, input_buffer_count_);
EXPECT_EQ(VideoDecodeEngine::kNormal, omx_engine_->state());
// Make emptybuffer requests.
EXPECT_EQ(0u, output_units_.size());
int count = output_pool_.size();
for (int i = 0; i < kBufferCount; ++i) {
// Give input buffers to OmxVideoDecodeEngine. OmxVideoDecodeEngine will
// make a new FillThisBuffer() call for each read.
EXPECT_CALL(*MockOmx::get(), EmptyThisBuffer(NotNull()))
.WillOnce(DoAll(EmptyBufferDone(&output_pool_), Return(OMX_ErrorNone)))
.RetiresOnSaturation();
EXPECT_CALL(*MockOmx::get(), FillThisBuffer(NotNull()))
.WillOnce(DoAll(EnqueueOutputBuffer(&output_pool_),
Return(OMX_ErrorNone)))
.RetiresOnSaturation();
MakeEmptyBufferRequest();
}
message_loop_.RunAllPending();
EXPECT_EQ(kBufferCount, static_cast<int>(input_units_.size()));
EXPECT_EQ(kBufferCount, static_cast<int>(output_units_.size()));
EXPECT_EQ(count, static_cast<int>(output_pool_.size()));
output_units_.clear();
// Send EndOfStream, expect eos flag.
SendEOSInputBuffer();
EXPECT_EQ(kBufferCount - 1, static_cast<int>(input_units_.size()));
EXPECT_EQ(1, static_cast<int>(output_units_.size()));
EXPECT_EQ(count - 1, static_cast<int>(output_pool_.size()));
EXPECT_TRUE(got_eos_);
// Shutdown.
ExpectStop();
omx_engine_->Stop(stop_task_.CreateTask());
message_loop_.RunAllPending();
}
TEST_F(OmxCodecTest, RecycleInputBuffers) {
ExpectSettings();
ExpectStart();
omx_engine_->Initialize(&message_loop_,
&av_stream_,
feed_done_cb_,
decode_done_cb_,
init_done_cb_task_.CreateTask());
message_loop_.RunAllPending();
EXPECT_EQ(kBufferCount, input_buffer_count_);
EXPECT_EQ(VideoDecodeEngine::kNormal, omx_engine_->state());
// Make emptybuffer requests, also recycle input buffers
EXPECT_EQ(0u, output_units_.size());
int count = output_pool_.size();
int repeat_count = kBufferCount * 2;
for (int i = 0; i < repeat_count; ++i) {
// Give input buffers to OmxVideoDecodeEngine. OmxVideoDecodeEngine will
// make a new FillThisBuffer() call for each read.
EXPECT_CALL(*MockOmx::get(), EmptyThisBuffer(NotNull()))
.WillOnce(DoAll(EmptyBufferDone(&output_pool_), Return(OMX_ErrorNone)))
.RetiresOnSaturation();
EXPECT_CALL(*MockOmx::get(), FillThisBuffer(NotNull()))
.WillOnce(DoAll(EnqueueOutputBuffer(&output_pool_),
Return(OMX_ErrorNone)))
.RetiresOnSaturation();
MakeEmptyBufferRequest();
message_loop_.RunAllPending();
CHECK(kBufferCount == static_cast<int>(input_units_.size()));
CHECK(((i % kBufferCount) + 1) ==
static_cast<int>(input_units_.back()->GetDataSize()));
}
message_loop_.RunAllPending();
EXPECT_EQ(kBufferCount, static_cast<int>(input_units_.size()));
EXPECT_EQ(repeat_count, static_cast<int>(output_units_.size()));
EXPECT_EQ(count, static_cast<int>(output_pool_.size()));
output_units_.clear();
// Send EndOfStream, expect eos flag.
SendEOSInputBuffer();
EXPECT_EQ(kBufferCount - 1, static_cast<int>(input_units_.size()));
EXPECT_EQ(1, static_cast<int>(output_units_.size()));
EXPECT_EQ(count - 1, static_cast<int>(output_pool_.size()));
EXPECT_TRUE(got_eos_);
// Shutdown.
ExpectStop();
omx_engine_->Stop(stop_task_.CreateTask());
message_loop_.RunAllPending();
}
// TODO(hclam): Add test case for dynamic port config.
// TODO(hclam): Create a more complicated test case so that read
// requests and reply from FillThisBuffer() arrives out of order.
} // namespace media
#endif

@ -0,0 +1,168 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#include "media/omx/omx_configurator.h"
#include "base/logging.h"
namespace media {
static std::string GetCodecName(OmxConfigurator::Codec codec) {
switch (codec) {
case OmxConfigurator::kCodecH264:
return "avc";
case OmxConfigurator::kCodecH263:
return "h263";
case OmxConfigurator::kCodecMpeg4:
return "mpeg4";
case OmxConfigurator::kCodecVc1:
return "vc1";
default:
break;
}
NOTREACHED();
return "";
}
std::string OmxDecoderConfigurator::GetRoleName() const {
return "video_decoder." + GetCodecName(input_format().codec);
}
bool OmxDecoderConfigurator::ConfigureIOPorts(
OMX_COMPONENTTYPE* component,
OMX_PARAM_PORTDEFINITIONTYPE* input_port_def,
OMX_PARAM_PORTDEFINITIONTYPE* output_port_def) const {
// Configure the input port.
if (input_format().codec == kCodecNone) {
LOG(ERROR) << "Unsupported codec " << input_format().codec;
return false;
}
if (input_format().codec == kCodecH264)
input_port_def->format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
else if (input_format().codec == kCodecMpeg4)
input_port_def->format.video.eCompressionFormat = OMX_VIDEO_CodingMPEG4;
else if (input_format().codec == kCodecH263)
input_port_def->format.video.eCompressionFormat = OMX_VIDEO_CodingH263;
else if (input_format().codec == kCodecVc1)
input_port_def->format.video.eCompressionFormat = OMX_VIDEO_CodingWMV;
// Assumes 480P.
input_port_def->format.video.nFrameWidth = 720;
input_port_def->format.video.nFrameHeight = 480;
OMX_ERRORTYPE omxresult = OMX_ErrorNone;
omxresult = OMX_SetParameter(component,
OMX_IndexParamPortDefinition,
input_port_def);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "SetParameter(OMX_IndexParamPortDefinition) "
"for input port failed";
return false;
}
return true;
}
std::string OmxEncoderConfigurator::GetRoleName() const {
return "video_encoder." + GetCodecName(output_format().codec);
}
bool OmxEncoderConfigurator::ConfigureIOPorts(
OMX_COMPONENTTYPE* component,
OMX_PARAM_PORTDEFINITIONTYPE* input_port_def,
OMX_PARAM_PORTDEFINITIONTYPE* output_port_def) const {
// TODO(jiesun): Add support for other format than MPEG4.
DCHECK_EQ(kCodecMpeg4, output_format().codec);
// Configure the input port.
input_port_def->format.video.nFrameWidth =
input_format().video_header.width;
input_port_def->format.video.nFrameHeight =
input_format().video_header.height;
OMX_ERRORTYPE omxresult = OMX_ErrorNone;
omxresult = OMX_SetParameter(component,
OMX_IndexParamPortDefinition,
input_port_def);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "SetParameter(OMX_IndexParamPortDefinition) "
"for input port failed";
return false;
}
// Configure the output port.
output_port_def->format.video.nFrameWidth =
input_format().video_header.width;
output_port_def->format.video.nFrameHeight =
input_format().video_header.height;
omxresult = OMX_SetParameter(component,
OMX_IndexParamPortDefinition,
output_port_def);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "SetParameter(OMX_IndexParamPortDefinition) "
"for output port failed";
return false;
}
if (output_format().codec == kCodecMpeg4) {
OMX_VIDEO_PARAM_MPEG4TYPE mp4_type;
omxresult = OMX_GetParameter(component,
OMX_IndexParamVideoMpeg4,
&mp4_type);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "GetParameter(OMX_IndexParamVideoMpeg4) failed";
return false;
}
// TODO(jiesun): verify if other vendors had the same definition.
// Specify the frame rate.
mp4_type.nTimeIncRes = output_format().video_header.frame_rate * 2;
// Specify how many P frames between adjacent intra frames.
mp4_type.nPFrames = output_format().video_header.i_dist - 1;
omxresult = OMX_SetParameter(component,
OMX_IndexParamVideoMpeg4,
&mp4_type);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "SetParameter(OMX_IndexParamVideoMpeg4) failed";
return false;
}
}
OMX_VIDEO_PARAM_BITRATETYPE bitrate;
omxresult = OMX_GetParameter(component,
OMX_IndexParamVideoBitrate,
&bitrate);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "GetParameter(OMX_IndexParamVideoBitrate) failed";
return false;
}
// TODO(jiesun): expose other rate control method that matters.
bitrate.eControlRate = OMX_Video_ControlRateConstant;
bitrate.nTargetBitrate = output_format().video_header.bit_rate;
omxresult = OMX_SetParameter(component,
OMX_IndexParamVideoBitrate,
&bitrate);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "SetParameter(OMX_IndexParamVideoBitrate) failed";
return false;
}
OMX_CONFIG_FRAMERATETYPE framerate;
omxresult = OMX_GetConfig(component,
OMX_IndexConfigVideoFramerate,
&framerate);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "GetParameter(OMX_IndexConfigVideoFramerate) failed";
return false;
}
framerate.xEncodeFramerate =
output_format().video_header.frame_rate << 16; // Q16 format.
omxresult = OMX_SetConfig(component,
OMX_IndexConfigVideoFramerate,
&framerate);
if (omxresult != OMX_ErrorNone) {
LOG(ERROR) << "SetParameter(OMX_IndexConfigVideoFramerate) failed";
return false;
}
return true;
}
} // namespace media

@ -0,0 +1,154 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#ifndef MEDIA_OMX_OMX_CONFIGURATOR_H_
#define MEDIA_OMX_OMX_CONFIGURATOR_H_
#include <string>
#include "base/basictypes.h"
#include "third_party/openmax/il/OMX_Component.h"
#include "third_party/openmax/il/OMX_Core.h"
#include "third_party/openmax/il/OMX_Video.h"
namespace media {
class OmxConfigurator {
public:
enum Codec {
kCodecNone,
kCodecH264,
kCodecMpeg4,
kCodecH263,
kCodecVc1,
kCodecRaw,
};
// TODO(jiesun): figure out what other surface formats are.
enum SurfaceFormat {
kSurfaceFormatNV21,
kSurfaceFormatNV21Tiled,
kSurfaceFormatNV12,
};
struct MediaFormatVideoHeader {
int width;
int height;
int stride; // n/a to compressed stream.
int frame_rate;
int bit_rate; // n/a to raw stream.
int profile; // n/a to raw stream.
int level; // n/a to raw stream.
int i_dist; // i frame distance; >0 if p frame is enabled.
int p_dist; // p frame distance; >0 if b frame is enabled.
};
struct MediaFormatVideoRaw {
SurfaceFormat color_space;
};
struct MediaFormatVideoH264 {
int slice_enable;
int max_ref_frames;
int num_ref_l0, num_ref_l1;
int cabac_enable;
int cabac_init_idc;
int deblock_enable;
int frame_mbs_only_flags;
int mbaff_enable;
int bdirect_spatial_temporal;
};
struct MediaFormatVideoMPEG4 {
int ac_pred_enable;
int time_inc_res;
int slice_enable;
};
struct MediaFormat {
// TODO(jiesun): instead of codec type, we should have media format.
Codec codec;
MediaFormatVideoHeader video_header;
union {
MediaFormatVideoRaw raw;
MediaFormatVideoH264 h264;
MediaFormatVideoMPEG4 mpeg4;
};
};
OmxConfigurator(const MediaFormat& input,
const MediaFormat& output)
: input_format_(input),
output_format_(output) {
}
virtual ~OmxConfigurator() {}
// Returns the role name for this configuration.
virtual std::string GetRoleName() const = 0;
// Called by OmxCodec on the message loop given to it during
// transition to idle state.
// OmxCodec reads the current IO port definitions and pass it to this
// method.
// Returns true if configuration has completed successfully.
virtual bool ConfigureIOPorts(
OMX_COMPONENTTYPE* component,
OMX_PARAM_PORTDEFINITIONTYPE* input_port_def,
OMX_PARAM_PORTDEFINITIONTYPE* output_port_def) const = 0;
const MediaFormat& input_format() const { return input_format_; }
const MediaFormat& output_format() const { return output_format_; }
private:
MediaFormat input_format_;
MediaFormat output_format_;
private:
DISALLOW_COPY_AND_ASSIGN(OmxConfigurator);
};
class OmxDecoderConfigurator : public OmxConfigurator {
public:
OmxDecoderConfigurator(const MediaFormat& input,
const MediaFormat& output)
: OmxConfigurator(input, output) {
}
virtual ~OmxDecoderConfigurator() {}
virtual std::string GetRoleName() const;
virtual bool ConfigureIOPorts(
OMX_COMPONENTTYPE* component,
OMX_PARAM_PORTDEFINITIONTYPE* input_port_def,
OMX_PARAM_PORTDEFINITIONTYPE* output_port_def) const;
private:
DISALLOW_COPY_AND_ASSIGN(OmxDecoderConfigurator);
};
class OmxEncoderConfigurator : public OmxConfigurator {
public:
OmxEncoderConfigurator(const MediaFormat& input,
const MediaFormat& output)
: OmxConfigurator(input, output) {
}
virtual ~OmxEncoderConfigurator() {}
virtual std::string GetRoleName() const;
virtual bool ConfigureIOPorts(
OMX_COMPONENTTYPE* component,
OMX_PARAM_PORTDEFINITIONTYPE* input_port_def,
OMX_PARAM_PORTDEFINITIONTYPE* output_port_def) const;
private:
DISALLOW_COPY_AND_ASSIGN(OmxEncoderConfigurator);
};
} // namespace media
#endif // MEDIA_OMX_OMX_CONFIGURATOR_H_

421
media/omx/omx_unittest.cc Normal file

@ -0,0 +1,421 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/command_line.h"
#include "base/logging.h"
#include "base/synchronization/waitable_event.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/openmax/il/OMX_Component.h"
#include "third_party/openmax/il/OMX_Core.h"
namespace media {
// Defines the maximum number of buffers created for I/O ports.
static const int kMaxBufferNum = 256;
template <typename T>
static void ResetHeader(T* param) {
memset(param, 0, sizeof(T));
// TODO(hclam): Make this version number configurable.
param->nVersion.nVersion = 0x00000101;
param->nSize = sizeof(T);
}
class OmxTest : public testing::Test {
public:
OmxTest()
: handle_(NULL),
event_(false, false),
empty_buffer_(false, false),
fill_buffer_(false, false),
last_event_type_(OMX_EventMax),
last_event_data1_(0),
last_event_data2_(0) {
memset(input_buffers_, 0, sizeof(input_buffers_));
memset(output_buffers_, 0, sizeof(output_buffers_));
}
protected:
virtual void SetUp() {
// Initialize OpenMAX.
EXPECT_EQ(OMX_ErrorNone, OMX_Init());
}
virtual void TearDown() {
EXPECT_EQ(OMX_ErrorNone, OMX_Deinit());
}
void InitComponent(std::string component_name) {
// TODO(hclam): Remove static when bug in driver is fixed.
static OMX_CALLBACKTYPE callback = { &EventHandler,
&EmptyBufferCallback,
&FillBufferCallback };
OMX_ERRORTYPE omxresult = OMX_GetHandle(
(void**)&handle_,
const_cast<OMX_STRING>(component_name.c_str()),
this, &callback);
EXPECT_EQ(OMX_ErrorNone, omxresult);
CHECK(handle_);
}
void DeinitComponent() {
if (handle_)
OMX_FreeHandle(handle_);
}
void AllocateBuffers(int port) {
int count = 0;
int size = 0;
OMX_BUFFERHEADERTYPE** buffers = NULL;
if (port == input_port_) {
count = input_buffer_count_;
size = input_buffer_size_;
buffers = input_buffers_;
} else if (port == output_port_) {
count = output_buffer_count_;
size = output_buffer_size_;
buffers = output_buffers_;
} else {
NOTREACHED() << "Not a valid port";
}
for (int i = 0; i < count; ++i) {
EXPECT_EQ(OMX_ErrorNone,
OMX_AllocateBuffer(handle_, buffers + i,
port, NULL, size));
}
}
void ReleaseBuffers(int port) {
int count = 0;
OMX_BUFFERHEADERTYPE** buffers = NULL;
if (port == input_port_) {
count = input_buffer_count_;
buffers = input_buffers_;
} else if (port == output_port_) {
count = output_buffer_count_;
buffers = output_buffers_;
} else {
NOTREACHED() << "Not a valid port";
}
for (int i = 0; i < count; ++i) {
CHECK(buffers[i]);
EXPECT_EQ(OMX_ErrorNone,
OMX_FreeBuffer(handle_, port, buffers[i]));
buffers[i] = NULL;
}
}
void TransitionLoadedToIdle() {
EXPECT_EQ(OMX_ErrorNone,
OMX_SendCommand(handle_, OMX_CommandStateSet,
OMX_StateIdle, 0));
AllocateBuffers(input_port_);
AllocateBuffers(output_port_);
event_.Wait();
EXPECT_EQ(OMX_EventCmdComplete, last_event_type_);
EXPECT_EQ(OMX_CommandStateSet, last_event_data1_);
EXPECT_EQ(OMX_StateIdle, last_event_data2_);
}
void TransitionIdleToLoaded() {
EXPECT_EQ(OMX_ErrorNone,
OMX_SendCommand(handle_, OMX_CommandStateSet,
OMX_StateLoaded, 0));
ReleaseBuffers(input_port_);
ReleaseBuffers(output_port_);
event_.Wait();
EXPECT_EQ(OMX_EventCmdComplete, last_event_type_);
EXPECT_EQ(OMX_CommandStateSet, last_event_data1_);
EXPECT_EQ(OMX_StateLoaded, last_event_data2_);
}
void TransitionIdleToExecuting() {
EXPECT_EQ(OMX_ErrorNone,
OMX_SendCommand(handle_, OMX_CommandStateSet,
OMX_StateExecuting, 0));
event_.Wait();
EXPECT_EQ(OMX_EventCmdComplete, last_event_type_);
EXPECT_EQ(OMX_CommandStateSet, last_event_data1_);
EXPECT_EQ(OMX_StateExecuting, last_event_data2_);
}
void TransitionExecutingToIdle() {
EXPECT_EQ(OMX_ErrorNone,
OMX_SendCommand(handle_, OMX_CommandStateSet,
OMX_StateIdle, 0));
event_.Wait();
EXPECT_EQ(OMX_EventCmdComplete, last_event_type_);
EXPECT_EQ(OMX_CommandStateSet, last_event_data1_);
EXPECT_EQ(OMX_StateIdle, last_event_data2_);
}
void GetComponentsOfRole(std::string role) {
OMX_U32 roles = 0;
OMX_U8** component_names = NULL;
const int kSize = 256;
LOG(INFO) << "GetComponentsOfRole: " << role;
EXPECT_EQ(OMX_ErrorNone, OMX_GetComponentsOfRole(
const_cast<OMX_STRING>(role.c_str()), &roles, 0));
// TODO(hclam): Should assert the component number.
LOG(INFO) << "Components: " << roles;
if (roles) {
component_names = new OMX_U8*[roles];
for (size_t i = 0; i < roles; ++i)
component_names[i] = new OMX_U8[kSize];
OMX_U32 roles_backup = roles;
EXPECT_EQ(OMX_ErrorNone,
OMX_GetComponentsOfRole(
const_cast<OMX_STRING>(role.c_str()),
&roles, component_names));
ASSERT_EQ(roles_backup, roles);
for (size_t i = 0; i < roles; ++i) {
LOG(INFO) << "Component name: " << component_names[i];
delete [] component_names[i];
}
delete [] component_names;
}
}
OMX_ERRORTYPE EventHandlerInternal(
OMX_HANDLETYPE component, OMX_EVENTTYPE event,
OMX_U32 data1, OMX_U32 data2, OMX_PTR event_data) {
last_event_type_ = event;
last_event_data1_ = static_cast<int>(data1);
last_event_data2_ = static_cast<int>(data2);
// TODO(hclam): Save |event_data|.
event_.Signal();
return OMX_ErrorNone;
}
OMX_ERRORTYPE EmptyBufferCallbackInternal(
OMX_HANDLETYPE component, OMX_BUFFERHEADERTYPE* buffer) {
// TODO(hclam): Add code here.
empty_buffer_.Signal();
return OMX_ErrorNone;
}
OMX_ERRORTYPE FillBufferCallbackInternal(
OMX_HANDLETYPE component, OMX_BUFFERHEADERTYPE* buffer) {
// TODO(hclam): Add code here.
fill_buffer_.Signal();
return OMX_ErrorNone;
}
// Static callback methods.
static OMX_ERRORTYPE EventHandler(
OMX_HANDLETYPE component, OMX_PTR priv_data,
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2,
OMX_PTR event_data) {
return static_cast<OmxTest*>(priv_data)
->EventHandlerInternal(component,
event, data1, data2, event_data);
}
static OMX_ERRORTYPE EmptyBufferCallback(
OMX_HANDLETYPE component, OMX_PTR priv_data,
OMX_BUFFERHEADERTYPE* buffer) {
return static_cast<OmxTest*>(priv_data)
->EmptyBufferCallbackInternal(component, buffer);
}
static OMX_ERRORTYPE FillBufferCallback(
OMX_HANDLETYPE component, OMX_PTR priv_data,
OMX_BUFFERHEADERTYPE* buffer) {
return static_cast<OmxTest*>(priv_data)
->FillBufferCallbackInternal(component, buffer);
}
OMX_COMPONENTTYPE* handle_;
int input_port_;
int output_port_;
int input_buffer_count_;
int input_buffer_size_;
int output_buffer_count_;
int output_buffer_size_;
OMX_BUFFERHEADERTYPE* input_buffers_[kMaxBufferNum];
OMX_BUFFERHEADERTYPE* output_buffers_[kMaxBufferNum];
base::WaitableEvent event_;
base::WaitableEvent empty_buffer_;
base::WaitableEvent fill_buffer_;
OMX_EVENTTYPE last_event_type_;
int last_event_data1_;
int last_event_data2_;
};
class OmxVideoDecoderTest : public OmxTest {
protected:
void Configure(OMX_VIDEO_CODINGTYPE codec,
int width, int height) {
// Obtain port IDs.
OMX_PORT_PARAM_TYPE port_param;
ResetHeader(&port_param);
EXPECT_EQ(OMX_ErrorNone,
OMX_GetParameter(handle_,
OMX_IndexParamVideoInit,
&port_param));
input_port_ = port_param.nStartPortNumber;
output_port_ = port_param.nStartPortNumber + 1;
LOG(INFO) << "Input port number: " << input_port_;
LOG(INFO) << "Output port number: " << output_port_;
// Get and set parameters for input port.
LOG(INFO) << "Input port width: " << width;
LOG(INFO) << "Input port height: " << height;
LOG(INFO) << "Input port codec: " << codec;
OMX_PARAM_PORTDEFINITIONTYPE port_format;
ResetHeader(&port_format);
port_format.nPortIndex = input_port_;
EXPECT_EQ(OMX_ErrorNone,
OMX_GetParameter(handle_,
OMX_IndexParamPortDefinition,
&port_format));
EXPECT_EQ(OMX_DirInput, port_format.eDir);
port_format.format.video.eCompressionFormat = codec;
port_format.format.video.nFrameWidth = width;
port_format.format.video.nFrameHeight = height;
EXPECT_EQ(OMX_ErrorNone,
OMX_SetParameter(handle_,
OMX_IndexParamPortDefinition,
&port_format));
// TODO(hclam): Add configurations to output port.
// Get Parameters for input port.
ResetHeader(&port_format);
port_format.nPortIndex = input_port_;
EXPECT_EQ(OMX_ErrorNone,
OMX_GetParameter(handle_,
OMX_IndexParamPortDefinition,
&port_format));
EXPECT_EQ(OMX_DirInput, port_format.eDir);
input_buffer_count_ = port_format.nBufferCountMin;
input_buffer_size_ = port_format.nBufferSize;
CHECK(input_buffer_count_ < kMaxBufferNum);
// Get parameters for output port.
ResetHeader(&port_format);
port_format.nPortIndex = output_port_;
EXPECT_EQ(OMX_ErrorNone,
OMX_GetParameter(handle_,
OMX_IndexParamPortDefinition,
&port_format));
EXPECT_EQ(OMX_DirOutput, port_format.eDir);
output_buffer_count_ = port_format.nBufferCountMin;
output_buffer_size_ = port_format.nBufferSize;
CHECK(output_buffer_count_ < kMaxBufferNum);
LOG(INFO) << "Input buffer count: " << input_buffer_count_;
LOG(INFO) << "Input buffer size: " << input_buffer_size_;
LOG(INFO) << "Output buffer count: " << output_buffer_count_;
LOG(INFO) << "Output buffer size: " << output_buffer_size_;
}
std::string component() {
return CommandLine::ForCurrentProcess()
->GetSwitchValueASCII("video-decoder-component");
}
OMX_VIDEO_CODINGTYPE codec() {
std::string codec = CommandLine::ForCurrentProcess()
->GetSwitchValueASCII("video-decoder-codec");
if (codec == "h264")
return OMX_VIDEO_CodingAVC;
return OMX_VIDEO_CodingAutoDetect;
}
};
TEST_F(OmxTest, SimpleInit) {
// A empty test case will test basic init/deinit of OpenMAX library.
}
TEST_F(OmxTest, GetComponentsOfRole) {
// Roles video decoders.
GetComponentsOfRole("video_decoder.avc");
GetComponentsOfRole("video_decoder.mpeg4");
GetComponentsOfRole("video_decoder.vc1");
// TODO(hclam): Add roles of encoders.
}
TEST_F(OmxVideoDecoderTest, GetHandle) {
// TODO(hclam): Should use GetComponentsOfRole instead.
InitComponent(component());
DeinitComponent();
}
TEST_F(OmxVideoDecoderTest, Configuration) {
InitComponent(component());
// TODO(hclam): Make resolution configurable.
Configure(codec(), 1024, 768);
DeinitComponent();
}
TEST_F(OmxVideoDecoderTest, TransitionToIdle) {
InitComponent(component());
Configure(codec(), 1024, 768);
TransitionLoadedToIdle();
TransitionIdleToLoaded();
DeinitComponent();
}
TEST_F(OmxVideoDecoderTest, FreeHandleWhenIdle) {
InitComponent(component());
Configure(codec(), 1024, 768);
TransitionLoadedToIdle();
DeinitComponent();
}
TEST_F(OmxVideoDecoderTest, TransitionToExecuting) {
InitComponent(component());
Configure(codec(), 1024, 768);
TransitionLoadedToIdle();
TransitionIdleToExecuting();
TransitionExecutingToIdle();
TransitionIdleToLoaded();
DeinitComponent();
}
TEST_F(OmxVideoDecoderTest, FreeHandleWhenExecuting) {
InitComponent(component());
Configure(codec(), 1024, 768);
TransitionLoadedToIdle();
TransitionIdleToExecuting();
DeinitComponent();
}
TEST_F(OmxVideoDecoderTest, CallbacksAreCopied) {
// Allocate a callback struct on stack and clear it with zero.
// This make sure OpenMAX library will copy the content of the
// struct.
OMX_CALLBACKTYPE callback = { &EventHandler,
&EmptyBufferCallback,
&FillBufferCallback };
OMX_ERRORTYPE omxresult = OMX_GetHandle(
(void**)&handle_,
const_cast<OMX_STRING>(component().c_str()),
this, &callback);
EXPECT_EQ(OMX_ErrorNone, omxresult);
CHECK(handle_);
memset(&callback, 0, sizeof(callback));
// Then configure the component as usual.
Configure(codec(), 1024, 768);
TransitionLoadedToIdle();
TransitionIdleToExecuting();
TransitionExecutingToIdle();
TransitionIdleToLoaded();
DeinitComponent();
}
} // namespace media

@ -0,0 +1,17 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/file_path.h"
#include "base/logging.h"
#include "base/test/test_suite.h"
#include "media/base/media.h"
int main(int argc, char** argv) {
// Load the OpenMAX library.
if (!media::InitializeOpenMaxLibrary(FilePath())) {
LOG(ERROR) << "Unable to initialize OpenMAX library.";
return -1;
}
return base::TestSuite(argc, argv).Run();
}

@ -0,0 +1,61 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#include "media/tools/omx_test/color_space_util.h"
namespace media {
void NV21toIYUV(const uint8* nv21, uint8* iyuv, int width, int height) {
memcpy(iyuv, nv21, width * height);
iyuv += width * height;
nv21 += width * height;
uint8* u = iyuv;
uint8* v = iyuv + width * height / 4;
for (int i = 0; i < width * height / 4; ++i) {
*v++ = *nv21++;
*u++ = *nv21++;
}
}
void NV21toYV12(const uint8* nv21, uint8* yv12, int width, int height) {
memcpy(yv12, nv21, width * height);
yv12 += width * height;
nv21 += width * height;
uint8* v = yv12;
uint8* u = yv12 + width * height / 4;
for (int i = 0; i < width * height / 4; ++i) {
*v++ = *nv21++;
*u++ = *nv21++;
}
}
void IYUVtoNV21(const uint8* iyuv, uint8* nv21, int width, int height) {
memcpy(nv21, iyuv, width * height);
iyuv += width * height;
nv21 += width * height;
const uint8* u = iyuv;
const uint8* v = iyuv + width * height / 4;
for (int i = 0; i < width * height / 4; ++i) {
*nv21++ = *v++;
*nv21++ = *u++;
}
}
void YV12toNV21(const uint8* yv12, uint8* nv21, int width, int height) {
memcpy(nv21, yv12, width * height);
yv12 += width * height;
nv21 += width * height;
const uint8* v = yv12;
const uint8* u = yv12 + width * height / 4;
for (int i = 0; i < width * height / 4; ++i) {
*nv21++ = *v++;
*nv21++ = *u++;
}
}
} // namespace media

@ -0,0 +1,24 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
// Color space conversion methods, they are for testing purpose and are
// not optimized for production use.
#ifndef MEDIA_TOOLS_OMX_TEST_COLOR_SPACE_UTIL_H_
#define MEDIA_TOOLS_OMX_TEST_COLOR_SPACE_UTIL_H_
#include "base/basictypes.h"
namespace media {
// First parameter is the input buffer, second parameter is the output
// buffer.
void NV21toIYUV(const uint8* nv21, uint8* iyuv, int width, int height);
void NV21toYV12(const uint8* nv21, uint8* yv12, int width, int height);
void IYUVtoNV21(const uint8* iyuv, uint8* nv21, int width, int height);
void YV12toNV21(const uint8* yv12, uint8* nv21, int width, int height);
} // namespace media
#endif // MEDIA_TOOLS_OMX_TEST_COLOR_SPACE_UTIL_H_

@ -0,0 +1,296 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/tools/omx_test/file_reader_util.h"
#include <stdio.h>
#include <string>
#include "base/file_util.h"
#include "base/logging.h"
#include "base/string_util.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/file_protocol.h"
#include "media/filters/bitstream_converter.h"
#include "media/tools/omx_test/color_space_util.h"
namespace media {
//////////////////////////////////////////////////////////////////////////////
// BasicFileReader
BasicFileReader::BasicFileReader(const FilePath& path)
: path_(path),
file_(NULL) {
}
bool BasicFileReader::Initialize() {
file_.Set(file_util::OpenFile(path_, "rb"));
if (!file_.get()) {
LOG(ERROR) << "unable to open " << path_.value();
}
return file_.get() != NULL;
}
//////////////////////////////////////////////////////////////////////////////
// YuvFileReader
YuvFileReader::YuvFileReader(const FilePath& path,
int width,
int height,
int loop_count,
bool enable_csc)
: BasicFileReader(path),
width_(width),
height_(height),
loop_count_(loop_count),
output_nv21_(enable_csc) {
}
YuvFileReader::~YuvFileReader() {}
void YuvFileReader::Read(uint8** output, int* size) {
if (!file()) {
*size = 0;
*output = NULL;
return;
}
while (true) {
scoped_array<uint8> data;
int bytes_read = 0;
// OMX require encoder input are delivered in frames (or planes).
// Assume the input file is I420 YUV file.
const int kFrameSize = width_ * height_ * 3 / 2;
data.reset(new uint8[kFrameSize]);
if (output_nv21_) {
if (!csc_buf_.get())
csc_buf_.reset(new uint8[kFrameSize]);
bytes_read = fread(csc_buf_.get(), 1, kFrameSize, file());
// We do not convert partial frames.
if (bytes_read == kFrameSize)
IYUVtoNV21(csc_buf_.get(), data.get(), width_, height_);
else
bytes_read = 0; // force cleanup or loop around.
} else {
bytes_read = fread(data.get(), 1, kFrameSize, file());
}
if (bytes_read) {
*size = bytes_read;
*output = data.release();
break;
}
// Encounter the end of file.
if (loop_count_ == 1) {
// Signal end of stream.
*size = 0;
*output = data.release();
}
--loop_count_;
fseek(file(), 0, SEEK_SET);
}
}
//////////////////////////////////////////////////////////////////////////////
// BlockFileReader
BlockFileReader::BlockFileReader(const FilePath& path,
int block_size)
: BasicFileReader(path),
block_size_(block_size) {
}
void BlockFileReader::Read(uint8** output, int* size) {
CHECK(file());
*output = new uint8[block_size_];
*size = fread(*output, 1, block_size_, file());
}
//////////////////////////////////////////////////////////////////////////////
// FFmpegFileReader
FFmpegFileReader::FFmpegFileReader(const FilePath& path)
: path_(path),
format_context_(NULL),
codec_context_(NULL),
target_stream_(-1),
converter_(NULL) {
}
FFmpegFileReader::~FFmpegFileReader() {
if (format_context_)
av_close_input_file(format_context_);
}
bool FFmpegFileReader::Initialize() {
// av_open_input_file wants a char*, which can't work with wide paths.
// So we assume ASCII on Windows. On other platforms we can pass the
// path bytes through verbatim.
#if defined(OS_WIN)
std::string string_path = WideToASCII(path_.value());
#else
const std::string& string_path = path_.value();
#endif
int result = av_open_input_file(&format_context_, string_path.c_str(),
NULL, 0, NULL);
if (result < 0) {
switch (result) {
case AVERROR_NOFMT:
LOG(ERROR) << "Error: File format not supported "
<< path_.value() << std::endl;
break;
default:
LOG(ERROR) << "Error: Could not open input for "
<< path_.value() << std::endl;
break;
}
return false;
}
if (av_find_stream_info(format_context_) < 0) {
LOG(ERROR) << "can't use FFmpeg to parse stream info";
return false;
}
for (size_t i = 0; i < format_context_->nb_streams; ++i) {
codec_context_ = format_context_->streams[i]->codec;
// Find the video stream.
if (codec_context_->codec_type == CODEC_TYPE_VIDEO) {
target_stream_ = i;
break;
}
}
if (target_stream_ == -1) {
LOG(ERROR) << "no video in the stream";
return false;
}
// Initialize the bitstream filter if needed.
// TODO(hclam): find a better way to identify mp4 container.
if (codec_context_->codec_id == CODEC_ID_H264) {
converter_.reset(new media::FFmpegBitstreamConverter(
"h264_mp4toannexb", codec_context_));
} else if (codec_context_->codec_id == CODEC_ID_MPEG4) {
converter_.reset(new media::FFmpegBitstreamConverter(
"mpeg4video_es", codec_context_));
} else if (codec_context_->codec_id == CODEC_ID_WMV3) {
converter_.reset(new media::FFmpegBitstreamConverter(
"vc1_asftorcv", codec_context_));
} else if (codec_context_->codec_id == CODEC_ID_VC1) {
converter_.reset(new media::FFmpegBitstreamConverter(
"vc1_asftoannexg", codec_context_));
}
if (converter_.get() && !converter_->Initialize()) {
converter_.reset();
LOG(ERROR) << "failed to initialize h264_mp4toannexb filter";
return false;
}
return true;
}
void FFmpegFileReader::Read(uint8** output, int* size) {
if (!format_context_ || !codec_context_ || target_stream_ == -1) {
*size = 0;
*output = NULL;
return;
}
AVPacket packet;
bool found = false;
while (!found) {
int result = av_read_frame(format_context_, &packet);
if (result < 0) {
*output = NULL;
*size = 0;
return;
}
if (packet.stream_index == target_stream_) {
if (converter_.get() && !converter_->ConvertPacket(&packet)) {
LOG(ERROR) << "failed to convert AVPacket";
}
*output = new uint8[packet.size];
*size = packet.size;
memcpy(*output, packet.data, packet.size);
found = true;
}
av_free_packet(&packet);
}
}
//////////////////////////////////////////////////////////////////////////////
// H264FileReader
const int kH264ReadSize = 1024 * 1024;
H264FileReader::H264FileReader(const FilePath& path)
: BasicFileReader(path),
read_buf_(new uint8[kH264ReadSize]),
current_(0),
used_(0) {
}
H264FileReader::~H264FileReader() {}
void H264FileReader::Read(uint8** output, int *size) {
// Fill the buffer when it's less than half full.
int read = 0;
if (used_ < kH264ReadSize / 2) {
read = fread(read_buf_.get(), 1, kH264ReadSize - used_, file());
CHECK(read >= 0);
used_ += read;
}
// If we failed to read.
if (current_ == used_) {
*output = NULL;
*size = 0;
return;
}
// Try to find start code of 0x00, 0x00, 0x01.
bool found = false;
int pos = current_ + 3;
for (; pos < used_ - 2; ++pos) {
if (read_buf_[pos] == 0 &&
read_buf_[pos+1] == 0 &&
read_buf_[pos+2] == 1) {
found = true;
break;
}
}
// If next NALU is found.
if (found) {
CHECK(pos > current_);
*size = pos - current_;
*output = new uint8[*size];
memcpy(*output, read_buf_.get() + current_, *size);
current_ = pos;
// If we have used_ more than half of the available buffer.
// Then move the unused_ buffer to the front to give space
// for more incoming output.
if (current_ > used_ / 2) {
CHECK(used_ > current_);
memcpy(read_buf_.get(),
read_buf_.get() + current_,
used_ - current_);
used_ = used_ - current_;
current_ = 0;
}
return;
}
// If next NALU is not found, assume the remaining data is a NALU
// and return the data.
CHECK(used_ > current_);
*size = used_ - current_;
*output = new uint8[*size];
memcpy(*output, read_buf_.get() + current_, *size);
current_ = used_;
}
} // namespace media

@ -0,0 +1,123 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#ifndef MEDIA_TOOLS_OMX_TEST_FILE_READER_UTIL_H_
#define MEDIA_TOOLS_OMX_TEST_FILE_READER_UTIL_H_
#include <string>
#include "base/basictypes.h"
#include "base/file_path.h"
#include "base/memory/scoped_handle.h"
#include "base/memory/scoped_ptr.h"
struct AVCodecContext;
struct AVFormatContext;
namespace media {
class BitstreamConverter;
// A class to help reading and parsing input file for use in omx_test.
class FileReader {
public:
virtual ~FileReader() {}
// Initialize FileReader object, returns true if successful.
virtual bool Initialize() = 0;
// Read the file into |output|, and output the number of bytes read to
// |size|.
virtual void Read(uint8** output, int* size) = 0;
};
class BasicFileReader : public FileReader {
public:
explicit BasicFileReader(const FilePath& path);
virtual bool Initialize();
virtual void Read(uint8** output, int* size) = 0;
protected:
FILE* file() const { return file_.get(); }
private:
FilePath path_;
ScopedStdioHandle file_;
DISALLOW_COPY_AND_ASSIGN(BasicFileReader);
};
class YuvFileReader : public BasicFileReader {
public:
// Construct a YUV file reader with looping and color space conversion
// ability. |loop_count| specifies the number of times the input file
// is read. If |enable_csc| is true, input in YV420 is converted to
// NV21.
// TODO(jiesun): Make color space more generic not a hard coded color
// space conversion.
YuvFileReader(const FilePath& path,
int width,
int height,
int loop_count,
bool output_nv21);
virtual ~YuvFileReader();
virtual void Read(uint8** output, int* size);
private:
int width_;
int height_;
int loop_count_;
bool output_nv21_;
scoped_array<uint8> csc_buf_;
DISALLOW_COPY_AND_ASSIGN(YuvFileReader);
};
class BlockFileReader : public BasicFileReader {
public:
BlockFileReader(const FilePath& path,
int block_size);
virtual void Read(uint8** output, int* size);
private:
int block_size_;
DISALLOW_COPY_AND_ASSIGN(BlockFileReader);
};
class FFmpegFileReader : public FileReader {
public:
explicit FFmpegFileReader(const FilePath& path);
virtual ~FFmpegFileReader();
virtual bool Initialize();
virtual void Read(uint8** output, int* size);
private:
FilePath path_;
AVFormatContext* format_context_;
AVCodecContext* codec_context_;
int target_stream_;
scoped_ptr<media::BitstreamConverter> converter_;
DISALLOW_COPY_AND_ASSIGN(FFmpegFileReader);
};
class H264FileReader : public BasicFileReader {
public:
explicit H264FileReader(const FilePath& path);
virtual ~H264FileReader();
virtual void Read(uint8** output, int* size);
private:
scoped_array<uint8> read_buf_;
int current_;
int used_;
DISALLOW_COPY_AND_ASSIGN(H264FileReader);
};
} // namespace media
#endif // MEDIA_TOOLS_OMX_TEST_FILE_READER_UTIL_H_

@ -0,0 +1,69 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#include "media/tools/omx_test/file_sink.h"
#include "base/file_util.h"
#include "base/logging.h"
#include "media/tools/omx_test/color_space_util.h"
namespace media {
FileSink::FileSink(const FilePath& output_path,
bool simulate_copy,
bool enable_csc)
: output_path_(output_path),
simulate_copy_(simulate_copy),
enable_csc_(enable_csc),
width_(0),
height_(0),
copy_buf_size_(0),
csc_buf_size_(0) {
}
FileSink::~FileSink() {}
void FileSink::BufferReady(int size, uint8* buffer) {
if (size > copy_buf_size_) {
copy_buf_.reset(new uint8[size]);
copy_buf_size_ = size;
}
if (size > csc_buf_size_) {
csc_buf_.reset(new uint8[size]);
csc_buf_size_ = size;
}
// Copy the output of the decoder to user memory.
if (simulate_copy_ || output_file_.get()) // Implies a copy.
memcpy(copy_buf_.get(), buffer, size);
uint8* out_buffer = copy_buf_.get();
if (enable_csc_) {
// Now assume the raw output is NV21.
media::NV21toIYUV(copy_buf_.get(), csc_buf_.get(), width_, height_);
out_buffer = csc_buf_.get();
}
if (output_file_.get())
fwrite(out_buffer, sizeof(uint8), size, output_file_.get());
}
bool FileSink::Initialize() {
// Opens the output file for writing.
if (!output_path_.empty()) {
output_file_.Set(file_util::OpenFile(output_path_, "wb"));
if (!output_file_.get()) {
LOG(ERROR) << "can't open dump file %s" << output_path_.value();
return false;
}
}
return true;
}
void FileSink::UpdateSize(int width, int height) {
width_ = width;
height_ = height;
}
} // namespace media

@ -0,0 +1,60 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#ifndef MEDIA_TOOLS_OMX_TEST_FILE_SINK_H_
#define MEDIA_TOOLS_OMX_TEST_FILE_SINK_H_
#include <map>
#include <string>
#include "base/basictypes.h"
#include "base/file_path.h"
#include "base/memory/scoped_handle.h"
#include "base/memory/scoped_ptr.h"
namespace media {
// This class writes output of a frame decoded by OmxCodec and save it to
// a file.
class FileSink {
public:
FileSink(const FilePath& output_path,
bool simulate_copy,
bool enable_csc);
virtual ~FileSink();
virtual void BufferReady(int size, uint8* buffer);
// Initialize this object. Returns true if successful.
bool Initialize();
// Update the output frame size.
void UpdateSize(int wdith, int height);
// Write the frame buffer reference by |buffer|.
void Write(uint8* buffer, int size);
private:
FilePath output_path_;
bool simulate_copy_;
bool enable_csc_;
ScopedStdioHandle output_file_;
// Image properties.
int width_;
int height_;
// Buffers for copying and color space conversion.
scoped_array<uint8> copy_buf_;
int copy_buf_size_;
scoped_array<uint8> csc_buf_;
int csc_buf_size_;
DISALLOW_COPY_AND_ASSIGN(FileSink);
};
} // namespace media
#endif // MEDIA_TOOLS_OMX_TEST_FILE_SINK_H_

@ -0,0 +1,414 @@
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// A test program that drives an OpenMAX video decoder module. This program
// will take video in elementary stream and read into the decoder.
//
// Run the following command to see usage:
// ./omx_test
#include "base/at_exit.h"
#include "base/callback.h"
#include "base/command_line.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop.h"
#include "base/string_number_conversions.h"
#include "base/time.h"
#include "base/utf_string_conversions.h"
#include "media/base/data_buffer.h"
#include "media/base/media.h"
#include "media/base/video_frame.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/file_protocol.h"
#include "media/filters/bitstream_converter.h"
#include "media/tools/omx_test/color_space_util.h"
#include "media/tools/omx_test/file_reader_util.h"
#include "media/tools/omx_test/file_sink.h"
#include "media/video/omx_video_decode_engine.h"
using media::BlockFileReader;
using media::Buffer;
using media::DataBuffer;
using media::FFmpegFileReader;
using media::FileReader;
using media::FileSink;
using media::H264FileReader;
using media::OmxConfigurator;
using media::OmxDecoderConfigurator;
using media::OmxEncoderConfigurator;
using media::OmxVideoDecodeEngine;
using media::PipelineStatistics;
using media::VideoFrame;
using media::YuvFileReader;
// This is the driver object to feed the decoder with data from a file.
// It also provides callbacks for the decoder to receive events from the
// decoder.
// TODO(wjia): AVStream should be replaced with a new structure which is
// neutral to any video decoder. Also change media.gyp correspondingly.
class TestApp : public base::RefCountedThreadSafe<TestApp>,
public media::VideoDecodeEngine::EventHandler {
public:
TestApp(AVStream* av_stream,
FileSink* file_sink,
FileReader* file_reader)
: av_stream_(av_stream),
file_reader_(file_reader),
file_sink_(file_sink),
stopped_(false),
error_(false) {
}
bool Initialize() {
if (!file_reader_->Initialize()) {
file_reader_.reset();
LOG(ERROR) << "can't initialize file reader";
return false;;
}
if (!file_sink_->Initialize()) {
LOG(ERROR) << "can't initialize output writer";
return false;
}
return true;
}
virtual void OnInitializeComplete(const media::VideoCodecInfo& info) {}
virtual void OnUninitializeComplete() {
// If this callback is received, mark the |stopped_| flag so that we don't
// feed more buffers into the decoder.
// We need to exit the current message loop because we have no more work
// to do on the message loop. This is done by calling
// message_loop_.Quit().
stopped_ = true;
message_loop_.Quit();
}
virtual void OnError() {
// In case of error, this method is called. Mark the error flag and
// exit the message loop because we have no more work to do.
LOG(ERROR) << "Error callback received!";
error_ = true;
message_loop_.Quit();
}
virtual void OnFlushComplete() {
NOTIMPLEMENTED();
}
virtual void OnSeekComplete() {
NOTIMPLEMENTED();
}
virtual void OnFormatChange(media::VideoStreamInfo stream_info) {
NOTIMPLEMENTED();
}
void FormatCallback(
const OmxConfigurator::MediaFormat& input_format,
const OmxConfigurator::MediaFormat& output_format) {
// This callback will be called when port reconfiguration is done.
// Input format and output format will be used in the codec.
DCHECK_EQ(input_format.video_header.width,
output_format.video_header.width);
DCHECK_EQ(input_format.video_header.height,
output_format.video_header.height);
file_sink_->UpdateSize(input_format.video_header.width,
input_format.video_header.height);
}
virtual void ProduceVideoSample(scoped_refptr<Buffer> buffer) {
// We receive this callback when the decoder has consumed an input buffer.
// In this case, delete the previous buffer and enqueue a new one.
// There are some conditions we don't want to enqueue, for example when
// the last buffer is an end-of-stream buffer, when we have stopped, and
// when we have received an error.
bool eos = buffer.get() && buffer->IsEndOfStream();
if (!eos && !stopped_ && !error_)
FeedInputBuffer();
}
virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
const PipelineStatistics& statistics) {
// This callback is received when the decoder has completed a decoding
// task and given us some output data. The frame is owned by the decoder.
if (stopped_ || error_)
return;
if (!frame_count_)
first_sample_delivered_time_ = base::TimeTicks::HighResNow();
// If we are reading to the end, then stop.
if (frame->IsEndOfStream()) {
engine_->Uninitialize();
return;
}
if (file_sink_.get()) {
for (size_t i = 0; i < frame->planes(); i++) {
int plane_size = frame->width() * frame->height();
if (i > 0) plane_size >>= 2;
file_sink_->BufferReady(plane_size, frame->data(i));
}
}
// could OMX IL return patial sample for decoder?
frame_count_++;
}
void FeedInputBuffer() {
uint8* data;
int read;
file_reader_->Read(&data, &read);
engine_->ConsumeVideoSample(new DataBuffer(data, read));
}
void Run() {
StartProfiler();
media::VideoDecoderConfig config(
media::CodecIDToVideoCodec(av_stream_->codec->codec_id),
av_stream_->codec->coded_width,
av_stream_->codec->coded_height,
media::GetSurfaceWidth(av_stream_.get()),
media::GetSurfaceHeight(av_stream_.get()),
av_stream_->r_frame_rate.num,
av_stream_->r_frame_rate.den,
av_stream_->codec->extradata,
av_stream_->codec->extradata_size);
engine_.reset(new OmxVideoDecodeEngine());
engine_->Initialize(&message_loop_, this, NULL, config);
// Execute the message loop so that we can run tasks on it. This call
// will return when we call message_loop_.Quit().
message_loop_.Run();
StopProfiler();
}
void StartProfiler() {
start_time_ = base::TimeTicks::HighResNow();
frame_count_ = 0;
}
void StopProfiler() {
base::TimeDelta duration = base::TimeTicks::HighResNow() - start_time_;
int64 duration_ms = duration.InMilliseconds();
int64 fps = 0;
if (duration_ms) {
fps = (static_cast<int64>(frame_count_) *
base::Time::kMillisecondsPerSecond) / duration_ms;
}
base::TimeDelta delay = first_sample_delivered_time_ - start_time_;
printf("\n<<< frame delivered : %d >>>", frame_count_);
printf("\n<<< time used(ms) : %d >>>", static_cast<int>(duration_ms));
printf("\n<<< fps : %d >>>", static_cast<int>(fps));
printf("\n<<< initial delay used(us): %d >>>",
static_cast<int>(delay.InMicroseconds()));
printf("\n");
}
scoped_ptr<OmxVideoDecodeEngine> engine_;
MessageLoop message_loop_;
scoped_ptr<AVStream> av_stream_;
scoped_ptr<FileReader> file_reader_;
scoped_ptr<FileSink> file_sink_;
// Internal states for execution.
bool stopped_;
bool error_;
// Counters for performance.
base::TimeTicks start_time_;
base::TimeTicks first_sample_delivered_time_;
int frame_count_;
};
static std::string GetStringSwitch(const char* name) {
return CommandLine::ForCurrentProcess()->GetSwitchValueASCII(name);
}
static bool HasSwitch(const char* name) {
return CommandLine::ForCurrentProcess()->HasSwitch(name);
}
static int GetIntSwitch(const char* name) {
if (HasSwitch(name)) {
int val;
base::StringToInt(GetStringSwitch(name), &val);
return val;
}
return 0;
}
static bool PrepareDecodeFormats(AVStream *av_stream) {
std::string codec = GetStringSwitch("codec");
av_stream->codec->codec_id = CODEC_ID_NONE;
if (codec == "h264") {
av_stream->codec->codec_id = CODEC_ID_H264;
} else if (codec == "mpeg4") {
av_stream->codec->codec_id = CODEC_ID_MPEG4;
} else if (codec == "h263") {
av_stream->codec->codec_id = CODEC_ID_H263;
} else if (codec == "vc1") {
av_stream->codec->codec_id = CODEC_ID_VC1;
} else {
LOG(ERROR) << "Unknown codec.";
return false;
}
return true;
}
static bool PrepareEncodeFormats(AVStream *av_stream) {
av_stream->codec->width = GetIntSwitch("width");
av_stream->codec->height = GetIntSwitch("height");
av_stream->avg_frame_rate.num = GetIntSwitch("framerate");
av_stream->avg_frame_rate.den = 1;
std::string codec = GetStringSwitch("codec");
av_stream->codec->codec_id = CODEC_ID_NONE;
if (codec == "h264") {
av_stream->codec->codec_id = CODEC_ID_H264;
} else if (codec == "mpeg4") {
av_stream->codec->codec_id = CODEC_ID_MPEG4;
} else if (codec == "h263") {
av_stream->codec->codec_id = CODEC_ID_H263;
} else if (codec == "vc1") {
av_stream->codec->codec_id = CODEC_ID_VC1;
} else {
LOG(ERROR) << "Unknown codec.";
return false;
}
// TODO(jiesun): assume constant bitrate now.
av_stream->codec->bit_rate = GetIntSwitch("bitrate");
// TODO(wjia): add more configurations needed by encoder
return true;
}
static bool InitFFmpeg() {
if (!media::InitializeMediaLibrary(FilePath()))
return false;
avcodec_init();
av_register_all();
av_register_protocol2(&kFFmpegFileProtocol, sizeof(kFFmpegFileProtocol));
return true;
}
static void PrintHelp() {
printf("Using for decoding...\n");
printf("\n");
printf("Usage: omx_test --input-file=FILE --codec=CODEC"
" [--output-file=FILE] [--enable-csc]"
" [--copy] [--use-ffmpeg]\n");
printf(" CODEC: h264/mpeg4/h263/vc1\n");
printf("\n");
printf("Optional Arguments\n");
printf(" --output-file Dump raw OMX output to file.\n");
printf(" --enable-csc Dump the CSCed output to file.\n");
printf(" --copy Simulate a memcpy from the output.\n");
printf(" --use-ffmpeg Use ffmpeg demuxer\n");
printf("\n");
printf("Using for encoding...\n");
printf("\n");
printf("Usage: omx_test --encoder --input-file=FILE --codec=CODEC"
" --width=PIXEL_WIDTH --height=PIXEL_HEIGHT"
" --bitrate=BIT_PER_SECOND --framerate=FRAME_PER_SECOND"
" [--output-file=FILE] [--enable-csc]"
" [--copy]\n");
printf(" CODEC: h264/mpeg4/h263/vc1\n");
printf("\n");
printf("Optional Arguments\n");
printf(" --output-file Dump raw OMX output to file.\n");
printf(" --enable-csc Dump the CSCed input from file.\n");
printf(" --copy Simulate a memcpy from the output.\n");
printf(" --loop=COUNT loop input streams\n");
}
int main(int argc, char** argv) {
base::AtExitManager at_exit_manager;
CommandLine::Init(argc, argv);
// Print help if there is not enough arguments.
if (argc == 1) {
PrintHelp();
return -1;
}
const CommandLine& cmd_line = *CommandLine::ForCurrentProcess();
// Read a bunch of parameters.
FilePath input_path = cmd_line.GetSwitchValuePath("input-file");
FilePath output_path = cmd_line.GetSwitchValuePath("output-file");
bool encoder = HasSwitch("encoder");
bool copy = HasSwitch("copy");
bool enable_csc = HasSwitch("enable-csc");
bool use_ffmpeg = HasSwitch("use-ffmpeg");
int loop_count = GetIntSwitch("loop");
if (loop_count == 0)
loop_count = 1;
DCHECK_GE(loop_count, 1);
// Initialize OpenMAX.
if (!media::InitializeOpenMaxLibrary(FilePath())) {
LOG(ERROR) << "Unable to initialize OpenMAX library.";
return false;
}
// If FFmpeg should be used for demuxing load the library here and do
// the initialization.
if (use_ffmpeg && !InitFFmpeg()) {
LOG(ERROR) << "Unable to initialize the media library.";
return -1;
}
// Create AVStream
AVStream *av_stream = new AVStream;
AVCodecContext *av_codec_context = new AVCodecContext;
memset(av_stream, 0, sizeof(AVStream));
memset(av_codec_context, 0, sizeof(AVCodecContext));
scoped_ptr<AVCodecContext> av_codec_context_deleter(av_codec_context);
av_stream->codec = av_codec_context;
av_codec_context->width = 320;
av_codec_context->height = 240;
if (encoder)
PrepareEncodeFormats(av_stream);
else
PrepareDecodeFormats(av_stream);
// Creates the FileReader to read input file.
FileReader* file_reader;
if (encoder) {
file_reader = new YuvFileReader(
input_path, av_stream->codec->width,
av_stream->codec->height, loop_count, enable_csc);
} else if (use_ffmpeg) {
// Use ffmepg for reading.
file_reader = new FFmpegFileReader(input_path);
} else if (input_path.Extension() == FILE_PATH_LITERAL(".264")) {
file_reader = new H264FileReader(input_path);
} else {
// Creates a reader that reads in blocks of 32KB.
const int kReadSize = 32768;
file_reader = new BlockFileReader(input_path, kReadSize);
}
// Create a file sink.
FileSink* file_sink = new FileSink(output_path, copy, enable_csc);
// Create a test app object and initialize it.
scoped_refptr<TestApp> test = new TestApp(av_stream, file_sink, file_reader);
if (!test->Initialize()) {
LOG(ERROR) << "can't initialize this application";
return -1;
}
// This will run the decoder until EOS is reached or an error
// is encountered.
test->Run();
return 0;
}