aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorprasanth kamuju <prasanth.kamuju@linaro.org>2014-09-04 10:12:30 +0530
committerprasanth kamuju <prasanth.kamuju@linaro.org>2014-09-04 10:12:30 +0530
commit5f873f609b8af0a1bc53fffeb9b417d9cac79c1c (patch)
tree4c5491621fa9ce37f86b2e7a836f61ede563680c
parent70d5968f09766c733e4e868864254b0eb379a567 (diff)
Add support to finish_frame
push the decoded buffer to next element using gstvideodecoder base call APIs.
-rw-r--r--src/gstqcvideodec.c378
-rw-r--r--src/gstqcvideodec.h4
2 files changed, 288 insertions, 94 deletions
diff --git a/src/gstqcvideodec.c b/src/gstqcvideodec.c
index 0ed76b2..5653c0f 100644
--- a/src/gstqcvideodec.c
+++ b/src/gstqcvideodec.c
@@ -114,6 +114,7 @@ static gboolean gst_qcvideodec_set_format (GstVideoDecoder * decoder,GstVideoCod
static gboolean gst_qcvideodec_close (GstVideoDecoder * decoder);
static GstFlowReturn gst_qcvideodec_handle_frame (GstVideoDecoder * decoder,GstVideoCodecFrame * frame);
static GstFlowReturn gst_qcvideodec_finish (GstVideoDecoder * decoder);
+static gboolean gst_qcvideodec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query);
static void* video_thread (Gstqcvideodec *);
static void* async_thread (Gstqcvideodec *);
@@ -128,7 +129,8 @@ int reconfig =0;
static struct vdec_bufferpayload *temp_input_buffer = NULL;
static int cnt = 0;
-
+FILE * outputBufferFile;
+GstVideoCodecFrame *g_frame,*g_frame1, *g_frame2,*g_frame3,*g_frame4;
static GstFlowReturn gst_qcvideodec_finish (GstVideoDecoder * decoder)
{
@@ -136,6 +138,7 @@ static GstFlowReturn gst_qcvideodec_finish (GstVideoDecoder * decoder)
return GST_FLOW_OK;
}
+#if 0
static GstStateChangeReturn gst_qcvideodec_change_state (GstElement * element,GstStateChange transition)
{
Gstqcvideodec *self;
@@ -212,6 +215,7 @@ static GstStateChangeReturn gst_qcvideodec_change_state (GstElement * element,Gs
return ret;
}
+#endif
static gboolean gst_qcvideodec_open (GstVideoDecoder * decoder)
{
pthread_mutexattr_t init_values;
@@ -233,6 +237,8 @@ static gboolean gst_qcvideodec_open (GstVideoDecoder * decoder)
g_print("\n ERROR : /dev/msm_vidc_dev Open failed ------\n");
}
+ outputBufferFile = fopen ("output11.yuv","wb");
+
/*Create Queue related data structures*/
queue_ptr = &dec->queue_context;
queue_ptr->commandq_size = 50;
@@ -241,6 +247,7 @@ static gboolean gst_qcvideodec_open (GstVideoDecoder * decoder)
sem_init(&queue_ptr->sem_message,0, 0);
sem_init(&dec->sem_synchronize,0, 0);
sem_init(&dec->sem_input_buf_done,0, 0);
+ sem_init(&dec->sem_output_buf_done,0, 0);
pthread_mutexattr_init (&init_values);
pthread_mutex_init (&queue_ptr->mutex,&init_values);
@@ -277,7 +284,17 @@ static gboolean gst_qcvideodec_start (GstVideoDecoder * decoder)
}
static gboolean gst_qcvideodec_stop (GstVideoDecoder * decoder)
{
+ Gstqcvideodec *dec = GST_QCVIDEODEC (decoder);
g_print("\n gst_qcvideodec_stop ### \n");
+
+ if (dec->input_state) {
+ gst_video_codec_state_unref (dec->input_state);
+ dec->input_state = NULL;
+ }
+ if (dec->output_state) {
+ gst_video_codec_state_unref (dec->output_state);
+ dec->output_state = NULL;
+ }
return TRUE;
}
static gboolean gst_qcvideodec_set_format (GstVideoDecoder * decoder,GstVideoCodecState * state)
@@ -290,14 +307,30 @@ static gboolean gst_qcvideodec_set_format (GstVideoDecoder * decoder,GstVideoCod
GstMapInfo minfo;
struct vdec_input_frameinfo frameinfo;
GstVideoInfo *Vinfo;
+ GstCaps *caps;
+ GstStructure *gststru;
+ gchar *caps_data;
+ // GstVideoCodecState *state;
+ GstVideoFormat fmt = GST_VIDEO_FORMAT_NV12_64Z32;
+ GstVideoInfo *info;
g_print("\n gst_qcvideodec_set_format Enter: ### \n");
+ info = &dec->input_state->info;
+ /* Keep a copy of the input state */
+ if (dec->input_state)
+ gst_video_codec_state_unref(dec->input_state);
+ dec->input_state = gst_video_codec_state_ref (state);
+
+ caps = state->caps;
buffer = state->codec_data;
Vinfo = &state->info;
gst_buffer_map (buffer, &minfo, GST_MAP_READ);
-
- // GST_VIDEO_FORMAT_INFO_NAME(Vinfo->finfo);
-
+ caps_data = gst_caps_to_string(caps);
+ caps = gst_caps_make_writable (gst_pad_query_caps (dec->srcpad, NULL));
+ GST_VIDEO_FORMAT_INFO_NAME(Vinfo->finfo);
+ gst_pad_set_caps (dec->srcpad, caps);
+ gst_caps_unref (caps);
+ g_print("\n gst_qcvideodec_set_format Enter: caps_data : %s \n",caps_data);
g_print("\n gst_qcvideodec_set_format Enter: data_size : %d \n",minfo.size);
g_print("\n gst_qcvideodec_set_format Enter: format : %s \n",GST_VIDEO_FORMAT_INFO_NAME(Vinfo->finfo));
g_print("\n gst_qcvideodec_set_format Enter: width : %d , Height : %d \n",Vinfo->width,Vinfo->height);
@@ -411,6 +444,13 @@ static gboolean gst_qcvideodec_set_format (GstVideoDecoder * decoder,GstVideoCod
{
g_print("\n Error in output Buffer allocation");
}
+ g_print("\n Going to set output_state ----------------------\n");
+ g_print("\n Going to set output_state :%d , %d , %d \n", fmt,Vinfo->width,Vinfo->height);
+ /* Create the output state */
+ // memcpy(&dec->output_state->info,&Vinfo,sizeof(GstVideoInfo));
+ dec->output_state = gst_video_decoder_set_output_state (decoder, fmt,Vinfo->width,Vinfo->height,dec->input_state);
+ //dec->output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), fmt,info->width,info->height,dec->input_state);
+ gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
gst_qcvideodec_set_buffers (dec);
/* send the header info */
@@ -432,7 +472,7 @@ static gboolean gst_qcvideodec_set_format (GstVideoDecoder * decoder,GstVideoCod
g_print("\n Decoder frame failed");
return -1;
}
- total_frames++;
+ // total_frames++;
sem_wait(&dec->sem_input_buf_done);
return TRUE;
}
@@ -512,6 +552,7 @@ static gboolean gst_qcvideodec_close (GstVideoDecoder *decoder)
}
sem_destroy (&dec->queue_context.sem_message);
sem_destroy (&dec->sem_input_buf_done);
+ sem_destroy (&dec->sem_output_buf_done);
sem_destroy (&dec->sem_synchronize);
pthread_mutex_destroy(&dec->queue_context.mutex);
@@ -525,19 +566,38 @@ static GstFlowReturn gst_qcvideodec_handle_frame (GstVideoDecoder *decoder_cxt,G
{
GstFlowReturn res;
Gstqcvideodec *dec = GST_QCVIDEODEC (decoder_cxt);
- g_print("\n gst_qcvideodec_handle_frame Enter: ### \n");
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder_cxt);
+ g_print("\n main_thread gst_qcvideodec_handle_frame Enter: ### \n");
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder_cxt);
+ // g_print("\n11 >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
+ // g_print("\n main_thread system_frame_number : %d , %d, %d, %d \n",frame->system_frame_number,frame->decode_frame_number,frame->presentation_frame_number,frame->distance_from_sync);
+ // g_print("\n main_thread time : %lld , %lld , %lld \n",frame->dts,frame->pts,frame->duration);
+ // g_print("\n main_thread address : %p , %p \n",frame->input_buffer,frame->output_buffer);
+ // g_print("\n11 >>>>>>>>>>>>>>>>>>>>>>>>>>>>> end\n");
+
res = gst_qcvideodec_decode_buffer (dec, frame->input_buffer, frame);
- switch (res) {
+ g_print("\n11 returning >>>>>>>>>>>>>>>>>>>>>>>>>>>>> done\n");
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder_cxt);
+
+/* if(frame->system_frame_number >= 3)
+ sem_wait(&dec->sem_output_buf_done);
+
+
+ // g_print("\n decode_frame_number : %d\n",g_frame1->decode_frame_number);
+ // g_print("\n presentation_frame_number : %d\n",g_frame1->presentation_frame_number);
+ // g_print("\n distance_from_sync : %d\n",g_frame1->distance_from_sync);
+/* switch (res) {
case GST_FLOW_OK:
res = gst_video_decoder_finish_frame (decoder_cxt, frame);
break;
-/* case GST_CUSTOM_FLOW_DROP:
+ case GST_CUSTOM_FLOW_DROP:
res = gst_video_decoder_drop_frame (decoder_cxt, frame);
break;
-*/ default:
+ default:
gst_video_codec_frame_unref (frame);
break;
- }
+ }*/
+ sem_wait(&dec->sem_input_buf_done);
return res;
}
@@ -551,6 +611,7 @@ static GstFlowReturn gst_qcvideodec_decode_buffer(Gstqcvideodec *dec, GstBuffer
struct vdec_fillbuffer_cmd fillbuffer;
unsigned int data_len =0;
GstMapInfo in_map_info = GST_MAP_INFO_INIT;
+ GstFlowReturn result;
decode_context = GST_QCVIDEODEC (dec);
size = gst_buffer_get_size (buf);
@@ -615,28 +676,9 @@ static GstFlowReturn gst_qcvideodec_decode_buffer(Gstqcvideodec *dec, GstBuffer
sem_post (&decode_context->sem_synchronize);
}
- g_print("\n Input buffer done send next buffer current value = %d",\
- total_frames);
-
}
gst_buffer_unmap(buf, &in_map_info);
-
- for(j=0;j<8;j++)
- {
- g_print("\n hex value is : 0x%x \n",((char *)frameinfo.bufferaddr)[j]);
- }
-// g_print ("\n Wait for EOS");
- /*Wait for EOS or Error condition*/
-// sem_wait (&decode_context->sem_synchronize);
-// g_print ("\n Reached EOS");
- total_frames++;
-// Should wait till get the msg from vcd driver, i.e INPUT_DONE
- sem_wait(&decode_context->sem_input_buf_done);
-// sem_wait (&decode_context->sem_synchronize);
-
return GST_FLOW_OK;
- /* just push out the incoming buffer without touching it */
-// return gst_pad_push (filter->srcpad, buf);
}
int free_buffer ( enum vdec_buffer buffer_dir,Gstqcvideodec *decoder_context)
@@ -1122,10 +1164,18 @@ static void* video_thread (Gstqcvideodec *decoder_cxt)
GstMapInfo info;
GstFlowReturn ret = GST_FLOW_OK;
int leng = 0;
+ GstVideoCodecFrame out_frame;
+ GstVideoFrame vframe;
+ int first_frm = 0;
+ int input_header = 0;
+ GstVideoCodecFrame *frame;
+ GstMapInfo map = GST_MAP_INFO_INIT;
+ GstCaps *caps;
+ gchar *caps_data;
// guint8 *ptr;
Gstqcvideodec *decode_context = GST_QCVIDEODEC (decoder_cxt);
- g_print("\n video thread created successfully");
+ g_print("\n video thread created successfully");
if (decode_context == NULL)
{
g_print("\n video thread recieved NULL context");
@@ -1152,12 +1202,12 @@ static void* video_thread (Gstqcvideodec *decoder_cxt)
case VDEC_MSG_RESP_OUTPUT_FLUSHED:
break;
case VDEC_MSG_RESP_START_DONE:
- g_print("\n recived start done command");
+ g_print("\n recived start done command");
sem_post (&decode_context->sem_synchronize);
break;
case VDEC_MSG_RESP_STOP_DONE:
- g_print("\n recieved stop done");
+ g_print("\n recieved stop done");
sem_post (&decode_context->sem_synchronize);
break;
@@ -1171,13 +1221,20 @@ static void* video_thread (Gstqcvideodec *decoder_cxt)
break;
}
/*To-do read the next frame*/
-
+ g_print("\n VDEC_MSG_RESP_INPUT_BUFFER_DONE Input buffer done for index : = %d", total_frames);
+ if(input_header != 0)
+ {
+ total_frames++;
+ }
+ else
+ input_header = 1;
+
sem_post(&decode_context->sem_input_buf_done);
break;
case VDEC_MSG_EVT_CONFIG_CHANGED:
reconfig =1;
- g_print("\nGot VDEC_MSG_EVT_CONFIG_CHANGED \n");
+ g_print("\n Got VDEC_MSG_EVT_CONFIG_CHANGED \n");
ioctl_msg.in = &decode_context->output_buffer;
ioctl_msg.out = NULL;
if (ioctl(decode_context->video_driver_fd, VDEC_IOCTL_CMD_FLUSH, &ioctl_msg) < 0)
@@ -1188,8 +1245,9 @@ static void* video_thread (Gstqcvideodec *decoder_cxt)
break;
case VDEC_MSG_RESP_OUTPUT_BUFFER_DONE:
- // g_print("\n\nVDEC_MSG_RESP_OUTPUT_BUFFER_DONE in video thread # 1\n");
-
+ // g_print("\n\n VDEC_MSG_RESP_OUTPUT_BUFFER_DONE in video thread # 1\n");
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder_cxt);
+ pthread_mutex_lock(&read_lock);
outputbuffer = (struct vdec_output_frameinfo *)\
queueitem->clientdata;
if (outputbuffer == NULL || outputbuffer->bufferaddr == NULL ||
@@ -1209,15 +1267,15 @@ static void* video_thread (Gstqcvideodec *decoder_cxt)
break;
}
- g_print("\n\nVDEC_MSG_RESP_OUTPUT_BUFFER_DONE in video thread outputbuffer->len: %d\n",outputbuffer->len);
+ // g_print("\n\n video_thread VDEC_MSG_RESP_OUTPUT_BUFFER_DONE in video thread outputbuffer->len: %d\n",outputbuffer->len);
if (outputbuffer->len == 0)
{
- g_print("\n Filled Length is zero Close decoding");
+ g_print("\n Filled Length is zero Close decoding");
sem_post (&decode_context->sem_synchronize);
break;
}
// if(outbuf == NULL)
- if(gst_pad_is_linked (decode_context->srcpad) != TRUE)
+ /* if(gst_pad_is_linked (decode_context->srcpad) != TRUE)
{
g_print("\n ----- pad is not linked ----\n");
}
@@ -1230,74 +1288,137 @@ static void* video_thread (Gstqcvideodec *decoder_cxt)
}
gst_buffer_map (outbuf, &info, GST_MAP_WRITE);
memcpy (info.data, outputbuffer->bufferaddr, info.size);
- leng = gst_buffer_get_size(outbuf);
- gst_buffer_ref(outbuf);
- ret = gst_pad_push (decode_context->srcpad, outbuf);
+ // leng = gst_buffer_get_size(outbuf);
+ // gst_buffer_ref(outbuf);
- gst_buffer_unmap(outbuf, &info);
+ /* if (outputBufferFile != NULL)
{
- g_print("\n--------------------------22222 GST_FLOW_NOT_OK : %d and leng = %d\n",ret,leng);
+ g_print("\nwriting data to output11.yuv file\n");
+ fwrite (info.data,1,info.size,outputBufferFile);
+ // fwrite (outputbuffer->bufferaddr,1,outputbuffer->len,outputBufferFile);
}
- if(ret == GST_FLOW_CUSTOM_SUCCESS_2)
+ */
+ // g_frame2 = gst_video_decoder_allocate_output_frame();
+ if(first_frm !=0)
{
- g_print("\n-------------------------- GST_FLOW_CUSTOM_SUCCESS_2 \n");
- }
-if(ret == GST_FLOW_CUSTOM_SUCCESS_1)
+
+ g_frame = gst_video_decoder_get_frame(decode_context,first_frm);
+ g_print("\n ************************************************* : %d\n",first_frm);
+/* if(g_frame->output_buffer)
+ {
+ g_print("\n--------- g_frame->output_buffer is not null ---------\n");
+ gst_buffer_unref (g_frame1->output_buffer);
+ g_frame1->output_buffer = NULL;
+ }
+
+
+ if(first_frm == 1)
+ { g_frame = g_frame1 ;}
+ if(first_frm == 2)
+ { g_frame = g_frame2 ;}
+ if(first_frm == 3)
+ { g_frame = g_frame3 ;}
+
+ g_print("\n ************************************************* : %d\n",first_frm);
+ g_print("\n video_thread writing data to output11.yuv file\n");
+ g_print("\n video_thread system_frame_number : %d , %d, %d, %d \n",g_frame->system_frame_number,g_frame->decode_frame_number,g_frame->presentation_frame_number,g_frame->distance_from_sync);
+ g_print("\n video_thread frame time : %lld , %lld , %lld , %lld \n",g_frame->dts,g_frame->pts,g_frame->duration,g_frame->deadline);
+ g_print("\n video_thread input address : %p , %p \n",g_frame->input_buffer,g_frame->output_buffer);
+ g_print("\n video_thread input size : %ld \n",decode_context->output_state->info.size);
+ g_print("\n video_thread par_n : %d ,par_d : %d , fps_n : %d , fps_d : %d \n",\
+ decode_context->output_state->info.par_n,\
+ decode_context->output_state->info.par_d,\
+ decode_context->output_state->info.fps_n,\
+ decode_context->output_state->info.fps_d);
+
+ caps = decode_context->output_state->caps;
+ caps_data = gst_caps_to_string(caps);
+ g_print("\n caps_data : %s \n",caps_data);
+ g_print("\n ************************************************* **\n");
+*/
+ ret = gst_video_decoder_allocate_output_frame(decode_context,g_frame);
+ // outbuf = gst_video_decoder_allocate_output_buffer(GST_VIDEO_DECODER (decoder_cxt));
+ /* if(outbuf !=NULL)
+ {
+ g_print("\n memory allocated for outbuf \n");
+ }
+ if(ret != GST_FLOW_OK)
+ {
+ g_print("\n not able to allocate memory GST_FLOW_NOT_OK \n");
+ }
+ else
+ g_print("\n !!!!!!! able to allocate memory GST_FLOW_OK : %p \n",g_frame->output_buffer);
+ */
+ /* if (!gst_video_frame_map (&vframe,&decode_context->output_state->info, g_frame->output_buffer,GST_MAP_READWRITE))
+ {
+ g_print("\n not able to map \n");
+ }
+ else
+ g_print("\n mapped successfully \n");
+
+ */
+ if (!gst_buffer_map (g_frame->output_buffer, &map, GST_MAP_WRITE))
{
- g_print("\n-------------------------- GST_FLOW_CUSTOM_SUCCESS_1 \n");
+ g_print("\n not able to mapped successfully \n");
+ }
+ memcpy (map.data,outputbuffer->bufferaddr, map.size);
+ gst_buffer_unmap (g_frame->output_buffer, &map);
+ // g_print("\n data copied to output buffer\n");
+ ret = gst_video_decoder_finish_frame(decode_context,g_frame);
+
+ /* if(ret == GST_FLOW_CUSTOM_SUCCESS_2)
+ {
+ g_print("\nfinish_frame -------------------------- GST_FLOW_CUSTOM_SUCCESS_2 \n");
}
-if(ret == GST_FLOW_CUSTOM_SUCCESS)
+ if(ret == GST_FLOW_CUSTOM_SUCCESS_1)
{
- g_print("\n-------------------------- GST_FLOW_CUSTOM_SUCCESS \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_CUSTOM_SUCCESS_1 \n");
}
-if(ret == GST_FLOW_OK)
+ if(ret == GST_FLOW_CUSTOM_SUCCESS)
{
- g_print("\n-------------------------- GST_FLOW_OK \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_CUSTOM_SUCCESS \n");
}
-if(ret == GST_FLOW_NOT_LINKED)
+ if(ret == GST_FLOW_OK)
{
- g_print("\n-------------------------- GST_FLOW_NOT_LINKED \n");
+ g_print("\nfinish_frame-------------------------- GST_FLOW_OK \n");
}
-if(ret == GST_FLOW_FLUSHING)
+ if(ret == GST_FLOW_NOT_LINKED)
{
- g_print("\n-------------------------- GST_FLOW_FLUSHING \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_NOT_LINKED \n");
}
-if(ret == GST_FLOW_EOS)
+ if(ret == GST_FLOW_FLUSHING)
{
- g_print("\n-------------------------- GST_FLOW_EOS \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_FLUSHING \n");
}
-if(ret == GST_FLOW_NOT_NEGOTIATED)
+ if(ret == GST_FLOW_EOS)
{
- g_print("\n-------------------------- GST_FLOW_NOT_NEGOTIATED \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_EOS \n");
}
-if(ret == GST_FLOW_ERROR)
+ if(ret == GST_FLOW_NOT_NEGOTIATED)
{
- g_print("\n-------------------------- GST_FLOW_ERROR \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_NOT_NEGOTIATED \n");
}
-if(ret == GST_FLOW_NOT_SUPPORTED)
+ if(ret == GST_FLOW_ERROR)
{
- g_print("\n-------------------------- GST_FLOW_NOT_SUPPORTED \n");
+ g_print("\nfinish_frame -------------------------- GST_FLOW_ERROR \n");
}
- gst_buffer_unref (outbuf);
- /* We need to copy the size and offset of the buffer at a minimum. */
-// GST_BUFFER_SIZE (outbuf) = outputbuffer->len;
-// GST_BUFFER_OFFSET (outbuf) = 0;
-
- /* Then allocate the memory for the new buffer */
- // GST_BUFFER_MALLOCDATA(outbuf) = (guchar *)g_malloc (GST_BUFFER_SIZE (outbuf));
- // ptr = (guchar *)g_malloc(GST_BUFFER_SIZE (outbuf));
- // GST_BUFFER_MALLOCDATA(outbuf) = ptr;
-
- // GST_BUFFER_DATA (outbuf) = (guchar *)g_malloc (GST_BUFFER_SIZE (outbuf));
- /* Then copy the data in the incoming buffer into the new buffer. */
- // memcpy (GST_BUFFER_DATA (outbuf), outputbuffer->bufferaddr, outputbuffer->len);
-
- /* if (decode_context->outputBufferFile != NULL)
+ if(ret == GST_FLOW_NOT_SUPPORTED)
{
- fwrite (outputbuffer->bufferaddr,1,outputbuffer->len,
- decode_context->outputBufferFile);
+ g_print("\nfinish_frame -------------------------- GST_FLOW_NOT_SUPPORTED \n");
}
+ //gst_video_frame_unmap (&vframe);
+ //gst_buffer_unref (g_frame1->output_buffer);
*/
+ //usleep(500*1000);
+ // sem_post (&decode_context->sem_output_buf_done);
+ }
+ else
+ {
+ g_print("\n*************************************************: %d\n",first_frm);
+ // g_print("\nThis is the first frame..ignore\n");
+ // g_print("\n*************************************************\n");
+ }
+ first_frm++;
tempbuffer = (struct vdec_bufferpayload *)\
outputbuffer->client_data;
@@ -1309,14 +1430,15 @@ if(ret == GST_FLOW_NOT_SUPPORTED)
ioctl_msg.in = &fillbuffer;
ioctl_msg.out = NULL;
- g_print("\n\nVDEC_MSG_RESP_OUTPUT_BUFFER_DONE in video thread # 4\n");
+ //g_print("\n\nVDEC_MSG_RESP_OUTPUT_BUFFER_DONE in video thread # 4\n");
if (ioctl (decode_context->video_driver_fd,
VDEC_IOCTL_FILL_OUTPUT_BUFFER,&ioctl_msg) < 0)
{
g_print("\n Decoder frame failed");
return NULL;
}
-
+ pthread_mutex_unlock(&read_lock);
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder_cxt);
break;
case VDEC_MSG_RESP_FLUSH_INPUT_DONE:
@@ -1383,7 +1505,7 @@ static void* async_thread (Gstqcvideodec *decoder_cxt)
}
else
{
- g_print("\n\nioctl read next msg code : %d",vdec_msg.msgcode);
+ g_print("\n\nasync_thread ioctl read next msg code : %d",vdec_msg.msgcode);
switch (vdec_msg.msgcode)
{
case VDEC_MSG_RESP_FLUSH_INPUT_DONE:
@@ -1456,6 +1578,72 @@ static void* async_thread (Gstqcvideodec *decoder_cxt)
}
}
+static gboolean
+gst_qcvideodec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
+{
+ GstBufferPool *pool;
+ GstStructure *config;
+#if 0
+#if defined (USE_OMX_TARGET_RPI) && defined (HAVE_GST_GL)
+ {
+ GstCaps *caps;
+ gint i, n;
+ GstVideoInfo info;
+
+ gst_query_parse_allocation (query, &caps, NULL);
+ if (caps && gst_video_info_from_caps (&info, caps)
+ && info.finfo->format == GST_VIDEO_FORMAT_RGBA) {
+ gboolean found = FALSE;
+ GstCapsFeatures *feature = gst_caps_get_features (caps, 0);
+ /* Prefer an EGLImage allocator if available and we want to use it */
+ n = gst_query_get_n_allocation_params (query);
+ for (i = 0; i < n; i++) {
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ gst_query_parse_nth_allocation_param (query, i, &allocator, &params);
+ if (allocator
+ && g_strcmp0 (allocator->mem_type,
+ GST_EGL_IMAGE_MEMORY_TYPE) == 0) {
+ found = TRUE;
+ gst_query_set_nth_allocation_param (query, 0, allocator, &params);
+ while (gst_query_get_n_allocation_params (query) > 1)
+ gst_query_remove_nth_allocation_param (query, 1);
+ break;
+ }
+ }
+
+ /* if try to negotiate with caps feature memory:EGLImage
+ * and if allocator is not of type memory EGLImage then fails */
+ if (feature
+ && gst_caps_features_contains (feature,
+ GST_CAPS_FEATURE_MEMORY_EGL_IMAGE) && !found) {
+ return FALSE;
+ }
+ }
+ }
+#endif
+#endif
+ g_print("\n gst_qcvideodec_decide_allocation \n");
+ if (!GST_VIDEO_DECODER_CLASS(gst_qcvideodec_parent_class)->decide_allocation (decoder, query))
+ return FALSE;
+
+ g_assert (gst_query_get_n_allocation_pools (query) > 0);
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
+ g_assert (pool != NULL);
+
+ config = gst_buffer_pool_get_config (pool);
+ if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
+ gst_buffer_pool_config_add_option (config,GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
+ gst_buffer_pool_set_config (pool, config);
+ gst_object_unref (pool);
+ g_print("\n gst_qcvideodec_decide_allocation return TRUE\n");
+ return TRUE;
+}
+
+
+
/* GObject vmethod implementations */
@@ -1490,6 +1678,7 @@ gst_qcvideodec_class_init (GstqcvideodecClass * klass)
video_decoder_class->close = GST_DEBUG_FUNCPTR (gst_qcvideodec_close);
video_decoder_class->handle_frame = GST_DEBUG_FUNCPTR (gst_qcvideodec_handle_frame);
video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_qcvideodec_finish);
+ video_decoder_class->decide_allocation = GST_DEBUG_FUNCPTR (gst_qcvideodec_decide_allocation);
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_factory));
@@ -1510,7 +1699,7 @@ static void
gst_qcvideodec_init (Gstqcvideodec * filter)
{
g_print("\nqcvideodec_init Enter ----\n");
- filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "qcsink");
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR(gst_qcvideodec_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
@@ -1518,10 +1707,12 @@ gst_qcvideodec_init (Gstqcvideodec * filter)
GST_PAD_SET_PROXY_CAPS (filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
- filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ filter->srcpad = gst_pad_new_from_static_template (&src_factory, "qcsrc");
+
+ gst_pad_set_active(filter->srcpad, TRUE);
GST_PAD_SET_PROXY_CAPS (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
-
+ gst_pad_set_active(filter->srcpad, TRUE);
//gst_qcvideodec_open (filter);
//gst_qcvideodec_set_format (filter,NULL);
@@ -1683,21 +1874,20 @@ gst_qcvideodec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
sem_post (&decode_context->sem_synchronize);
}
- g_print("\n Input buffer done send next buffer current value = %d",\
- total_frames);
+ g_print("\n %d == Input buffer done send next buffer current value \n",total_frames);
}
- gst_buffer_unmap(buf, &in_map_info);
+ // gst_buffer_unmap(buf, &in_map_info);
- for(j=0;j<8;j++)
+/* for(j=0;j<8;j++)
{
g_print("\n hex value is : 0x%x \n",((char *)frameinfo.bufferaddr)[j]);
- }
+ }*/
// g_print ("\n Wait for EOS");
/*Wait for EOS or Error condition*/
// sem_wait (&decode_context->sem_synchronize);
// g_print ("\n Reached EOS");
- total_frames++;
+ // total_frames++;
// Should wait till get the msg from vcd driver, i.e INPUT_DONE
sem_wait(&decode_context->sem_input_buf_done);
// sem_wait (&decode_context->sem_synchronize);
diff --git a/src/gstqcvideodec.h b/src/gstqcvideodec.h
index 62aed13..3281a27 100644
--- a/src/gstqcvideodec.h
+++ b/src/gstqcvideodec.h
@@ -120,6 +120,10 @@ struct _Gstqcvideodec
pthread_t asyncthread_id;
sem_t sem_synchronize;
sem_t sem_input_buf_done;
+ sem_t sem_output_buf_done;
+ GstVideoCodecState *input_state;
+ GstVideoCodecState *output_state;
+ GstVideoCodecFrame frame;
};
struct _GstqcvideodecClass