1
/*
2
* Copyright (C) 2013 The Android Open Source Project
3
*
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
* you may not use this file except in compliance with the License.
6
* You may obtain a copy of the License at
7
*
8
* http://www.apache.org/licenses/LICENSE-2.0
9
*
10
* Unless required by applicable law or agreed to in writing, software
11
* distributed under the License is distributed on an "AS IS" BASIS,
12
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
* See the License for the specific language governing permissions and
14
* limitations under the License.
15
*/
16
17
package com.example.android.basicmediadecoder;
18
19
20
import android.animation.TimeAnimator;
21
import android.app.Activity;
22
import android.media.MediaCodec;
23
import android.media.MediaExtractor;
24
import android.net.Uri;
25
import android.os.Bundle;
26
import android.view.Menu;
27
import android.view.MenuInflater;
28
import android.view.MenuItem;
29
import android.view.Surface;
30
import android.view.TextureView;
31
import android.view.View;
32
import android.widget.TextView;
33
34
import com.example.android.common.media.MediaCodecWrapper;
35
36
import java.io.IOException;
37
38
/**
39
* This activity uses a {@link android.view.TextureView} to render the frames of a video decoded using
40
* {@link android.media.MediaCodec} API.
41
*/
42
public class MainActivity extends Activity {
43
44
private TextureView mPlaybackView;
45
private TimeAnimator mTimeAnimator = new TimeAnimator();
46
47
// A utility that wraps up the underlying input and output buffer processing operations
48
// into an east to use API.
49
private MediaCodecWrapper mCodecWrapper;
50
private MediaExtractor mExtractor = new MediaExtractor();
51
TextView mAttribView = null;
52
53
54
/**
55
* Called when the activity is first created.
56
*/
57
@Override
58
public void onCreate(Bundle savedInstanceState) {
59
super.onCreate(savedInstanceState);
60
setContentView(R.layout.sample_main);
61
mPlaybackView = (TextureView) findViewById(R.id.PlaybackView);
62
mAttribView = (TextView)findViewById(R.id.AttribView);
63
64
}
65
66
@Override
67
public boolean onCreateOptionsMenu(Menu menu) {
68
MenuInflater inflater = getMenuInflater();
69
inflater.inflate(R.menu.action_menu, menu);
70
return true;
71
}
72
73
@Override
74
protected void onPause() {
75
super.onPause();
76
if(mTimeAnimator != null && mTimeAnimator.isRunning()) {
77
mTimeAnimator.end();
78
}
79
80
if (mCodecWrapper != null ) {
81
mCodecWrapper.stopAndRelease();
82
mExtractor.release();
83
}
84
}
85
86
@Override
87
public boolean onOptionsItemSelected(MenuItem item) {
88
if (item.getItemId() == R.id.menu_play) {
89
mAttribView.setVisibility(View.VISIBLE);
90
startPlayback();
91
item.setEnabled(false);
92
}
93
return true;
94
}
95
96
97
public void startPlayback() {
98
99
// Construct a URI that points to the video resource that we want to play
100
Uri videoUri = Uri.parse("android.resource://"
101
+ getPackageName() + "/"
102
+ R.raw.vid_bigbuckbunny);
103
104
try {
105
107
mExtractor.setDataSource(this, videoUri, null);
108
int nTracks = mExtractor.getTrackCount();
109
110
// Begin by unselecting all of the tracks in the extractor, so we won't see
111
// any tracks that we haven't explicitly selected.
112
for (int i = 0; i < nTracks; ++i) {
113
mExtractor.unselectTrack(i);
114
}
115
116
117
// Find the first video track in the stream. In a real-world application
118
// it's possible that the stream would contain multiple tracks, but this
119
// sample assumes that we just want to play the first one.
120
for (int i = 0; i < nTracks; ++i) {
121
// Try to create a video codec for this track. This call will return null if the
122
// track is not a video track, or not a recognized video format. Once it returns
123
// a valid MediaCodecWrapper, we can break out of the loop.
124
mCodecWrapper = MediaCodecWrapper.fromVideoFormat(mExtractor.getTrackFormat(i),
125
new Surface(mPlaybackView.getSurfaceTexture()));
126
if (mCodecWrapper != null) {
127
mExtractor.selectTrack(i);
128
break;
129
}
130
}
132
133
134
135
136
// By using a {@link TimeAnimator}, we can sync our media rendering commands with
137
// the system display frame rendering. The animator ticks as the {@link Choreographer}
138
// recieves VSYNC events.
139
mTimeAnimator.setTimeListener(new TimeAnimator.TimeListener() {
140
@Override
141
public void onTimeUpdate(final TimeAnimator animation,
142
final long totalTime,
143
final long deltaTime) {
144
145
boolean isEos = ((mExtractor.getSampleFlags() & MediaCodec
146
.BUFFER_FLAG_END_OF_STREAM) == MediaCodec.BUFFER_FLAG_END_OF_STREAM);
147
149
if (!isEos) {
150
// Try to submit the sample to the codec and if successful advance the
151
// extractor to the next available sample to read.
152
boolean result = mCodecWrapper.writeSample(mExtractor, false,
153
mExtractor.getSampleTime(), mExtractor.getSampleFlags());
154
155
if (result) {
156
// Advancing the extractor is a blocking operation and it MUST be
157
// executed outside the main thread in real applications.
158
mExtractor.advance();
159
}
160
}
162
163
// Examine the sample at the head of the queue to see if its ready to be
164
// rendered and is not zero sized End-of-Stream record.
165
MediaCodec.BufferInfo out_bufferInfo = new MediaCodec.BufferInfo();
166
mCodecWrapper.peekSample(out_bufferInfo);
167
169
if (out_bufferInfo.size <= 0 && isEos) {
170
mTimeAnimator.end();
171
mCodecWrapper.stopAndRelease();
172
mExtractor.release();
173
} else if (out_bufferInfo.presentationTimeUs / 1000 < totalTime) {
174
// Pop the sample off the queue and send it to {@link Surface}
175
mCodecWrapper.popSample(true);
176
}
178
179
}
180
});
181
182
// We're all set. Kick off the animator to process buffers and render video frames as
183
// they become available
184
mTimeAnimator.start();
185
} catch (IOException e) {
186
e.printStackTrace();
187
}
188
}
189
}