Hi. I have updated my code for streaming. Still, I am not able to display the content. Please find the below code: Can anyone please tell me where I made mistake? Is the below code right?
Code for getting data (working perfectly):
public class myVideoReceiver extends Thread {
public boolean bKeepRunning2 = true;
public String lastMessage2 = "";
public void run() {
String message2;
byte[] lmessage2 = new byte[MAX_UDP_DATAGRAM_LEN2];
DatagramPacket packet2 = new DatagramPacket(lmessage2, lmessage2.length);
try {
DatagramSocket socket2 = new DatagramSocket(UDP_SERVER_PORT2);
while(bKeepRunning2) {
socket2.receive(packet2);
message2 = new String(lmessage2, 0, packet2.getLength());
lastMessage2 = message2;
runOnUiThread(updateTextMessage2);
}
} catch (Throwable e) {
e.printStackTrace();
}
if (mysocket != null) {
mysocket.close();
}
}
public void kill() {
bKeepRunning2 = false;
}
public String getLastMessage() {
return lastMessage2;
} }
public Runnable updateTextMessage2 = new Runnable() {
public void run() {
if (myVideoReceiver == null) return;
VIDEO_RESPONSE.setText(myVideoReceiver.getLastMessage());
}
};
Code for Displaying streamed data using TextureView:
public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener{
private TextureView m_surface;// View that contains the Surface Texture
private myVideoReceiver provider;// Object that connects to our server and gets H264 frames
private MediaCodec m_codec;// Media decoder
private DecodeFramesTask m_frameTask;// AsyncTask that takes H264 frames and uses the decoder to update the Surface Texture
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Get a referance to the TextureView in the UI
m_surface = (TextureView)findViewById(R.id.textureView);
// Add this class as a call back so we can catch the events from the Surface Texture
m_surface.setSurfaceTextureListener(this);
}
@Override
// Invoked when a TextureView's SurfaceTexture is ready for use
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
// when the surface is ready, we make a H264 provider Object. When its constructor runs it starts an AsyncTask to log into our server and start getting frames
provider = new myVideoReceiver();
// Create the format settinsg for the MediaCodec
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080);// MIMETYPE: a two-part identifier for file formats and format contents
// Set the PPS and SPS frame
format.setByteBuffer("csd-0", ByteBuffer.wrap(provider.getCSD()));
// Set the buffer size
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 100000);
try {
// Get an instance of MediaCodec and give it its Mime type
m_codec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
// Configure the Codec
m_codec.configure(format, new Surface(m_surface.getSurfaceTexture()), null, 0);
// Start the codec
m_codec.start();
// Create the AsyncTask to get the frames and decode them using the Codec
m_frameTask = new DecodeFramesTask();
m_frameTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}catch(Exception e){
e.printStackTrace();
}
}
@Override
// Invoked when the SurfaceTexture's buffers size changed
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
// Invoked when the specified SurfaceTexture is about to be destroyed
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
// Invoked when the specified SurfaceTexture is updated through updateTexImage()
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
private class DecodeFramesTask extends AsyncTask<String, String, String> {
@Override
protected String doInBackground(String... data) {
while(!isCancelled()) {
// Get the next frame
byte[] frame = provider.nextFrame();
// Now we need to give it to the Codec to decode into the surface
// Get the input buffer from the decoder
int inputIndex = m_codec.dequeueInputBuffer(-1);// Pass in -1 here as in this example we don't have a playback time reference
// If the buffer number is valid use the buffer with that index
if(inputIndex>=0) {
ByteBuffer buffer = m_codec.getInputBuffer(inputIndex);
buffer.put(frame);
// tell the decoder to process the frame
m_codec.queueInputBuffer(inputIndex, 0, frame.length, 0, 0);
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outputIndex = m_codec.dequeueOutputBuffer(info, 0);
if (outputIndex >= 0) {
m_codec.releaseOutputBuffer(outputIndex, true);
}
// wait for the next frame to be ready, our server makes a frame every 250ms
try{Thread.sleep(250);}catch (Exception e){e.printStackTrace();}
}
return "";
}
@Override
protected void onPostExecute(String result) {
try {
m_codec.stop();
m_codec.release();
}catch(Exception e){
e.printStackTrace();
}
provider.release();
}
}
@Override
public void onStop(){
super.onStop();
m_frameTask.cancel(true);
provider.release();
}
}