package
seu.mcs.client.media;
/** */ /**
*
* @author leeo
*@todo : Capturing the local audio/vidio data
* Translate them to specified address(Address : port)
*
*
*/
import javax.media.rtp. * ;
import javax.media. * ;
import javax.media.protocol. * ;
import javax.media.control.FormatControl;
import javax.media.control.TrackControl;
import javax.media.format. * ;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util. * ;
public class TransmitController ... {
private CaptureDeviceInfo captureVideoDevice = null;
private CaptureDeviceInfo captureAudioDevice = null;
private Processor videoProcessor = null;
private Processor audioProcessor = null;
private DataSource videoDataSource = null;
private DataSource audioDataSource = null;
//private SessionManager videoRtpsm = null;
//private SessionManager audioRtpsm = null;
private RTPManager videortpManager = null;
private RTPManager audiortpManager = null;
private SendStream videoRtpstream = null;
private SendStream audioRtpstream = null;
//private SessionManager mgr = null;
private Vector sendplayerlist = new Vector();
private boolean terminatedbyClose_sender= false;
private SessionAddress remoteAddress1 = null;
private SessionAddress remoteAddress2= null;
private void getDeviceInfo()...{
// Get the VideoCaptureDeviceInfo for the live video capture device
// initialize the CaptureDeviceInfo
Vector deviceList = null;
deviceList = CaptureDeviceManager.getDeviceList(null);
for(int i = 0; i < deviceList.size(); i ++)...{
captureVideoDevice = (CaptureDeviceInfo)deviceList.elementAt(i);
String name = captureVideoDevice.getName();
if(name.startsWith("vfw:")) break;
}
// Get the AudioCaptureDeviceInfo for the live audio capture device
deviceList=null;
deviceList = CaptureDeviceManager.getDeviceList(new AudioFormat("linear", 8000, 8, 1));
if (deviceList.size() > 0)
captureAudioDevice = (CaptureDeviceInfo)deviceList.elementAt(0);
else...{
// Exit if we can't find a device that does linear, 8000 Hz, 8 bit, stereo audio.
System.err.println("Device initializing failure!");
System.exit(-1);
}
}
private void setDataSource() throws IOException,NoProcessorException,
CannotRealizeException, NoDataSinkException...{
DataSource ds = null;
TrackControl track[] = null;
//Create a video processor for capturevideodevice & exit if we cannot create it
try...{
ds = Manager.createDataSource(captureVideoDevice.getLocator());
} catch(Exception e) ...{ e.printStackTrace(); }
try ...{
videoProcessor = Manager.createProcessor(ds);
} catch(Exception e) ...{ e.printStackTrace(); }
videoProcessor.configure();
// block until it has been configured
while(true) ...{
if(videoProcessor.getState() == videoProcessor.Configured) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
videoProcessor.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));
track = videoProcessor.getTrackControls();
boolean encodingOk = false;
// Go through the tracks and try to program one of them to
// output ULAW_RTP data.
for (int i = 0; i < track.length; i++) ...{
if (!encodingOk && track[i] instanceof FormatControl) ...{
if (((FormatControl)track[i]).setFormat(new VideoFormat(VideoFormat.JPEG_RTP)) == null) ...{
track[i].setEnabled(false);
}
else ...{
encodingOk = true;
}
} else ...{
// we could not set this track to gsm, so disable it
track[i].setEnabled(false);
}
}
videoProcessor.realize();
// block until it has been configured
while(true) ...{
if(videoProcessor.getState() == videoProcessor.Realized) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
try ...{
videoDataSource = videoProcessor.getDataOutput();
} catch (NotRealizedError e)...{
System.exit(-1);
}
// Create a audio processor for captureaudiodevice & exit if we
// cannot create it
try ...{
ds = Manager.createDataSource(captureAudioDevice.getLocator());
} catch(Exception e) ...{e.printStackTrace();}
try ...{
audioProcessor = Manager.createProcessor(ds);
} catch(Exception e) ...{e.printStackTrace();}
audioProcessor.configure();
// block until it has been configured
while(true) ...{
if(audioProcessor.getState() == audioProcessor.Configured) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
audioProcessor.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));
track = audioProcessor.getTrackControls();
boolean encodingOk1 = false;
// Go through the tracks and try to program one of them to
// output ULAW_RTP data.
for (int i = 0; i < track.length; i++) ...{
if (!encodingOk1 && track[i] instanceof FormatControl) ...{
if (((FormatControl)track[i]).setFormat(new AudioFormat(AudioFormat.ULAW_RTP,
8000,
8,
1)) == null) ...{
track[i].setEnabled(false);
}
else ...{
encodingOk1 = true;
}
} else ...{
// we could not set this track to gsm, so disable it
track[i].setEnabled(false);
}
}
// Realize it and block until it is realized.
audioProcessor.realize();
// block until it has been configured
while(true) ...{
if(audioProcessor.getState() == audioProcessor.Realized) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
try ...{
audioDataSource = audioProcessor.getDataOutput();
} catch (NotRealizedError e)...{
System.exit(-1);
}
}
public void transmitStart(String targetAddress ,String targetPort)...{
videortpManager = RTPManager.newInstance();
audiortpManager = RTPManager.newInstance();
SessionAddress localAddress1 = null;
SessionAddress localAddress2 = null;
// create the local endpoint for the local interface on any local port
localAddress1= new SessionAddress();
localAddress2= new SessionAddress();
try ...{
videortpManager.initialize(localAddress1);
audiortpManager.initialize(localAddress2);
} catch(IOException e) ...{
System.out.println(e.getMessage());
e.printStackTrace();
} catch(InvalidSessionAddressException e) ...{
e.printStackTrace();
}
// specify the remote endpoint of this unicast session
try ...{
InetAddress ipAddress = InetAddress.getByName(targetAddress);
remoteAddress1 = new SessionAddress(ipAddress, Integer.parseInt(targetPort));
videortpManager.addTarget(remoteAddress1);
remoteAddress2 = new SessionAddress(ipAddress, Integer.parseInt(targetPort)+2);
audiortpManager.addTarget(remoteAddress2);
System.out.println();
System.out.println("data address " + localAddress1.getDataAddress());
System.out.println("contorl address " + localAddress1.getControlAddress());
System.out.println("data port " + localAddress1.getDataPort());
System.out.println("control port " + localAddress1.getControlPort());
System.out.println();
System.out.println("data address " + localAddress2.getDataAddress());
System.out.println("contorl address " + localAddress2.getControlAddress());
System.out.println("data port " + localAddress2.getDataPort());
System.out.println("control port " + localAddress2.getControlPort());
System.out.println();
}catch(UnknownHostException e) ...{
e.printStackTrace();
}catch(InvalidSessionAddressException e) ...{
e.printStackTrace();
}catch(IOException e) ...{
e.printStackTrace();
}
try ...{
videoRtpstream = videortpManager.createSendStream(videoDataSource,0);
audioRtpstream = audiortpManager.createSendStream(audioDataSource,0);
} catch(IOException e) ...{
e.printStackTrace();
} catch (UnsupportedFormatException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
}
try ...{
videoProcessor.start();
audioProcessor.start();
videoRtpstream.start();
audioRtpstream.start();
} catch (IOException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void transmitStop() throws IOException ...{
try ...{
videoRtpstream.stop();
videoRtpstream.close();
videoProcessor.stop();
audioRtpstream.stop();
audioRtpstream.close();
audioProcessor.close();
// close the connection if no longer needed.
videortpManager.removeTarget(remoteAddress1, "client disconnected.");
audiortpManager.removeTarget(remoteAddress2, "client disconnected.");
// call dispose at the end of the life-cycle of this RTPManager so
// it is prepared to be garbage-collected.
videortpManager.dispose();
audiortpManager.dispose();
System.out.println("Now releasing the resource!");
videoProcessor.deallocate();
audioProcessor.deallocate();
videoDataSource.disconnect();
audioDataSource.disconnect();
} catch (Exception e) ...{e.printStackTrace();}
}
public TransmitController()...{
//initilizing the devices and ready for transmmit
getDeviceInfo();
try...{
setDataSource();
}catch(IOException e)...{e.printStackTrace();} catch (NoProcessorException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CannotRealizeException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoDataSinkException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/** */ /**
*
* @author leeo
*@todo : Capturing the local audio/vidio data
* Translate them to specified address(Address : port)
*
*
*/
import javax.media.rtp. * ;
import javax.media. * ;
import javax.media.protocol. * ;
import javax.media.control.FormatControl;
import javax.media.control.TrackControl;
import javax.media.format. * ;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util. * ;
public class TransmitController ... {
private CaptureDeviceInfo captureVideoDevice = null;
private CaptureDeviceInfo captureAudioDevice = null;
private Processor videoProcessor = null;
private Processor audioProcessor = null;
private DataSource videoDataSource = null;
private DataSource audioDataSource = null;
//private SessionManager videoRtpsm = null;
//private SessionManager audioRtpsm = null;
private RTPManager videortpManager = null;
private RTPManager audiortpManager = null;
private SendStream videoRtpstream = null;
private SendStream audioRtpstream = null;
//private SessionManager mgr = null;
private Vector sendplayerlist = new Vector();
private boolean terminatedbyClose_sender= false;
private SessionAddress remoteAddress1 = null;
private SessionAddress remoteAddress2= null;
private void getDeviceInfo()...{
// Get the VideoCaptureDeviceInfo for the live video capture device
// initialize the CaptureDeviceInfo
Vector deviceList = null;
deviceList = CaptureDeviceManager.getDeviceList(null);
for(int i = 0; i < deviceList.size(); i ++)...{
captureVideoDevice = (CaptureDeviceInfo)deviceList.elementAt(i);
String name = captureVideoDevice.getName();
if(name.startsWith("vfw:")) break;
}
// Get the AudioCaptureDeviceInfo for the live audio capture device
deviceList=null;
deviceList = CaptureDeviceManager.getDeviceList(new AudioFormat("linear", 8000, 8, 1));
if (deviceList.size() > 0)
captureAudioDevice = (CaptureDeviceInfo)deviceList.elementAt(0);
else...{
// Exit if we can't find a device that does linear, 8000 Hz, 8 bit, stereo audio.
System.err.println("Device initializing failure!");
System.exit(-1);
}
}
private void setDataSource() throws IOException,NoProcessorException,
CannotRealizeException, NoDataSinkException...{
DataSource ds = null;
TrackControl track[] = null;
//Create a video processor for capturevideodevice & exit if we cannot create it
try...{
ds = Manager.createDataSource(captureVideoDevice.getLocator());
} catch(Exception e) ...{ e.printStackTrace(); }
try ...{
videoProcessor = Manager.createProcessor(ds);
} catch(Exception e) ...{ e.printStackTrace(); }
videoProcessor.configure();
// block until it has been configured
while(true) ...{
if(videoProcessor.getState() == videoProcessor.Configured) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
videoProcessor.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));
track = videoProcessor.getTrackControls();
boolean encodingOk = false;
// Go through the tracks and try to program one of them to
// output ULAW_RTP data.
for (int i = 0; i < track.length; i++) ...{
if (!encodingOk && track[i] instanceof FormatControl) ...{
if (((FormatControl)track[i]).setFormat(new VideoFormat(VideoFormat.JPEG_RTP)) == null) ...{
track[i].setEnabled(false);
}
else ...{
encodingOk = true;
}
} else ...{
// we could not set this track to gsm, so disable it
track[i].setEnabled(false);
}
}
videoProcessor.realize();
// block until it has been configured
while(true) ...{
if(videoProcessor.getState() == videoProcessor.Realized) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
try ...{
videoDataSource = videoProcessor.getDataOutput();
} catch (NotRealizedError e)...{
System.exit(-1);
}
// Create a audio processor for captureaudiodevice & exit if we
// cannot create it
try ...{
ds = Manager.createDataSource(captureAudioDevice.getLocator());
} catch(Exception e) ...{e.printStackTrace();}
try ...{
audioProcessor = Manager.createProcessor(ds);
} catch(Exception e) ...{e.printStackTrace();}
audioProcessor.configure();
// block until it has been configured
while(true) ...{
if(audioProcessor.getState() == audioProcessor.Configured) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
audioProcessor.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));
track = audioProcessor.getTrackControls();
boolean encodingOk1 = false;
// Go through the tracks and try to program one of them to
// output ULAW_RTP data.
for (int i = 0; i < track.length; i++) ...{
if (!encodingOk1 && track[i] instanceof FormatControl) ...{
if (((FormatControl)track[i]).setFormat(new AudioFormat(AudioFormat.ULAW_RTP,
8000,
8,
1)) == null) ...{
track[i].setEnabled(false);
}
else ...{
encodingOk1 = true;
}
} else ...{
// we could not set this track to gsm, so disable it
track[i].setEnabled(false);
}
}
// Realize it and block until it is realized.
audioProcessor.realize();
// block until it has been configured
while(true) ...{
if(audioProcessor.getState() == audioProcessor.Realized) ...{
break;
}
try ...{
this.wait(100);
} catch (Exception e) ...{}
}
try ...{
audioDataSource = audioProcessor.getDataOutput();
} catch (NotRealizedError e)...{
System.exit(-1);
}
}
public void transmitStart(String targetAddress ,String targetPort)...{
videortpManager = RTPManager.newInstance();
audiortpManager = RTPManager.newInstance();
SessionAddress localAddress1 = null;
SessionAddress localAddress2 = null;
// create the local endpoint for the local interface on any local port
localAddress1= new SessionAddress();
localAddress2= new SessionAddress();
try ...{
videortpManager.initialize(localAddress1);
audiortpManager.initialize(localAddress2);
} catch(IOException e) ...{
System.out.println(e.getMessage());
e.printStackTrace();
} catch(InvalidSessionAddressException e) ...{
e.printStackTrace();
}
// specify the remote endpoint of this unicast session
try ...{
InetAddress ipAddress = InetAddress.getByName(targetAddress);
remoteAddress1 = new SessionAddress(ipAddress, Integer.parseInt(targetPort));
videortpManager.addTarget(remoteAddress1);
remoteAddress2 = new SessionAddress(ipAddress, Integer.parseInt(targetPort)+2);
audiortpManager.addTarget(remoteAddress2);
System.out.println();
System.out.println("data address " + localAddress1.getDataAddress());
System.out.println("contorl address " + localAddress1.getControlAddress());
System.out.println("data port " + localAddress1.getDataPort());
System.out.println("control port " + localAddress1.getControlPort());
System.out.println();
System.out.println("data address " + localAddress2.getDataAddress());
System.out.println("contorl address " + localAddress2.getControlAddress());
System.out.println("data port " + localAddress2.getDataPort());
System.out.println("control port " + localAddress2.getControlPort());
System.out.println();
}catch(UnknownHostException e) ...{
e.printStackTrace();
}catch(InvalidSessionAddressException e) ...{
e.printStackTrace();
}catch(IOException e) ...{
e.printStackTrace();
}
try ...{
videoRtpstream = videortpManager.createSendStream(videoDataSource,0);
audioRtpstream = audiortpManager.createSendStream(audioDataSource,0);
} catch(IOException e) ...{
e.printStackTrace();
} catch (UnsupportedFormatException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
}
try ...{
videoProcessor.start();
audioProcessor.start();
videoRtpstream.start();
audioRtpstream.start();
} catch (IOException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void transmitStop() throws IOException ...{
try ...{
videoRtpstream.stop();
videoRtpstream.close();
videoProcessor.stop();
audioRtpstream.stop();
audioRtpstream.close();
audioProcessor.close();
// close the connection if no longer needed.
videortpManager.removeTarget(remoteAddress1, "client disconnected.");
audiortpManager.removeTarget(remoteAddress2, "client disconnected.");
// call dispose at the end of the life-cycle of this RTPManager so
// it is prepared to be garbage-collected.
videortpManager.dispose();
audiortpManager.dispose();
System.out.println("Now releasing the resource!");
videoProcessor.deallocate();
audioProcessor.deallocate();
videoDataSource.disconnect();
audioDataSource.disconnect();
} catch (Exception e) ...{e.printStackTrace();}
}
public TransmitController()...{
//initilizing the devices and ready for transmmit
getDeviceInfo();
try...{
setDataSource();
}catch(IOException e)...{e.printStackTrace();} catch (NoProcessorException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CannotRealizeException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoDataSinkException e) ...{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}