locator = device.getLocator(); format = createFormat(viewSize); converter = new BufferToImage(format);
@Override public String getName() { return cdi.getName(); }
@Override public Dimension[] getResolutions() { if (dimensions == null) { dimensions = new ArrayList<Dimension>(); Format[] formats = cdi.getFormats(); for (Format format : formats) { if ("RGB".equalsIgnoreCase(format.getEncoding())) { dimensions.add(((VideoFormat) format).getSize()); } } Collections.sort(dimensions, new Comparator<Dimension>() { @Override public int compare(Dimension a, Dimension b) { int apx = a.width * a.height; int bpx = b.width * b.height; if (apx > bpx) { return 1; } else if (apx < bpx) { return -1; } else { return 0; } } }); } return dimensions.toArray(new Dimension[dimensions.size()]); }
captureDeviceInfo.getName(), captureDeviceInfo.getLocator(), captureDeviceInfo.getFormats(), uid, transportType,
/** * Initializes a new <tt>IVFMediaDevice</tt> instance which will read * the IVF file located at <tt>filename</tt>. * * @param filename the location of the IVF the <tt>IVFStream<tt> * will read. */ public IVFMediaDevice(String filename) { super(new CaptureDeviceInfo( filename, new MediaLocator("ivffile:"+filename), IVFMediaDevice.SUPPORTED_FORMATS), MediaType.VIDEO); } }
if (captureDeviceInfo.equals(cdiIter.next()))
/** * Gets a human-readable <tt>String</tt> representation of this instance. * * @return a <tt>String</tt> providing a human-readable representation of * this instance */ @Override public String toString() { CaptureDeviceInfo captureDeviceInfo = getCaptureDeviceInfo(); return (captureDeviceInfo == null) ? super.toString() : captureDeviceInfo.toString(); } }
/** * Create a new audio <tt>MediaDevice</tt> instance which will read * the rtpdump file located at <tt>filePath</tt>, and which will have the * encoding format <tt>format</tt>. * * Note: for proper function, <tt>format</tt> has to implement correctly * the <tt>computeDuration(long)</tt> method, because FMJ insists on using * this to compute its own RTP timestamps. * * Note: The RtpdumpStream instance needs to know the RTP clock rate to * correctly interpret the RTP timestamps. We use the sampleRate field of * AudioFormat, or the frameRate field of VideoFormat, to piggyback the RTP * clock rate. See * {@link RtpdumpStream#RtpdumpStream(DataSource, javax.media.control.FormatControl)} * TODO: Avoid this hack... * * @param filePath the location of the rtpdump file * @param format the <tt>AudioFormat</tt> of the data contained in the * payload of the recorded rtp packet in the rtpdump file. * @return a <tt>MediaDevice</tt> that will read the rtpdump file given. */ public static MediaDevice createRtpdumpAudioMediaDevice( String filePath, AudioFormat format) { return new MyAudioMediaDeviceImpl(new CaptureDeviceInfo( "Audio rtpdump file", new MediaLocator("rtpdumpfile:" + filePath), new Format[]{format})); }
&& captureDeviceInfo.equals( ((MediaDeviceImpl) device) .getCaptureDeviceInfo()))
Format format = new RGBFormat(); MediaLocator cameraLocator = null; // get device list Vector deviceList = CaptureDeviceManager.getDeviceList(format); // if devices available if(deviceList != null && deviceList.size() > 0) { // pick first CaptureDeviceInfo device = (CaptureDeviceInfo) deviceList.get(0); cameraLocator = device.getLocator(); }
import javax.media.*; import java.util.*; public static void main(String[] args) { VFWAuto vfwObj = new VFWAuto(); Vector devices = CaptureDeviceManager.getDeviceList(null); Enumeration deviceEnum = devices.elements(); System.out.println("Device count : " + devices.size()); while (deviceEnum.hasMoreElements()) { CaptureDeviceInfo cdi = (CaptureDeviceInfo) deviceEnum.nextElement(); System.out.println("Device : " + cdi.getName()); } }
/** * Get video format for size. * * @param device device to get format from * @param size specific size to search * @return VideoFormat */ private VideoFormat getSizedVideoFormat(Dimension size) { Format[] formats = device.getFormats(); VideoFormat format = null; for (Format f : formats) { if (!"RGB".equalsIgnoreCase(f.getEncoding()) || !(f instanceof VideoFormat)) { continue; } Dimension d = ((VideoFormat) f).getSize(); if (d.width == size.width && d.height == size.height) { format = (VideoFormat) f; break; } } return format; }
/** * Initializes a new <tt>VideoGreyFadingMediaDevice</tt> with the given * framerate and dimension. * @param framerate the framerate of the <tt>CaptureDevice</tt> behind this * <tt>MediaDevice</tt>. * @param dimension the dimension (width & height) of the * <tt>CaptureDevice</tt> behind this <tt>MediaDevice</tt>. */ public VideoGreyFadingMediaDevice(int framerate, Dimension dimension) { super(new CaptureDeviceInfo( "GreyFadingVideo", new MediaLocator("greyfading:"), new Format[] { new RGBFormat( dimension, // size Format.NOT_SPECIFIED, // maxDataLength Format.byteArray, // dataType framerate, // frameRate 32, // bitsPerPixel 2 /* red */, 3 /* green */, 4 /* blue */) }), MediaType.VIDEO); } }
/** * Gets the <tt>MediaLocator</tt> which specifies the playback device to be * used by this <tt>Renderer</tt>. * * @return the <tt>MediaLocator</tt> which specifies the playback device to * be used by this <tt>Renderer</tt> */ public MediaLocator getLocator() { MediaLocator locator = this.locator; if ((locator == null) && (audioSystem != null)) { CaptureDeviceInfo device = audioSystem.getSelectedDevice(dataFlow); if (device != null) locator = device.getLocator(); } return locator; }
Vector list = CaptureDeviceManager.getDeviceList(null); int i; CaptureDeviceInfo tempDevice; // List all the devices ... if( list!=null) { if( list.size() == 0) { System.out.println("the device list is zero : "); System.exit(1); } System.out.println("The devices are : "); for( i=0;i< list.size() ;i++ ) { tempDevice = (CaptureDeviceInfo) list.elementAt(i); System.out.println(tempDevice.getName()); } }
/** * Get suitable video format to use (the largest one by default, but this * can be easily changed). * * @param device device to get video format for * @return Suitable video format */ private VideoFormat getLargestVideoFormat() { Format[] formats = device.getFormats(); VideoFormat format = null; int area = 0; // find the largest picture format for (Format f : formats) { if (!(f instanceof VideoFormat) || !"RGB".equalsIgnoreCase(f.getEncoding())) { continue; } VideoFormat vf = (VideoFormat) f; Dimension dim = vf.getSize(); int a = dim.width * dim.height; if (a > area) { area = a; format = vf; } } return format; }
= new CaptureDeviceInfo( name + " " + display, new MediaLocator(
/** * Returns a human-readable representation of a specific * <tt>CaptureDevice</tt> in the form of a <tt>String</tt> value. * * @param captureDevice the <tt>CaptureDevice</tt> to get a human-readable * representation of * @return a <tt>String</tt> value which gives a human-readable * representation of the specified <tt>captureDevice</tt> */ private static String toString(CaptureDevice captureDevice) { StringBuffer str = new StringBuffer(); str.append("CaptureDevice with hashCode "); str.append(captureDevice.hashCode()); str.append(" and captureDeviceInfo "); CaptureDeviceInfo captureDeviceInfo = captureDevice.getCaptureDeviceInfo(); MediaLocator mediaLocator = null; if (captureDeviceInfo != null) { mediaLocator = captureDeviceInfo.getLocator(); } str.append((mediaLocator == null) ? captureDeviceInfo : mediaLocator); return str.toString(); }
/** * If the <tt>MediaDevice</tt> corresponds to partial desktop streaming * device. * * @param mediaDevice <tt>MediaDevice</tt> * @return true if <tt>MediaDevice</tt> is a partial desktop streaming * device, false otherwise */ public boolean isPartialStreaming(MediaDevice mediaDevice) { if(mediaDevice == null) return false; MediaDeviceImpl dev = (MediaDeviceImpl)mediaDevice; CaptureDeviceInfo cdi = dev.getCaptureDeviceInfo(); return (cdi != null) && cdi.getName().startsWith("Partial desktop streaming"); }
/** * Provides the default implementation of * <tt>AbstractBufferCaptureDevice</tt> for * {@link #getSupportedFormats(int)}. * * @param streamIndex the zero-based index of the * <tt>AbstractBufferStream</tt> for which the specified * <tt>FormatControl</tt> is to report the list of supported * <tt>Format</tt>s * @return an array of <tt>Format</tt>s to be reported by a * <tt>FormatControl</tt> as the supported formats for the * <tt>AbstractBufferStream</tt> at the specified <tt>streamIndex</tt> in * the list of streams of this <tt>AbstractBufferCaptureDevice</tt> */ final Format[] defaultGetSupportedFormats(int streamIndex) { CaptureDeviceInfo captureDeviceInfo = getCaptureDeviceInfo(); return (captureDeviceInfo == null) ? new Format[0] : captureDeviceInfo.getFormats(); }