/** * Configures the detector * @param requestedBlockWidth About how wide and tall you wish a block to be in pixels. */ public ThresholdBlockCommon(ConfigLength requestedBlockWidth, boolean thresholdFromLocalBlocks, Class<T> imageClass ) { this.requestedBlockWidth = requestedBlockWidth; this.imageType = ImageType.single(imageClass); this.thresholdFromLocalBlocks = thresholdFromLocalBlocks; }
public FlowBlock_to_DenseOpticalFlow(DenseOpticalFlowBlockPyramid<T> flowAlg, double scale, int maxLayers, Class<T> imageType) { this.flowAlg = flowAlg; this.scale = scale; this.maxLayers = maxLayers; this.imageType = ImageType.single(imageType); }
@Override public ImageType<T> getInputType() { return ImageType.single(imageType); } }
@Override public ImageType<Output> getDerivativeType() { return ImageType.single((Class) m.getParameterTypes()[1]); } }
@Override public ImageType<Output> getDerivativeType() { return ImageType.single((Class)m.getParameterTypes()[2]); } }
/** * Configurations background removal. * * @param learnRate Specifies how quickly the background is updated. 0 = static 1.0 = instant. Try 0.05 * @param threshold Threshold for background. Try 10. * @param imageType Type of input image. */ public BackgroundStationaryGaussian_SB(float learnRate, float threshold, Class<T> imageType) { super(learnRate, threshold, ImageType.single(imageType)); inputWrapper = FactoryGImageGray.create(imageType); }
@Override public void modelChanged(ConfigBackgroundGaussian config, boolean stationary) { synchronized (lockBackground) { background = FactoryBackgroundModel.stationaryGaussian(config, ImageType.single(GrayU8.class)); } }
@Override public void modelChanged(ConfigBackgroundBasic config, boolean stationary) { synchronized (lockBackground) { background = FactoryBackgroundModel.stationaryBasic(config, ImageType.single(GrayU8.class)); } }
public DetectBlackShapeAppBase(List<String> examples , Class<T> imageType) { super(examples, ImageType.single(imageType)); this.imageClass = imageType; JMenuItem menuSaveInput = new JMenuItem("Save Input"); menuSaveInput.addActionListener(e -> requestSaveInputImage()); BoofSwingUtil.setMenuItemKeys(menuSaveInput,KeyEvent.VK_S,KeyEvent.VK_Y); JMenu menu = new JMenu("Data"); menu.setMnemonic(KeyEvent.VK_D); menu.add(menuSaveInput); menuBar.add(menu); }
public void setDistorted (CameraPinholeRadial param , DenseMatrix64F rect ) { if( rect == null ) { this.undoRadial = LensDistortionOps.imageRemoveDistortion( AdjustmentType.FULL_VIEW, BorderType.ZERO, param, null, ImageType.single(GrayF32.class)); this.remove_p_to_p = LensDistortionOps.transform_F32(AdjustmentType.FULL_VIEW, param, null, false); } else { this.undoRadial = RectifyImageOps.rectifyImage(param, rect, BorderType.ZERO, ImageType.single(GrayF32.class)); this.remove_p_to_p = RectifyImageOps.transformPixelToRect_F32(param, rect); } }
public WrapDescribeBriefSo(DescribePointBriefSO<T> alg , Class<T> imageType ) { this.alg = alg; this.length = alg.getDefinition().getLength(); this.imageType = ImageType.single(imageType); }
public WrapDescribeBrief( DescribePointBrief<T> alg , Class<T> imageType) { this.alg = alg; this.length = alg.getDefinition().getLength(); this.imageType = ImageType.single(imageType); }
public static void main( String args[] ) throws FileNotFoundException { ImageStreamSequence stream = new ImageStreamSequence("combined.mpng",true, ImageType.single(GrayU16.class)); while( stream.hasNext() ) { System.out.println("Image"); stream.next(); } } }
public VisualizeBackgroundModelApp(List<?> exampleInputs ) { super(exampleInputs, ImageType.single(GrayU8.class)); super.allowImages = false; add(BorderLayout.WEST,controls); add(BorderLayout.CENTER,imagePanels); imagePanels.setScaleToFit(true); }
public FiducialProcessor() { super(ImageType.single(GrayU8.class)); detected = new FiducialDetector.Detected[3]; for (int i = 0; i < detected.length; i++) { detected[i] = new FiducialDetector.Detected(); detected[i].binary = new GrayU8(1,1); detected[i].location = new Quadrilateral_F64(); } }
@Override public void createNewProcessor() { Log.e("Undistort","Set Processing!"); if( isColor ) { setProcessing(new UndistortProcessing(ImageType.pl(3,GrayU8.class))); } else { setProcessing(new UndistortProcessing(ImageType.single(GrayU8.class))); } }
@Override public void setCalibration (CameraPinholeRadial param ) { CameraPinhole undistorted = new CameraPinhole(param); this.undoRadial = LensDistortionOps.changeCameraModel( AdjustmentType.FULL_VIEW, BorderType.ZERO, param, undistorted,null, ImageType.single(GrayF32.class)); this.remove_p_to_p = LensDistortionOps_F32.transformChangeModel(AdjustmentType.FULL_VIEW, param, undistorted, false,null); undoRadialDistortion(distorted); }
public void setCalibration (CameraPinholeRadial param , DMatrixRMaj rect ) { FMatrixRMaj rect_f32 = new FMatrixRMaj(3,3); ConvertMatrixData.convert(rect,rect_f32); this.undoRadial = RectifyImageOps.rectifyImage( param, rect_f32, BorderType.ZERO, ImageType.single(GrayF32.class)); this.remove_p_to_p = RectifyImageOps.transformPixelToRect(param, rect_f32); }
public DetectLineApp( List<PathLabel> examples , Class<T> imageType , Class<D> derivType ) { super(examples, ImageType.single(imageType)); this.imageType = imageType; this.derivType = derivType; blur = GeneralizedImageOps.createSingleBand(imageType, 1, 1); work = blur.createSameShape(); add(controls,BorderLayout.WEST); add(gui,BorderLayout.CENTER); declareDetector(); }
public void derivByGaussThenGausDeriv() { System.out.println("DxG*(G*I)"); T blur = GeneralizedImageOps.createSingleBand(imageType, width, height); T blurDeriv = GeneralizedImageOps.createSingleBand(imageType, width, height); for( int sigma = 1; sigma <= 3; sigma++ ) { ImageGradient<T,T> funcGaussDeriv = FactoryDerivative.gaussian(sigma,-1,imageType,imageType); BlurStorageFilter<T> funcBlur = FactoryBlurFilter.gaussian(ImageType.single(imageType),sigma,-1); funcBlur.process(input,blur); funcGaussDeriv.process(blur,blurDeriv,derivY); printIntensity("Sigma "+sigma,blurDeriv); } }