public static <T extends ImageMultiBand> PixelLikelihood<T> likelihoodHueSatHistIndependent( double maxPixelValue , int numHistogramBins , ImageType<T> imageType ) { if( imageType.getFamily() != ImageType.Family.PLANAR) throw new IllegalArgumentException("Only Planar images supported currently"); if( imageType.getNumBands() != 3 ) throw new IllegalArgumentException("Input image type must have 3 bands."); if( imageType.getDataType() == ImageDataType.U8 ) { return (PixelLikelihood)new LikelihoodHueSatHistInd_PL_U8((int)maxPixelValue,numHistogramBins); } else { throw new RuntimeException("Band type not yet supported "+imageType.getDataType()); } }
public static <T extends ImageBase> PixelLikelihood<T> likelihoodHistogramCoupled( double maxPixelValue , int numHistogramBins , ImageType<T> imageType ) { switch( imageType.getFamily() ) { case GRAY: if( imageType.getDataType() != ImageDataType.U8 ) throw new IllegalArgumentException("Only U8 currently supported"); return (PixelLikelihood)new LikelihoodHistCoupled_SB_U8((int)maxPixelValue,numHistogramBins); case PLANAR: if( imageType.getDataType() == ImageDataType.U8 ) { return (PixelLikelihood)new LikelihoodHistCoupled_PL_U8((int)maxPixelValue,numHistogramBins); } else { throw new RuntimeException("Band type not yet supported "+imageType.getDataType()); } default: throw new IllegalArgumentException("Image family not yet supported. Try Planar"); } }
public static <T extends ImageMultiBand> PixelLikelihood<T> likelihoodHueSatHistCoupled( double maxPixelValue , int numHistogramBins , ImageType<T> imageType ) { if( imageType.getFamily() != ImageType.Family.PLANAR) throw new IllegalArgumentException("Only Planar images supported currently"); if( imageType.getNumBands() != 3 ) throw new IllegalArgumentException("Input image type must have 3 bands."); if( imageType.getDataType() == ImageDataType.U8 ) { return (PixelLikelihood)new LikelihoodHueSatHistCoupled_PL_U8((int)maxPixelValue,numHistogramBins); } else { throw new RuntimeException("Band type not yet supported "+imageType.getDataType()); } }
public DescribeImageDense_Convert(DescribeImageDense<ImageBase,Desc> describer , ImageType<T> inputType ) { ImageType describerType = describer.getImageType(); if( inputType.getFamily() != describerType.getFamily() ) throw new IllegalArgumentException("Image types must have the same family"); if( inputType.getDataType() == describerType.getDataType() ) throw new IllegalArgumentException("Data types are the same. Why do you want to use this class?"); workspace = describerType.createImage(1,1); this.describer = describer; this.inputType = inputType; }
public static <Input extends ImageInterleaved<Input>, Output extends ImageInterleaved<Output>> ImageDistort<Input, Output> distortIL(boolean cached, InterpolatePixelMB<Input> interp, ImageType<Output> outputType) { if( cached ) { throw new IllegalArgumentException("Cached not supported yet"); } else { switch( outputType.getDataType() ) { case F32: return (ImageDistort<Input, Output>) new ImplImageDistort_IL_F32((InterpolatePixelMB)interp); case U8: return (ImageDistort<Input, Output>) new ImplImageDistort_IL_U8((InterpolatePixelMB)interp); default: throw new IllegalArgumentException("Not supported yet"); } } }
public PlanarConvolveDown(ConvolveDown<In, Out> down, int numBands ) { this.down = down; inputType = ImageType.pl(numBands, down.getInputType().getDataType()); outputType = ImageType.pl(numBands, down.getOutputType().getDataType()); }
/** * Converts {@link ImageInterleaved} image into Bitmap. * * @see #declareStorage(android.graphics.Bitmap, byte[]) * * @param input Input Planar image. * @param output Output Bitmap image. * @param storage Byte array used for internal storage. If null it will be declared internally. */ public static <T extends ImageInterleaved<T>> void interleavedToBitmap(T input, Bitmap output, byte[] storage) { if( output.getWidth() != input.getWidth() || output.getHeight() != input.getHeight() ) { throw new IllegalArgumentException("Image shapes are not the same"); } if( storage == null ) storage = declareStorage(output,null); if( input.getImageType().getDataType() == ImageDataType.U8 ) ImplConvertBitmap.interleavedToArray((InterleavedU8) input, storage, output.getConfig()); else if( input.getImageType().getDataType() == ImageDataType.F32 ) ImplConvertBitmap.interleavedToArray((InterleavedF32) input, storage, output.getConfig()); else throw new IllegalArgumentException("Unsupported BoofCV Type"); output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
/** * Converts {@link ImageInterleaved} image into Bitmap. * * @see #declareStorage(android.graphics.Bitmap, byte[]) * * @param input Input Planar image. * @param output Output Bitmap image. * @param storage Byte array used for internal storage. If null it will be declared internally. */ public static <T extends ImageInterleaved> void interleavedToBitmap(T input, Bitmap output, byte[] storage) { if( output.getWidth() != input.getWidth() || output.getHeight() != input.getHeight() ) { throw new IllegalArgumentException("Image shapes are not the same"); } if( storage == null ) storage = declareStorage(output,null); if( input.getImageType().getDataType() == ImageDataType.U8 ) ImplConvertBitmap.interleavedToArray((InterleavedU8) input, storage, output.getConfig()); else if( input.getImageType().getDataType() == ImageDataType.F32 ) ImplConvertBitmap.interleavedToArray((InterleavedF32) input, storage, output.getConfig()); else throw new IllegalArgumentException("Unsupported BoofCV Type"); output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
public static <T extends ImageInterleaved<T>> void interleavedYuvToBitmap(T input, Bitmap output, byte[] storage) { if( output.getWidth() != input.getWidth() || output.getHeight() != input.getHeight() ) { throw new IllegalArgumentException("Image shapes are not the same"); } if( storage == null ) storage = declareStorage(output,null); if( input.getImageType().getDataType() == ImageDataType.U8 ) { switch( output.getConfig() ) { case ARGB_8888: ImplConvertBitmap.interleavedYuvToArgb8888((InterleavedU8) input, storage); output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); return; case RGB_565: ImplConvertBitmap.interleavedYuvToRGB565((InterleavedU8) input, storage); output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); return; } } throw new IllegalArgumentException("Unsupported BoofCV Type"); }
/** * Configuration constructor * @param distort Used to apply image distortion from different input images * @param equiWidth Width of output equirectangular image * @param equiHeight Height of output equirectangular image * @param imageType Type of image it processes and outputs. Must be floating point. Hmm why isn't this fixed? */ public MultiCameraToEquirectangular(ImageDistort<T,T> distort , int equiWidth , int equiHeight , ImageType<T> imageType ) { if( imageType.getDataType().isInteger() || imageType.getDataType().getNumBits() != 32 ) throw new IllegalArgumentException("Must be a 32 bit floating point image"); this.distort = distort; this.equiWidth = equiWidth; this.equHeight = equiHeight; tools.configure(equiWidth, equiHeight); weightImage = new GrayF32(equiWidth,equiHeight); averageImage = imageType.createImage(equiWidth, equiHeight); workImage = averageImage.createSameShape(); cameraRendered = averageImage.createSameShape(); }
/** * Creates an updater for discrete pyramids where a Gaussian is convolved across the input * prior to sub-sampling. * * @param imageType Type of input image. * @param sigma Gaussian sigma. If < 0 then a sigma is selected using the radius. Try -1. * @param radius Radius of the Gaussian kernel. If < 0 then the radius is selected using sigma. Try 2. * @return PyramidDiscrete */ public static <T extends ImageBase<T>> PyramidDiscrete<T> discreteGaussian( int[] scaleFactors , double sigma , int radius , boolean saveOriginalReference, ImageType<T> imageType ) { Class<Kernel1D> kernelType = FactoryKernel.getKernelType(imageType.getDataType(),1); Kernel1D kernel = FactoryKernelGaussian.gaussian(kernelType,sigma,radius); return new PyramidDiscreteSampleBlur<>(kernel, sigma, imageType, saveOriginalReference, scaleFactors); }
/** * Configuration constructor * @param distort Used to apply image distortion from different input images * @param equiWidth Width of output equirectangular image * @param equiHeight Height of output equirectangular image * @param imageType Type of image it processes and outputs. Must be floating point. Hmm why isn't this fixed? */ public MultiCameraToEquirectangular(ImageDistort<T,T> distort , int equiWidth , int equiHeight , ImageType<T> imageType ) { if( imageType.getDataType().isInteger() || imageType.getDataType().getNumBits() != 32 ) throw new IllegalArgumentException("Must be a 32 bit floating point image"); this.distort = distort; this.equiWidth = equiWidth; this.equHeight = equiHeight; tools.configure(equiWidth, equiHeight); weightImage = new GrayF32(equiWidth,equiHeight); averageImage = imageType.createImage(equiWidth, equiHeight); workImage = averageImage.createSameShape(); cameraRendered = averageImage.createSameShape(); }
public static<T extends ImageBase> SegmentSlic<T> slic( ConfigSlic config , ImageType<T> imageType ) { if( config == null ) throw new IllegalArgumentException("No default configuration since the number of segments must be specified."); if( imageType.getFamily() == ImageType.Family.GRAY) { switch( imageType.getDataType() ) { case U8: return (SegmentSlic)new SegmentSlic_U8(config.numberOfRegions, config.spacialWeight,config.totalIterations,config.connectRule); case F32: return (SegmentSlic)new SegmentSlic_F32(config.numberOfRegions, config.spacialWeight,config.totalIterations,config.connectRule); } } else if( imageType.getFamily() == ImageType.Family.PLANAR) { int N = imageType.getNumBands(); switch( imageType.getDataType() ) { case U8: return (SegmentSlic)new SegmentSlic_PlU8(config.numberOfRegions, config.spacialWeight,config.totalIterations,config.connectRule,N); case F32: return (SegmentSlic)new SegmentSlic_PlF32(config.numberOfRegions, config.spacialWeight,config.totalIterations,config.connectRule,N); } } throw new IllegalArgumentException("Unknown imageType or connect rule"); }
public BackgroundProcessing(BackgroundModelStationary<T> model ) { super(model.getImageType()); this.model = model; this.scaled = model.getImageType().createImage(1, 1); this.work = GeneralizedImageOps.createSingleBand(model.getImageType().getDataType(),1,1); }
/** * Converts an image in JCodec format into one in BoofCV format. * @param input JCodec image * @param output BoofCV image */ public static void convertToBoof(Picture input, ImageBase output) { if( input.getColor() == ColorSpace.RGB ) { ImplConvertJCodecPicture.RGB_to_PLU8(input, (Planar) output); } else if( input.getColor() == ColorSpace.YUV420 ) { if( output instanceof Planar) { Planar ms = (Planar)output; if( ms.getImageType().getDataType() == ImageDataType.U8 ) { ImplConvertJCodecPicture.yuv420_to_PlRgb_U8(input, ms); } else if( ms.getImageType().getDataType() == ImageDataType.F32 ) { ImplConvertJCodecPicture.yuv420_to_PlRgb_F32(input, ms); } } else if( output instanceof GrayU8) { ImplConvertJCodecPicture.yuv420_to_U8(input, (GrayU8) output); } else if( output instanceof GrayF32) { ImplConvertJCodecPicture.yuv420_to_F32(input, (GrayF32) output); } else { throw new RuntimeException("Unexpected output image type"); } } } }
/** * Converts an image in JCodec format into one in BoofCV format. * @param input JCodec image * @param output BoofCV image */ public static void convertToBoof(Picture input, ImageBase output) { if( input.getColor() == ColorSpace.RGB ) { ImplConvertJCodecPicture.RGB_to_PLU8(input, (Planar) output); } else if( input.getColor() == ColorSpace.YUV420 ) { if( output instanceof Planar) { Planar ms = (Planar)output; if( ms.getImageType().getDataType() == ImageDataType.U8 ) { ImplConvertJCodecPicture.yuv420_to_PlRgb_U8(input, ms); } else if( ms.getImageType().getDataType() == ImageDataType.F32 ) { ImplConvertJCodecPicture.yuv420_to_PlRgb_F32(input, ms); } } else if( output instanceof GrayU8) { ImplConvertJCodecPicture.yuv420_to_U8(input, (GrayU8) output); } else if( output instanceof GrayF32) { ImplConvertJCodecPicture.yuv420_to_F32(input, (GrayF32) output); } else { throw new RuntimeException("Unexpected output image type"); } } } }
/** * Given different types input images it creates the correct algorithm for computing the image gradient. The * actualy calulcation is always done using {@link DerivativeType#THREE} */ static <Input extends ImageBase> ImageGradient<Input,GrayF32> createGradient( ImageType<Input> imageType ) { ImageGradient<Input,GrayF32> gradient; ImageType<GrayF32> typeF32 = ImageType.single(GrayF32.class); if( imageType.getDataType() != ImageDataType.F32 ) throw new IllegalArgumentException("Input image type must be F32"); if( imageType.getFamily() == ImageType.Family.GRAY) { gradient = FactoryDerivative.gradient(DerivativeType.THREE,imageType, typeF32); } else if( imageType.getFamily() == ImageType.Family.PLANAR ) { ImageType<Planar<GrayF32>> typePF32 = ImageType.pl(imageType.getNumBands(),GrayF32.class); ImageGradient<Planar<GrayF32>,Planar<GrayF32>> gradientMB = FactoryDerivative.gradient(DerivativeType.THREE,typePF32, typePF32); gradient = (ImageGradient)FactoryDerivative.gradientReduce(gradientMB, DerivativeReduceType.MAX_F, GrayF32.class); } else { throw new IllegalArgumentException("Unsupported image type "+imageType); } return gradient; }
public static <T extends ImageMultiBand<T>> InterpolatePixelMB<T> nearestNeighborPixelMB(ImageType<T> imageType, BorderType borderType ) { InterpolatePixelMB<T> alg; if( imageType.getFamily() == ImageType.Family.INTERLEAVED ) { switch( imageType.getDataType()) { case U8: alg = (InterpolatePixelMB<T>)new NearestNeighborPixel_IL_U8(); break; case S16: alg = (InterpolatePixelMB<T>)new NearestNeighborPixel_IL_S16(); break; case S32: alg = (InterpolatePixelMB<T>)new NearestNeighborPixel_IL_S32(); break; case F32: alg = (InterpolatePixelMB<T>)new NearestNeighborPixel_IL_F32(); break; default: throw new IllegalArgumentException("Add support"); } if( borderType != null ) alg.setBorder(FactoryImageBorder.interleaved(imageType.getImageClass(), borderType)); } else { throw new IllegalArgumentException("Only interleaved current supported here"); } return alg; }
public static GImageMultiBand create( ImageType imageType ) { if( imageType.getFamily() == ImageType.Family.GRAY ) { return new GSingleToMB(FactoryGImageGray.create(imageType.getImageClass())); } if( imageType.getFamily() == ImageType.Family.PLANAR) { return new PL(); } else if( imageType.getFamily() == ImageType.Family.INTERLEAVED ) { switch( imageType.getDataType() ) { case U8: return new IL_U8(); case S8: return new IL_S8(); case F32: return new IL_F32(); default: throw new IllegalArgumentException("Need to support more data types"); } } else { throw new RuntimeException("Add support for more families"); } }
public static <I extends ImageBase<I>, D extends ImageBase<D>> ImageType<D> getDerivativeType( ImageType<I> imageType ) { switch( imageType.getFamily() ) { case GRAY: return ImageType.single(getDerivativeType(imageType.getImageClass())); case PLANAR: { int numBands = imageType.getNumBands(); return ImageType.pl(numBands, getDerivativeType(imageType.getImageClass())); } case INTERLEAVED: int numBands = imageType.getNumBands(); switch ( imageType.getDataType() ) { case F32: return (ImageType)ImageType.il(numBands, ImageDataType.F32); case F64: return (ImageType)ImageType.il(numBands, ImageDataType.F64); case U8: return (ImageType)ImageType.il(numBands, ImageDataType.S16); case U16: return (ImageType)ImageType.il(numBands, ImageDataType.S32); } } throw new IllegalArgumentException("Unknown image type"); }