.create( "result", worker.getRenderedImage(), returnValues.get(0).getEnvelope(), sds,
/** * The BandMerge operation takes indexed images and expands them, however in the context of * coverage view band merging we don't normally want that, e.g., raster mask bands are * represented as indexed but we really want to keep them in their binary, single band form. To * do so, the IndexColorModel is replaced by a ComponentColorModel * * @param coverage * @return */ private GridCoverage2D prepareForBandMerge(GridCoverage2D coverage) { RenderedImage ri = coverage.getRenderedImage(); SampleModel sampleModel = ri.getSampleModel(); if (sampleModel.getNumBands() == 1 && ri.getColorModel() instanceof IndexColorModel) { ImageWorker worker = new ImageWorker(ri); worker.removeIndexColorModel(); RenderedImage formatted = worker.getRenderedImage(); return new GridCoverageFactory() .create( coverage.getName(), formatted, coverage.getGridGeometry(), coverage.getSampleDimensions(), new GridCoverage[] {coverage}, coverage.getProperties()); } return coverage; }
/** * Returns the {@linkplain #getRenderedImage rendered image} as a planar image. * * @return The planar image. * @see #getRenderedImage * @see #getRenderedOperation * @see #getImageAsROI */ public final PlanarImage getPlanarImage() { return PlanarImage.wrapRenderedImage(getRenderedImage()); }
/** * Returns a {@linkplain ROI Region Of Interest} built from the current {@linkplain * #getRenderedImage image}. If the image is multi-bands, then this method first computes an * estimation of its {@linkplain #intensity intensity}. Next, this method {@linkplain * #binarize() binarize} the image and constructs a {@link ROI} from the result. * * @return The image as a region of interest. * @see #getRenderedImage * @see #getPlanarImage * @see #getRenderedOperation */ public final ROI getImageAsROI() { binarize(); return new ROI(getRenderedImage()); }
private RenderedImage setPadding( int xPaddingSx, int xPaddingDx, int yPaddingTop, int yPaddingBottom, RenderedImage image) { ImageWorker worker = new ImageWorker(image); worker.border( Math.abs(xPaddingSx), Math.abs(0), Math.abs(yPaddingTop), Math.abs(0), new BorderExtenderConstant(new double[] {Double.NaN})); worker.translate((float) -xPaddingSx, (float) -yPaddingTop, null); return worker.getRenderedImage(); }
private RenderedImage postProcessRaster(RenderedImage image) { // alpha on the final image if (transparentColor != null) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine("Support for alpha on final image"); final ImageWorker w = new ImageWorker(image); if (image.getSampleModel() instanceof MultiPixelPackedSampleModel) w.forceComponentColorModel(); return w.makeColorTransparent(transparentColor).getRenderedImage(); } return image; } }
@Override RenderedImage process( ImageWorker inputWorker, Hints hints, Map<String, Expression> parameters) { checkParameters(parameters, KEY_MIN, KEY_MAX); Map<String, Object> processParams = getMinMaxParams(parameters); RenderedImage inputImage = inputWorker.getRenderedImage(); final int dataType = inputImage.getSampleModel().getDataType(); Utilities.ensureNonNull("processParams", processParams); double minData = (double) processParams.get(KEY_MIN); double maxData = (double) processParams.get(KEY_MAX); if (dataType == DataBuffer.TYPE_BYTE) { // Optimization for byte images, we use the lookup operation if (maxData == MAX_BYTE && minData == MIN_BYTE) return inputImage; LookupTable table = createByteLookupTable(processParams); inputWorker.setRenderingHints(hints); inputWorker.lookup(table); return inputWorker.getRenderedImage(); } // // STEP 2 use generic piecewise // processParams.put(KEY_DATATYPE, dataType); final PiecewiseTransform1D<DefaultPiecewiseTransform1DElement> transform = generatePiecewise(processParams); inputWorker.piecewise(transform, Integer.valueOf(0)); return inputWorker.getRenderedImage(); }
@Override RenderedImage process( ImageWorker inputWorker, Hints hints, Map<String, Expression> parameters) { checkParameters(parameters, KEY_MIN, KEY_MAX); Map<String, Object> processParams = getMinMaxParams(parameters); RenderedImage inputImage = inputWorker.getRenderedImage(); final int dataType = inputImage.getSampleModel().getDataType(); Utilities.ensureNonNull("processParams", processParams); double minData = (double) processParams.get(KEY_MIN); double maxData = (double) processParams.get(KEY_MAX); if (dataType == DataBuffer.TYPE_BYTE) { if (maxData == MAX_BYTE && minData == MIN_BYTE) { return inputImage; } // Optimization for byte images, we use the lookup operation LookupTable table = createByteLookupTable(processParams); inputWorker.setRenderingHints(hints); inputWorker.lookup(table); return inputWorker.getRenderedImage(); } // // STEP 2 use generic piecewise // final PiecewiseTransform1D<DefaultPiecewiseTransform1DElement> transform = generatePiecewise(processParams); inputWorker.piecewise(transform, Integer.valueOf(0)); return inputWorker.getRenderedImage(); }
@Override public RenderedImage process( RenderedImage[] sources, double[] backgroundValues, double[][] inputThreshold, PlanarImage[] sourceAlpha, ROI[] sourceROI, MosaicType mosaicType, RenderingHints localHints) { return new ImageWorker(localHints) .setBackground(backgroundValues) .mosaic(sources, mosaicType, sourceAlpha, sourceROI, inputThreshold, null) .getRenderedImage(); } };
/** * Performs a contrast enhancement operation on the input image. Note that not all the contrast * enhancement operations have been implemented in a way that is generic enough o handle all * data types. * * @param inputImage the input {@link RenderedImage} to work on. * @param hints {@link Hints} to control the contrast enhancement process. * @return a {@link RenderedImage} on which a contrast enhancement has been performed. */ private RenderedImage performContrastEnhancement(ImageWorker inputWorker, final Hints hints) { inputWorker.setRenderingHints(hints); if (contrastEnhancementMethod != null) { RenderedImage inputImage = inputWorker.getRenderedImage(); assert inputImage.getSampleModel().getNumBands() == 1 : inputImage; ContrastEnhancementType ceType = ContrastEnhancementType.getType(contrastEnhancementMethod); return ceType.process(inputWorker, hints, contrastEnhancementMethod.getParameters()); } return inputWorker.getRenderedImage(); }
/** * Post processes a blank image response, eventually making it transparent * * @param finalImage * @return */ public RenderedImage postProcessBlankResponse(RenderedImage finalImage, RenderingHints hints) { // prepare a ROI made of only zeroes ImageLayout layout = new ImageLayout( finalImage.getMinX(), finalImage.getMinY(), finalImage.getWidth(), finalImage.getHeight()); RenderedOp roi = ConstantDescriptor.create( (float) finalImage.getWidth(), (float) finalImage.getHeight(), new Byte[] {0}, new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout)); ImageWorker iw = new ImageWorker(finalImage); iw.setROI(new ROI(roi)); return iw.getRenderedImage(); } }
@Override RenderedImage process( ImageWorker inputWorker, Hints hints, Map<String, Expression> parameters) { // // IT WORKS ONLY ON BYTE DATA TYPE!!! // // convert the input image to 8 bit inputWorker.rescaleToBytes(); // compute the histogram final Histogram h = inputWorker.removeRenderingHints().getHistogram(null, null, null); // do the actual lookup Map<String, Object> params = new HashMap<String, Object>(); params.put(KEY_HISTOGRAM, h); LookupTable table = createByteLookupTable(params); inputWorker.setRenderingHints(hints); inputWorker.lookup(table); return inputWorker.getRenderedImage(); }
/** * Returns the {@linkplain #getRenderedImage rendered image} as a rendered operation. * * @return The rendered operation. * @see #getRenderedImage * @see #getPlanarImage * @see #getImageAsROI */ public final RenderedOp getRenderedOperation() { final RenderedImage image = getRenderedImage(); if (image instanceof RenderedOp) { return (RenderedOp) image; } // Creating a parameter block ParameterBlock pb = new ParameterBlock(); pb.setSource(image, 0); // Executing the operation return JAI.create("Null", pb, getRenderingHints()); }
@Override RenderedImage process( ImageWorker inputWorker, Hints hints, Map<String, Expression> parameters) { RenderedImage inputImage = inputWorker.getRenderedImage(); assert inputImage.getSampleModel().getNumBands() == 1 : inputImage; final int dataType = inputImage.getSampleModel().getDataType(); if (dataType == DataBuffer.TYPE_BYTE) { // // Optimisation for byte images // LookupTable table = createByteLookupTable(EMPTY_MAP); inputWorker.lookup(table); return inputWorker.getRenderedImage(); } // General case, we use the piecewise1D transform // // STEP 1 do the extrema // inputWorker.removeRenderingHints(); final double[] minimum = inputWorker.getMinimums(); final double[] maximum = inputWorker.getMaximums(); // // STEP 2 use generic piecewise // final PiecewiseTransform1D<DefaultPiecewiseTransform1DElement> transform = generatePiecewise(setMinMaxParams(minimum[0], maximum[0])); inputWorker.piecewise(transform, Integer.valueOf(0)); return inputWorker.getRenderedImage(); }
/** * Relies on the {@link ImageWorker} to mask a certain color from an image. * * @param transparentColor the {@link Color} to make transparent * @param image the {@link RenderedImage} to work on * @return a new {@link RenderedImage} where the provided {@link Color} has turned into * transparent. * @throws IllegalStateException */ public static RenderedImage maskColor(final Color transparentColor, final RenderedImage image) throws IllegalStateException { Utilities.ensureNonNull("image", image); if (transparentColor == null) { return image; } final ImageWorker w = new ImageWorker(image); if (image.getSampleModel() instanceof MultiPixelPackedSampleModel) { w.forceComponentColorModel(); } return w.makeColorTransparent(transparentColor).getRenderedImage(); }
/** * Forces the image to start in the origin and have a rgb/rbga/gray/gray+alpha structure * * @param image1 * @return */ private RenderedImage normalizeImage(RenderedImage image1) { image1 = new ImageWorker(image1) .forceColorSpaceRGB() .forceComponentColorModel() .getRenderedImage(); if (image1.getMinX() != 0 || image1.getMinY() != 0) { image1 = PlanarImage.wrapRenderedImage(image1).getAsBufferedImage(); } return image1; }
private GridCoverage2D createCoverageFromElement( MosaicElement mosaicElement, GridCoverageFactory factory, ReferencedEnvelope submosaicBBOX) { RenderedImage image = mosaicElement.getSource(); Object roiProperty = image.getProperty("ROI"); if (!(roiProperty instanceof ROI)) { // need the ROI before warp, as the area of validity needs to be warped along, so // if missing add one now ROIGeometry roi = new ROIGeometry( JTS.toGeometry( new Envelope( image.getMinX(), image.getMinX() + image.getWidth(), image.getMinY(), image.getMinY() + image.getHeight()))); ImageWorker iw = new ImageWorker(image); iw.setROI(roi); image = iw.getRenderedImage(); roiProperty = roi; } // move the property at the coverage level too Map<String, Object> properties = new HashMap<>(); CoverageUtilities.setROIProperty(properties, (ROI) roiProperty); return factory.create("submosaic", image, submosaicBBOX, null, null, properties); }
/** * We override this one to get some extra behavior that ImageWorker has (ROI, paletted images * management) */ protected RenderedImage createRenderedImage( final ParameterBlockJAI parameters, final RenderingHints hints) { parameters.getSources(); RenderedImage[] images = (RenderedImage[]) parameters .getSources() .toArray(new RenderedImage[parameters.getSources().size()]); MosaicType type = getParameter(parameters, 0); PlanarImage[] alphas = getParameter(parameters, ALPHA_PARAM); ROI[] rois = getParameter(parameters, ROI_PARAM); double[][] thresholds = getParameter(parameters, THRESHOLD_PARAM); Range[] noData = getParameter(parameters, NODATA_RANGE_PARAM); double[] backgrounds = getParameter(parameters, BACKGROUND_PARAM); ImageWorker iw = new ImageWorker(); iw.setRenderingHints(hints); iw.setBackground(backgrounds); iw.mosaic(images, type, alphas, rois, thresholds, noData); return iw.getRenderedImage(); }
@Override public GridCoverage2D read(GeneralParameterValue[] parameters) throws IOException { GridCoverage2D originalCoverage = super.read(parameters); RenderedImage source = new ImageWorker(originalCoverage.getRenderedImage()) .format(DataBuffer.TYPE_USHORT) .getRenderedImage(); TiledImage shortImage = new TiledImage( source.getMinX(), source.getMinY(), source.getWidth(), source.getHeight(), source.getTileGridXOffset(), source.getTileGridYOffset(), source.getSampleModel(), null); shortImage.set(source); // force color model to be null, this also occurs in real cases GridCoverage2D coverage = CoverageFactoryFinder.getGridCoverageFactory(null) .create( originalCoverage.getName(), shortImage, originalCoverage.getEnvelope2D()); return coverage; } };
private RenderedImage mosaicHeterogeneousImages(List<ImageInTile> sources) { // at the time of writing, only JAI-EXT mosaic can handle a mix of different // color models, we need to use it explicitly RenderedImage image; final ParameterBlockJAI pb = new ParameterBlockJAI(new it.geosolutions.jaiext.mosaic.MosaicDescriptor()); for (ImageInTile it : sources) { if (it.posx != 0 || it.posy != 0) { ImageWorker iw = new ImageWorker(it.image); iw.translate( it.posx, it.posy, Interpolation.getInstance(Interpolation.INTERP_NEAREST)); RenderedImage translated = iw.getRenderedImage(); pb.addSource(translated); } else { pb.addSource(it.image); } } pb.setParameter("mosaicType", MosaicDescriptor.MOSAIC_TYPE_OVERLAY); pb.setParameter("sourceAlpha", null); pb.setParameter("sourceROI", null); pb.setParameter("sourceThreshold", null); pb.setParameter("backgroundValues", new double[] {0}); pb.setParameter("nodata", null); RenderingHints hints = new Hints(JAI.getDefaultInstance().getRenderingHints()); hints.putAll(GeoTools.getDefaultHints()); image = new MosaicRIF().create(pb, hints); return image; }