import static org.junit.Assert.*; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExternalResource; public class TestSomething { @Rule public ResourceFile res = new ResourceFile("/res.txt"); @Test public void test() throws Exception { assertTrue(res.getContent().length() > 0); assertTrue(res.getFile().exists()); } }
/** * Release resource sets not needed any more, otherwise they will waste heap space for the * duration of the build. * * <p>This might be called twice when an incremental build falls back to a full one. */ private void cleanup() { fileValidity.clear(); processedInputs = null; }
@Override public void run() { workAction.run(); } }
ResourceMerger merger = new ResourceMerger(); try { if (!merger.loadFromBlob(getIncrementalFolder(), true /*incrementalState*/)) { doFullTaskAction(); return; resourceSet.setNormalizeResources(normalizeResources); if (!merger.checkValidUpdate(resourceSets)) { getLogger().info("Changed Resource sets: full task run!"); doFullTaskAction(); merger.findDataSetContaining(changedFile, fileValidity); if (fileValidity.getStatus() == FileValidity.FileStatus.UNKNOWN_FILE) { doFullTaskAction(); return; } else if (fileValidity.getStatus() == FileValidity.FileStatus.VALID_FILE) { if (!fileValidity.getDataSet().updateWith( fileValidity.getSourceFile(), changedFile, entry.getValue(), getILogger())) { getLogger().info( MergedResourceWriter writer = new MergedResourceWriter( getOutputDir(), getCruncher(), getCrunchPng(), getProcess9Patch(), getPublicFile()); writer.setInsertSourceMarkers(getInsertSourceMarkers()); merger.mergeData(writer, false /*doCleanUp*/); merger.writeBlobTo(getIncrementalFolder(), writer);
@Override protected void doFullTaskAction() { // this is full run, clean the previous output File destinationDir = getOutputDir(); emptyFolder(destinationDir); List<ResourceSet> resourceSets = getInputResourceSets(); // create a new merger and populate it with the sets. ResourceMerger merger = new ResourceMerger(); try { for (ResourceSet resourceSet : resourceSets) { resourceSet.setNormalizeResources(normalizeResources); // set needs to be loaded. resourceSet.loadFromFiles(getILogger()); merger.addDataSet(resourceSet); } // get the merged set and write it down. MergedResourceWriter writer = new MergedResourceWriter( destinationDir, getCruncher(), getCrunchPng(), getProcess9Patch(), getPublicFile()); writer.setInsertSourceMarkers(getInsertSourceMarkers()); merger.mergeData(writer, false /*doCleanUp*/); // No exception? Write the known state. merger.writeBlobTo(getIncrementalFolder(), writer); } catch (MergingException e) { System.out.println(e.getMessage()); merger.cleanBlob(getIncrementalFolder()); throw new ResourceException(e.getMessage(), e); } }
AssetMerger merger = new AssetMerger(); try { if (!merger.loadFromBlob(getIncrementalFolder(), true /*incrementalState*/)) { doFullTaskAction(); return; if (!merger.checkValidUpdate(assetSets)) { getLogger().info("Changed Asset sets: full task run!"); doFullTaskAction(); merger.findDataSetContaining(changedFile, fileValidity); if (fileValidity.getStatus() == FileValidity.FileStatus.UNKNOWN_FILE) { doFullTaskAction(); return; } else if (fileValidity.getStatus() == FileValidity.FileStatus.VALID_FILE) { if (!fileValidity.getDataSet().updateWith( fileValidity.getSourceFile(), changedFile, entry.getValue(), MergedAssetWriter writer = new MergedAssetWriter(getOutputDir()); merger.mergeData(writer, false /*doCleanUp*/); merger.writeBlobTo(getIncrementalFolder(), writer, false); } catch (MergingException e) { getLogger().error("Could not merge source set folders: ", e); merger.cleanBlob(getIncrementalFolder());
@Nullable protected Collection<String> getViewTags( @NonNull Context context, @NonNull ResourceItem item) { // Check view tag in this file. Can I do it cheaply? Try with // an XML pull parser. Or DOM if we have multiple resources looked // up? ResourceFile source = item.getSource(); if (source != null) { File file = source.getFile(); Multimap<String,String> map = getIdToTagsIn(context, file); if (map != null) { return map.get(item.getName()); } } return null; }
private static boolean checkResourceRepository( @NonNull AbstractResourceRepository resources, @NonNull String name) { List<ResourceItem> items = resources.getResourceItem(ResourceType.DRAWABLE, name); if (items == null) { return false; } // Check if at least one drawable with this name is a vector. for (ResourceItem item : items) { ResourceFile source = item.getSource(); if (source == null) { return false; } File file = source.getFile(); if (!file.getPath().endsWith(SdkConstants.DOT_XML)) { continue; } return SdkConstants.TAG_VECTOR.equals(XmlUtils.getRootTagName(file)); } return false; } }
@Override protected void doFullTaskAction() throws IOException { // this is full run, clean the previous output File destinationDir = getOutputDir(); FileUtils.cleanOutputDir(destinationDir); List<AssetSet> assetSets = getInputDirectorySets(); // create a new merger and populate it with the sets. AssetMerger merger = new AssetMerger(); try { for (AssetSet assetSet : assetSets) { // set needs to be loaded. assetSet.loadFromFiles(getILogger()); merger.addDataSet(assetSet); } // get the merged set and write it down. MergedAssetWriter writer = new MergedAssetWriter(destinationDir); merger.mergeData(writer, false /*doCleanUp*/); // No exception? Write the known state. merger.writeBlobTo(getIncrementalFolder(), writer, false); } catch (MergingException e) { getLogger().error("Could not merge source set folders: ", e); merger.cleanBlob(getIncrementalFolder()); throw new ResourceException(e.getMessage(), e); } }
@NonNull private List<ResourceSet> getConfiguredResourceSets(ResourcePreprocessor preprocessor) { List<ResourceSet> resourceSets = Lists.newArrayList(getInputResourceSets()); List<ResourceSet> generatedSets = Lists.newArrayListWithCapacity(resourceSets.size()); for (ResourceSet resourceSet : resourceSets) { resourceSet.setPreprocessor(preprocessor); ResourceSet generatedSet = new GeneratedResourceSet(resourceSet); resourceSet.setGeneratedSet(generatedSet); generatedSets.add(generatedSet); } // Put all generated sets at the start of the list. resourceSets.addAll(0, generatedSets); return resourceSets; }
private int resolveDimensionValue(String name, Context context){ LintClient client = context.getDriver().getClient(); LintProject project = context.getDriver().getProject(); AbstractResourceRepository resources = client.getProjectResources(project, true); return Integer.valueOf(resources.getResourceItem(ResourceType.DIMEN, name).get(0).getResourceValue(false).getValue()); }
private void finalizeMerge(PreprocessResourcesMerger merger) throws MergingException { PreprocessResourcesWriter writer = new PreprocessResourcesWriter(getOutputResDirectory()); merger.mergeData(writer, true); merger.writeBlobTo(getIncrementalFolder(), writer); }
public static List<File> flattenSourceSets(List<? extends SourceSet> resourceSets) { List<File> list = Lists.newArrayList(); for (SourceSet sourceSet : resourceSets) { list.addAll(sourceSet.getSourceFiles()); } return list; } }
column.setCellFactory(c -> { TableCell<DataItem, DataItem> cell = new TableCell<DataItem, DataItem>() { @Override protected void updateItem(DataItem item, boolean empty) { super.updateItem(item, empty); if (item == null) { setText(null); setTooltip(null); } else { setText(item.getName()); if (item.getDescription() != null && !item.getDescription().isEmpty()) { setTooltip(new Tooltip(item.getDescription())); } else { // may need something here, depending on your application logic... } } } }; return cell; });
@Inject public FileGenerationWorkAction(MergedResourceWriter.FileGenerationParameters workItem) { this.workAction = new MergedResourceWriter.FileGenerationWorkAction(workItem); }
ResourceMerger merger = new ResourceMerger(minSdk); resourceSet.loadFromFiles(getILogger()); merger.addDataSet(resourceSet); resourceCompiler = QueueableResourceCompiler.NONE; MergedResourceWriter writer = new MergedResourceWriter( destinationDir, getPublicFile(), getIncrementalFolder()); merger.mergeData(writer, false /*doCleanUp*/); merger.writeBlobTo(getIncrementalFolder(), writer, false); } catch (MergingException e) { System.out.println(e.getMessage()); merger.cleanBlob(getIncrementalFolder()); throw new ResourceException(e.getMessage(), e);
@Nullable protected Collection<String> getViewTags( @NonNull Context context, @NonNull ResourceItem item) { // Check view tag in this file. Can I do it cheaply? Try with // an XML pull parser. Or DOM if we have multiple resources looked // up? ResourceFile source = item.getSource(); if (source != null) { File file = source.getFile(); Multimap<String,String> map = getIdToTagsIn(context, file); if (map != null) { return map.get(item.getName()); } } return null; }
private List<ResourceSet> getConfiguredResourceSets(ResourcePreprocessor preprocessor) { // it is possible that this get called twice in case the incremental run fails and reverts // back to full task run. Because the cached ResourceList is modified we don't want // to recompute this twice (plus, why recompute it twice anyway?) if (processedInputs == null) { processedInputs = computeResourceSetList(); List<ResourceSet> generatedSets = Lists.newArrayListWithCapacity(processedInputs.size()); for (ResourceSet resourceSet : processedInputs) { resourceSet.setPreprocessor(preprocessor); ResourceSet generatedSet = new GeneratedResourceSet(resourceSet); resourceSet.setGeneratedSet(generatedSet); generatedSets.add(generatedSet); } // We want to keep the order of the inputs. Given inputs: // (A, B, C, D) // We want to get: // (A-generated, A, B-generated, B, C-generated, C, D-generated, D). // Therefore, when later in {@link DataMerger} we look for sources going through the // list backwards, B-generated will take priority over A (but not B). // A real life use-case would be if an app module generated resource overrode a library // module generated resource (existing not in generated but bundled dir at this stage): // (lib, app debug, app main) // We will get: // (lib generated, lib, app debug generated, app debug, app main generated, app main) for (int i = 0; i < generatedSets.size(); ++i) { processedInputs.add(2 * i, generatedSets.get(i)); } } return processedInputs; }
public static List<File> flattenSourceSets(List<? extends SourceSet> resourceSets) { List<File> list = Lists.newArrayList(); for (SourceSet sourceSet : resourceSets) { list.addAll(sourceSet.getSourceFiles()); } return list; } }