private void reLoadHost() { hostCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_HOST ); ObjectMapper mapper = new ObjectMapper(); if ( hostCacheDataFile.exists() ) { try { String hosts = hostCacheDataFile.readString(); hostCache = mapper.readValue( hosts, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read hosts cache from data file have a error:" + e ); } } }
public void putHostGroup( String name, String id ) throws IOException { ObjectMapper mapper = new ObjectMapper(); hostGroupCache.put( name, id ); hostGroupCacheDataFile.writeString( mapper.writeValueAsString( hostGroupCache ), "UTF-8", null ); }
@Override public void clear( final ChangeSummary summary ) throws IndyDataException { super.clear( summary ); final DataFile basedir = manager.getDataFile( INDY_STORE ); try { basedir.delete( summary ); } catch ( final IOException e ) { throw new IndyDataException( "Failed to delete Indy storage files: {}", e, e.getMessage() ); } }
/** * TODO: dump the repo definitions as they exist in the StoreDataManager instead. * Currently, those are the same thing, but when we move to a cluster-enabled Indy implementation we're * going to need to escape the filesystem for things like repo definition storage, and use an ISPN cache * or similar instead. */ private void zipRepositoryFiles( ZipOutputStream zip ) throws IOException { DataFile[] packageDirs = dataFileManager.getDataFile( INDY_STORE ).listFiles( ( f ) -> true ); for ( DataFile pkgDir : packageDirs ) { String pkgDirName = REPOS_DIR + "/" + pkgDir.getName(); for ( StoreType type : StoreType.values() ) { String typeDirName = pkgDirName + "/" + type.singularEndpointName(); DataFile[] files = pkgDir.getChild( type.singularEndpointName() ).listFiles( f -> true ); if ( files != null ) { for ( DataFile f : files ) { final String json = f.readString(); String name = typeDirName + "/" + f.getName(); logger.debug( "Adding {} to repo zip", name ); zip.putNextEntry( new ZipEntry( name ) ); IOUtils.copy( toInputStream( json ), zip ); } } } } }
@Override public <T extends IndyLifecycleAction> Collection<T> getUserLifecycleActions(String lifecycleName, Class<T> type) DataFile lifecycleDir = dataFileManager.getDataFile(LIFECYCLE_DIR, lifecycleName); if ( lifecycleDir.exists() ) { final DataFile[] scripts = lifecycleDir.listFiles((pathname) -> try String s = script.readString(); Object obj = scriptEngine.parseScriptInstance(s, type, true); T action = type.cast(obj);
private void store( final boolean skipIfExists, final ChangeSummary summary, final ArtifactStore... stores ) throws IndyDataException { for ( final ArtifactStore store : stores ) { final DataFile f = manager.getDataFile( INDY_STORE, store.getPackageType(), store.getType().singularEndpointName(), store.getName() + ".json" ); if ( skipIfExists && f.exists() ) { continue; } final DataFile d = f.getParent(); if ( !d.mkdirs() ) { throw new IndyDataException( "Cannot create storage directory: {} for definition: {}", d, store ); } try { final String json = serializer.writeValueAsString( store ); f.writeString( json, "UTF-8", summary ); logger.debug( "Persisted {} to disk at: {}\n{}", store, f, json ); } catch ( final IOException e ) { throw new IndyDataException( "Cannot write definition: {} to: {}. Reason: {}", e, store, f, e.getMessage() ); } } }
DataFile[] packageDirs = manager.getDataFile( INDY_STORE ).listFiles( ( f ) -> true ); for ( DataFile pkgDir : packageDirs ) DataFile[] files = pkgDir.getChild( type.singularEndpointName() ).listFiles(f->true); if ( files != null ) final String json = f.readString(); final ArtifactStore store = serializer.readValue( json, type.getStoreClass() ); if ( store == null ) f.delete( summary ); try f.delete( summary );
public List<ChangeSummary> getDataChangeLog( String path, final int start, final int length ) throws GitSubsystemException { if ( !revisionsConfig.isEnabled() ) { return Collections.emptyList(); } final File basedir = dataFileManager.getDetachedDataBasedir(); if ( new File( path ).isAbsolute() ) { if ( !path.startsWith( basedir.getPath() ) ) { throw new GitSubsystemException( "Cannot reference path outside of data basedir." ); } path = Paths.get( basedir.toURI() ) .relativize( Paths.get( path ) ) .toString(); } final File file; if ( isEmpty( path ) || path.equals( "/" ) ) { file = basedir; } else { file = dataFileManager.getDataFile( path ) .getDetachedFile(); } return dataFileGit.getChangelog( file, start, length ); }
public DataFile getDataFile( final StoreKey key ) { return manager.getDataFile( INDY_STORE, key.getType().singularEndpointName(), key.getName() + ".json" ); }
@Override public void install() throws IndyDataException { if ( !manager.getDataFile( INDY_STORE ) .isDirectory() ) { final ChangeSummary summary = new ChangeSummary( ChangeSummary.SYSTEM_USER, "Initializing defaults" ); storeArtifactStore( new RemoteRepository( MAVEN_PKG_KEY, "central", "http://repo.maven.apache.org/maven2/" ), summary, true, false, new EventMetadata() ); storeArtifactStore( new Group( MAVEN_PKG_KEY, "public", new StoreKey( StoreType.remote, "central" ) ), summary, true, false, new EventMetadata() ); } }
public boolean deleteStoreSettings( final ArtifactStore store ) throws SetBackDataException { if ( !config.isEnabled() ) { throw new SetBackDataException( "SetBack is disabled!" ); } final StoreKey key = store.getKey(); if ( StoreType.hosted == key.getType() ) { return false; } final DataFile settingsXml = getSettingsXml( key ); if ( settingsXml.exists() ) { try { settingsXml.delete( new ChangeSummary( ChangeSummary.SYSTEM_USER, "SETBACK: Deleting generated SetBack settings.xml for: " + store ) ); } catch ( final IOException e ) { throw new SetBackDataException( "Failed to delete SetBack settings.xml for: %s.\n at: %s\n Reason: %s", e, store, settingsXml, e.getMessage() ); } return true; } return false; }
public DataFile getSetBackSettings( final StoreKey key ) { if ( !config.isEnabled() ) { return null; } final DataFile settingsXml = getSettingsXml( key ); return settingsXml == null || !settingsXml.exists() ? null : settingsXml; }
public List<ChangeSummary> getDataChangeLog( final StoreKey key, final int start, final int count ) throws GitSubsystemException { if ( !revisionsConfig.isEnabled() ) { return Collections.emptyList(); } final DataFile dataFile = storeManager.getDataFile( key ); return dataFileGit.getChangelog( dataFile.getDetachedFile(), start, count ); }
private void reLoadItem() { itemCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_ITEM ); ObjectMapper mapper = new ObjectMapper(); if ( itemCacheDataFile.exists() ) { try { String items = itemCacheDataFile.readString(); itemCache = mapper.readValue( items, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read items cache from data file have a error:" + e ); } } }
private void delete( final ArtifactStore store, final ChangeSummary summary ) throws IndyDataException { logger.trace( "Attempting to delete data file for store: {}", store.getKey() ); final DataFile f = manager.getDataFile( INDY_STORE, store.getPackageType(), store.getType().singularEndpointName(), store.getName() + ".json" ); try { logger.trace( "Deleting file: {}", f ); f.delete( summary ); } catch ( final IOException e ) { throw new IndyDataException( "Cannot delete store definition: {} in file: {}. Reason: {}", e, store.getKey(), f, e.getMessage() ); } }
private DataFile getSettingsXml( final StoreKey key ) { if ( !config.isEnabled() ) { return null; } return manager.getDataFile( DATA_DIR, key.getType() .singularEndpointName(), "settings-" + key.getName() + ".xml" ); }
if ( settingsXml == null || !settingsXml.exists() )
public void putHost( String name, String id ) throws IOException { ObjectMapper mapper = new ObjectMapper(); hostCache.put( name, id ); hostCacheDataFile.writeString( mapper.writeValueAsString( hostCache ), "UTF-8", null ); }
private void reLoadHostGroup() { hostGroupCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_HOSTGROUP ); ObjectMapper mapper = new ObjectMapper(); if ( hostGroupCacheDataFile.exists() ) { try { HashMap m = new HashMap( ); m.put( "test","123" ); mapper.writeValueAsString( m ); String hostGroups = hostGroupCacheDataFile.readString(); hostGroupCache = mapper.readValue( hostGroups, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read hostGroup cache from data file have a error:" + e ); } } }
public void putItem( String name, String id ) throws IOException { ObjectMapper mapper = new ObjectMapper(); itemCache.put( name, id ); itemCacheDataFile.writeString( mapper.writeValueAsString( itemCache ), "UTF-8", null ); }