if ( lifecycleDir.exists() ) { final DataFile[] scripts = lifecycleDir.listFiles((pathname) -> try String s = script.readString(); Object obj = scriptEngine.parseScriptInstance(s, type, true); T action = type.cast(obj);
public void putHostGroup( String name, String id ) throws IOException { ObjectMapper mapper = new ObjectMapper(); hostGroupCache.put( name, id ); hostGroupCacheDataFile.writeString( mapper.writeValueAsString( hostGroupCache ), "UTF-8", null ); }
/** * TODO: dump the repo definitions as they exist in the StoreDataManager instead. * Currently, those are the same thing, but when we move to a cluster-enabled Indy implementation we're * going to need to escape the filesystem for things like repo definition storage, and use an ISPN cache * or similar instead. */ private void zipRepositoryFiles( ZipOutputStream zip ) throws IOException { DataFile[] packageDirs = dataFileManager.getDataFile( INDY_STORE ).listFiles( ( f ) -> true ); for ( DataFile pkgDir : packageDirs ) { String pkgDirName = REPOS_DIR + "/" + pkgDir.getName(); for ( StoreType type : StoreType.values() ) { String typeDirName = pkgDirName + "/" + type.singularEndpointName(); DataFile[] files = pkgDir.getChild( type.singularEndpointName() ).listFiles( f -> true ); if ( files != null ) { for ( DataFile f : files ) { final String json = f.readString(); String name = typeDirName + "/" + f.getName(); logger.debug( "Adding {} to repo zip", name ); zip.putNextEntry( new ZipEntry( name ) ); IOUtils.copy( toInputStream( json ), zip ); } } } } }
private void reLoadHost() { hostCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_HOST ); ObjectMapper mapper = new ObjectMapper(); if ( hostCacheDataFile.exists() ) { try { String hosts = hostCacheDataFile.readString(); hostCache = mapper.readValue( hosts, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read hosts cache from data file have a error:" + e ); } } }
new ChangeSummary( ChangeSummary.SYSTEM_USER, "Migrating legacy store definitions." ); if ( !basedir.exists() ) final String[] dirs = basedir.list(); if ( dirs == null || dirs.length < 1 ) File[] files = basedir.getDetachedFile() .toPath() .resolve( type.singularEndpointName() ) if ( target.exists() ) DataFile targetDir = target.getParent(); if ( !targetDir.exists() && !targetDir.mkdirs() ) throw new IndyLifecycleException( "Cannot make directory: %s.", targetDir.getPath() ); else if ( !targetDir.isDirectory() ) throw new IndyLifecycleException( "Not a directory: %s.", targetDir.getPath() ); logger.info( "Migrating definition {}", src.getPath() ); final String json = src.readString(); target.writeString( migrated, summary ); changed = true;
private void store( final boolean skipIfExists, final ChangeSummary summary, final ArtifactStore... stores ) throws IndyDataException { for ( final ArtifactStore store : stores ) { final DataFile f = manager.getDataFile( INDY_STORE, store.getPackageType(), store.getType().singularEndpointName(), store.getName() + ".json" ); if ( skipIfExists && f.exists() ) { continue; } final DataFile d = f.getParent(); if ( !d.mkdirs() ) { throw new IndyDataException( "Cannot create storage directory: {} for definition: {}", d, store ); } try { final String json = serializer.writeValueAsString( store ); f.writeString( json, "UTF-8", summary ); logger.debug( "Persisted {} to disk at: {}\n{}", store, f, json ); } catch ( final IOException e ) { throw new IndyDataException( "Cannot write definition: {} to: {}. Reason: {}", e, store, f, e.getMessage() ); } } }
DataFile[] packageDirs = manager.getDataFile( INDY_STORE ).listFiles( ( f ) -> true ); for ( DataFile pkgDir : packageDirs ) DataFile[] files = pkgDir.getChild( type.singularEndpointName() ).listFiles(f->true); if ( files != null ) final String json = f.readString(); final ArtifactStore store = serializer.readValue( json, type.getStoreClass() ); if ( store == null ) f.delete( summary ); try f.delete( summary );
settingsXml.getParent() .mkdirs(); settingsXml.writeString( rendered, "UTF-8", new ChangeSummary( ChangeSummary.SYSTEM_USER, "SETBACK: Updating generated SetBack settings.xml for: " + key ) );
public boolean deleteStoreSettings( final ArtifactStore store ) throws SetBackDataException { if ( !config.isEnabled() ) { throw new SetBackDataException( "SetBack is disabled!" ); } final StoreKey key = store.getKey(); if ( StoreType.hosted == key.getType() ) { return false; } final DataFile settingsXml = getSettingsXml( key ); if ( settingsXml.exists() ) { try { settingsXml.delete( new ChangeSummary( ChangeSummary.SYSTEM_USER, "SETBACK: Deleting generated SetBack settings.xml for: " + store ) ); } catch ( final IOException e ) { throw new SetBackDataException( "Failed to delete SetBack settings.xml for: %s.\n at: %s\n Reason: %s", e, store, settingsXml, e.getMessage() ); } return true; } return false; }
public DataFile getSetBackSettings( final StoreKey key ) { if ( !config.isEnabled() ) { return null; } final DataFile settingsXml = getSettingsXml( key ); return settingsXml == null || !settingsXml.exists() ? null : settingsXml; }
public List<ChangeSummary> getDataChangeLog( final StoreKey key, final int start, final int count ) throws GitSubsystemException { if ( !revisionsConfig.isEnabled() ) { return Collections.emptyList(); } final DataFile dataFile = storeManager.getDataFile( key ); return dataFileGit.getChangelog( dataFile.getDetachedFile(), start, count ); }
@Override public void clear( final ChangeSummary summary ) throws IndyDataException { super.clear( summary ); final DataFile basedir = manager.getDataFile( INDY_STORE ); try { basedir.delete( summary ); } catch ( final IOException e ) { throw new IndyDataException( "Failed to delete Indy storage files: {}", e, e.getMessage() ); } }
private void reLoadItem() { itemCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_ITEM ); ObjectMapper mapper = new ObjectMapper(); if ( itemCacheDataFile.exists() ) { try { String items = itemCacheDataFile.readString(); itemCache = mapper.readValue( items, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read items cache from data file have a error:" + e ); } } }
if ( settingsXml == null || !settingsXml.exists() )
public List<ChangeSummary> getDataChangeLog( String path, final int start, final int length ) throws GitSubsystemException { if ( !revisionsConfig.isEnabled() ) { return Collections.emptyList(); } final File basedir = dataFileManager.getDetachedDataBasedir(); if ( new File( path ).isAbsolute() ) { if ( !path.startsWith( basedir.getPath() ) ) { throw new GitSubsystemException( "Cannot reference path outside of data basedir." ); } path = Paths.get( basedir.toURI() ) .relativize( Paths.get( path ) ) .toString(); } final File file; if ( isEmpty( path ) || path.equals( "/" ) ) { file = basedir; } else { file = dataFileManager.getDataFile( path ) .getDetachedFile(); } return dataFileGit.getChangelog( file, start, length ); }
private void delete( final ArtifactStore store, final ChangeSummary summary ) throws IndyDataException { logger.trace( "Attempting to delete data file for store: {}", store.getKey() ); final DataFile f = manager.getDataFile( INDY_STORE, store.getPackageType(), store.getType().singularEndpointName(), store.getName() + ".json" ); try { logger.trace( "Deleting file: {}", f ); f.delete( summary ); } catch ( final IOException e ) { throw new IndyDataException( "Cannot delete store definition: {} in file: {}. Reason: {}", e, store.getKey(), f, e.getMessage() ); } }
private void reLoadHostGroup() { hostGroupCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_HOSTGROUP ); ObjectMapper mapper = new ObjectMapper(); if ( hostGroupCacheDataFile.exists() ) { try { HashMap m = new HashMap( ); m.put( "test","123" ); mapper.writeValueAsString( m ); String hostGroups = hostGroupCacheDataFile.readString(); hostGroupCache = mapper.readValue( hostGroups, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read hostGroup cache from data file have a error:" + e ); } } }
public void putHost( String name, String id ) throws IOException { ObjectMapper mapper = new ObjectMapper(); hostCache.put( name, id ); hostCacheDataFile.writeString( mapper.writeValueAsString( hostCache ), "UTF-8", null ); }
public void putItem( String name, String id ) throws IOException { ObjectMapper mapper = new ObjectMapper(); itemCache.put( name, id ); itemCacheDataFile.writeString( mapper.writeValueAsString( itemCache ), "UTF-8", null ); }