private void reLoadHostGroup() { hostGroupCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_HOSTGROUP ); ObjectMapper mapper = new ObjectMapper(); if ( hostGroupCacheDataFile.exists() ) { try { HashMap m = new HashMap( ); m.put( "test","123" ); mapper.writeValueAsString( m ); String hostGroups = hostGroupCacheDataFile.readString(); hostGroupCache = mapper.readValue( hostGroups, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read hostGroup cache from data file have a error:" + e ); } } }
private void reLoadHost() { hostCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_HOST ); ObjectMapper mapper = new ObjectMapper(); if ( hostCacheDataFile.exists() ) { try { String hosts = hostCacheDataFile.readString(); hostCache = mapper.readValue( hosts, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read hosts cache from data file have a error:" + e ); } } }
private void reLoadItem() { itemCacheDataFile = dataFileManager.getDataFile( ZABBIX_ID + "/" + ZABBIX_ITEM ); ObjectMapper mapper = new ObjectMapper(); if ( itemCacheDataFile.exists() ) { try { String items = itemCacheDataFile.readString(); itemCache = mapper.readValue( items, ConcurrentHashMap.class ); } catch ( IOException e ) { e.printStackTrace(); logger.error( "read items cache from data file have a error:" + e ); } } }
try String s = script.readString(); Object obj = scriptEngine.parseScriptInstance(s, type, true); T action = type.cast(obj);
/** * TODO: dump the repo definitions as they exist in the StoreDataManager instead. * Currently, those are the same thing, but when we move to a cluster-enabled Indy implementation we're * going to need to escape the filesystem for things like repo definition storage, and use an ISPN cache * or similar instead. */ private void zipRepositoryFiles( ZipOutputStream zip ) throws IOException { DataFile[] packageDirs = dataFileManager.getDataFile( INDY_STORE ).listFiles( ( f ) -> true ); for ( DataFile pkgDir : packageDirs ) { String pkgDirName = REPOS_DIR + "/" + pkgDir.getName(); for ( StoreType type : StoreType.values() ) { String typeDirName = pkgDirName + "/" + type.singularEndpointName(); DataFile[] files = pkgDir.getChild( type.singularEndpointName() ).listFiles( f -> true ); if ( files != null ) { for ( DataFile f : files ) { final String json = f.readString(); String name = typeDirName + "/" + f.getName(); logger.debug( "Adding {} to repo zip", name ); zip.putNextEntry( new ZipEntry( name ) ); IOUtils.copy( toInputStream( json ), zip ); } } } } }
final String json = src.readString();
final String json = f.readString(); final ArtifactStore store = serializer.readValue( json, type.getStoreClass() ); if ( store == null )