public static String resolve(String originalPath, String propName, Settings settings, Path configPath, boolean mustBeValid) { log.debug("Path is is {}", originalPath); String path = originalPath; final Environment env = new Environment(settings, configPath); if(env != null && originalPath != null && originalPath.length() > 0) { path = env.configFile().resolve(originalPath).toAbsolutePath().toString(); log.debug("Resolved {} to {} against {}", originalPath, path, env.configFile().toAbsolutePath().toString()); } if(mustBeValid) { checkPath(path, propName); } if("".equals(path)) { path = null; } return path; }
/** * asserts that the two environments are equivalent for all things the environment cares about (i.e., all but the setting * object which may contain different setting) */ public static void assertEquivalent(Environment actual, Environment expected) { assertEquals(actual.dataFiles(), expected.dataFiles(), "dataFiles"); assertEquals(actual.repoFiles(), expected.repoFiles(), "repoFiles"); assertEquals(actual.configFile(), expected.configFile(), "configFile"); assertEquals(actual.pluginsFile(), expected.pluginsFile(), "pluginsFile"); assertEquals(actual.binFile(), expected.binFile(), "binFile"); assertEquals(actual.libFile(), expected.libFile(), "libFile"); assertEquals(actual.modulesFile(), expected.modulesFile(), "modulesFile"); assertEquals(actual.logsFile(), expected.logsFile(), "logsFile"); assertEquals(actual.pidFile(), expected.pidFile(), "pidFile"); assertEquals(actual.tmpFile(), expected.tmpFile(), "tmpFile"); }
public ComplianceConfig(final Environment environment, final IndexResolverReplacer irr, final AuditLog auditLog) { super(); this.settings = environment.settings(); this.environment = environment; this.irr = irr; this.auditLog = auditLog; final List<String> watchedReadFields = this.settings.getAsList(ConfigConstants.SEARCHGUARD_COMPLIANCE_HISTORY_READ_WATCHED_FIELDS, Collections.emptyList(), false); watchedWriteIndices = settings.getAsList(ConfigConstants.SEARCHGUARD_COMPLIANCE_HISTORY_WRITE_WATCHED_INDICES, Collections.emptyList()); logDiffsForWrite = settings.getAsBoolean(ConfigConstants.SEARCHGUARD_COMPLIANCE_HISTORY_WRITE_LOG_DIFFS, false); logWriteMetadataOnly = settings.getAsBoolean(ConfigConstants.SEARCHGUARD_COMPLIANCE_HISTORY_WRITE_METADATA_ONLY, false); logReadMetadataOnly = settings.getAsBoolean(ConfigConstants.SEARCHGUARD_COMPLIANCE_HISTORY_READ_METADATA_ONLY, false); log.warn("If you plan to use field masking pls configure "+ConfigConstants.SEARCHGUARD_COMPLIANCE_SALT+" to be a random string of 16 chars length identical on all nodes"); log.warn(ConfigConstants.SEARCHGUARD_COMPLIANCE_SALT+" is greater than 16 bytes. Only the first 16 bytes are used for salting"); log.error("Unable to check if auditlog index {} is part of compliance setup", index, e);
@Override public List<String> run() { final Path confPath = new Environment(settings, configPath).configFile().toAbsolutePath(); if(Files.isDirectory(confPath, LinkOption.NOFOLLOW_LINKS)) { try (Stream<Path> s = Files.walk(confPath)) { return s .distinct() .map(p->sha256(p)) .collect(Collectors.toList()); } catch (Exception e) { log.error(e); return null; } } return Collections.emptyList(); } });
Settings.Builder output = Settings.builder(); initializeSettings(output, input, properties); Environment environment = new Environment(output.build(), configPath); if (Files.exists(environment.configFile().resolve("elasticsearch.yaml"))) { throw new SettingsException("elasticsearch.yaml was deprecated in 5.5.0 and must be renamed to elasticsearch.yml"); if (Files.exists(environment.configFile().resolve("elasticsearch.json"))) { throw new SettingsException("elasticsearch.json was deprecated in 5.5.0 and must be converted to elasticsearch.yml"); output = Settings.builder(); // start with a fresh output Path path = environment.configFile().resolve("elasticsearch.yml"); if (Files.exists(path)) { try { output.loadFromPath(path); finalizeSettings(output, terminal); environment = new Environment(output.build(), configPath); output.put(Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile().toAbsolutePath().normalize().toString()); return new Environment(output.build(), configPath);
Map<String,Policy> map = new HashMap<>(); Set<Path> pluginsAndModules = new LinkedHashSet<>(PluginsService.findPluginDirs(environment.pluginsFile())); pluginsAndModules.addAll(PluginsService.findPluginDirs(environment.modulesFile())); Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); if (Files.exists(policyFile)) { try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) { for (Path jar : jarStream) { URL url = jar.toRealPath().toUri().toURL(); if (codebases.add(url) == false) { throw new IllegalStateException("duplicate module/plugin: " + url);
protected void findAndProcessShardPath(OptionSet options, Environment environment, CheckedConsumer<ShardPath, IOException> consumer) throws IOException { final Settings settings = environment.settings(); final Path path = getPath(folderOption.value(options)).getParent(); final Path shardParent = path.getParent(); final Path shardParentParent = shardParent.getParent(); final Path indexPath = path.resolve(ShardPath.INDEX_FOLDER_NAME); if (Files.exists(indexPath) == false || Files.isDirectory(indexPath) == false) { throw new ElasticsearchException("index directory [" + indexPath + "], must exist and be a directory"); final String shardIdFileName = path.getFileName().toString(); final String nodeIdFileName = shardParentParent.getParent().getFileName().toString(); if (Files.isDirectory(path) && shardIdFileName.chars().allMatch(Character::isDigit) // SHARD-ID path element check && NodeEnvironment.INDICES_FOLDER.equals(shardParentParent.getFileName().toString()) // `indices` check
private Dictionary(Configuration cfg) { this.configuration = cfg; this.props = new Properties(); this.conf_dir = cfg.getEnvironment().configFile().resolve(AnalysisIkPlugin.PLUGIN_NAME); Path configFile = conf_dir.resolve(FILE_NAME); InputStream input = null; try { logger.info("try load config from {}", configFile); input = new FileInputStream(configFile.toFile()); } catch (FileNotFoundException e) { conf_dir = cfg.getConfigInPluginDir(); configFile = conf_dir.resolve(FILE_NAME); try { logger.info("try load config from {}", configFile); input = new FileInputStream(configFile.toFile()); } catch (FileNotFoundException ex) { // We should report origin exception logger.error("ik-analyzer", e); } } if (input != null) { try { props.loadFromXML(input); } catch (InvalidPropertiesFormatException e) { logger.error("ik-analyzer", e); } catch (IOException e) { logger.error("ik-analyzer", e); } } }
private void init() { Path configFilePath = env.configFile().resolve("elasticsearch-analysis-ansj").resolve(CONFIG_FILE_NAME); LOG.info("try to load ansj config file: {}", configFilePath); if (!Files.exists(configFilePath)) { configFilePath = Paths.get(new File(AnsjElasticConfigurator.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getParent(), "config").resolve(CONFIG_FILE_NAME); LOG.info("try to load ansj config file: {}", configFilePath); } Settings.Builder builder = Settings.builder(); if (Files.exists(configFilePath)) { try { builder.loadFromPath(configFilePath); LOG.info("load ansj config: {}", configFilePath); } catch (IOException e) { LOG.error("load ansj config[{}] error: {}", configFilePath, e); } } else { LOG.warn("can't find ansj config file"); } Settings settings = builder.build(); path = settings.get("ansj_config"); ansjSettings = settings.getAsSettings("ansj"); configDir = env.configFile().toFile(); flushConfig(); // 进行一次测试分词 preheat(); }
/** * @return null If no settings set for "settingsPrefix" then return <code>null</code>. * @throws IllegalArgumentException * If the Reader can not be instantiated. */ public static Reader getReaderFromFile(Environment env, Settings settings, String settingPrefix) { String filePath = settings.get(settingPrefix, null); if (filePath == null) { return null; } final Path path = env.configFile().resolve(filePath); try { return Files.newBufferedReader(path, StandardCharsets.UTF_8); } catch (CharacterCodingException ex) { String message = String.format(Locale.ROOT, "Unsupported character encoding detected while reading %s_path: %s files must be UTF-8 encoded", settingPrefix, path.toString()); throw new IllegalArgumentException(message, ex); } catch (IOException ioe) { String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, path.toString()); throw new IllegalArgumentException(message, ioe); } }
private String resolve(String propName, boolean mustBeValid) { final String originalPath = settings.get(propName, null); String path = originalPath; log.debug("Value for {} is {}", propName, originalPath); if (env != null && originalPath != null && originalPath.length() > 0) { path = env.configFile().resolve(originalPath).toAbsolutePath().toString(); log.debug("Resolved {} to {} against {}", originalPath, path, env.configFile().toAbsolutePath().toString()); } if (mustBeValid) { checkPath(path, propName); } if ("".equals(path)) { path = null; } return path; }
@Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile()); if (keystore == null) { if (options.has(forceOption) == false && keystore.save(env.configFile(), new char[0] /* always use empty passphrase for auto created keystore */); terminal.println("Created elasticsearch keystore in " + env.configFile()); } else { keystore.decrypt(new char[0] /* TODO: prompt for password when they are supported */); if (Files.exists(file) == false) { throw new UserException(ExitCodes.IO_ERROR, "File [" + file.toString() + "] does not exist"); String.join(", ", argumentValues.subList(2, argumentValues.size())) + "] after filepath"); keystore.setFile(setting, Files.readAllBytes(file)); keystore.save(env.configFile(), new char[0]);
throw new IllegalStateException("native controllers already spawned"); if (!Files.exists(environment.modulesFile())) { throw new IllegalStateException("modules directory [" + environment.modulesFile() + "] not found"); List<Path> paths = PluginsService.findPluginDirs(environment.modulesFile()); for (final Path modules : paths) { final PluginInfo info = PluginInfo.readFromProperties(modules); final Path spawnPath = Platforms.nativeControllerPath(modules); if (!Files.isRegularFile(spawnPath)) { continue; Locale.ROOT, "module [%s] does not have permission to fork native controller", modules.getFileName()); throw new IllegalArgumentException(message); final Process process = spawnNativeController(spawnPath, environment.tmpFile()); processes.add(process);
/** Writes a file to the logs dir containing the ports for the given transport type */ private void writePortsFile(String type, BoundTransportAddress boundAddress) { Path tmpPortsFile = environment.logsFile().resolve(type + ".ports.tmp"); try (BufferedWriter writer = Files.newBufferedWriter(tmpPortsFile, Charset.forName("UTF-8"))) { for (TransportAddress address : boundAddress.boundAddresses()) { InetAddress inetAddress = InetAddress.getByName(address.getAddress()); writer.write(NetworkAddress.format(new InetSocketAddress(inetAddress, address.getPort())) + "\n"); } } catch (IOException e) { throw new RuntimeException("Failed to write ports file", e); } Path portsFile = environment.logsFile().resolve(type + ".ports"); try { Files.move(tmpPortsFile, portsFile, StandardCopyOption.ATOMIC_MOVE); } catch (IOException e) { throw new RuntimeException("Failed to rename ports file", e); } }
private Dictionary(Configuration configuration) { Path configDir = configuration.getEnvironment().configFile().resolve(AnalysisHanLPPlugin.PLUGIN_NAME); Predefine.HANLP_PROPERTIES_PATH = configDir.resolve(CONFIG_FILE_NAME).toString(); logger.debug("hanlp properties path: {}", Predefine.HANLP_PROPERTIES_PATH); DictionaryFileCache.configCachePath(configuration); DictionaryFileCache.loadCache(); RemoteDictConfig.initial(configDir.resolve(REMOTE_CONFIG_FILE_NAME).toString()); }
public HyphenationCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, env, name, settings); String hyphenationPatternsPath = settings.get("hyphenation_patterns_path", null); if (hyphenationPatternsPath == null) { throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); } Path hyphenationPatternsFile = env.configFile().resolve(hyphenationPatternsPath); try { hyphenationTree = HyphenationCompoundWordTokenFilter.getHyphenationTree(new InputSource(Files.newInputStream(hyphenationPatternsFile))); } catch (Exception e) { throw new IllegalArgumentException("Exception while reading hyphenation_patterns_path.", e); } }
public NumberConcatenationFilterFactory(final IndexSettings indexSettings, final Environment environment, final String name, final Settings settings) { super(indexSettings, name, settings); final String suffixWordsPath = settings.get("suffix_words_path"); if (suffixWordsPath != null) { final File suffixWordsFile = environment.configFile().resolve(suffixWordsPath).toFile(); try (Reader reader = IOUtils.getDecodingReader(new FileInputStream(suffixWordsFile), StandardCharsets.UTF_8)) { suffixWords = WordlistLoader.getWordSet(reader); } catch (final IOException e) { throw new IllegalArgumentException("Could not load " + suffixWordsFile.getAbsolutePath(), e); } } else { suffixWords = new CharArraySet(0, false); } }
addPath(policy, "path.home", environment.binFile(), "read,readlink"); addPath(policy, "path.home", environment.libFile(), "read,readlink"); addPath(policy, "path.home", environment.modulesFile(), "read,readlink"); addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink"); addPath(policy, "path.conf", environment.configFile(), "read,readlink"); addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink"); addPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete"); addPath(policy, "path.logs", environment.logsFile(), "read,readlink,write,delete"); if (environment.sharedDataFile() != null) { addPath(policy, "path.shared_data", environment.sharedDataFile(), "read,readlink,write,delete"); for (Path path : environment.dataFiles()) { addPath(policy, "path.data", path, "read,readlink,write,delete"); for (Path path : environment.dataWithClusterFiles()) { addPath(policy, "path.data", path, "read,readlink,write,delete"); for (Path path : environment.repoFiles()) { addPath(policy, "path.repo", path, "read,readlink,write,delete"); if (environment.pidFile() != null) { policy.add(new FilePermission(environment.pidFile().toString(), "delete"));
Path extractedDir(Environment env) { return env.pluginsFile().resolve(name); }
return; final NodePath[] nodePaths = new NodePath[environment.dataWithClusterFiles().length]; final Lock[] locks = new Lock[nodePaths.length]; boolean success = false; sharedDataPath = environment.sharedDataFile(); int nodeLockId = -1; IOException lastException = null; int maxLocalStorageNodes = MAX_LOCAL_STORAGE_NODES_SETTING.get(settings); for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) { for (int dirIndex = 0; dirIndex < environment.dataFiles().length; dirIndex++) { Path dataDirWithClusterName = environment.dataWithClusterFiles()[dirIndex]; Path dataDir = environment.dataFiles()[dirIndex]; Files.createDirectories(dir); startupTraceLogger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); try { locks[dirIndex] = luceneDir.obtainLock(NODE_LOCK_FILENAME); "failed to obtain node locks, tried [%s] with lock id%s;" + " maybe these locations are not writable or multiple nodes were started without increasing [%s] (was [%d])?", Arrays.toString(environment.dataWithClusterFiles()), maxLocalStorageNodes == 1 ? " [0]" : "s [0--" + (maxLocalStorageNodes - 1) + "]", MAX_LOCAL_STORAGE_NODES_SETTING.getKey(),