@Override public HdfsResource getResource() { return _hdfsUri == null ? null : new HdfsResource(_hdfsUri); }
public HdfsResource createResource(final String hdfsPath) { return new HdfsResource(_defaultFs.resolve(hdfsPath).toString()); }
public Resource getResourceToUse(final URI path) { if (path == null) { return null; } if (_hadoopConfiguration == null) { if ("hdfs".equals(path.getScheme())) { return new HdfsResource(path.toString()); } return new FileResource(path.toString()); } return new HadoopResource(path, _hadoopConfiguration, HadoopResource.DEFAULT_CLUSTERREFERENCE); }
@Override public Resource create(ResourceProperties properties) throws UnsupportedResourcePropertiesException { final Object hadoopConfDirProperty = properties.toMap().get(PROPERTY_HADOOP_CONF_DIR); final String hadoopConfDir = hadoopConfDirProperty == null ? null : hadoopConfDirProperty.toString(); return new HdfsResource(properties.getUri().toString(), hadoopConfDir); }
private void getResultFileFromCluster(TenantContext tenantContext, ExecutionLogger executionLogger, String hadoopResultFileName, String jobName) { HdfsResource resultsResource = null; try { resultsResource = new HdfsResource(HadoopUtils.getFileSystem().getUri().resolve(hadoopResultFileName) .toString()); if (resultsResource != null && resultsResource.isExists()) { final RepositoryFolder repositoryResultFolder = tenantContext.getResultFolder(); final String fileName = HadoopJobExecutionUtils.getUrlReadyJobName(jobName) + FileFilters.ANALYSIS_RESULT_SER.getExtension(); final Resource resourceFile = repositoryResultFolder.createFile(fileName, null).toResource(); logger.info("Writing the result to" + resourceFile.getQualifiedPath()); FileHelper.copy(resultsResource, resourceFile); } else { final String message = "An error has occured while running the job. The result was not persisted on Hadoop. Please check Hadoop and/or DataCleaner logs"; final Exception error = new Exception(message); executionLogger.setStatusFailed(null, null, error); } } catch (Exception e) { executionLogger.setStatusFailed(null, null, e); } } }
final String hadoopJobResultFileName = SparkRunner.DEFAULT_RESULT_PATH + "/" + jobName + SparkRunner.RESULT_FILE_EXTENSION; final String uri = HadoopUtils.getFileSystem().getUri().resolve(hadoopJobFileName).toString(); final HdfsResource analysisJobResource = new HdfsResource(uri);