Tabnine Logo
RepositoryFile.getName
Code IndexAdd Tabnine to your IDE (free)

How to use
getName
method
in
org.datacleaner.repository.RepositoryFile

Best Java code snippets using org.datacleaner.repository.RepositoryFile.getName (Showing top 20 results out of 315)

origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@Override
public String getName() {
  final int extensionLength = CustomJobEngine.EXTENSION.length();
  final String filename = _file.getName();
  return filename.substring(0, filename.length() - extensionLength);
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

  @Override
  public Boolean eval(RepositoryFile file) {
    // check that the remainding part of the file is ONLY a
    // timestamp - or else it might be a name conflict
    // between similarly named jobs.
    String timestampPart = file.getName();
    timestampPart = timestampPart.substring(prefix.length());
    timestampPart = timestampPart.substring(0, timestampPart.length() - extension.length());
    try {
      Long.parseLong(timestampPart);
      return true;
    } catch (NumberFormatException e) {
      return false;
    }
  }
});
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@Override
public JobContext getJob() {
  final String resultFilename = _repositoryFile.getName();
  // we assume a filename pattern like this:
  // {job}-{timestamp}.analysis.result.dat
  final int lastIndexOfDash = resultFilename.lastIndexOf('-');
  assert lastIndexOfDash != -1;
  final String jobName = resultFilename.substring(0, lastIndexOfDash);
  return _tenantContext.getJob(jobName);
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@Override
public String getName() {
  final int extensionLength = FileFilters.ANALYSIS_XML.getExtension().length();
  final String filename = _file.getName();
  return filename.substring(0, filename.length() - extensionLength);
}
origin: datacleaner/DataCleaner

@Override
public String getName() {
  return getRepositoryFile().getName();
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

private void addTimelines(List<TimelineIdentifier> result, DashboardGroup group, RepositoryFolder repositoryFolder) {
  final String extension = FileFilters.ANALYSIS_TIMELINE_XML.getExtension();
  final List<RepositoryFile> files = repositoryFolder.getFiles(null, extension);
  for (RepositoryFile file : files) {
    final String timelineName = file.getName().substring(0, file.getName().length() - extension.length());
    final TimelineIdentifier timelineIdentifier = new TimelineIdentifier(timelineName, file.getQualifiedPath(),
        group);
    result.add(timelineIdentifier);
  }
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@Override
public ResultContext getResult(TenantIdentifier tenant, RepositoryFile resultFile) {
  if (resultFile == null) {
    return null;
  }
  TenantContext context = _tenantContextFactory.getContext(tenant);
  return context.getResult(resultFile.getName());
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

logger.info("File: " + path + file.getName());
zipOutput.putNextEntry(new ZipEntry(path + file.getName()));
file.readFile(new Action<InputStream>() {
  @Override
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@RequestMapping(method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public List<Map<String, String>> resultsFolderJson(@PathVariable("tenant") String tenant) {
  final TenantContext context = _contextFactory.getContext(tenant);
  final List<Map<String, String>> result = new ArrayList<>();
  {
    final List<JobIdentifier> jobs = context.getJobs();
    for (JobIdentifier job : jobs) {
      final JobContext jobContext = context.getJob(job);
      final RepositoryFile file = jobContext.getJobFile();
      final Map<String, String> map = new HashMap<>();
      map.put("name", job.getName());
      map.put("filename", file.getName());
      map.put("repository_path", file.getQualifiedPath());
      result.add(map);
    }
  }
  return result;
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

  @RolesAllowed(SecurityRoles.VIEWER)
  @RequestMapping(method = RequestMethod.GET, produces = "application/json")
  @ResponseBody
  public List<Map<String, String>> resultsFolderJson(@PathVariable("tenant") String tenant) {
    final TenantContext context = _tenantContextFactory.getContext(tenant);

    final RepositoryFolder resultsFolder = context.getResultFolder();

    final List<Map<String, String>> result = new ArrayList<Map<String, String>>();

    {
      final List<RepositoryFile> files = resultsFolder.getFiles(null,
          FileFilters.ANALYSIS_RESULT_SER.getExtension());
      for (RepositoryFile file : files) {
        Map<String, String> map = new HashMap<String, String>();
        map.put("filename", file.getName());
        map.put("repository_path", file.getQualifiedPath());
        result.add(map);
      }
    }

    return result;
  }
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

result.put("status", "Success");
result.put("file_type", resultFile.getType().toString());
result.put("filename", resultFile.getName());
result.put("repository_path", resultFile.getQualifiedPath());
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@RolesAllowed(SecurityRoles.JOB_EDITOR)
@RequestMapping(method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@ResponseBody
public Map<String, String> uploadAnalysisJobToFolderJson(@PathVariable("tenant") final String tenant,
    @RequestParam("file") final MultipartFile file) {
  if (file == null) {
    throw new IllegalArgumentException(
        "No file upload provided. Please provide a multipart file using the 'file' HTTP parameter.");
  }
  final Action<OutputStream> writeCallback = out -> {
    final InputStream in = file.getInputStream();
    try {
      FileHelper.copy(in, out);
    } finally {
      FileHelper.safeClose(in);
    }
  };
  final TenantContext context = _contextFactory.getContext(tenant);
  final RepositoryFile jobFile;
  final RepositoryFolder jobsFolder = context.getJobFolder();
  final String filename = file.getOriginalFilename();
  jobFile = jobsFolder.createFile(filename, writeCallback);
  logger.info("Created new job from uploaded file: {}", filename);
  final Map<String, String> result = new HashMap<>();
  result.put("status", STATUS_SUCCESS);
  result.put("file_type", jobFile.getType().toString());
  result.put("filename", jobFile.getName());
  result.put("repository_path", jobFile.getQualifiedPath());
  return result;
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

result.put("status", "Success");
result.put("file_type", configurationFile.getType().toString());
result.put("filename", configurationFile.getName());
result.put("repository_path", configurationFile.getQualifiedPath());
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

final String extension = existingFile.getName().substring(jobName.length());
response.put("source_job", sourceJob.getName());
response.put("target_job", newJob.getName());
response.put("repository_url", "/" + tenant + "/jobs/" + newJobFile.getName());
logger.debug("Response payload: {}", response);
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

} else {
  jobFile = existingJob.getJobFile();
  logger.info("Overwriting job from uploaded file: {}", jobFile.getName());
  jobFile.writeFile(writeCallback);
result.put("status", "Success");
result.put("file_type", jobFile.getType().toString());
result.put("filename", jobFile.getName());
result.put("repository_path", jobFile.getQualifiedPath());
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@Override
public List<TimelineIdentifier> getTimelines(final TenantIdentifier tenant, final DashboardGroup group) {
  final RepositoryFolder timelinesFolder = _tenantContextFactory.getContext(tenant).getTimelineFolder();
  final List<RepositoryFile> files;
  final String groupName = (group == null ? null : group.getName());
  if (group == null || groupName == null || "".equals(groupName)) {
    files = timelinesFolder.getFiles();
  } else {
    RepositoryFolder groupFolder = timelinesFolder.getFolder(groupName);
    files = groupFolder.getFiles();
  }
  final List<TimelineIdentifier> result = new ArrayList<TimelineIdentifier>();
  for (RepositoryFile file : files) {
    if (file.getType() == Type.TIMELINE_SPEC) {
      String timelineName = file.getName().substring(0,
          file.getName().length() - FileFilters.ANALYSIS_TIMELINE_XML.getExtension().length());
      TimelineIdentifier timeline = new TimelineIdentifier(timelineName, file.getQualifiedPath(), group);
      result.add(timeline);
    }
  }
  return result;
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

final ResultContext resultContext = context.getResult(resultFile.getName());
final AnalysisResult analysisResult;
try {
  logger.error("Failed to read AnalysisResult in file: " + resultFile, e);
  response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Failed to read result file: "
      + resultFile.getName() + ". See server logs for details.");
  return;
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

final String oldFilename = existingFile.getName();
response.put("old_result_name", oldFilename);
final String newFilename = newResult.getResultFile().getName();
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

  @Override
  public MetricValues getMetricValues(List<MetricIdentifier> metricIdentifiers, RepositoryFile resultFile,
      TenantIdentifier tenant, JobIdentifier jobIdentifier) {
    final TenantContext tenantContext = _tenantContextFactory.getContext(tenant);
    final String resultFilename = resultFile.getName();
    final ResultContext resultContext = tenantContext.getResult(resultFilename);

    final String jobName = jobIdentifier.getName();
    final MetricJobContext job = (MetricJobContext) tenantContext.getJob(jobName);
    final MetricJobEngine<? extends MetricJobContext> jobEngine;
    if (job == null) {
      logger.warn("Job {} does not exist. Cannot resolve job engine, so defaulting to DataCleanerJobEngine.",
          jobName);
      jobEngine = _jobEngineManager.getJobEngineOfType(DataCleanerJobEngine.class);
    } else {
      jobEngine = job.getJobEngine();
    }
    return jobEngine.getMetricValues(job, resultContext, metricIdentifiers);
  }
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-services

@Override
public ResultContext getLatestResult(TenantIdentifier tenantIdentifier, JobIdentifier job) {
  final TenantContext context = _tenantContextFactory.getContext(tenantIdentifier.getId());
  final RepositoryFolder resultsFolder = context.getResultFolder();
  final String jobName = job.getName();
  final RepositoryFile resultFile = resultsFolder.getLatestFile(jobName,
      FileFilters.ANALYSIS_RESULT_SER.getExtension());
  if (resultFile == null) {
    return null;
  }
  return context.getResult(resultFile.getName());
}
org.datacleaner.repositoryRepositoryFilegetName

Popular methods of RepositoryFile

  • writeFile
    Opens up an OutputStream to write to the file.
  • delete
  • getLastModified
  • getQualifiedPath
  • getSize
  • readFile
  • getType
  • toResource

Popular in Java

  • Reactive rest calls using spring rest template
  • requestLocationUpdates (LocationManager)
  • scheduleAtFixedRate (Timer)
  • startActivity (Activity)
  • InputStream (java.io)
    A readable source of bytes.Most clients will use input streams that read data from the file system (
  • PrintStream (java.io)
    Fake signature of an existing Java class.
  • URLConnection (java.net)
    A connection to a URL for reading or writing. For HTTP connections, see HttpURLConnection for docume
  • MessageDigest (java.security)
    Uses a one-way hash function to turn an arbitrary number of bytes into a fixed-length byte sequence.
  • Executors (java.util.concurrent)
    Factory and utility methods for Executor, ExecutorService, ScheduledExecutorService, ThreadFactory,
  • ServletException (javax.servlet)
    Defines a general exception a servlet can throw when it encounters difficulty.
  • Top 12 Jupyter Notebook Extensions
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now