/** * Registers the OntologyRepositorySource factory so it can be used by * DataService.createFileRepositorySource(File file); */ @PostConstruct public void registerOntologyRepositorySource() { fileRepositorySourceFactory.addFileRepositoryCollectionClass( OntologyRepositoryCollection.class, OntologyFileExtensions.getOntology()); } }
fileRepositoryCollectionFactory.createFileRepositoryCollection(importWizard.getFile());
fileRepositoryCollectionFactory.createFileRepositoryCollection(file);
fileName, fileRepositoryCollectionFactory .createFileRepositoryCollection(tmpFile) .getFileNameExtensions());
/** * Registers the CsvRepositorySource factory so it can be used by * DataService.createFileRepositorySource(File file); */ @PostConstruct public void registerCsvRepositorySource() { fileRepositorySourceFactory.addFileRepositoryCollectionClass( CsvRepositoryCollection.class, CsvFileExtensions.getCSV()); } }
Files.copy(file.toPath(), renamed.toPath(), StandardCopyOption.REPLACE_EXISTING); RepositoryCollection repositoryCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(renamed); EntityImportReport report = importService.doImport(
fileRepositoryCollectionFactory.createFileRepositoryCollection(importWizard.getFile()); ImportService importService = importServiceFactory.getImportService(importWizard.getFile(), repositoryCollection);
fileRepositoryCollectionFactory.createFileRepositoryCollection(file); ImportService importService = importServiceFactory.getImportService(file, source); EntitiesValidationReport validationReport = importService.validateImport(file, source);
/** * Imports a csv file defined in the fileIngest entity * * @see FileIngestJobExecutionMetaData */ public FileMeta ingest( String entityTypeId, String url, String loader, String jobExecutionID, Progress progress) { if (!"CSV".equals(loader)) { throw new FileIngestException("Unknown loader '" + loader + "'"); } progress.setProgressMax(2); progress.progress(0, "Downloading url '" + url + "'"); File file = fileStoreDownload.downloadFile(url, jobExecutionID, entityTypeId + ".csv"); progress.progress(1, "Importing..."); FileRepositoryCollection repoCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(file); ImportService importService = importServiceFactory.getImportService(file, repoCollection); EntityImportReport report = importService.doImport(repoCollection, MetadataAction.UPSERT, ADD_UPDATE_EXISTING, null); progress.status("Ingestion of url '" + url + "' done."); Integer count = report.getNrImportedEntitiesMap().get(entityTypeId); count = count != null ? count : 0; progress.progress(2, "Successfully imported " + count + " " + entityTypeId + " entities."); FileMeta fileMeta = createFileMeta(jobExecutionID, file); FileIngestJobExecution fileIngestJobExecution = (FileIngestJobExecution) progress.getJobExecution(); fileIngestJobExecution.setFile(fileMeta); dataService.add(FILE_META, fileMeta); return fileMeta; }
fileRepositoryCollectionFactory.createFileRepositoryCollection(file); ImportService importService = importServiceFactory.getImportService(file, repositoryCollection);