public void unregisterContext(TaskContext aContext) { contexts.remove(aContext.getId()); }
@Override public void destroy(TaskContext aContext) { aContext.message("Shut down task"); } }
/** * Convenience method to store data in the task context. */ public void storeBinary(String aKey, StreamWriter aStream) { context.storeBinary(aKey, aStream); }
@Override public void fail(TaskContext aContext, Task aConfiguration, Throwable aCause) throws LifeCycleException { try { aContext.getStorageService().delete(aContext.getId()); } catch (DataAccessResourceFailureException e) { aContext.error("Unable to clean up context after failure. Some data may remain in " + "the context.", e); } aContext.error("Task failed ["+aConfiguration.getType()+"]", aCause); }
@Override public void execute() { XSLTStreamReader r = new XSLTStreamReader(); context.retrieveBinary(UimaTask.ANALYSIS_ENGINE_DESC_KEY, r); context.storeBinary(UimaTask.ANALYSIS_ENGINE_DESC_KEY + ".html", r.openStream()); context.retrieveBinary(UimaTask.COLLECTION_READER_DESC_KEY, r); context.storeBinary(UimaTask.COLLECTION_READER_DESC_KEY + ".html", r.openStream()); }
@Override public void begin(TaskContext aContext, Task aConfiguration) { for (int g = 0; g < 3; g++) { System.gc(); } aContext.getMetadata().setStart(System.currentTimeMillis()); aContext.message("Starting task ["+aConfiguration.getType()+"]"); }
throws Exception StorageService store = getContext().getStorageService(); File contextFolder = store.getStorageFolder(getContext().getId(), subcontext.getId().substring(subcontext.getId().length() - 36)); getContext().storeBinary( contextFolder.getName() + System.getProperty("file.separator") + report_name table.getExcelWriter()); getContext().storeBinary( contextFolder.getName() + System.getProperty("file.separator") + report_name table.getCsvWriter()); getContext() .storeBinary(contextFolder.getName() + System.getProperty("file.separator") + Task.DISCRIMINATORS_KEY, new PropertiesAdapter(discriminatorsMap)); File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy"); getContext().getLoggingService().message(getContextLabel(), "Storing detailed results in:\n" + dummyFolder.getParent() + "\n"); dummyFolder.delete();
throws LifeCycleException aContext.getMetadata().setEnd(System.currentTimeMillis()); aContext.message("Completing task ["+aConfiguration.getType()+"]"); aContext.message("Running reports for task ["+aConfiguration.getType()+"]"); List<Class<? extends Report>> reports = new ArrayList<Class<? extends Report>>( aConfiguration.getReports()); aContext.message("Starting report [" + reportClass.getName() + "] (" + i + "/" + reports.size() + ")"); Report report = reportClass.newInstance(); report.setContext(aContext); report.execute(); aContext.message("Report complete [" + reportClass.getName() + "] (" + i + "/" + reports.size() + ")"); aContext.error("Report failed [" + reportClass.getName() + "] (" + i + "/" + reports.size() + ")", e); throw new LifeCycleException(e); aContext.storeBinary(TaskContextMetadata.METADATA_KEY, aContext.getMetadata()); aContext.getStorageService().delete(aContext.getId(), TaskContextMetadata.METADATA_KEY); throw new LifeCycleException("Unable to write [" + TaskContextMetadata.METADATA_KEY + "] to mark context as complete.", e); aContext.message("Completed task ["+aConfiguration.getType()+"]");
TaskContextProvider.PARAM_CONTEXT_ID, ctx.getId()); ctx.getLifeCycleManager().initialize(ctx, aConfiguration); ctx.getLifeCycleManager().begin(ctx, aConfiguration); if (progresses != null) { for (Progress p : progresses) { ctx.message("Progress " + readerDesc.getImplementationName() + " " + p.getCompleted() + "/" + p.getTotal() + " " + p.getUnit()); ctx.getLifeCycleManager().complete(ctx, aConfiguration); return ctx.getId(); ctx.getLifeCycleManager().fail(ctx, aConfiguration, e); throw e; ctx.getLifeCycleManager().fail(ctx, aConfiguration, e); throw new ExecutionException(e); ctx.destroy();
@Override public String run(Task aConfiguration) throws ExecutionException, LifeCycleException { if (!(aConfiguration instanceof ReportingTask)) { throw new ExecutionException("This engine can only execute [" + ReportingTask.class.getName() + "]"); } // Create persistence service for injection into analysis components TaskContext ctx = null; try { ctx = contextFactory.createContext(aConfiguration); // Now the setup is complete ctx.getLifeCycleManager().initialize(ctx, aConfiguration); // Start recording ctx.getLifeCycleManager().begin(ctx, aConfiguration); // End recording (here the reports will nbe done) ctx.getLifeCycleManager().complete(ctx, aConfiguration); return ctx.getId(); } finally { if (ctx != null) { ctx.destroy(); } } }
@Override public void execute() throws Exception { StorageService store = getContext().getStorageService(); Properties props = new Properties(); for (TaskContextMetadata subcontext : getSubtasks()) { if (subcontext.getType().startsWith(TestTask.class.getName())) { props.putAll(store.retrieveBinary(subcontext.getId(), OutcomeIDReport.ID_OUTCOME_KEY, new PropertiesAdapter()).getMap()); } } getContext().storeBinary(OutcomeIDReport.ID_OUTCOME_KEY, new PropertiesAdapter(props)); } }
@Override public void execute() throws Exception { File storage = getContext().getStorageLocation(TestTask.OUTPUT_KEY, AccessMode.READONLY); File arff = new File(storage.getAbsolutePath() + "/" + TestTask.PREDICTIONS_KEY); Instances predictions = TaskUtils.getInstances(arff, TestTask.MULTILABEL); Properties props = generateProperties(predictions, TestTask.MULTILABEL); getContext().storeBinary(ID_OUTCOME_KEY, new PropertiesAdapter(props)); }
throws Exception StorageService store = getContext().getStorageService(); Id2Outcome overallOutcome = new Id2Outcome(); Properties prop = new Properties(); Task.DISCRIMINATORS_KEY, new PropertiesAdapter()).getMap(); String mode = getDiscriminatorValue(discriminatorsMap, DIM_LEARNING_MODE); File id2outcomeFile = getContext().getStorageService().getStorageFolder(subcontext.getId(), ID_OUTCOME_KEY); Id2Outcome id2outcome = new Id2Outcome(id2outcomeFile, mode); overallOutcome.add(id2outcome); prop.remove(key); getContext().storeBinary(Constants.DISCRIMINATORS_KEY_TEMP, new PropertiesAdapter(prop)); FileOutputStream fos = new FileOutputStream(new File(getContext().getStorageLocation( Constants.TEST_TASK_OUTPUT_KEY, AccessMode.READWRITE) + "/" + Constants.SERIALIZED_ID_OUTCOME_KEY));
throws Exception File storage = getContext().getStorageLocation(TestTask.OUTPUT_KEY, AccessMode.READONLY); FileOutputStream fos = new FileOutputStream(new File(getContext().getStorageLocation( TestTask.OUTPUT_KEY, AccessMode.READWRITE) + "/" + PR_CURVE_KEY)); getContext().getLoggingService().message(getContextLabel(), s + " - " + results.get(s)); props.setProperty(s, results.get(s).toString()); getContext().storeBinary(TestTask.RESULTS_KEY, new PropertiesAdapter(props)); getContext().storeBinary(ClassificationReport.CONFUSIONMATRIX_KEY, cMTable.getCsvWriter());
@Override public Map<String, String> getResolvedDescriminators(TaskContext aContext) { StorageService storageService = aContext.getStorageService(); Map<String, String> descs = new HashMap<String, String>(); descs.putAll(getDescriminators()); // Load previous discriminators and check that the do not conflict with discriminators // defined in this task for (String rawUri : aContext.getMetadata().getImports().values()) { URI uri = URI.create(rawUri); if (isStaticImport(uri)) { continue; } final TaskContextMetadata meta = aContext.resolve(uri); Map<String, String> prerequisiteDiscriminators = storageService.retrieveBinary( meta.getId(), DISCRIMINATORS_KEY, new PropertiesAdapter()).getMap(); for (Entry<String, String> e : prerequisiteDiscriminators.entrySet()) { if (descs.containsKey(e.getKey()) && !descs.get(e.getKey()).equals(e.getValue())) { throw new IllegalStateException("Discriminator [" + e.getKey() + "] in task [" + getType() + "] conflicts with dependency [" + meta.getType() + "]"); } descs.put(e.getKey(), e.getValue()); } } return descs; }
throw new IllegalStateException(e); Map<String, String> discs = getContext().getStorageService() .retrieveBinary(aContextId, Task.DISCRIMINATORS_KEY, new PropertiesAdapter()) .getMap();
File arffFile = new File(aContext.getStorageLocation(INPUT_KEY, AccessMode.READONLY) .getPath() + "/" + TRAINING_DATA_KEY); aContext.message("Performing " + FOLDS + "-fold cross-validation (" + n + ", with " + Arrays.asList(classificationArguments) + ")"); .getStorageLocation(OUTPUT_KEY, AccessMode.READWRITE).getPath() + "/" + StringUtils.replace(EVALUATION_DATA_KEY, "#", String.valueOf(n))); aContext.getStorageLocation(OUTPUT_KEY, AccessMode.READWRITE).getAbsolutePath() + "/" + StringUtils.replace(PREDICTIONS_KEY, "#", String.valueOf(n)), test);
File file = new File(aContext.getStorageLocation(DUMMY_KEY, AccessMode.READWRITE) .getPath()); file.mkdir();
log.info("== Running new configuration ["+aContext.getId()+"] =="); List<String> keys = new ArrayList<String>(config.keySet()); for (String key : keys) { Task task = queue.poll(); TaskExecutionService execService = aContext.getExecutionService();
/** * Execute the given task with the given task configuration. * * @param aContext * the context of the current batch task. * @param aTask * the the task whose task to be executed. * @param aConfig * the current parameter configuration. * @return the context meta data. */ private TaskContextMetadata runNewExecution(TaskContext aContext, Task aTask, Map<String, Object> aConfig, Set<String> aScope) throws ExecutionException, LifeCycleException { TaskExecutionService execService = aContext.getExecutionService(); TaskExecutionEngine engine = execService.createEngine(aTask); engine.setContextFactory(new ScopedTaskContextFactory(execService .getContextFactory(), aConfig, aScope)); String uuid = engine.run(aTask); return aContext.getStorageService().getContext(uuid); }