Tabnine Logo
Document
Code IndexAdd Tabnine to your IDE (free)

How to use
Document
in
com.github.cafdataprocessing.corepolicy.common

Best Java code snippets using com.github.cafdataprocessing.corepolicy.common.Document (Showing top 20 results out of 315)

origin: com.github.cafdataprocessing/worker-policy

private boolean shouldSkipPolicyBecauseAlreadyRunWithDiagnostics(com.github.cafdataprocessing.corepolicy.common.Document doc,
                                 Long policyId) {
  if (doc.getMetadata().containsKey(PolicyWorkerConstants.POLICYWORKER_FAILURE_POLICY + policyId)) {
    return true;
  }
  return false;
}
origin: com.github.cafdataprocessing/corepolicy-common

@Override
public void serialize(Document value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
  jgen.writeStartObject();
  if(value.getMetadata()!=null){
    for(String key:value.getMetadata().keySet()){
      if(key.equalsIgnoreCase(DocumentFields.Reference)){
        jgen.writeFieldName(DocumentFields.Reference);
        jgen.writeString(value.getReference());
        Collection<String> values = value.getMetadata().get(key);
        if(values.size() == 1) {
          jgen.writeFieldName(key);
  if(value.getDocuments()!=null){
    jgen.writeArrayFieldStart("document");
    for(Document childDocument:value.getDocuments()){
      serialize(childDocument, jgen, provider);
origin: com.github.cafdataprocessing/corepolicy-common

public static String getReference(Document document){
  return getReference(document.getMetadata(), !document.getDocuments().isEmpty());
}
origin: com.github.cafdataprocessing/worker-policy

com.github.cafdataprocessing.worker.policy.shared.Document newDocument =
    new com.github.cafdataprocessing.worker.policy.shared.Document();
newDocument.setReference(document.getReference());
if (document.getMetadata() != null) {
  for (Map.Entry<String, String> entry : document.getMetadata().entries()) {
    newDocument.getMetadata().put(entry.getKey(), entry.getValue());
if(document.getStreams() != null) {
  for (Map.Entry<String, InputStream> streamEntry : document.getStreams().entries()){
    if(streamEntry.getValue() == null || !(streamEntry.getValue() instanceof DataStoreAwareInputStream)){
      continue;
if (document.getDocuments() != null) {
  newDocument.setDocuments(document.getDocuments().stream().map(this::convert).collect(Collectors.toList()));
origin: com.github.cafdataprocessing/corepolicy-condition-engine

public DocumentUnderEvaluationImpl(Document document, ConditionEngineMetadata conditionEngineMetadata, ApiProperties apiProperties){
  this(conditionEngineMetadata, apiProperties);
  // we hold onto a list of  MetadataValues internally, so convert at this point so we do it only once.
  for( Map.Entry<String, String> entry : document.getMetadata().entries())
  {
    this.metadata.put( entry.getKey(), new MetadataValue(apiProperties, entry.getValue()));
  }
  // we hold onto a list of  MetadataValues internally, so convert at this point so we do it only once.
  for( Map.Entry<String, InputStream> entry : document.getStreams().entries())
  {
    this.streams.put( entry.getKey(), new MetadataValue(apiProperties, entry.getValue()));
  }
  this.addMetadataString(DocumentFields.ChildDocumentCount, String.valueOf(document.getDocuments().size()));
  final int depth = getDepth();
  this.documents = document.getDocuments().stream().
      map(d -> {
        DocumentUnderEvaluation documentUnderEvaluation = new DocumentUnderEvaluationImpl(d, conditionEngineMetadata, apiProperties);
        documentUnderEvaluation.addMetadataString( DocumentFields.ChildDocumentDepth, String.valueOf(depth+1));
        return documentUnderEvaluation;
      }).collect(Collectors.toList());
  setupPreevaluatedInformation(document);
}
origin: com.github.cafdataprocessing/corepolicy-common

for (Document document : documents) {
  try {
    document.close();
  } catch (Exception e) {
    logger.warn("Could not close document", e);
origin: com.github.cafdataprocessing/worker-policy

public static void reapplyTemporaryWorkingData(com.github.cafdataprocessing.corepolicy.common.Document document,
                        Multimap<String, String> temporaryData )
{
  Multimap<String, String> docData = document.getMetadata();
  for( String propName : temporaryData.keySet() )
  {
    docData.putAll( propName, temporaryData.get(propName));
  }
}
origin: com.github.cafdataprocessing/worker-policy-binary-hash-worker-handler

  private String getStorageReference(Document document)
  {
    Collection<String> storageReferences = document.getMetadata().get("storageReference");
    if (storageReferences.size() != 1) {
      throw new RuntimeException("No storageReference set");
    }
    return storageReferences.stream().findAny().get();
  }
}
origin: com.github.cafdataprocessing/worker-policy

public final static List<Long> getCollectionSequencesAlreadyStarted( com.github.cafdataprocessing.corepolicy.common.Document document ){
  Multimap<String, String> metadata = document.getMetadata();
  // get ids from property = "POLICYWORKER_COLLECTIONSEQUENCE" - mv field of sequenceids.
  return getIdsFromMetadataPropertyStringMap(metadata, ApiStrings.POLICYWORKER_COLLECTION_SEQUENCE);
}
origin: com.github.cafdataprocessing/worker-policy

private static Collection<Long> getPoliciesYetToBeExecuted(com.github.cafdataprocessing.corepolicy.common.Document document,
                              Long collectionSequenceId, Collection<Long> resolvedPolicies) {
  Collection<Long> policiesExecutedAlready =
      DocumentFields.getPoliciesAlreadyExecutedForStringMap(collectionSequenceId, document.getMetadata());
  Collection<Long> policiesYetToBeExecuted = new ArrayList<>();
  policiesYetToBeExecuted.addAll(resolvedPolicies);
  policiesYetToBeExecuted.removeAll(policiesExecutedAlready);
  return policiesYetToBeExecuted;
}
origin: com.github.cafdataprocessing/corepolicy-api

@Override
public Long extract(Document document) throws Exception {
  Optional<String> collectionSequenceIdFieldValue = document.getMetadata().get(DocumentFields.CollectionSequence).stream().findFirst();
  if(!collectionSequenceIdFieldValue.isPresent()){
    throw new Exception("No " + DocumentFields.CollectionSequence + " field");
  }
  return getCollectionSequenceId(collectionSequenceIdFieldValue.get());
}
origin: com.github.cafdataprocessing/worker-policy

public final static List<Long> getCollectionSequencesAlreadyCompleted( com.github.cafdataprocessing.corepolicy.common.Document document ){
  Multimap<String, String> metadata = document.getMetadata();
  // get ids from property = "POLICYWORKER_COLLECTIONSEQUENCE_COMPLETED" - mv field of sequenceids.
  return getIdsFromMetadataPropertyStringMap(metadata, ApiStrings.POLICYWORKER_COLLECTION_SEQUENCES_COMPLETED);
}
origin: com.github.cafdataprocessing/corepolicy-common

public static void setReference(Document document, String reference){
  setReference(document.getMetadata(), reference);
}
origin: com.github.cafdataprocessing/worker-policy

/***
 * Add the collection sequence id when we have completed executing all resolved policies for its classification.
 * @param document
 * @param collectionSequenceId
 */
public final static void addCollectionSequenceCompletedInfo( com.github.cafdataprocessing.corepolicy.common.Document document, Long collectionSequenceId )
{
  Multimap<String, String> metadata = document.getMetadata();
  // get all completed collection sequences - "POLICYWORKER_COLLECTION_SEQUENCES_COMPLETED" - mv field
  List<Long> csIds = getIdsFromMetadataPropertyStringMap(metadata, ApiStrings.POLICYWORKER_COLLECTION_SEQUENCES_COMPLETED);
  // if the specified sequence already exists in this list, just return, we have nothing to do.
  if ( csIds.contains( collectionSequenceId ))
    return;
  // add on the collection sequence id onto the mv list.
  metadata.put(ApiStrings.POLICYWORKER_COLLECTION_SEQUENCES_COMPLETED, collectionSequenceId.toString() );
}
origin: com.github.cafdataprocessing/worker-policy

/**
 * Removes fields that are added to track working progress through Collection Sequence execution.
 * @param document The Document to remove temporary working data from.
 * @return The temporary data that was removed.
 */
public static Multimap<String, String> removeTemporaryWorkingData(com.github.cafdataprocessing.corepolicy.common.Document document )
{
  // using same map type as the used inside our document object.
  Multimap<String, String> temporaryData = ArrayListMultimap.create();
  Multimap<String, String> docData = document.getMetadata();
  getListOfKnownTemporaryData(docData).stream().filter(propName -> docData.containsKey(propName)).forEach(propName -> {
    temporaryData.putAll(propName, docData.get(propName));
    docData.removeAll(propName);
  });
  return temporaryData;
}
origin: com.github.cafdataprocessing/corepolicy-common

/**
 * This method will REMOVE any existing temp (e.g. validation hash) metadata fields, and add new ones that represent
 * this metadata.
 * @param document The document who's metadata should be updated.
 */
public void applyTemporaryMetadataToDocument( ConditionEngineResult conditionEngineResult, Document document ){
  try {
    //Lets remove all the field values
    document.getMetadata().removeAll(DocumentFields.EvaluationInformationBlob);
    document.getMetadata().removeAll(DocumentFields.MetadataHash);
    //now lets get new values and add them to the document
    String sourceJson = mapper.writeValueAsString(conditionEngineResult);
    String compressedBlob = ZipUtils.compressStringAndEncode(sourceJson);
    String securityHash = generateSecurityHash(conditionEngineResult);
    document.getMetadata().put(DocumentFields.EvaluationInformationBlob, compressedBlob);
    document.getMetadata().put(DocumentFields.MetadataHash, securityHash);
  } catch (IOException e) {
    CpeException cpeException = new BackEndRequestFailedCpeException(BackEndRequestFailedErrors.UnableToAddTemporaryMetadata,e );
    logger.error("Error adding temporary evaluation metadata to a document.", cpeException);
    throw cpeException;
  }
}
origin: com.github.cafdataprocessing/worker-policy

  protected void applyFieldActions(Document document, Collection<FieldAction> fieldActions) {
    for (FieldAction fieldAction : fieldActions) {

      FieldAction.Action action = fieldAction.getAction();

      // N.B. Weird javac behaviour whereby if a switch is used below 2 ProcessDocument class files are
      // generated, once called ProcessDocument$1.class and once ProcessDocument.class which stops us from
      // correctly running javah on this class.
      if ( action == FieldAction.Action.SET_FIELD_VALUE ){
        document.getMetadata().get(fieldAction.getFieldName()).clear();
        document.getMetadata().put(fieldAction.getFieldName(), fieldAction.getFieldValue());
      }
      else if ( action == FieldAction.Action.ADD_FIELD_VALUE ) {
        document.getMetadata().put(fieldAction.getFieldName(), fieldAction.getFieldValue());
      }
    }
  }
}
origin: com.github.cafdataprocessing/corepolicy-condition-engine

  private void applyFieldActions(Document document, Collection<FieldAction> fieldActions) {
    for (FieldAction fieldAction : fieldActions) {

      FieldAction.Action action = fieldAction.getAction();

      // N.B. Weird javac behaviour whereby if a switch is used below 2 ProcessDocument class files are
      // generated, once called ProcessDocument$1.class and once ProcessDocument.class which stops us from
      // correctly running javah on this class.
      if ( action == FieldAction.Action.SET_FIELD_VALUE ){
        document.getMetadata().get(fieldAction.getFieldName()).clear();
        document.getMetadata().put(fieldAction.getFieldName(), fieldAction.getFieldValue());
      }
      else if ( action == FieldAction.Action.ADD_FIELD_VALUE ) {
        document.getMetadata().put(fieldAction.getFieldName(), fieldAction.getFieldValue());
      }
    }
  }
}
origin: com.github.cafdataprocessing/corepolicy-condition-engine

private void setupPreevaluatedInformation(Document document) {
  // Check if we have any supplied metadata which has been evaluated before.
  ConditionEngineResult result = conditionEngineMetadata.createResult(document.getMetadata());
  applyEvaluationInfo(result, this);
}
origin: com.github.cafdataprocessing/worker-policy-boilerplate-worker-handler

private Multimap<String, ReferencedData> getRequestedFieldsData(Collection<String> fieldsRequested, Document document) {
  //gather source data fields for Extract task
  Multimap<String, ReferencedData> sourceData = ArrayListMultimap.create();
  //add metadata fields to source data (filter to list provided on policy definition if required)
  for (Map.Entry<String, String> metadata : document.getMetadata().entries()) {
    String fieldName = metadata.getKey();
    if (fieldsRequested.contains(fieldName)) {
      sourceData.put(fieldName, ReferencedData.getWrappedData(metadata.getValue().getBytes()));
    }
  }
  WorkerResponseHolder workerResponseHolder = applicationContext.getBean(WorkerResponseHolder.class);
  com.github.cafdataprocessing.worker.policy.shared.Document taskDataDocumentToClassify = workerResponseHolder.getTaskData().getDocument();
  //add metadata reference fields to source data (filter to list provided on policy definition if required)
  for (Map.Entry<String, ReferencedData> metadataReference : taskDataDocumentToClassify.getMetadataReferences().entries()) {
    String fieldName = metadataReference.getKey();
    if (fieldsRequested.contains(fieldName)) {
      sourceData.put(metadataReference.getKey(), metadataReference.getValue());
    }
  }
  return sourceData;
}
com.github.cafdataprocessing.corepolicy.commonDocument

Javadoc

A document to be collated against a collection sequence and it's contained collections and conditions.

Most used methods

  • getMetadata
    The metadata for the document, implementation should use case insensitive keys. To allow identificat
  • getDocuments
    Child documents of the document to be collated, each child document should have a "reference" field
  • getReference
  • getStreams
    The streams available for the document, implementation should use case insensitive keys. note we do
  • close
  • getFullMetadata

Popular in Java

  • Making http requests using okhttp
  • onCreateOptionsMenu (Activity)
  • putExtra (Intent)
  • setContentView (Activity)
  • Kernel (java.awt.image)
  • FileWriter (java.io)
    A specialized Writer that writes to a file in the file system. All write requests made by calling me
  • SecureRandom (java.security)
    This class generates cryptographically secure pseudo-random numbers. It is best to invoke SecureRand
  • MessageFormat (java.text)
    Produces concatenated messages in language-neutral way. New code should probably use java.util.Forma
  • Comparator (java.util)
    A Comparator is used to compare two objects to determine their ordering with respect to each other.
  • JFileChooser (javax.swing)
  • Best IntelliJ plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now