congrats Icon
New! Announcing Tabnine Chat Beta
Learn More
Tabnine Logo
SegmentStatusManager$ValidAndInvalidSegmentsInfo
Code IndexAdd Tabnine to your IDE (free)

How to use
SegmentStatusManager$ValidAndInvalidSegmentsInfo
in
org.apache.carbondata.core.statusmanager

Best Java code snippets using org.apache.carbondata.core.statusmanager.SegmentStatusManager$ValidAndInvalidSegmentsInfo (Showing top 12 results out of 315)

origin: org.apache.carbondata/carbondata-hadoop

if (getValidateSegmentsToAccess(job.getConfiguration())) {
 if (!accessStreamingSegments) {
  List<Segment> validSegments = segments.getValidSegments();
  streamSegments = segments.getStreamSegments();
  streamSegments = getFilteredSegment(job, streamSegments, true, readCommittedScope);
  if (validSegments.size() == 0) {
    getFilteredSegment(job, segments.getValidSegments(), true, readCommittedScope);
  if (filteredSegmentToAccess.size() == 0) {
   return getSplitsOfStreaming(job, streamSegments, carbonTable);
    getFilteredNormalSegments(job, segments.getValidSegments(),
      getSegmentsToAccess(job, readCommittedScope));
  streamSegments = segments.getStreamSegments();
  if (filteredNormalSegments.size() == 0) {
   return getSplitsOfStreaming(job, streamSegments, carbonTable);
 invalidSegments.addAll(segments.getInvalidSegments());
 invalidTimestampsList.addAll(updateStatusManager.getInvalidTimestampRange());
 if (invalidSegments.size() > 0) {
List<Segment> validAndInProgressSegments = new ArrayList<>(segments.getValidSegments());
validAndInProgressSegments.addAll(segments.getListOfInProgressSegments());
origin: org.apache.carbondata/carbondata-core

 throw e;
return new ValidAndInvalidSegmentsInfo(listOfValidSegments, listOfValidUpdatedSegments,
  listOfInvalidSegments, listOfStreamSegments, listOfInProgressSegments);
origin: org.apache.carbondata/carbondata-hadoop

Set<Segment> segmentSet = new HashSet<>(
  new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier(),
    context.getConfiguration()).getValidAndInvalidSegments().getValidSegments());
if (updateTime != null) {
 CarbonUpdateUtil.updateTableMetadataStatus(segmentSet, carbonTable, updateTime, true,
origin: org.apache.carbondata/carbondata-hadoop

getFilteredSegment(job, allSegments.getValidSegments(), false, readCommittedScope);
origin: org.apache.carbondata/carbondata-core

SegmentStatusManager.ValidAndInvalidSegmentsInfo validAndInvalidSegmentsInfo =
  segmentStatusManager.getValidAndInvalidSegments();
List<Segment> validSegments = validAndInvalidSegmentsInfo.getValidSegments();
if (validSegments.isEmpty()) {
 return carbonProperties.getFormatVersion();
origin: org.apache.carbondata/carbondata-hadoop

List<Segment> validSegments =
  new SegmentStatusManager(table.getAbsoluteTableIdentifier())
    .getValidAndInvalidSegments().getValidSegments();
String uniqueId = String.valueOf(System.currentTimeMillis());
List<String> tobeUpdatedSegs = new ArrayList<>();
origin: org.apache.carbondata/carbondata-core

SegmentStatusManager.ValidAndInvalidSegmentsInfo validAndInvalidSegmentsInfo =
  getValidAndInvalidSegments(carbonTable, FileFactory.getConfiguration());
List<Segment> validSegments = validAndInvalidSegmentsInfo.getValidSegments();
List<Segment> invalidSegments = validAndInvalidSegmentsInfo.getInvalidSegments();
DataMapExprWrapper dataMapExprWrapper = null;
if (DataMapStoreManager.getInstance().getAllDataMap(carbonTable).size() > 0) {
origin: org.apache.carbondata/carbondata-core

/**
 * Update the table status file with the dropped partitions information
 *
 * @param carbonTable
 * @param uniqueId
 * @param toBeUpdatedSegments
 * @param toBeDeleteSegments
 * @throws IOException
 */
public static void commitDropPartitions(CarbonTable carbonTable, String uniqueId,
  List<String> toBeUpdatedSegments, List<String> toBeDeleteSegments,
  String uuid) throws IOException {
 if (toBeDeleteSegments.size() > 0 || toBeUpdatedSegments.size() > 0) {
  Set<Segment> segmentSet = new HashSet<>(
    new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier())
      .getValidAndInvalidSegments().getValidSegments());
  CarbonUpdateUtil.updateTableMetadataStatus(segmentSet, carbonTable, uniqueId, true,
    Segment.toSegmentList(toBeDeleteSegments, null),
    Segment.toSegmentList(toBeUpdatedSegments, null), uuid);
 }
}
origin: org.apache.carbondata/carbondata-core

/**
 * this method gets the datamapJob and call execute of that job, this will be launched for
 * distributed CG or FG
 * @return list of Extended blocklets after pruning
 */
public static List<ExtendedBlocklet> executeDataMapJob(CarbonTable carbonTable,
  FilterResolverIntf resolver, List<Segment> validSegments,
  DataMapExprWrapper dataMapExprWrapper, DataMapJob dataMapJob,
  List<PartitionSpec> partitionsToPrune) throws IOException {
 String className = "org.apache.carbondata.core.datamap.DistributableDataMapFormat";
 SegmentStatusManager.ValidAndInvalidSegmentsInfo validAndInvalidSegmentsInfo =
   getValidAndInvalidSegments(carbonTable, validSegments.get(0).getConfiguration());
 List<Segment> invalidSegments = validAndInvalidSegmentsInfo.getInvalidSegments();
 DistributableDataMapFormat dataMapFormat =
   createDataMapJob(carbonTable, dataMapExprWrapper, validSegments, invalidSegments,
     partitionsToPrune, className, false);
 List<ExtendedBlocklet> prunedBlocklets = dataMapJob.execute(dataMapFormat, resolver);
 // Apply expression on the blocklets.
 prunedBlocklets = dataMapExprWrapper.pruneBlocklets(prunedBlocklets);
 return prunedBlocklets;
}
origin: org.apache.carbondata/carbondata-processing

/**
 * This method returns the valid segments attached to the table Identifier.
 *
 * @param absoluteTableIdentifier
 * @return
 */
public static List<Segment> getValidSegmentList(AbsoluteTableIdentifier absoluteTableIdentifier)
    throws IOException {
 SegmentStatusManager.ValidAndInvalidSegmentsInfo validAndInvalidSegments = null;
 try {
  validAndInvalidSegments =
      new SegmentStatusManager(absoluteTableIdentifier).getValidAndInvalidSegments();
 } catch (IOException e) {
  LOGGER.error("Error while getting valid segment list for a table identifier");
  throw new IOException();
 }
 return validAndInvalidSegments.getValidSegments();
}
origin: org.apache.carbondata/carbondata-lucene

/**
 * this method will delete the datamap folders during drop datamap
 * @throws MalformedDataMapCommandException
 */
private void deleteDatamap() throws MalformedDataMapCommandException {
 SegmentStatusManager ssm = new SegmentStatusManager(tableIdentifier);
 try {
  List<Segment> validSegments = ssm.getValidAndInvalidSegments().getValidSegments();
  for (Segment segment : validSegments) {
   deleteDatamapData(segment);
  }
 } catch (IOException | RuntimeException ex) {
  throw new MalformedDataMapCommandException(
    "drop datamap failed, failed to delete datamap directory");
 }
}
origin: org.apache.carbondata/carbondata-bloom

@Override
public void deleteDatamapData() {
 SegmentStatusManager ssm =
   new SegmentStatusManager(getCarbonTable().getAbsoluteTableIdentifier());
 try {
  List<Segment> validSegments = ssm.getValidAndInvalidSegments().getValidSegments();
  for (Segment segment : validSegments) {
   deleteDatamapData(segment);
  }
 } catch (IOException e) {
  LOGGER.error("drop datamap failed, failed to delete datamap directory");
 }
}
org.apache.carbondata.core.statusmanagerSegmentStatusManager$ValidAndInvalidSegmentsInfo

Most used methods

  • getValidSegments
  • getInvalidSegments
  • <init>
  • getListOfInProgressSegments
  • getStreamSegments

Popular in Java

  • Start an intent from android
  • setScale (BigDecimal)
  • onRequestPermissionsResult (Fragment)
  • getApplicationContext (Context)
  • InputStreamReader (java.io)
    A class for turning a byte stream into a character stream. Data read from the source input stream is
  • Socket (java.net)
    Provides a client-side TCP socket.
  • URLEncoder (java.net)
    This class is used to encode a string using the format required by application/x-www-form-urlencoded
  • Path (java.nio.file)
  • Map (java.util)
    A Map is a data structure consisting of a set of keys and values in which each key is mapped to a si
  • ConcurrentHashMap (java.util.concurrent)
    A plug-in replacement for JDK1.5 java.util.concurrent.ConcurrentHashMap. This version is based on or
  • Top 12 Jupyter Notebook extensions
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now