/** * Equality is based on the data center name only. Two {@code DataCenter} objects are equal if they refer to * the same data center. */ @Override public boolean equals(Object o) { return (this == o) || (o instanceof DataCenter && _name.equals(((DataCenter) o).getName())); }
@Override public int compareTo(DataCenter o) { return _name.compareTo(o.getName()); }
/** * Equality is based on the data center name only. Two {@code DataCenter} objects are equal if they refer to * the same data center. */ @Override public boolean equals(Object o) { return (this == o) || (o instanceof DataCenter && _name.equals(((DataCenter) o).getName())); }
public static String getReplicationFanoutChannel(DataCenter dataCenter, int partition) { return String.format("%s%s[%d]", REPLICATION_FANOUT_PREFIX, dataCenter.getName(), partition); }
@Override public int compareTo(DataCenter o) { return _name.compareTo(o.getName()); }
public static String getReplicationFanoutChannel(DataCenter dataCenter, int partition) { return String.format("%s%s[%d]", REPLICATION_FANOUT_PREFIX, dataCenter.getName(), partition); }
private static void verifySystemDataCenters(Collection<DataCenter> dataCenters) { Set<String> systemDataCenters = Sets.newTreeSet(); for (DataCenter dataCenter : dataCenters) { if (dataCenter.isSystem()) { systemDataCenters.add(dataCenter.getName()); } } if (systemDataCenters.size() > 1) { _log.error("Multiple data centers are configured as system data centers: {}", systemDataCenters); } }
private Map<String, DataCenter> deserializeAll(Map<String, Object> json) { ImmutableMap.Builder<String, DataCenter> builder = ImmutableMap.builder(); for (Map.Entry<String, Object> entry : json.entrySet()) { if (!entry.getKey().startsWith("~")) { DataCenter dataCenter = deserialize(entry); if (dataCenter != null) { builder.put(dataCenter.getName(), dataCenter); } } } return builder.build(); }
@Override public String get() { return format(JOBS_TABLE_NAME_FORMAT, dataCenters.getSelf().getName().replaceAll("\\s", "_")); } };
private Map<String, DataCenter> deserializeAll(Map<String, Object> json) { ImmutableMap.Builder<String, DataCenter> builder = ImmutableMap.builder(); for (Map.Entry<String, Object> entry : json.entrySet()) { if (!entry.getKey().startsWith("~")) { DataCenter dataCenter = deserialize(entry); if (dataCenter != null) { builder.put(dataCenter.getName(), dataCenter); } } } return builder.build(); }
private static void verifySystemDataCenters(Collection<DataCenter> dataCenters) { Set<String> systemDataCenters = Sets.newTreeSet(); for (DataCenter dataCenter : dataCenters) { if (dataCenter.isSystem()) { systemDataCenters.add(dataCenter.getName()); } } if (systemDataCenters.size() > 1) { _log.error("Multiple data centers are configured as system data centers: {}", systemDataCenters); } }
@Override public String get() { return format(JOBS_TABLE_NAME_FORMAT, dataCenters.getSelf().getName().replaceAll("\\s", "_")); } };
private Map.Entry<String, Object> serialize(DataCenter dataCenter) { return Maps.<String, Object>immutableEntry(dataCenter.getName(), ImmutableMap.<String, Object>builder() .put("cluster", _cluster) .put("serviceUri", dataCenter.getServiceUri().toString()) .put("adminUri", dataCenter.getAdminUri().toString()) .put("system", dataCenter.isSystem()) .put("cassandraName", dataCenter.getCassandraName()) .put("cassandraKeyspaces", sorted(dataCenter.getCassandraKeyspaces())) .build()); }
private Map.Entry<String, Object> serialize(DataCenter dataCenter) { return Maps.<String, Object>immutableEntry(dataCenter.getName(), ImmutableMap.<String, Object>builder() .put("cluster", _cluster) .put("serviceUri", dataCenter.getServiceUri().toString()) .put("adminUri", dataCenter.getAdminUri().toString()) .put("system", dataCenter.isSystem()) .put("cassandraName", dataCenter.getCassandraName()) .put("cassandraKeyspaces", sorted(dataCenter.getCassandraKeyspaces())) .build()); }
private void cleanupScan(String id) { // Remove this scan from the active set if (_activeScans.remove(id)) { notifyActiveScanCountChanged(); } try { // Remove the table snapshots set for this scan _dataTools.clearStashTokenRangeSnapshot(id); } catch (Exception e) { _log.error("Failed to clean up table set for scan {}", id, e); } try { // Delete the entry of the scan start time in Zookeeper. _compactionControlSource.deleteStashTime(id, _dataCenters.getSelf().getName()); } catch (Exception e) { _log.error("Failed to delete the stash time for scan {}", id, e); } }
public void cancel(String id) { _scanStatusDAO.setCanceled(id); // Notify the workflow the scan status was updated _scanWorkflow.scanStatusUpdated(id); try { // Delete the entry of the scan start time in Zookeeper. _compactionControlSource.deleteStashTime(id, _dataCenters.getSelf().getName()); } catch (Exception e) { _log.error("Failed to delete the stash time for scan {}", id, e); } } }
@Override public Managed newInboundReplicationFanout(DataCenter dataCenter, ReplicationSource replicationSource) { PartitionEventSourceSupplier eventSourceSupplier = partition -> new ReplicationEventSource(replicationSource, ChannelNames.getReplicationFanoutChannel(_dataCenters.getSelf(), partition)); return create("in-" + dataCenter.getName(), eventSourceSupplier, null, REMOTE_DC_SLEEP_WHEN_IDLE, _dataCenterFanoutPartitions); }
@Override public Managed newInboundReplicationFanout(DataCenter dataCenter, ReplicationSource replicationSource) { PartitionEventSourceSupplier eventSourceSupplier = partition -> new ReplicationEventSource(replicationSource, ChannelNames.getReplicationFanoutChannel(_dataCenters.getSelf(), partition)); return create("in-" + dataCenter.getName(), eventSourceSupplier, null, REMOTE_DC_SLEEP_WHEN_IDLE, _dataCenterFanoutPartitions); }
public void clearTokenRanges(String stashId) { ensureStashTokenRangeTableExists(); _placementCache.get(_systemTablePlacement) .getKeyspace() .getCqlSession() .execute( QueryBuilder.delete() .from(STASH_TOKEN_RANGE_TABLE) .where(QueryBuilder.eq(STASH_ID_COLUMN, stashId)) .and(QueryBuilder.eq(DATA_CENTER_COLUMN, _dataCenters.getSelf().getName())) .setConsistencyLevel(ConsistencyLevel.LOCAL_QUORUM)); }
public void clearTokenRanges(String stashId) { ensureStashTokenRangeTableExists(); _placementCache.get(_systemTablePlacement) .getKeyspace() .getCqlSession() .execute( QueryBuilder.delete() .from(STASH_TOKEN_RANGE_TABLE) .where(QueryBuilder.eq(STASH_ID_COLUMN, stashId)) .and(QueryBuilder.eq(DATA_CENTER_COLUMN, _dataCenters.getSelf().getName())) .setConsistencyLevel(ConsistencyLevel.LOCAL_QUORUM)); }