public ProvisionedThroughput unmarshall(JsonUnmarshallerContext context) throws Exception { ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput();
private void runTest(AmazonDynamoDB client) { CreateTableRequest request = new CreateTableRequest() .withAttributeDefinitions(new AttributeDefinition( "Name", ScalarAttributeType.S)) .withKeySchema(new KeySchemaElement("Name", KeyType.HASH)) .withProvisionedThroughput(new ProvisionedThroughput( new Long(10), new Long(10))) .withTableName("foo"); client.createTable(request); final TableDescription tableDescription = client.describeTable("foo").getTable(); assertNotNull("the description is not null", tableDescription); assertEquals("the table has the right name", "foo", tableDescription.getTableName()); assertEquals("the name has the right primary key", "Name", tableDescription.getKeySchema().get(0).getAttributeName()); } }
public void createTables() { ProvisionedThroughput throughput = new ProvisionedThroughput().withReadCapacityUnits(1000L).withWriteCapacityUnits(1000L); CreateTableRequest createEnvironments = dynamoDBMapper .generateCreateTableRequest(EnvironmentDDBRecord.class) .withProvisionedThroughput(throughput); createEnvironments .getGlobalSecondaryIndexes() .forEach(index -> index.withProvisionedThroughput(throughput)); CreateTableRequest createEnvironmentRevisions = dynamoDBMapper .generateCreateTableRequest(EnvironmentRevisionDDBRecord.class) .withProvisionedThroughput(throughput); amazonDynamoDB.createTable(createEnvironments); amazonDynamoDB.createTable(createEnvironmentRevisions); } }
@PostConstruct public void setup() { try { DescribeTableResult result = dynamoDBClient.describeTable(tableConfig.getTableName()); if (!result.getTable().getKeySchema().equals(PROJECT_KEYSCHEMA) || !ImmutableSet.copyOf(result.getTable().getAttributeDefinitions()).equals(ATTRIBUTES)) { throw new IllegalStateException("Invalid schema for user storage dynamodb table. " + "Please remove existing table or change dynamodb table."); } } catch (ResourceNotFoundException e) { dynamoDBClient.createTable(new CreateTableRequest().withTableName(tableConfig.getTableName()) .withKeySchema(PROJECT_KEYSCHEMA) .withAttributeDefinitions(ATTRIBUTES) .withProvisionedThroughput(new ProvisionedThroughput() .withReadCapacityUnits(3L) .withWriteCapacityUnits(3L))); } }
private void createTable() { dynamoDBClient.createTable(new CreateTableRequest() .withTableName(tableConfig.getTableName()).withKeySchema(PROJECT_KEYSCHEMA) .withAttributeDefinitions(ATTRIBUTES) .withProvisionedThroughput(new ProvisionedThroughput() .withReadCapacityUnits(1L) .withWriteCapacityUnits(1L))); }
private void createTable() { dynamoDBClient.createTable(new CreateTableRequest() .withTableName(tableConfig.getTableName()).withKeySchema(PROJECT_KEYSCHEMA) .withAttributeDefinitions(ATTRIBUTES) .withProvisionedThroughput(new ProvisionedThroughput() .withReadCapacityUnits(1L) .withWriteCapacityUnits(1L))); }
public ProvisionedThroughput unmarshall(JsonUnmarshallerContext context) throws Exception { AwsJsonReader reader = context.getReader(); if (!reader.isContainer()) { reader.skipValue(); return null; } ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput(); reader.beginObject(); while (reader.hasNext()) { String name = reader.nextName(); if (name.equals("ReadCapacityUnits")) { provisionedThroughput.setReadCapacityUnits(LongJsonUnmarshaller.getInstance() .unmarshall(context)); } else if (name.equals("WriteCapacityUnits")) { provisionedThroughput.setWriteCapacityUnits(LongJsonUnmarshaller.getInstance() .unmarshall(context)); } else { reader.skipValue(); } } reader.endObject(); return provisionedThroughput; }
private void createTable() { dynamoDBClient.createTable(new CreateTableRequest() .withTableName(tableConfig.getTableName()).withKeySchema(PROJECT_KEYSCHEMA) .withAttributeDefinitions(ATTRIBUTES) .withStreamSpecification(new StreamSpecification().withStreamEnabled(true).withStreamViewType(StreamViewType.NEW_IMAGE)) .withProvisionedThroughput(new ProvisionedThroughput() .withReadCapacityUnits(1L) .withWriteCapacityUnits(1L))); }
private void autoCreateTableIfNotExists() { if (autoCreateTable) { AttributeDefinition partitionKeyDefinition = new AttributeDefinition() .withAttributeName(partitionKeyName) .withAttributeType(ScalarAttributeType.S); KeySchemaElement partitionKeySchema = new KeySchemaElement() .withAttributeName(partitionKeyName) .withKeyType(KeyType.HASH); ProvisionedThroughput throughput = new ProvisionedThroughput() .withReadCapacityUnits(5L) .withWriteCapacityUnits(5L); try { TableUtils.createTableIfNotExists(dynamoDb, new CreateTableRequest() .withTableName(tableName) .withAttributeDefinitions(partitionKeyDefinition) .withKeySchema(partitionKeySchema) .withProvisionedThroughput(throughput)); } catch (AmazonDynamoDBException e) { throw new PersistenceException("Create table request failed", e); } } }
@Autowired public Entity2DynamoDBTableSynchronizer(AmazonDynamoDB amazonDynamoDB, DynamoDBMapper mapper, @Value(CONFIGURATION_KEY_entity2ddl_auto) String mode, @Value(CONFIGURATION_KEY_entity2ddl_gsiProjectionType) String gsiProjectionType, @Value(CONFIGURATION_KEY_entity2ddl_readCapacity) long readCapacity, @Value(CONFIGURATION_KEY_entity2ddl_writeCapacity) long writeCapacity) { this.amazonDynamoDB = amazonDynamoDB; this.mapper = mapper; this.mode = Entity2DDL.fromValue(mode); this.pt = new ProvisionedThroughput(readCapacity, writeCapacity); this.gsiProjectionType = ProjectionType.fromValue(gsiProjectionType); }
ProvisionedThroughput thorughput = new ProvisionedThroughput().withReadCapacityUnits(10L).withWriteCapacityUnits(5L); KeySchemaElement schemaElement = new KeySchemaElement().withAttributeName("Name").withAttributeType(ScalarAttributeType.S); CreateTableRequest createTableRequest = new CreateTableRequest().withTableName(TOPIC_TABLE).withProvisionedThroughput(thorughput).withKeySchema(new KeySchema(schemaElement)); client.createTable(createTableRequest);
/** * Returns the read and write capacity to provision when creating a new * DynamoDB table. * * @return The read and write capacity. */ public ProvisionedThroughput getProvisionedThroughput() { return new ProvisionedThroughput(readCapacity.get(), writeCapacity.get()); }
/** * Sets the provisioned throughput for the specified table * * @param tableName * @param readCapUnits * @param writeCapUnits */ public void setProvisionedThroughput(String tableName, long readCapUnits, long writeCapUnits) { ProvisionedThroughput ptDesc = new ProvisionedThroughput() .withReadCapacityUnits(readCapUnits).withWriteCapacityUnits( writeCapUnits); }
static void updateExampleTable() { Table table = dynamoDB.getTable(tableName); System.out.println("Modifying provisioned throughput for " + tableName); try { table.updateTable(new ProvisionedThroughput() .withReadCapacityUnits(6L).withWriteCapacityUnits(7L)); table.waitForActive(); } catch (Exception e) { System.err.println("UpdateTable request failed for " + tableName); System.err.println(e.getMessage()); } }
public static void verifyOrCreateTransactionTable(AmazonDynamoDB client, String tableName, long readCapacityUnits, long writeCapacityUnits, Long waitTimeSeconds) throws InterruptedException { new TableHelper(client).verifyOrCreateTable( tableName, TRANSACTIONS_TABLE_ATTRIBUTES, TRANSACTIONS_TABLE_KEY_SCHEMA, null/*localIndexes*/, new ProvisionedThroughput() .withReadCapacityUnits(readCapacityUnits) .withWriteCapacityUnits(writeCapacityUnits), waitTimeSeconds); }
public static void verifyOrCreateTransactionImagesTable(AmazonDynamoDB client, String tableName, long readCapacityUnits, long writeCapacityUnits, Long waitTimeSeconds) throws InterruptedException { new TableHelper(client).verifyOrCreateTable( tableName, TRANSACTION_IMAGES_TABLE_ATTRIBUTES, TRANSACTION_IMAGES_TABLE_KEY_SCHEMA, null/*localIndexes*/, new ProvisionedThroughput() .withReadCapacityUnits(readCapacityUnits) .withWriteCapacityUnits(writeCapacityUnits), waitTimeSeconds); }
/** * Creates the schemata for the DynamoDB table or tables each store requires. * Implementations should override and reuse this logic * @return a create table request appropriate for the schema of the selected implementation. */ public CreateTableRequest getTableSchema() { return new CreateTableRequest() .withTableName(tableName) .withProvisionedThroughput(new ProvisionedThroughput(client.readCapacity(tableName), client.writeCapacity(tableName))); }
static void updateExampleTable() { ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput() .withReadCapacityUnits(6L) .withWriteCapacityUnits(7L); UpdateTableRequest updateTableRequest = new UpdateTableRequest() .withTableName(tableName) .withProvisionedThroughput(provisionedThroughput); client.updateTable(updateTableRequest); waitForTableToBecomeAvailable(tableName); }
public static TableDescription createTable(AmazonDynamoDB client, String tableName) throws InterruptedException { CreateTableRequest tableReq = new CreateTableRequest().withTableName(tableName) .withKeySchema(new KeySchemaElement("Id", KeyType.HASH)) .withAttributeDefinitions(new AttributeDefinition("Id", ScalarAttributeType.N)) .withProvisionedThroughput(new ProvisionedThroughput(10L, 10L)) .withStreamSpecification(new StreamSpecification().withStreamEnabled(true).withStreamViewType(StreamViewType.NEW_AND_OLD_IMAGES)); DynamoDB dynamoDB = new DynamoDB(client); Table table = dynamoDB.createTable(tableReq); return table.waitForActive(); }
public static TableDescription createTable(AmazonDynamoDB client, String tableName) throws InterruptedException { CreateTableRequest tableReq = new CreateTableRequest().withTableName(tableName) .withKeySchema(new KeySchemaElement("Id", KeyType.HASH)) .withAttributeDefinitions(new AttributeDefinition("Id", ScalarAttributeType.N)) .withProvisionedThroughput(new ProvisionedThroughput(10L, 10L)) .withStreamSpecification(new StreamSpecification().withStreamEnabled(true).withStreamViewType(StreamViewType.NEW_AND_OLD_IMAGES)); DynamoDB dynamoDB = new DynamoDB(client); Table table = dynamoDB.createTable(tableReq); return table.waitForActive(); }