private Map<String, Object> getMap(FieldType mapValueType, String valueAsString) throws IOException { Map<String, Object> map = new HashMap<>(); JsonParser parser = jsonFactory.createParser(valueAsString); JsonToken t = parser.getCurrentToken(); if (t != JsonToken.START_OBJECT) { return null; } else { t = parser.nextToken(); } for (; t == JsonToken.FIELD_NAME; t = parser.nextToken()) { String key = parser.getCurrentName(); if (!parser.nextToken().isScalarValue()) { throw new JsonMappingException(String.format("Nested properties are not supported. ('%s' field)", mapValueType.name())); } map.put(key, getValue(mapValueType, parser)); } return map; } }
private GenericData.Array getArray(FieldType arrayElementType, String valueAsString) throws IOException { JsonParser parser = jsonFactory.createParser(valueAsString); List<Object> objects = new ArrayList<>(); JsonToken t = parser.getCurrentToken(); if (t != JsonToken.START_ARRAY) { return null; } else { t = parser.nextToken(); } for (; t != JsonToken.END_ARRAY; t = parser.nextToken()) { if (!t.isScalarValue()) { throw new JsonMappingException(String.format("Nested properties are not supported. ('%s' field)", arrayElementType.name())); } objects.add(getValue(arrayElementType, parser)); } return new GenericData.Array(generateAvroSchema(arrayElementType), objects); }
case DATE: case TIME: return type.name(); case TIMESTAMP: return "timestamp with time zone";
public void incrementProperty(Connection conn, String project, Object userId, String property, double value) throws SQLException { Map<String, FieldType> columns = createMissingColumns(project, userId, ImmutableList.of(new SimpleImmutableEntry<>(property, new DoubleNode(value))), new CommitConnection(conn)); FieldType fieldType = columns.get(property); if (fieldType == null) { createColumn(project, userId, property, JsonHelper.numberNode(0)); } if (!fieldType.isNumeric()) { throw new RakamException(String.format("The property the is %s and it can't be incremented.", fieldType.name()), BAD_REQUEST); } String tableRef = checkTableColumn(stripName(property, "table column")); Statement statement = conn.createStatement(); ProjectCollection userTable = getUserTable(project, false); String table = checkProject(userTable.project, '"') + "." + checkCollection(userTable.collection); int execute = statement.executeUpdate("update " + table + " set " + tableRef + " = " + value + " + coalesce(" + tableRef + ", 0)"); if (execute == 0) { create(project, userId, JsonHelper.jsonObject().put(property, value)); } }
@Test public void testCollectionFieldsOrdering() throws Exception { getMetastore().createProject(PROJECT_NAME); ImmutableSet.Builder<SchemaField> builder = ImmutableSet.builder(); for (FieldType fieldType : FieldType.values()) { builder.add(new SchemaField(fieldType.name(), fieldType)); } getMetastore().getOrCreateCollectionFields(PROJECT_NAME, "testcollection", builder.build()); for (int i = 0; i < 100; i++) { assertTrue(getMetastore().getCollection(PROJECT_NAME, "testcollection").containsAll(builder.build())); } }
@Test public void testAllSchemaTypes() throws Exception { getMetastore().createProject(PROJECT_NAME); ImmutableSet.Builder<SchemaField> builder = ImmutableSet.builder(); for (FieldType fieldType : FieldType.values()) { builder.add(new SchemaField(fieldType.name(), fieldType)); } getMetastore().getOrCreateCollectionFields(PROJECT_NAME, "testcollection", builder.build()); assertTrue(getMetastore().getCollection(PROJECT_NAME, "testcollection").containsAll(builder.build())); }
public static String toSql(FieldType type) { switch (type) { case INTEGER: return "INT"; case DECIMAL: return "DECIMAL"; case LONG: return "BIGINT"; case STRING: return "VARCHAR"; case BINARY: return "VARBINARY"; case BOOLEAN: case DATE: case TIME: case TIMESTAMP: return type.name(); case DOUBLE: return "DOUBLE"; default: if (type.isArray()) { return "ARRAY<" + toSql(type.getArrayElementType()) + ">"; } if (type.isMap()) { return "MAP<VARCHAR, " + toSql(type.getMapValueType()) + ">"; } throw new IllegalStateException("sql type couldn't converted to fieldtype"); } }
public static String toSql(FieldType type) { switch (type) { case LONG: return StandardTypes.BIGINT; case STRING: return StandardTypes.VARCHAR; case BINARY: return StandardTypes.VARBINARY; case DECIMAL: case INTEGER: case BOOLEAN: case DATE: case TIME: case DOUBLE: case TIMESTAMP: return type.name(); default: if (type.isArray()) { return "ARRAY<" + toSql(type.getArrayElementType()) + ">"; } if (type.isMap()) { return "MAP<VARCHAR, " + toSql(type.getMapValueType()) + ">"; } throw new IllegalStateException("sql type couldn't converted to fieldtype"); } }
.put("id", new AttributeValue(rangeKey)) .put("collection", new AttributeValue(collection)).put("name", new AttributeValue(newField.getName())) .put("type", new AttributeValue(newField.getType().name())).build()) ); fields.add(newField); .put("id", new AttributeValue(rangeKey)) .put("collection", new AttributeValue(collection)).put("name", new AttributeValue(newField.getName())) .put("type", new AttributeValue(newField.getType().name())).build()) ); } catch (ConditionalCheckFailedException e1) {
@Test public void testDuplicateFields() throws Exception { getMetastore().createProject(PROJECT_NAME); ImmutableSet.Builder<SchemaField> builder = ImmutableSet.builder(); for (FieldType fieldType : FieldType.values()) { builder.add(new SchemaField(fieldType.name(), fieldType)); } getMetastore().getOrCreateCollectionFields(PROJECT_NAME, "testcollection", ImmutableSet.of(new SchemaField("test", LONG))); getMetastore().getOrCreateCollectionFields(PROJECT_NAME, "testcollection", ImmutableSet.of(new SchemaField("test", LONG))); assertTrue(ImmutableSet.copyOf(getMetastore().getCollection(PROJECT_NAME, "testcollection")).containsAll( ImmutableSet.of(new SchemaField("test", LONG), new SchemaField("test", LONG)))); }