public RecordTypeNotFoundException(SchemaId id, Long version) { this.id = id != null ? id.toString() : null; this.name = null; this.version = version; }
public FieldTypeNotFoundException(SchemaId id) { this.id = (id != null) ? id.toString() : null; }
private Filter createRecordTypeFilter(SchemaId schemaId) { return new SingleColumnValueFilter(RecordCf.DATA.bytes, RecordColumn.NON_VERSIONED_RT_ID.bytes, CompareFilter.CompareOp.EQUAL, schemaId.getBytes()); } }
@Override public String getMessage() { String id = fieldName != null ? fieldName.toString() : fieldId.toString(); return "Field '" + id + "' could not be found."; } }
public byte[] getQualifier() { if (idQualifier == null) { this.idQualifier = Bytes.add(new byte[]{RecordColumn.DATA_PREFIX}, id.getBytes()); } return idQualifier; }
private void collectSubTypes(SchemaId recordTypeId, Set<SchemaId> result, Deque<SchemaId> parents, boolean recursive) throws InterruptedException, RepositoryException { // the parent-stack is to protect against endless loops in the type hierarchy. If a type is a subtype // of itself, it will not be included in the result. Thus if record type A extends (directly or indirectly) // from A, and we search the subtypes of A, then the resulting set will not include A. parents.push(recordTypeId); Set<SchemaId> subtypes = getRecordTypeById(recordTypeId, null).getSupertypes().keySet(); for (SchemaId subtype : subtypes) { if (!parents.contains(subtype)) { result.add(subtype); if (recursive) { collectSubTypes(subtype, result, parents, recursive); } } else { // Loop detected in type hierarchy throw new RepositoryException( "Error while refreshing subtypes of record type " + recordTypeId.toString()); } } parents.pop(); }
/** * Serializes a list of field ids into a byte array. Each field id has a fixed length, thus the serialization * simply appends all the field ids byte representations. * * @param fields list of field ids to serialize * @return byte array containing all the byte representations of the field ids */ byte[] serializeFields(Collection<SchemaId> fields) { final byte[] serialized = new byte[SCHEMA_ID_BYTE_LENGTH * fields.size()]; final Iterator<SchemaId> iterator = fields.iterator(); int idx = 0; while (iterator.hasNext()) { final byte[] bytes = iterator.next().getBytes(); assert SCHEMA_ID_BYTE_LENGTH == bytes.length; System.arraycopy(bytes, 0, serialized, idx * SCHEMA_ID_BYTE_LENGTH, SCHEMA_ID_BYTE_LENGTH); idx++; } return serialized; }
/** * Tries to look up the name of the record type, but returns the id if it fails. */ private String getNameSafe(SchemaId schemaId) { try { return getRecordTypeById(schemaId, null).getName().toString(); } catch (Exception e) { return schemaId.toString(); } }
private Long putSupertypeOnRecordType(Long recordTypeVersion, Put put, SchemaId supertypeId, Long supertypeVersion) throws TypeException { // when specifying a version, this returns only a single result RecordType recordType = getRecordTypeByIdWithoutCache(supertypeId, supertypeVersion).get(0); Long newSupertypeVersion = recordType.getVersion(); put.add(TypeCf.SUPERTYPE.bytes, supertypeId.getBytes(), recordTypeVersion, Bytes.toBytes(newSupertypeVersion)); return newSupertypeVersion; }
indent += 2; for (SchemaId id : failedFieldTypes) { println(out, indent, id.toString());
@Override public void write(DataOutput out) throws IOException { super.write(out); if (queriedFields == null) { out.writeInt(-1); } else { out.writeInt(queriedFields.size()); for (SchemaId queriedField : queriedFields) { final byte[] bytes = queriedField.getBytes(); out.writeInt(bytes.length); out.write(bytes); } } out.writeInt(dependencyRecordVariantProperties.size()); for (Map.Entry<String, String> entry : dependencyRecordVariantProperties.entrySet()) { out.writeUTF(entry.getKey()); out.writeUTF(entry.getValue()); } }
public static ObjectNode toJson(RecordType recordType, WriteOptions options, Namespaces namespaces, boolean includeName) { ObjectNode rtNode = JsonNodeFactory.instance.objectNode(); rtNode.put("id", recordType.getId().toString()); if (includeName) { rtNode.put("name", QNameConverter.toJson(recordType.getName(), namespaces)); } ArrayNode fieldsNode = rtNode.putArray("fields"); for (FieldTypeEntry entry : recordType.getFieldTypeEntries()) { ObjectNode entryNode = fieldsNode.addObject(); entryNode.put("id", entry.getFieldTypeId().toString()); entryNode.put("mandatory", entry.isMandatory()); } rtNode.put("version", recordType.getVersion()); ArrayNode supertypesNode = rtNode.putArray("supertypes"); for (Map.Entry<SchemaId, Long> supertype : recordType.getSupertypes().entrySet()) { ObjectNode entryNode = supertypesNode.addObject(); entryNode.put("id", supertype.getKey().toString()); entryNode.put("version", supertype.getValue()); } return rtNode; }
/** * Get the record type based on its id */ public RecordType getRecordType(SchemaId id, Long version) { String bucketId = AbstractSchemaCache.encodeHex(id.getBytes()); Map<SchemaId, Map<Long, RecordType>> bucket = buckets.get(bucketId); if (bucket == null) { return null; } Map<Long, RecordType> recordTypesByVersion = bucket.get(id); return getRecordTypeWithVersion(recordTypesByVersion, version); }
public static ObjectNode toJson(FieldType fieldType, Namespaces namespaces, boolean includeName) { ObjectNode fieldNode = JsonNodeFactory.instance.objectNode(); fieldNode.put("id", fieldType.getId().toString()); if (includeName) { fieldNode.put("name", QNameConverter.toJson(fieldType.getName(), namespaces)); } fieldNode.put("scope", fieldType.getScope().toString().toLowerCase()); fieldNode.put("valueType", ValueTypeNSConverter.toJson(fieldType.getValueType().getName(), namespaces)); return fieldNode; } }
private boolean updateSupertypes(Put put, Long newRecordTypeVersion, RecordType recordType, RecordType latestRecordType) { boolean changed = false; Map<SchemaId, Long> latestSupertypes = latestRecordType.getSupertypes(); // Update supertypes for (Entry<SchemaId, Long> entry : recordType.getSupertypes().entrySet()) { SchemaId supertypeId = entry.getKey(); Long supertypeVersion = entry.getValue(); if (!supertypeVersion.equals(latestSupertypes.get(supertypeId))) { put.add(TypeCf.SUPERTYPE.bytes, supertypeId.getBytes(), newRecordTypeVersion, Bytes.toBytes(supertypeVersion)); changed = true; } latestSupertypes.remove(supertypeId); } // Remove remaining supertypes for (Entry<SchemaId, Long> entry : latestSupertypes.entrySet()) { put.add(TypeCf.SUPERTYPE.bytes, entry.getKey().getBytes(), newRecordTypeVersion, DELETE_MARKER); changed = true; } return changed; }
public SolrInputDocument build() throws InterruptedException, RepositoryException { solrDoc.setField("lily.id", recordId.toString()); solrDoc.setField("lily.table", table); solrDoc.setField("lily.key", key); solrDoc.setField("lily.vtagId", vtag.toString()); solrDoc.setField("lily.vtag", typeManager.getFieldTypeById(vtag).getName().getName()); solrDoc.setField("lily.version", version); return solrDoc; }
private boolean deleteReference(byte[] blobKey, SchemaId recordId) throws IOException { Delete delete = new Delete(blobKey); boolean result = blobIncubatorTable.checkAndDelete(blobKey, BlobIncubatorCf.REF.bytes, BlobIncubatorColumn.RECORD.bytes, recordId.getBytes(), delete); if (result) { metrics.refDeleteCount.inc(); } return result; }
case CREATED: importListener.created(EntityType.FIELD_TYPE, newFieldType.getName().toString(), newFieldType.getId() .toString()); break; case UP_TO_DATE: