/** * Returns the {@link MappedAppSchema} derived from configuration / tables. * * @return mapped application schema, never <code>null</code> */ public MappedAppSchema getMappedSchema() { FeatureType[] fts = ftNameToFt.values().toArray( new FeatureType[ftNameToFt.size()] ); FeatureTypeMapping[] ftMappings = ftNameToMapping.values().toArray( new FeatureTypeMapping[ftNameToMapping.size()] ); Map<FeatureType, FeatureType> ftToSuperFt = null; Map<String, String> prefixToNs = null; GMLSchemaInfoSet xsModel = null; // TODO GeometryStorageParams geometryParams = new GeometryStorageParams( CRSManager.getCRSRef( "EPSG:4326" ), dialect.getUndefinedSrid(), CoordinateDimension.DIM_2 ); return new MappedAppSchema( fts, ftToSuperFt, prefixToNs, xsModel, ftMappings, null, null, geometryParams, deleteCascadingByDB, null, null, null ); }
@Override protected List<String> getBLOBCreates() { List<String> ddl = new ArrayList<String>(); // create feature_type table TableName ftTable = schema.getBBoxMapping().getTable(); ddl.add( "CREATE TABLE " + ftTable + " (id integer PRIMARY KEY, qname text NOT NULL, bbox GEOMETRY)" ); // populate feature_type table for ( short ftId = 0; ftId < schema.getFts(); ftId++ ) { QName ftName = schema.getFtName( ftId ); ddl.add( "INSERT INTO " + ftTable + " (id,qname) VALUES (" + ftId + ",'" + ftName + "')" ); } // create gml_objects table TableName blobTable = schema.getBlobMapping().getTable(); ddl.add( "CREATE TABLE " + blobTable + " (id integer IDENTITY(1,1) PRIMARY KEY, " + "gml_id varchar(2000) NOT NULL, ft_type integer REFERENCES " + ftTable + " , binary_object varbinary(max), gml_bounded_by GEOMETRY)" ); ddl.add( "ALTER TABLE " + blobTable + " ADD CONSTRAINT gml_objects_geochk CHECK (gml_bounded_by.STIsValid() = 1)" ); double[] dom = schema.getBlobMapping().getCRS().getValidDomain(); ddl.add( "CREATE SPATIAL INDEX gml_objects_sidx ON " + blobTable + "(gml_bounded_by) WITH ( BOUNDING_BOX = ( " + ArrayUtils.join( ",", dom ) + " ) )" ); // ddl.add( "CREATE INDEX gml_objects_sidx ON " + blobTable + " USING GIST (gml_bounded_by GIST_GEOMETRY_OPS)" // ); // ddl.add( "CREATE TABLE gml_names (gml_object_id integer REFERENCES gml_objects," // + "name text NOT NULL,codespace text,prop_idx smallint NOT NULL)" ); return ddl; }
writer.writeAttribute( XSINS, "schemaLocation", SCHEMA_LOCATION ); int i = 1; for ( String ns : schema.getGMLSchema().getAppNamespaces() ) { String prefix = schema.getGMLSchema().getNamespacePrefixes().get( ns ); if ( prefix != null && !prefix.equals( XMLConstants.DEFAULT_NS_PREFIX ) ) { writer.writeNamespace( prefix, ns ); GMLVersion version = schema.getGMLSchema().getVersion(); writer.writeNamespace( "gml", version.getNamespace() ); writer.writeAttribute( "srid", schema.getGeometryParams().getSrid() ); writer.writeCharacters( schema.getGeometryParams().getCrs().getAlias() ); writer.writeEndElement(); if ( schema.getBlobMapping() != null ) { writeBlobMapping( writer, schema.getBlobMapping() ); List<FeatureType> fts = schema.getFeatureTypes( null, false, false ); SortedSet<String> ftNames = new TreeSet<String>(); for ( FeatureType ft : fts ) { FeatureType ft = schema.getFeatureType( ftName ); if ( schema.getFtMapping( ft.getName() ) != null ) { writeFeatureTypeMapping( writer, ft );
@Override public boolean isMapped( QName ftName ) { if ( schema.getFtMapping( ftName ) != null ) { return true; } if ( schema.getBBoxMapping() != null ) { return true; } return false; }
private List<StringBuffer> getRelationalCreates() { List<StringBuffer> ddl = new ArrayList<StringBuffer>(); for ( short ftId = 0; ftId < schema.getFts(); ftId++ ) { QName ftName = schema.getFtName( ftId ); FeatureTypeMapping ftMapping = schema.getFtMapping( ftName ); if ( ftMapping != null ) { ddl.addAll( process( ftMapping ) ); } } return ddl; }
private void initConverters() { for ( FeatureType ft : schema.getFeatureTypes() ) { FeatureTypeMapping ftMapping = schema.getFtMapping( ft.getName() ); if ( ftMapping != null ) { for ( Mapping particleMapping : ftMapping.getMappings() ) { initConverter( particleMapping ); } } } }
QName ftName = getSchema().getFtMappings().keySet().iterator().next(); if ( mapping.getValueFtName() != null ) { ftName = mapping.getValueFtName(); if ( getSchema().getFeatureType( ftName ).isAbstract() ) { FeatureType[] concreteSubtypes = getSchema().getConcreteSubtypes( getSchema().getFeatureType( ftName ) ); if ( concreteSubtypes.length == 0 ) { String msg = "Error in mapping. Feature-particle mapping " + mapping throw new FeatureStoreException( msg ); ftName = getSchema().getConcreteSubtypes( getSchema().getFeatureType( ftName ) )[0].getName(); FeatureTypeMapping ftMapping = getSchema().getFtMapping( ftName ); ftTable = ftMapping.getFtTable(); } else if ( !join.getToTable().getName().equals( "?" ) ) {
try { for ( String fid : filter.getMatchingIds() ) { IdAnalysis analysis = getSchema().analyzeId( fid ); FeatureType ft = analysis.getFeatureType(); List<IdAnalysis> idKernels = ftNameToIdAnalysis.get( ft.getName() ); FeatureType ft = getSchema().getFeatureType( ftName ); checkIfFeatureTypIsRequested( typeNames, ft ); FeatureTypeMapping ftMapping = getSchema().getFtMapping( ftName ); FIDMapping fidMapping = ftMapping.getFidMapping(); List<IdAnalysis> idKernels = ftNameToIdAnalysis.get( ftName );
/** * Returns the relational mapping for the given feature type name. * * @param ftName * name of the feature type, must not be <code>null</code> * @return relational mapping for the feature type, may be <code>null</code> (no relational mapping) */ public FeatureTypeMapping getMapping( QName ftName ) { return schema.getFtMapping( ftName ); }
conn = getConnection(); BlobMapping blobMapping = getSchema().getBlobMapping(); stmt.setShort( i++, getSchema().getFtId( ftName ) ); if ( wb != null ) { for ( SQLArgument o : wb.getWhere().getArguments() ) {
if ( getSchema().getBlobMapping() != null ) { return queryHitsByOperatorFilterBlob( query, ftName, filter ); FeatureType ft = getSchema().getFeatureType( ftName ); FeatureTypeMapping ftMapping = getMapping( ftName ); if ( ftMapping == null ) {
/** * Creates a new {@link DDLCreator} instance for the given {@link MappedAppSchema}. * * @param schema * mapped application schema, must not be <code>null</code> * @param dialect * SQL dialect, must not be <code>null</code> */ protected DDLCreator( MappedAppSchema schema, SQLDialect dialect ) { this.schema = schema; this.dialect = dialect; hasBlobTable = schema.getBlobMapping() != null; }
public FeatureParticleConverter( SQLIdentifier fkColumn, SQLIdentifier hrefColumn, GMLReferenceResolver resolver, FeatureType valueFt, MappedAppSchema schema ) { this.fkColumn = fkColumn; this.hrefColumn = hrefColumn; this.resolver = resolver; this.valueFt = valueFt; this.schema = schema; if ( valueFt != null && schema.getSubtypes( valueFt ).length == 0 && schema.getFtMapping( valueFt.getName() ) != null ) { fidPrefix = schema.getFtMapping( valueFt.getName() ).getFidMapping().getPrefix(); } else { fidPrefix = null; } }
Envelope calcEnvelope( QName ftName, Connection conn ) throws FeatureStoreException { Envelope env = null; FeatureType ft = schema.getFeatureType( ftName ); if ( ft != null ) { // TODO what should be favored for hybrid mappings? if ( blobMapping != null ) { env = calcEnvelope( ftName, blobMapping, conn ); } else if ( schema.getFtMapping( ft.getName() ) != null ) { FeatureTypeMapping ftMapping = schema.getFtMapping( ft.getName() ); env = calcEnvelope( ftMapping, conn ); } } bboxCache.set( ftName, env ); return env; }
private int performDeleteRelational( IdFilter filter, Lock lock ) throws FeatureStoreException { checkIfFeaturesAreNotLocked( filter, lock ); int deleted = 0; for ( ResourceId id : filter.getSelectedIds() ) { LOG.debug( "Analyzing id: " + id.getRid() ); IdAnalysis analysis = null; try { analysis = schema.analyzeId( id.getRid() ); LOG.debug( "Analysis: " + analysis ); if ( !schema.getKeyDependencies().getDeleteCascadingByDB() ) { LOG.debug( "Deleting joined rows manually." ); deleteJoinedRows( analysis ); } else { LOG.debug( "Depending on database to delete joined rows automatically." ); } deleted += deleteFeatureRow( analysis ); } catch ( IllegalArgumentException e ) { throw new FeatureStoreException( "Unable to determine feature type for id '" + id + "'." ); } } return deleted; }
private void addRelationallyMappedMultiProperty( ParsedPropertyReplacement replacement, Mapping mapping, FeatureTypeMapping ftMapping, List<ResourceId> list ) throws FilterEvaluationException, FeatureStoreException, SQLException { UpdateAction action = replacement.getUpdateAction(); if ( action == null ) { action = UpdateAction.INSERT_AFTER; } switch ( action ) { case INSERT_BEFORE: case REMOVE: case REPLACE: LOG.warn( "Updating of multi properties is currently only supported for 'insertAfter' update action. Omitting." ); break; case INSERT_AFTER: break; default: break; } InsertRowManager mgr = new InsertRowManager( fs, conn, null ); List<Property> props = Collections.singletonList( replacement.getNewValue() ); for ( ResourceId id : list ) { IdAnalysis analysis = schema.analyzeId( id.getRid() ); FeatureType featureType = schema.getFeatureType( ftMapping.getFeatureType() ); Feature f = featureType.newFeature( id.getRid(), props, null ); mgr.updateFeature( f, ftMapping, analysis.getIdKernels(), mapping, replacement ); } }
throws SQLException, FeatureStoreException { LOG.debug( "Inserting feature with id '" + feature.getId() + "' (BLOB)" ); if ( fs.getSchema().getFeatureType( feature.getName() ) == null ) { throw new FeatureStoreException( "Cannot insert feature '" + feature.getName() + "': feature type is not served by this feature store." );
private GMLObject getObjectByIdRelational( String id ) throws FeatureStoreException { GMLObject result = null; IdAnalysis idAnalysis = getSchema().analyzeId( id ); if ( !idAnalysis.isFid() ) { String msg = "Fetching of geometries by id (relational mode) is not implemented yet."; throw new UnsupportedOperationException( msg ); } FeatureInputStream rs = queryByIdFilterRelational( null, new IdFilter( id ), null ); try { Iterator<Feature> iter = rs.iterator(); if ( iter.hasNext() ) { result = iter.next(); } } finally { rs.close(); } return result; }
@Override public int performDelete( IdFilter filter, Lock lock ) throws FeatureStoreException { int deleted = 0; if ( blobMapping != null ) { deleted = performDeleteBlob( filter, lock ); } else { deleted = performDeleteRelational( filter, lock ); } // TODO improve this for ( FeatureType ft : schema.getFeatureTypes( null, false, false ) ) { bboxTracker.delete( ft.getName() ); } return deleted; }
short getFtId( QName ftName ) { return getSchema().getFtId( ftName ); }