/** * Run the configured action on the given instance. */ public void run(Class<?> cls) { if (ACTION_ADD.equals(_action)) { if (_meta) addMeta(cls); else add(cls); } else if (ACTION_REFRESH.equals(_action)) refresh(cls); else if (ACTION_BUILD_SCHEMA.equals(_action)) buildSchema(cls); else if (ACTION_DROP.equals(_action)) drop(cls); else if (ACTION_VALIDATE.equals(_action)) validate(cls); }
/** * Add tables used by sequences to the given schema. */ private void addSequenceComponents(ClassMapping[] mappings) { SchemaGroup group = getSchemaGroup(); for (int i = 0; i < mappings.length; i++) addSequenceComponents(mappings[i], group); }
/** * Validate the mappings for the given class and its fields. */ private void validate(Class<?> cls) { if (cls == null) return; MappingRepository repos = getRepository(); repos.setStrategyInstaller(new RuntimeStrategyInstaller(repos)); if (getMapping(repos, cls, true) != null) _flushSchema = !contains(_schemaActions,SCHEMA_ACTION_NONE) && !contains(_schemaActions,SchemaTool.ACTION_ADD); }
ImportExport[] instances = newImportExports(); for (int i = 0; i < instances.length; i++) { if (instances[i].exportMappings(conf, act, flags.meta, log, ImportExport[] instances = newImportExports(); for (int i = 0; i < instances.length; i++) { if (instances[i].importMappings(conf, act, args, flags.meta, tool = new MappingTool(conf, flags.action, flags.meta, loader); } catch (IllegalArgumentException iae) { return false; tool.setIgnoreErrors(flags.ignoreErrors); tool.setMetaDataFile(flags.metaDataFile); tool.setMappingWriter(flags.mappingWriter); tool.setSchemaAction(flags.schemaAction); tool.setSchemaWriter(flags.schemaWriter); tool.setReadSchema(flags.readSchema && !ACTION_VALIDATE.equals(flags.action)); tool.setPrimaryKeys(flags.primaryKeys); tool.setForeignKeys(flags.foreignKeys); tool.setIndexes(flags.indexes); tool.setSequences(flags.sequences || flags.dropSequences); if (i == 0 && flags.readSchema) log.info(_loc.get("tool-time")); tool.run(act[i]); tool.record(flags);
private void record(MappingTool.Flags flags) { MappingRepository repos = getRepository(); MetaDataFactory io = repos.getMetaDataFactory(); ClassMapping[] mappings; dropUnusedSchemaComponents(mappings); addSequenceComponents(mappings); && (_schemaWriter == null || (_schemaTool != null && _schemaTool.getWriter() != null))) { SchemaTool tool = newSchemaTool(schemaActions[i]); tool.setSchemaGroup(getSchemaGroup()); tool.run(); tool.record(); ser.addAll(getSchemaGroup()); ser.serialize(_schemaWriter, ser.PRETTY); _schemaWriter.flush(); throw new GeneralException(e); } finally { clear();
/** * Return the schema tool to use for schema modification. */ private SchemaTool newSchemaTool(String action) { if (SCHEMA_ACTION_NONE.equals(action)) action = null; SchemaTool tool = new SchemaTool(_conf, action); tool.setIgnoreErrors(getIgnoreErrors()); tool.setPrimaryKeys(getPrimaryKeys()); tool.setForeignKeys(getForeignKeys()); tool.setIndexes(getIndexes()); tool.setSequences(getSequences()); return tool; }
/** * Return the schema group to use in mapping. If none has been set, the * schema will be generated from the database. */ public SchemaGroup getSchemaGroup() { if (_schema == null) { if (_action.indexOf(ACTION_BUILD_SCHEMA) != -1) { DynamicSchemaFactory factory = new DynamicSchemaFactory(); factory.setConfiguration(_conf); _schema = factory; } else if (_readSchema || contains(_schemaActions,SchemaTool.ACTION_RETAIN) || contains(_schemaActions,SchemaTool.ACTION_REFRESH)) { _schema = (SchemaGroup) newSchemaTool(null).getDBSchemaGroup(). clone(); } else { // with this we'll just read tables as different mappings // look for them LazySchemaFactory factory = new LazySchemaFactory(); factory.setConfiguration(_conf); factory.setPrimaryKeys(getPrimaryKeys()); factory.setForeignKeys(getForeignKeys()); factory.setIndexes(getIndexes()); _schema = factory; } if (_schema.getSchemas().length == 0) _schema.addSchema(); } return _schema; }
/** * Synchronize the mappings of the classes listed in the configuration. */ protected void synchronizeMappings(ClassLoader loader, JDBCConfiguration conf) { String action = conf.getSynchronizeMappings(); if (StringUtils.isEmpty(action)) return; MappingRepository repo = conf.getMappingRepositoryInstance(); Collection<Class<?>> classes = repo.loadPersistentTypes(false, loader); if (classes.isEmpty()) return; String props = Configurations.getProperties(action); action = Configurations.getClassName(action); MappingTool tool = new MappingTool(conf, action, false); Configurations.configureInstance(tool, conf, props, "SynchronizeMappings"); // initialize the schema for (Class<?> cls : classes) { try { tool.run(cls); } catch (IllegalArgumentException iae) { throw new UserException(_loc.get("bad-synch-mappings", action, Arrays.asList(MappingTool.ACTIONS))); } } tool.record(); }
/** * Drop mapping for given class. */ private void drop(Class<?> cls) { if (cls == null) return; if (_dropCls == null) _dropCls = new HashSet<Class<?>>(); _dropCls.add(cls); if (!contains(_schemaActions,SchemaTool.ACTION_DROP)) return; MappingRepository repos = getRepository(); repos.setStrategyInstaller(new RuntimeStrategyInstaller(repos)); ClassMapping mapping = null; try { mapping = repos.getMapping(cls, null, false); } catch (Exception e) { } if (mapping != null) { _flushSchema = true; if (_dropMap == null) _dropMap = new HashSet<ClassMapping>(); _dropMap.add(mapping); } else _log.warn(_loc.get("no-drop-meta", cls)); }
/** * Add tables used by sequences to the given schema. */ private void addSequenceComponents(ClassMapping mapping, SchemaGroup group) { SequenceMetaData smd = mapping.getIdentitySequenceMetaData(); Seq seq = null; if (smd != null) seq = smd.getInstance(null); else if (mapping.getIdentityStrategy() == ValueStrategies.NATIVE || (mapping.getIdentityStrategy() == ValueStrategies.NONE && mapping.getIdentityType() == ClassMapping.ID_DATASTORE)) seq = _conf.getSequenceInstance(); if (seq instanceof JDBCSeq) ((JDBCSeq) seq).addSchema(mapping, group); FieldMapping[] fmds; if (mapping.getEmbeddingMetaData() == null) fmds = mapping.getDefinedFieldMappings(); else fmds = mapping.getFieldMappings(); for (int i = 0; i < fmds.length; i++) { smd = fmds[i].getValueSequenceMetaData(); if (smd != null) { seq = smd.getInstance(_loader); if (seq instanceof JDBCSeq) ((JDBCSeq) seq).addSchema(mapping, group); } else if (fmds[i].getEmbeddedMapping() != null) addSequenceComponents(fmds[i].getEmbeddedMapping(), group); } }
ImportExport[] instances = newImportExports(); for (int i = 0; i < instances.length; i++) { if (instances[i].exportMappings(conf, act, flags.meta, log, ImportExport[] instances = newImportExports(); for (int i = 0; i < instances.length; i++) { if (instances[i].importMappings(conf, act, args, flags.meta, tool = new MappingTool(conf, flags.action, flags.meta, loader); } catch (IllegalArgumentException iae) { return false; tool.setIgnoreErrors(flags.ignoreErrors); tool.setMetaDataFile(flags.metaDataFile); tool.setMappingWriter(flags.mappingWriter); tool.setSchemaAction(flags.schemaAction); tool.setSchemaWriter(flags.schemaWriter); tool.setReadSchema(flags.readSchema && !ACTION_VALIDATE.equals(flags.action)); tool.setPrimaryKeys(flags.primaryKeys); tool.setForeignKeys(flags.foreignKeys); tool.setIndexes(flags.indexes); tool.setSequences(flags.sequences || flags.dropSequences); if (i == 0 && flags.readSchema) log.info(_loc.get("tool-time")); tool.run(act[i]); tool.record(flags);
public void record(MappingTool.Flags flags) { MappingRepository repos = getRepository(); MetaDataFactory io = repos.getMetaDataFactory(); ClassMapping[] mappings; dropUnusedSchemaComponents(mappings); addSequenceComponents(mappings); && (_schemaWriter == null || (_schemaTool != null && _schemaTool.getWriter() != null))) { SchemaTool tool = newSchemaTool(schemaActions[i]); tool.setSchemaGroup(getSchemaGroup()); tool.run(); tool.record(); ser.addAll(getSchemaGroup()); ser.serialize(_schemaWriter, MetaDataSerializer.PRETTY); _schemaWriter.flush(); throw new GeneralException(e); } finally { clear();
/** * Return the schema tool to use for schema modification. */ private SchemaTool newSchemaTool(String action) { if (SCHEMA_ACTION_NONE.equals(action)) action = null; SchemaTool tool = new SchemaTool(_conf, action); tool.setIgnoreErrors(getIgnoreErrors()); tool.setPrimaryKeys(getPrimaryKeys()); tool.setForeignKeys(getForeignKeys()); tool.setIndexes(getIndexes()); tool.setSequences(getSequences()); return tool; }
/** * Return the schema group to use in mapping. If none has been set, the * schema will be generated from the database. */ public SchemaGroup getSchemaGroup() { if (_schema == null) { if (_action.indexOf(ACTION_BUILD_SCHEMA) != -1) { DynamicSchemaFactory factory = new DynamicSchemaFactory(); factory.setConfiguration(_conf); _schema = factory; } else if (_readSchema || contains(_schemaActions,SchemaTool.ACTION_RETAIN) || contains(_schemaActions,SchemaTool.ACTION_REFRESH)) { _schema = (SchemaGroup) newSchemaTool(null).getDBSchemaGroup(). clone(); } else { // with this we'll just read tables as different mappings // look for them LazySchemaFactory factory = new LazySchemaFactory(); factory.setConfiguration(_conf); factory.setPrimaryKeys(getPrimaryKeys()); factory.setForeignKeys(getForeignKeys()); factory.setIndexes(getIndexes()); _schema = factory; } if (_schema.getSchemas().length == 0) _schema.addSchema(); } return _schema; }
/** * Synchronize the mappings of the classes listed in the configuration. */ protected void synchronizeMappings(ClassLoader loader, JDBCConfiguration conf) { String action = conf.getSynchronizeMappings(); if (StringUtils.isEmpty(action)) return; MappingRepository repo = conf.getMappingRepositoryInstance(); Collection<Class<?>> classes = repo.loadPersistentTypes(false, loader); if (classes.isEmpty()) return; String props = Configurations.getProperties(action); action = Configurations.getClassName(action); MappingTool tool = new MappingTool(conf, action, false); Configurations.configureInstance(tool, conf, props, "SynchronizeMappings"); // initialize the schema for (Class<?> cls : classes) { try { tool.run(cls); } catch (IllegalArgumentException iae) { throw new UserException(_loc.get("bad-synch-mappings", action, Arrays.asList(MappingTool.ACTIONS))); } } tool.record(); }
/** * Drop mapping for given class. */ private void drop(Class<?> cls) { if (cls == null) return; if (_dropCls == null) _dropCls = new HashSet<Class<?>>(); _dropCls.add(cls); if (!contains(_schemaActions,SchemaTool.ACTION_DROP)) return; MappingRepository repos = getRepository(); repos.setStrategyInstaller(new RuntimeStrategyInstaller(repos)); ClassMapping mapping = null; try { mapping = repos.getMapping(cls, null, false); } catch (Exception e) { } if (mapping != null) { _flushSchema = true; if (_dropMap == null) _dropMap = new HashSet<ClassMapping>(); _dropMap.add(mapping); } else _log.warn(_loc.get("no-drop-meta", cls)); }
/** * Add tables used by sequences to the given schema. */ private void addSequenceComponents(ClassMapping mapping, SchemaGroup group) { SequenceMetaData smd = mapping.getIdentitySequenceMetaData(); Seq seq = null; if (smd != null) seq = smd.getInstance(null); else if (mapping.getIdentityStrategy() == ValueStrategies.NATIVE || (mapping.getIdentityStrategy() == ValueStrategies.NONE && mapping.getIdentityType() == ClassMapping.ID_DATASTORE)) seq = _conf.getSequenceInstance(); if (seq instanceof JDBCSeq) ((JDBCSeq) seq).addSchema(mapping, group); FieldMapping[] fmds; if (mapping.getEmbeddingMetaData() == null) fmds = mapping.getDefinedFieldMappings(); else fmds = mapping.getFieldMappings(); for (int i = 0; i < fmds.length; i++) { smd = fmds[i].getValueSequenceMetaData(); if (smd != null) { seq = smd.getInstance(null); if (seq instanceof JDBCSeq) ((JDBCSeq) seq).addSchema(mapping, group); } else if (fmds[i].getEmbeddedMapping() != null) addSequenceComponents(fmds[i].getEmbeddedMapping(), group); } }
ImportExport[] instances = newImportExports(); for (int i = 0; i < instances.length; i++) { if (instances[i].exportMappings(conf, act, flags.meta, log, ImportExport[] instances = newImportExports(); for (int i = 0; i < instances.length; i++) { if (instances[i].importMappings(conf, act, args, flags.meta, tool = new MappingTool(conf, flags.action, flags.meta); } catch (IllegalArgumentException iae) { return false; tool.setIgnoreErrors(flags.ignoreErrors); tool.setMetaDataFile(flags.metaDataFile); tool.setMappingWriter(flags.mappingWriter); tool.setSchemaAction(flags.schemaAction); tool.setSchemaWriter(flags.schemaWriter); tool.setReadSchema(flags.readSchema && !ACTION_VALIDATE.equals(flags.action)); tool.setPrimaryKeys(flags.primaryKeys); tool.setForeignKeys(flags.foreignKeys); tool.setIndexes(flags.indexes); tool.setSequences(flags.sequences || flags.dropSequences); if (i == 0 && flags.readSchema) log.info(_loc.get("tool-time")); tool.run(act[i]); tool.record(flags);
/** * Run the configured action on the given instance. */ public void run(Class<?> cls) { if (ACTION_ADD.equals(_action)) { if (_meta) addMeta(cls); else add(cls); } else if (ACTION_REFRESH.equals(_action)) refresh(cls); else if (ACTION_BUILD_SCHEMA.equals(_action)) buildSchema(cls); else if (ACTION_DROP.equals(_action)) drop(cls); else if (ACTION_VALIDATE.equals(_action)) validate(cls); }
public void record(MappingTool.Flags flags) { MappingRepository repos = getRepository(); MetaDataFactory io = repos.getMetaDataFactory(); ClassMapping[] mappings; dropUnusedSchemaComponents(mappings); addSequenceComponents(mappings); && (_schemaWriter == null || (_schemaTool != null && _schemaTool.getWriter() != null))) { SchemaTool tool = newSchemaTool(schemaActions[i]); tool.setSchemaGroup(getSchemaGroup()); tool.run(); tool.record(); ser.addAll(getSchemaGroup()); ser.serialize(_schemaWriter, MetaDataSerializer.PRETTY); _schemaWriter.flush(); throw new GeneralException(e); } finally { clear();