public AnnotationFields(DeployBeanInfo<?> info) { super(info); if (GlobalProperties.getBoolean("ebean.lobEagerFetch", false)) { defaultLobFetchType = FetchType.EAGER; } }
public AnnotationFields(DeployBeanInfo<?> info) { super(info); if (GlobalProperties.getBoolean("ebean.lobEagerFetch", false)) { defaultLobFetchType = FetchType.EAGER; } }
private void checkMissingHashCodeOrEquals(Exception source, Class<?> idType, Class<?> beanType) { String msg = "SERIOUS ERROR: The hashCode() and equals() methods *MUST* be implemented "; msg += "on Embedded bean " + idType + " as it is used as an Id for " + beanType; if (GlobalProperties.getBoolean("ebean.strict", true)) { throw new PersistenceException(msg, source); } else { logger.log(Level.SEVERE, msg, source); } }
private void checkMissingHashCodeOrEquals(Exception source, Class<?> idType, Class<?> beanType) { String msg = "SERIOUS ERROR: The hashCode() and equals() methods *MUST* be implemented "; msg += "on Embedded bean " + idType + " as it is used as an Id for " + beanType; if (GlobalProperties.getBoolean("ebean.strict", true)) { throw new PersistenceException(msg, source); } else { logger.log(Level.SEVERE, msg, source); } }
private void initClassPaths() { try { String cn = GlobalProperties.get("ebean.classpathreader", null); if (cn != null){ // use a user defined classPathReader logger.info("Using ["+cn+"] to read the searchable class path"); classPathReader = (ClassPathReader)ClassUtil.newInstance(cn, this.getClass()); } classPaths = classPathReader.readPath(classLoader); if (classPaths == null || classPaths.length == 0){ String msg = "ClassPath is EMPTY using ClassPathReader ["+classPathReader+"]"; logger.warning(msg); } boolean debug = GlobalProperties.getBoolean("ebean.debug.classpath", false); if (debug || logger.isLoggable(Level.FINER)) { String msg = "Classpath " + Arrays.toString(classPaths); logger.info(msg); } } catch (Exception e) { String msg = "Error trying to read the classpath entries"; throw new RuntimeException(msg, e); } }
private AutoFetchManager createAutoFetchManager(String serverName, ResourceManager resourceManager) { File autoFetchFile = getAutoFetchFile(serverName, resourceManager); AutoFetchManager autoFetchManager = null; boolean readFile = GlobalProperties.getBoolean("autofetch.readfromfile", true); if (readFile) { autoFetchManager = deserializeAutoFetch(autoFetchFile); } if (autoFetchManager == null) { // not deserialized from file so create as empty // It will be populated automatically by querying the // database meta data autoFetchManager = new DefaultAutoFetchManager(autoFetchFile.getAbsolutePath()); } return autoFetchManager; }
private AutoFetchManager createAutoFetchManager(String serverName, ResourceManager resourceManager) { File autoFetchFile = getAutoFetchFile(serverName, resourceManager); AutoFetchManager autoFetchManager = null; boolean readFile = GlobalProperties.getBoolean("autofetch.readfromfile", true); if (readFile) { autoFetchManager = deserializeAutoFetch(autoFetchFile); } if (autoFetchManager == null) { // not deserialized from file so create as empty // It will be populated automatically by querying the // database meta data autoFetchManager = new DefaultAutoFetchManager(autoFetchFile.getAbsolutePath()); } return autoFetchManager; }
/** * Construct this DmlPersistExecute. */ public DefaultPersistExecute(boolean validate, Binder binder, PstmtBatch pstmtBatch) { this.validate = validate; this.exeOrmUpdate = new ExeOrmUpdate(binder, pstmtBatch); this.exeUpdateSql = new ExeUpdateSql(binder, pstmtBatch); this.exeCallableSql = new ExeCallableSql(binder, pstmtBatch); this.defaultBatchGenKeys = GlobalProperties.getBoolean("batch.getgeneratedkeys", true); this.defaultBatchSize = GlobalProperties.getInt("batch.size", 20); }
/** * Construct this DmlPersistExecute. */ public DefaultPersistExecute(boolean validate, Binder binder, PstmtBatch pstmtBatch) { this.validate = validate; this.exeOrmUpdate = new ExeOrmUpdate(binder, pstmtBatch); this.exeUpdateSql = new ExeUpdateSql(binder, pstmtBatch); this.exeCallableSql = new ExeCallableSql(binder, pstmtBatch); this.defaultBatchGenKeys = GlobalProperties.getBoolean("batch.getgeneratedkeys", true); this.defaultBatchSize = GlobalProperties.getInt("batch.size", 20); }
/** * Construct with explicit ConfigProperties. */ public DataSourceManager() { this.alertlistener = createAlertListener(); // perform heart beat every 30 seconds by default this.dbUpFreqInSecs = GlobalProperties.getInt("datasource.heartbeatfreq",30); this.dbDownFreqInSecs = GlobalProperties.getInt("datasource.deadbeatfreq",10); this.dbChecker = new BackgroundRunnable(new Checker(), dbUpFreqInSecs); this.deregisterDriver = GlobalProperties.getBoolean("datasource.deregisterDriver", true); try { BackgroundThread.add(dbChecker); } catch (Exception e) { logger.log(Level.SEVERE, null, e); } }
/** * Construct with explicit ConfigProperties. */ public DataSourceManager() { this.alertlistener = createAlertListener(); // perform heart beat every 30 seconds by default this.dbUpFreqInSecs = GlobalProperties.getInt("datasource.heartbeatfreq",30); this.dbDownFreqInSecs = GlobalProperties.getInt("datasource.deadbeatfreq",10); this.dbChecker = new BackgroundRunnable(new Checker(), dbUpFreqInSecs); this.deregisterDriver = GlobalProperties.getBoolean("datasource.deregisterDriver", true); try { BackgroundThread.add(dbChecker); } catch (Exception e) { logger.log(Level.SEVERE, null, e); } }
public DLoadContext(SpiEbeanServer ebeanServer, BeanDescriptor<?> rootDescriptor, Boolean readOnly, boolean excludeBeanCache, ObjectGraphNode parentNode, boolean useAutofetchManager) { this.ebeanServer = ebeanServer; this.hardRefs = GlobalProperties.getBoolean("ebean.hardrefs", false); this.defaultBatchSize = ebeanServer.getLazyLoadBatchSize(); this.rootDescriptor = rootDescriptor; this.rootBeanContext = new DLoadBeanContext(this, rootDescriptor, null, defaultBatchSize, null, createBeanLoadList()); this.readOnly = readOnly; this.excludeBeanCache = excludeBeanCache; this.useAutofetchManager = useAutofetchManager; if (parentNode != null){ this.origin = parentNode.getOriginQueryPoint(); this.relativePath = parentNode.getPath(); } else { this.origin = null; this.relativePath = null; } }
public DLoadContext(SpiEbeanServer ebeanServer, BeanDescriptor<?> rootDescriptor, Boolean readOnly, boolean excludeBeanCache, ObjectGraphNode parentNode, boolean useAutofetchManager) { this.ebeanServer = ebeanServer; this.hardRefs = GlobalProperties.getBoolean("ebean.hardrefs", false); this.defaultBatchSize = ebeanServer.getLazyLoadBatchSize(); this.rootDescriptor = rootDescriptor; this.rootBeanContext = new DLoadBeanContext(this, rootDescriptor, null, defaultBatchSize, null, createBeanLoadList()); this.readOnly = readOnly; this.excludeBeanCache = excludeBeanCache; this.useAutofetchManager = useAutofetchManager; if (parentNode != null){ this.origin = parentNode.getOriginQueryPoint(); this.relativePath = parentNode.getPath(); } else { this.origin = null; this.relativePath = null; } }
public DefaultAutoFetchManagerLogging(ServerConfig serverConfig, DefaultAutoFetchManager profileListener) { this.manager = profileListener; AutofetchConfig autofetchConfig = serverConfig.getAutofetchConfig(); traceUsageCollection = GlobalProperties.getBoolean("ebean.autofetch.traceUsageCollection", false); useFileLogger = autofetchConfig.isUseFileLogging(); if (!useFileLogger) { fileLogger = null; } else { // a separate log file just like the transaction logging // for putting the profiling log messages. The benefit is that // this doesn't pollute the main log with heaps of messages. String baseDir = serverConfig.getLoggingDirectoryWithEval(); fileLogger = new SimpleLogger(baseDir, "autofetch", true, "csv"); } int updateFreqInSecs = autofetchConfig.getProfileUpdateFrequency(); BackgroundThread.add(updateFreqInSecs, new UpdateProfile()); }
public DefaultAutoFetchManagerLogging(ServerConfig serverConfig, DefaultAutoFetchManager profileListener) { this.manager = profileListener; AutofetchConfig autofetchConfig = serverConfig.getAutofetchConfig(); traceUsageCollection = GlobalProperties.getBoolean("ebean.autofetch.traceUsageCollection", false); useFileLogger = autofetchConfig.isUseFileLogging(); if (!useFileLogger) { fileLogger = null; } else { // a separate log file just like the transaction logging // for putting the profiling log messages. The benefit is that // this doesn't pollute the main log with heaps of messages. String baseDir = serverConfig.getLoggingDirectoryWithEval(); fileLogger = new SimpleLogger(baseDir, "autofetch", true, "csv"); } int updateFreqInSecs = autofetchConfig.getProfileUpdateFrequency(); BackgroundThread.add(updateFreqInSecs, new UpdateProfile()); }
public H2Platform() { super(); this.name = "h2"; this.dbEncrypt = new H2DbEncrypt(); // only support getGeneratedKeys with non-batch JDBC // so generally use SEQUENCE instead of IDENTITY for H2 boolean useIdentity = GlobalProperties.getBoolean("ebean.h2platform.useIdentity", false); IdType idType = useIdentity ? IdType.IDENTITY : IdType.SEQUENCE; this.dbIdentity.setIdType(idType); this.dbIdentity.setSupportsGetGeneratedKeys(true); this.dbIdentity.setSupportsSequence(true); this.dbIdentity.setSupportsIdentity(true); this.openQuote = "\""; this.closeQuote = "\""; // H2 data types match default JDBC types // so no changes to dbTypeMap required this.dbDdlSyntax.setDropIfExists("if exists"); this.dbDdlSyntax.setDisableReferentialIntegrity("SET REFERENTIAL_INTEGRITY FALSE"); this.dbDdlSyntax.setEnableReferentialIntegrity("SET REFERENTIAL_INTEGRITY TRUE"); this.dbDdlSyntax.setForeignKeySuffix("on delete restrict on update restrict"); }
public H2Platform() { super(); this.name = "h2"; this.dbEncrypt = new H2DbEncrypt(); // only support getGeneratedKeys with non-batch JDBC // so generally use SEQUENCE instead of IDENTITY for H2 boolean useIdentity = GlobalProperties.getBoolean("ebean.h2platform.useIdentity", false); IdType idType = useIdentity ? IdType.IDENTITY : IdType.SEQUENCE; this.dbIdentity.setIdType(idType); this.dbIdentity.setSupportsGetGeneratedKeys(true); this.dbIdentity.setSupportsSequence(true); this.dbIdentity.setSupportsIdentity(true); this.openQuote = "\""; this.closeQuote = "\""; // H2 data types match default JDBC types // so no changes to dbTypeMap required this.dbDdlSyntax.setDropIfExists("if exists"); this.dbDdlSyntax.setDisableReferentialIntegrity("SET REFERENTIAL_INTEGRITY FALSE"); this.dbDdlSyntax.setEnableReferentialIntegrity("SET REFERENTIAL_INTEGRITY TRUE"); this.dbDdlSyntax.setForeignKeySuffix("on delete restrict on update restrict"); }
public HsqldbPlatform(){ super(); this.name = "hsqldb"; this.dbEncrypt = new H2DbEncrypt(); // only support getGeneratedKeys with non-batch JDBC // so generally use SEQUENCE instead of IDENTITY for H2 boolean useIdentity = GlobalProperties.getBoolean("ebean.hsqldb.useIdentity", true); IdType idType = useIdentity ? IdType.IDENTITY : IdType.SEQUENCE; this.dbIdentity.setIdType(idType); this.dbIdentity.setSupportsGetGeneratedKeys(true); this.dbIdentity.setSupportsSequence(true); this.dbIdentity.setSupportsIdentity(true); this.openQuote = "\""; this.closeQuote = "\""; // H2 data types match default JDBC types // so no changes to dbTypeMap required dbTypeMap.put(Types.INTEGER, new DbType("integer",false)); this.dbDdlSyntax.setDropIfExists("if exists"); this.dbDdlSyntax.setDisableReferentialIntegrity("SET DATABASE REFERENTIAL INTEGRITY FALSE"); this.dbDdlSyntax.setEnableReferentialIntegrity("SET DATABASE REFERENTIAL INTEGRITY TRUE"); this.dbDdlSyntax.setForeignKeySuffix("on delete restrict on update restrict"); this.dbDdlSyntax.setIdentity("GENERATED BY DEFAULT AS IDENTITY (START WITH 1) "); }
public HsqldbPlatform() { super(); this.name = "hsqldb"; this.dbEncrypt = new H2DbEncrypt(); // only support getGeneratedKeys with non-batch JDBC // so generally use SEQUENCE instead of IDENTITY for H2 boolean useIdentity = GlobalProperties.getBoolean("ebean.hsqldb.useIdentity", true); IdType idType = useIdentity ? IdType.IDENTITY : IdType.SEQUENCE; this.dbIdentity.setIdType(idType); this.dbIdentity.setSupportsGetGeneratedKeys(true); this.dbIdentity.setSupportsSequence(true); this.dbIdentity.setSupportsIdentity(true); this.openQuote = "\""; this.closeQuote = "\""; // H2 data types match default JDBC types // so no changes to dbTypeMap required dbTypeMap.put(Types.INTEGER, new DbType("integer", false)); this.dbDdlSyntax.setDropIfExists("if exists"); this.dbDdlSyntax.setDisableReferentialIntegrity("SET DATABASE REFERENTIAL INTEGRITY FALSE"); this.dbDdlSyntax.setEnableReferentialIntegrity("SET DATABASE REFERENTIAL INTEGRITY TRUE"); this.dbDdlSyntax.setForeignKeySuffix("on delete restrict on update restrict"); this.dbDdlSyntax.setIdentity("GENERATED BY DEFAULT AS IDENTITY (START WITH 1) "); }
private SpiBackgroundExecutor createBackgroundExecutor(ServerConfig serverConfig, int uniqueServerId) { String namePrefix = "Ebean-" + serverConfig.getName(); // the size of the pool for executing periodic tasks (such as cache // flushing) int schedulePoolSize = GlobalProperties.getInt("backgroundExecutor.schedulePoolsize", 1); // the side of the main pool for immediate background task execution int minPoolSize = GlobalProperties.getInt("backgroundExecutor.minPoolSize", 1); int poolSize = GlobalProperties.getInt("backgroundExecutor.poolsize", 20); int maxPoolSize = GlobalProperties.getInt("backgroundExecutor.maxPoolSize", poolSize); int idleSecs = GlobalProperties.getInt("backgroundExecutor.idlesecs", 60); int shutdownSecs = GlobalProperties.getInt("backgroundExecutor.shutdownSecs", 30); boolean useTrad = GlobalProperties.getBoolean("backgroundExecutor.traditional", true); if (useTrad) { // this pool will use Idle seconds between min and max so I think it is // better // as it will let the thread count float between the min and max ThreadPool pool = ThreadPoolManager.getThreadPool(namePrefix); pool.setMinSize(minPoolSize); pool.setMaxSize(maxPoolSize); pool.setMaxIdleTime(idleSecs * 1000); return new TraditionalBackgroundExecutor(pool, schedulePoolSize, shutdownSecs, namePrefix); } else { return new DefaultBackgroundExecutor(poolSize, schedulePoolSize, idleSecs, shutdownSecs, namePrefix); } }