@SqlBatch("insert into user_ranks (stargazers_count, rank, created_at, updated_at) " + "values (:stargazersCount, :rank, current_timestamp(), current_timestamp())") @BatchChunkSize(5000) void bulkInsert(@BindBean List<UserRank> userRanks); }
private ChunkSizeFunction determineBatchChunkSize(Class<?> sqlObjectType, Method raw_method) { // this next big if chain determines the batch chunk size. It looks from most specific // scope to least, that is: as an argument, then on the method, then on the class, // then default to Integer.MAX_VALUE int index_of_batch_chunk_size_annotation_on_parameter; if ((index_of_batch_chunk_size_annotation_on_parameter = findBatchChunkSizeFromParam(raw_method)) >= 0) { return new ParamBasedChunkSizeFunction(index_of_batch_chunk_size_annotation_on_parameter); } else if (raw_method.isAnnotationPresent(BatchChunkSize.class)) { final int size = raw_method.getAnnotation(BatchChunkSize.class).value(); if (size <= 0) { throw new IllegalArgumentException("Batch chunk size must be >= 0"); } return new ConstantChunkSizeFunction(size); } else if (sqlObjectType.isAnnotationPresent(BatchChunkSize.class)) { final int size = BatchChunkSize.class.cast(sqlObjectType.getAnnotation(BatchChunkSize.class)).value(); return new ConstantChunkSizeFunction(size); } else { return new ConstantChunkSizeFunction(Integer.MAX_VALUE); } }
@SqlBatch("insert into repositories " + "(id, owner_id, name, full_name, description, fork, homepage, stargazers_count, language, created_at, updated_at, fetched_at) " + "values (:id, :ownerId, :name, :fullName, :description, :fork, :homepage, :stargazersCount, :language, current_timestamp(), current_timestamp(), current_timestamp()) " + "on duplicate key update " + "owner_id=values(owner_id), name=values(name), full_name=values(full_name), description=values(description), homepage=values(homepage), stargazers_count=values(stargazers_count), language=values(language), updated_at=values(updated_at), fetched_at=values(fetched_at)") @BatchChunkSize(100) void bulkInsert(@BindBean List<Repository> repos);
private ChunkSizeFunction determineBatchChunkSize(Class<?> sqlObjectType, Method raw_method) { // this next big if chain determines the batch chunk size. It looks from most specific // scope to least, that is: as an argument, then on the method, then on the class, // then default to Integer.MAX_VALUE int index_of_batch_chunk_size_annotation_on_parameter; if ((index_of_batch_chunk_size_annotation_on_parameter = findBatchChunkSizeFromParam(raw_method)) >= 0) { return new ParamBasedChunkSizeFunction(index_of_batch_chunk_size_annotation_on_parameter); } else if (raw_method.isAnnotationPresent(BatchChunkSize.class)) { final int size = raw_method.getAnnotation(BatchChunkSize.class).value(); if (size <= 0) { throw new IllegalArgumentException("Batch chunk size must be >= 0"); } return new ConstantChunkSizeFunction(size); } else if (sqlObjectType.isAnnotationPresent(BatchChunkSize.class)) { final int size = BatchChunkSize.class.cast(sqlObjectType.getAnnotation(BatchChunkSize.class)).value(); return new ConstantChunkSizeFunction(size); } else { return new ConstantChunkSizeFunction(Integer.MAX_VALUE); } }
@SqlBatch("insert into users (id, type, login, avatar_url, created_at, updated_at) " + "values (:id, :type, :login, :avatarUrl, current_timestamp(), current_timestamp()) " + "on duplicate key update login=values(login), avatar_url=values(avatar_url), updated_at=values(updated_at)") @BatchChunkSize(100) void bulkInsert(@BindBean List<User> users);
@SqlBatch("insert into organization_ranks (stargazers_count, rank, created_at, updated_at) " + "values (:stargazersCount, :rank, current_timestamp(), current_timestamp())") @BatchChunkSize(5000) void bulkInsert(@BindBean List<OrganizationRank> orgRanks); }
@SqlBatch("insert into repository_ranks (stargazers_count, rank, created_at, updated_at) " + "values (:stargazersCount, :rank, current_timestamp(), current_timestamp())") @BatchChunkSize(5000) void bulkInsert(@BindBean List<RepositoryRank> repoRanks); }
@SqlBatch @BatchChunkSize(1000) void bulkInsertTimelineChunks(@TimelineChunkBinder Iterator<TimelineChunk> chunkIterator, @InternalTenantContextBinder final InternalCallContext context);
@BatchChunkSize(4) @UseStringTemplate3StatementLocator public static interface UsesBatching { @SqlBatch("insert into something (id, name) values (:id, :name)") public int[] insertBeans(@BindBean Iterable<Something> elements); @SqlBatch(value = "insert into something (id, name) values (:id, :name)", transactional = false) public int[] insertBeansNoTx(@BindBean Iterator<Something> elements); @SqlBatch("insert into something (id, name) values (:id, :name)") public int[] withConstantValue(@Bind("id") Iterable<Integer> ids, @Bind("name") String name); @SqlBatch("insert into something (id, name) values (:id, :name)") public int[] zipArgumentsTogether(@Bind("id") Iterable<Integer> ids, @Bind("name") List<String> name); @SqlBatch("insert into something (id, name) values (:it.id, :it.name)") @BatchChunkSize(2) public int[] insertChunked(@BindBean("it") Iterable<Something> its); @SqlBatch public int[] insertChunked(@BatchChunkSize int size, @BindBean("it") Iterable<Something> its); @SqlQuery("select count(*) from something") public int size(); } }
@BatchChunkSize(4) @UseStringTemplate3StatementLocator public static interface UsesBatching
@SqlBatch("insert into something (id, name) values (:it.id, :it.name)") @BatchChunkSize(2) public int[] insertChunked(@BindBean("it") Iterable<Something> its);
@SqlBatch("insert into something (id, name) values (:it.id, :it.name)") @BatchChunkSize(2) public int[] insertChunked(@BindBean("it") Iterable<Something> its);
@SqlBatch public int[] insertChunked(@BatchChunkSize int size, @BindBean("it") Iterable<Something> its);
@SqlBatch public int[] insertChunked(@BatchChunkSize int size, @BindBean("it") Iterable<Something> its);
@SqlBatch("insert into something (id, name) values (:id, :first || ' ' || :last)") @BatchChunkSize(2) void insertFamily(@Bind("id") List<Integer> ids, @Bind("first") Iterator<String> firstNames, @Bind("last") String lastName);
@SqlBatch("insert into something (id, name) values (:id, :first || ' ' || :last)") @BatchChunkSize(2) void insertFamily(@Bind("id") List<Integer> ids, @Bind("first") Iterator<String> firstNames, @Bind("last") String lastName);