@Test public void testDatesAndTimes() throws IOException, SQLException { final int TOTAL_RECORDS = 10; ColumnGenerator genDate = getDateColumnGenerator(); ColumnGenerator genTime = getTimeColumnGenerator(); createTextFile(0, TOTAL_RECORDS, false, genDate, genTime); createTable(genDate, genTime); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); assertColMinAndMax(forIdx(0), genDate); assertColMinAndMax(forIdx(1), genTime); }
/** * Exercises the testMultiTransaction test with staging table specified. * @throws IOException * @throws SQLException */ @Test public void testMultiTransactionWithStaging() throws IOException, SQLException { final int TOTAL_RECORDS = 20; createTextFile(0, TOTAL_RECORDS, true); createTable(); createStagingTable(); runExport(getArgv(true, 5, 2, "--staging-table", getStagingTableName())); verifyExport(TOTAL_RECORDS); }
public String getType() { return getBigIntType(); } };
protected void testExportToTableWithName(String tableName) throws IOException, SQLException { final int TOTAL_RECORDS = 10; setCurTableName(tableName); createTextFile(0, TOTAL_RECORDS, false); createTable(); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); }
createTable(); LOG.info("Generating code..."); CodeGenTool codeGen = new CodeGenTool(); String [] codeGenArgs = getCodeGenArgv(); SqoopOptions options = codeGen.parseArguments( codeGenArgs, null, null, true); createTable(); removeTablePath(); createSequenceFile(0, TOTAL_RECORDS, className); runExport(getArgv(true, 10, 10, "--class-name", className, "--jar-file", jarFileName)); verifyExport(TOTAL_RECORDS); } finally { if (null != prevClassLoader) {
/** Verify that for the max and min values of the 'id' column, the values for a given column meet the expected values. */ protected void assertColMinAndMax(String colName, ColumnGenerator generator) throws SQLException { Connection conn = getConnection(); int minId = getMinRowId(conn); int maxId = getMaxRowId(conn); LOG.info("Checking min/max for column " + colName + " with type " + generator.getType()); String expectedMin = generator.getVerifyText(minId); String expectedMax = generator.getVerifyText(maxId); assertColValForRowId(minId, colName, expectedMin); assertColValForRowId(maxId, colName, expectedMax); }
Connection conn = getConnection(); PreparedStatement statement = conn.prepareStatement( getDropTableStatement(getTableName()), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try { sb.append(getTableName()); sb.append(" (\"ID\" INT NOT NULL PRIMARY KEY, \"MSG\" VARCHAR(64)"); int colNum = 0; for (ColumnGenerator gen : extraColumns) { sb.append(", \"" + forIdx(colNum++) + "\" " + gen.getType());
String stageTableName = getStagingTableName(); Connection conn = getConnection(); PreparedStatement statement = conn.prepareStatement( getDropTableStatement(stageTableName), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try { int colNum = 0; for (ColumnGenerator gen : extraColumns) { sb.append(", \"" + forIdx(colNum++) + "\" " + gen.getType());
@Override protected String [] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { String [] subArgv = newStrArray(additionalArgv, "--username", OracleUtils.ORACLE_USER_NAME, "--password", OracleUtils.ORACLE_USER_PASS); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, subArgv); }
@Override protected String [] getCodeGenArgv(String... extraArgs) { String [] moreArgs = new String[extraArgs.length + 4]; int i = 0; for (i = 0; i < extraArgs.length; i++) { moreArgs[i] = extraArgs[i]; } // Add username and password args. moreArgs[i++] = "--username"; moreArgs[i++] = OracleUtils.ORACLE_USER_NAME; moreArgs[i++] = "--password"; moreArgs[i++] = OracleUtils.ORACLE_USER_PASS; return super.getCodeGenArgv(moreArgs); }
@Test public void testColumnsExport() throws IOException, SQLException { testColumnsExport("ID,MSG," + forIdx(0) + "," + forIdx(2)); }
@Override public void createTable(ColumnGenerator... extraColumns) throws SQLException { super.createTable(extraColumns); names = new String[extraColumns.length]; types = new String[extraColumns.length]; for (int i = 0; i < extraColumns.length; ++i) { names[i] = forIdx(i); types[i] = extraColumns[i].getType(); } createProcedure(names, types); }
/** Run 2 mappers, make sure all records load in correctly. */ @Test public void testMultiMapTextExport() throws IOException, SQLException { final int RECORDS_PER_MAP = 10; final int NUM_FILES = 2; for (int f = 0; f < NUM_FILES; f++) { createTextFile(f, RECORDS_PER_MAP, false); } createTable(); runExport(getArgv(true, 10, 10)); verifyExport(RECORDS_PER_MAP * NUM_FILES); }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { ArrayList<String> args = new ArrayList<String>(Arrays.asList(additionalArgv)); args.add("--username"); args.add(getUserName()); args.add("--password"); args.add(PASSWORD); args.add("--direct"); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, args.toArray(new String[0])); }
@Override protected String [] getCodeGenArgv(String... extraArgs) { ArrayList<String> args = new ArrayList<String>(Arrays.asList(extraArgs)); args.add("--username"); args.add(getUserName()); return super.getCodeGenArgv(args.toArray(new String[0])); }
@Test public void testBigIntCol() throws IOException, SQLException { final int TOTAL_RECORDS = 10; // generate a column that won't fit in a normal int. ColumnGenerator gen = new ColumnGenerator() { public String getExportText(int rowNum) { long val = (long) rowNum * 1000000000; return "" + val; } public String getVerifyText(int rowNum) { long val = (long) rowNum * 1000000000; return "" + val; } public String getType() { return getBigIntType(); } }; createTextFile(0, TOTAL_RECORDS, false, gen); createTable(gen); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); assertColMinAndMax(forIdx(0), gen); }
/** * Ensure that we use multiple transactions in a single mapper. */ @Test public void testMultiTransaction() throws IOException, SQLException { final int TOTAL_RECORDS = 20; createTextFile(0, TOTAL_RECORDS, true); createTable(); runExport(getArgv(true, 5, 2)); verifyExport(TOTAL_RECORDS); }
/** * Run 2 mappers with staging enabled, * make sure all records load in correctly. */ @Test public void testMultiMapTextExportWithStaging() throws IOException, SQLException { final int RECORDS_PER_MAP = 10; final int NUM_FILES = 2; for (int f = 0; f < NUM_FILES; f++) { createTextFile(f, RECORDS_PER_MAP, false); } createTable(); createStagingTable(); runExport(getArgv(true, 10, 10, "--staging-table", getStagingTableName())); verifyExport(RECORDS_PER_MAP * NUM_FILES); }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { ArrayList<String> args = new ArrayList<String>(Arrays.asList(additionalArgv)); args.add("-D"); args.add("pgbulkload.bin=" + PG_BULKLOAD); args.add("--username"); args.add(getUserName()); args.add("--connection-manager"); args.add("org.apache.sqoop.manager.PGBulkloadManager"); args.add("--staging-table"); args.add("dummy"); args.add("--clear-staging-table"); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, args.toArray(new String[0])); }
@Override protected String [] getCodeGenArgv(String... extraArgs) { return super.getCodeGenArgv(mySqlTestUtils.addUserNameAndPasswordToArgs(extraArgs)); }