@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches()) { Map<String, List<String>> rll = getRegexToLogLines(); if (rll.get(EXCEPTION_REGEX).size() > 0 && rll.get(SPLIT_REGEX).size() > 0) { // There should only be a single split line... String splitLogLine = rll.get(SPLIT_REGEX).get(0); // Extract only 'split: hdfs://...' Pattern p = Pattern.compile(SPLIT_REGEX, Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(splitLogLine); m.find(); String splitStr = m.group(1); es = new ErrorAndSolution( "Data file " + splitStr + " is corrupted.", "Replace file. i.e. by re-running the query that produced the " + "source table / partition."); } } reset(); return es; } }
String errorCode = m2.group(); es = new ErrorAndSolution( "A user-supplied transfrom script has exited with error code " + errorCode + " instead of 0.",
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches()) { Map<String, List<String>> rll = getRegexToLogLines(); if (rll.get(EXCEPTION_REGEX).size() > 0 && rll.get(SPLIT_REGEX).size() > 0) { // There should only be a single split line... String splitLogLine = rll.get(SPLIT_REGEX).get(0); // Extract only 'split: hdfs://...' Pattern p = Pattern.compile(SPLIT_REGEX, Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(splitLogLine); m.find(); String splitStr = m.group(1); es = new ErrorAndSolution( "Data file " + splitStr + " is corrupted.", "Replace file. i.e. by re-running the query that produced the " + "source table / partition."); } } reset(); return es; } }
String errorCode = m2.group(); es = new ErrorAndSolution( "A user-supplied transfrom script has exited with error code " + errorCode + " instead of 0.",
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches() && configMatches) { List<String> matchingLines = getRegexToLogLines().get(OUT_OF_MEMORY_REGEX); if (matchingLines.size() > 0) { String confName = HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY.toString(); float confValue = HiveConf.getFloatVar(getConf(), HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY); es = new ErrorAndSolution( "Out of memory due to hash maps used in map-side aggregation.", "Currently " + confName + " is set to " + confValue + ". " + "Try setting it to a lower value. i.e " + "'set " + confName + " = " + confValue/2 + ";'"); } } reset(); return es; } }
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches() && configMatches) { List<String> matchingLines = getRegexToLogLines().get(OUT_OF_MEMORY_REGEX); if (matchingLines.size() > 0) { String confName = HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY.toString(); float confValue = HiveConf.getFloatVar(getConf(), HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY); es = new ErrorAndSolution( "Out of memory due to hash maps used in map-side aggregation.", "Currently " + confName + " is set to " + confValue + ". " + "Try setting it to a lower value. i.e " + "'set " + confName + " = " + confValue/2 + ";'"); } } reset(); return es; } }
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches()) { Map<String, List<String>> rll = getRegexToLogLines(); if (rll.get(EXCEPTION_REGEX).size() > 0 && rll.get(SPLIT_REGEX).size() > 0) { // There should only be a single split line... assert(rll.get(SPLIT_REGEX).size()==1); String splitLogLine = rll.get(SPLIT_REGEX).get(0); // Extract only 'split: hdfs://...' Pattern p = Pattern.compile(SPLIT_REGEX, Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(splitLogLine); m.find(); String splitStr = m.group(); es = new ErrorAndSolution( "Data file " + splitStr + " is corrupted.", "Replace file. i.e. by re-running the query that produced the " + "source table / partition."); } } reset(); return es; } }
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches()) { Map<String, List<String>> rll = getRegexToLogLines(); if (rll.get(EXCEPTION_REGEX).size() > 0 && rll.get(SPLIT_REGEX).size() > 0) { // There should only be a single split line... String splitLogLine = rll.get(SPLIT_REGEX).get(0); // Extract only 'split: hdfs://...' Pattern p = Pattern.compile(SPLIT_REGEX, Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(splitLogLine); m.find(); String splitStr = m.group(1); es = new ErrorAndSolution( "Data file " + splitStr + " is corrupted.", "Replace file. i.e. by re-running the query that produced the " + "source table / partition."); } } reset(); return es; } }
String errorCode = m2.group(); es = new ErrorAndSolution( "A user-supplied transfrom script has exited with error code " + errorCode + " instead of 0.",
String errorCode = m2.group(); es = new ErrorAndSolution( "A user-supplied transfrom script has exited with error code " + errorCode + " instead of 0.",
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches() && configMatches) { List<String> matchingLines = getRegexToLogLines().get(OUT_OF_MEMORY_REGEX); if (matchingLines.size() > 0) { String confName = HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY.toString(); float confValue = HiveConf.getFloatVar(getConf(), HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY); es = new ErrorAndSolution( "Out of memory due to hash maps used in map-side aggregation.", "Currently " + confName + " is set to " + confValue + ". " + "Try setting it to a lower value. i.e " + "'set " + confName + " = " + confValue/2 + ";'"); } } reset(); return es; } }
@Override public ErrorAndSolution getErrorAndSolution() { ErrorAndSolution es = null; if(getQueryMatches() && configMatches) { List<String> matchingLines = getRegexToLogLines().get(OUT_OF_MEMORY_REGEX); if (matchingLines.size() > 0) { String confName = HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY.toString(); float confValue = HiveConf.getFloatVar(getConf(), HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY); es = new ErrorAndSolution( "Out of memory due to hash maps used in map-side aggregation.", "Currently " + confName + " is set to " + confValue + ". " + "Try setting it to a lower value. i.e " + "'set " + confName + " = " + confValue/2 + ";'"); } } reset(); return es; } }