errorMessage("Problem reading instance weight: } expected");
errorMessage("Problem reading instance weight: } expected");
/** * Gets token and checks if its end of line. * * @param endOfFileOk whether EOF is OK * @throws IOException if it doesn't find an end of line */ protected void getLastToken(boolean endOfFileOk) throws IOException { if ((m_Tokenizer.nextToken() != StreamTokenizer.TT_EOL) && ((m_Tokenizer.ttype != StreamTokenizer.TT_EOF) || !endOfFileOk)) { errorMessage("end of line expected"); } }
Integer.valueOf(m_Tokenizer.sval).intValue(); } catch (NumberFormatException e) { errorMessage("index number expected"); errorMessage("indices have to be ordered"); errorMessage("index out of bounds"); errorMessage("not a valid value"); m_Tokenizer.sval); if (valIndex == -1) { errorMessage("nominal value not declared in header"); Double.valueOf(m_Tokenizer.sval).doubleValue(); } catch (NumberFormatException e) { errorMessage("number expected"); m_Tokenizer.sval); } catch (ParseException e) { errorMessage("unparseable date: " + m_Tokenizer.sval); errorMessage("unknown attribute type in column " + m_IndicesBuffer[numValues]);
Integer.valueOf(m_Tokenizer.sval).intValue(); } catch (NumberFormatException e) { errorMessage("index number expected"); errorMessage("indices have to be ordered"); errorMessage("index out of bounds"); errorMessage("not a valid value"); m_Tokenizer.sval); if (valIndex == -1) { errorMessage("nominal value not declared in header"); Double.valueOf(m_Tokenizer.sval).doubleValue(); } catch (NumberFormatException e) { errorMessage("number expected"); m_Tokenizer.sval); } catch (ParseException e) { errorMessage("unparseable date: " + m_Tokenizer.sval); errorMessage("unknown attribute type in column " + m_IndicesBuffer[numValues]);
if ((m_Tokenizer.ttype != StreamTokenizer.TT_WORD) && (m_Tokenizer.ttype != '\'') && (m_Tokenizer.ttype != '\"')) { errorMessage("not a valid date format"); errorMessage("premature end of file"); getNextToken(); if (!attributeName.equalsIgnoreCase(m_Tokenizer.sval)) { errorMessage("declaration of subrelation " + attributeName + " must be terminated by " + "@end " + attributeName); errorMessage("declaration of subrelation " + attributeName + " must be terminated by " + "@end " + attributeName); errorMessage("no valid attribute type or invalid " + "enumeration"); errorMessage("{ expected at beginning of enumeration"); errorMessage("} expected at end of enumeration"); } else { attributeValues.add(m_Tokenizer.sval); getFirstToken(); if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { errorMessage("premature end of file");
if ((m_Tokenizer.ttype != StreamTokenizer.TT_WORD) && (m_Tokenizer.ttype != '\'') && (m_Tokenizer.ttype != '\"')) { errorMessage("not a valid date format"); errorMessage("premature end of file"); getNextToken(); if (!attributeName.equalsIgnoreCase(m_Tokenizer.sval)) { errorMessage("declaration of subrelation " + attributeName + " must be terminated by " + "@end " + attributeName); errorMessage("declaration of subrelation " + attributeName + " must be terminated by " + "@end " + attributeName); errorMessage("no valid attribute type or invalid " + "enumeration"); errorMessage("{ expected at beginning of enumeration"); errorMessage("} expected at end of enumeration"); } else { attributeValues.add(m_Tokenizer.sval); getFirstToken(); if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { errorMessage("premature end of file");
errorMessage("not a valid value"); errorMessage("nominal value not declared in header"); instance[i] = Double.valueOf(m_Tokenizer.sval).doubleValue(); } catch (NumberFormatException e) { errorMessage("number expected"); instance[i] = m_Data.attribute(i).parseDate(m_Tokenizer.sval); } catch (ParseException e) { errorMessage("unparseable date: " + m_Tokenizer.sval); errorMessage("unknown attribute type in column " + i);
errorMessage("not a valid value"); errorMessage("nominal value not declared in header"); instance[i] = Double.valueOf(m_Tokenizer.sval).doubleValue(); } catch (NumberFormatException e) { errorMessage("number expected"); instance[i] = m_Data.attribute(i).parseDate(m_Tokenizer.sval); } catch (ParseException e) { errorMessage("unparseable date: " + m_Tokenizer.sval); errorMessage("unknown attribute type in column " + i);
errorMessage("premature end of file"); getLastToken(false); } else { errorMessage("keyword " + Instances.ARFF_RELATION + " expected"); errorMessage("premature end of file"); errorMessage("keyword " + Instances.ARFF_DATA + " expected"); errorMessage("no attributes declared");
errorMessage("premature end of file"); getLastToken(false); } else { errorMessage("keyword " + Instances.ARFF_RELATION + " expected"); errorMessage("premature end of file"); errorMessage("keyword " + Instances.ARFF_DATA + " expected"); errorMessage("no attributes declared");
/** * Gets the value of an attribute's weight (if one exists). * * @return the value of the attribute's weight, or 1.0 if no weight has been * supplied in the file */ protected double getAttributeWeight() throws IOException { double weight = 1.0; m_Tokenizer.nextToken(); if (m_Tokenizer.ttype == StreamTokenizer.TT_EOL || m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { m_Tokenizer.pushBack(); return weight; } // see if we can read an attribute weight if (m_Tokenizer.ttype == '{') { m_Tokenizer.nextToken(); try { weight = Double.parseDouble(m_Tokenizer.sval); } catch (NumberFormatException ex) { errorMessage("Problem reading attribute weight " + ex.getMessage()); } m_Tokenizer.nextToken(); if (m_Tokenizer.ttype != '}') { errorMessage("Problem reading attribute weight: } expected"); } } return weight; }
/** * Gets the value of an attribute's weight (if one exists). * * @return the value of the attribute's weight, or 1.0 if no weight has been * supplied in the file */ protected double getAttributeWeight() throws IOException { double weight = 1.0; m_Tokenizer.nextToken(); if (m_Tokenizer.ttype == StreamTokenizer.TT_EOL || m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { m_Tokenizer.pushBack(); return weight; } // see if we can read an attribute weight if (m_Tokenizer.ttype == '{') { m_Tokenizer.nextToken(); try { weight = Double.parseDouble(m_Tokenizer.sval); } catch (NumberFormatException ex) { errorMessage("Problem reading attribute weight " + ex.getMessage()); } m_Tokenizer.nextToken(); if (m_Tokenizer.ttype != '}') { errorMessage("Problem reading attribute weight: } expected"); } } return weight; }
/** * Reads a single instance using the tokenizer and returns it. * * @param structure the dataset header information, will get updated in case * of string or relational attributes * @param flag if method should test for carriage return after each instance * @return null if end of file has been reached * @throws IOException if the information is not read successfully */ protected Instance getInstance(Instances structure, boolean flag) throws IOException { m_Data = structure; // Check if any attributes have been declared. if (m_Data.numAttributes() == 0) { errorMessage("no header information available"); } // Check if end of file reached. getFirstToken(); if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { return null; } // Parse instance if (m_Tokenizer.ttype == '{') { return getInstanceSparse(flag); } else { return getInstanceFull(flag); } }
/** * Reads a single instance using the tokenizer and returns it. * * @param structure the dataset header information, will get updated in case * of string or relational attributes * @param flag if method should test for carriage return after each instance * @return null if end of file has been reached * @throws IOException if the information is not read successfully */ protected Instance getInstance(Instances structure, boolean flag) throws IOException { m_Data = structure; // Check if any attributes have been declared. if (m_Data.numAttributes() == 0) { errorMessage("no header information available"); } // Check if end of file reached. getFirstToken(); if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { return null; } // Parse instance if (m_Tokenizer.ttype == '{') { return getInstanceSparse(flag); } else { return getInstanceFull(flag); } }
/** * Gets next token, checking for a premature and of line. * * @throws IOException if it finds a premature end of line */ protected void getNextToken() throws IOException { if (m_Tokenizer.nextToken() == StreamTokenizer.TT_EOL) { errorMessage("premature end of line"); } if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { errorMessage("premature end of file"); } else if ((m_Tokenizer.ttype == '\'') || (m_Tokenizer.ttype == '"')) { m_Tokenizer.ttype = StreamTokenizer.TT_WORD; } else if ((m_Tokenizer.ttype == StreamTokenizer.TT_WORD) && (m_Tokenizer.sval.equals("?"))) { m_Tokenizer.ttype = '?'; } }
/** * Gets next token, checking for a premature and of line. * * @throws IOException if it finds a premature end of line */ protected void getNextToken() throws IOException { if (m_Tokenizer.nextToken() == StreamTokenizer.TT_EOL) { errorMessage("premature end of line"); } if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { errorMessage("premature end of file"); } else if ((m_Tokenizer.ttype == '\'') || (m_Tokenizer.ttype == '"')) { m_Tokenizer.ttype = StreamTokenizer.TT_WORD; } else if ((m_Tokenizer.ttype == StreamTokenizer.TT_WORD) && (m_Tokenizer.sval.equals("?"))) { m_Tokenizer.ttype = '?'; } }
/** * Gets index, checking for a premature and of line. * * @throws IOException if it finds a premature end of line */ protected void getIndex() throws IOException { if (m_Tokenizer.nextToken() == StreamTokenizer.TT_EOL) { errorMessage("premature end of line"); } if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { errorMessage("premature end of file"); } }
/** * Gets token and checks if its end of line. * * @param endOfFileOk whether EOF is OK * @throws IOException if it doesn't find an end of line */ protected void getLastToken(boolean endOfFileOk) throws IOException { if ((m_Tokenizer.nextToken() != StreamTokenizer.TT_EOL) && ((m_Tokenizer.ttype != StreamTokenizer.TT_EOF) || !endOfFileOk)) { errorMessage("end of line expected"); } }
/** * Gets index, checking for a premature and of line. * * @throws IOException if it finds a premature end of line */ protected void getIndex() throws IOException { if (m_Tokenizer.nextToken() == StreamTokenizer.TT_EOL) { errorMessage("premature end of line"); } if (m_Tokenizer.ttype == StreamTokenizer.TT_EOF) { errorMessage("premature end of file"); } }