/** * The function is for running LCLR with square hinge-loss. * <p> * * Remember ALWAYS use * {@link WeightVector#predictLCLRBinaryScore(IInstance, AbstractStructureFinder)} * to get the prediction score for binary examples * * @param init_wv * The initial weight vector for the input. Given that this * learning algorithm is not convex, a good initialization point * is important * @param struct_finder * The inference solver (dynamic programming, ILP,...). Given an * input (IInstance) and a Weight vector (WeightVector), return * the best structure (AbstractStructures) * @param bp * Binary labeled dataset * @param para * Parameters for JLIS * @return * @throws Exception */ @Override public WeightVector trainLCLR(final WeightVector init_wv, final AbstractStructureFinder struct_finder, final BinaryProblem bp, final JLISParameters para) throws Exception { return getJointWeightVectorFast(init_wv, struct_finder, empty_s, bp, para); }
/** * The function for the users to call for the structured SVM * * @param struct_finder * The inference solver (dynamic programming, ILP,...). Given an * input (IInstance) and a Weight vector (WeightVector), return * the best structure (AbstractStructures) * @param sp * Structured Labeled Dataset * @param para * parameters for JLIS * @return * @throws Exception */ @Override public WeightVector trainStructuredSVM( final AbstractLossSensitiveStructureFinder struct_finder, final StructuredProblem sp, JLISParameters para) throws Exception { WeightVector wv = new WeightVector(para.total_number_features + 1); // +1 // because // we // skip // wv.u[0] //wv.setExtendable(false); return getJointWeightVectorFast(wv, struct_finder, sp, empty_b, para); }
WeightVector res_wv = getJointWeightVectorFast(init_wv, struct_finder, sp, bp, para);