// Create a Conjugate Gradient algorithm to estimate the regressionvar mcg = new MultinomialLogisticLearning<ConjugateGradient>();
// Now, we can estimate our model using Conjugate Gradient
MultinomialLogisticRegression mlr = mcg.Learn(inputs, outputs);
// We can compute the model answersint[] answers = mlr.Decide(inputs);
// And also the probability of each of the answersdouble[][] probabilities = mlr.Probabilities(inputs);
// Now we can check how good our model is at predictingdouble error = new ZeroOneLoss(outputs).Loss(answers);

// Create a Conjugate Gradient algorithm to estimate the regressionvar mgd = new MultinomialLogisticLearning<GradientDescent>();
// Now, we can estimate our model using Gradient Descent
MultinomialLogisticRegression mlr = mgd.Learn(inputs, outputs);
// We can compute the model answersint[] answers = mlr.Decide(inputs);
// And also the probability of each of the answersdouble[][] probabilities = mlr.Probabilities(inputs);
// Now we can check how good our model is at predictingdouble error = new ZeroOneLoss(outputs).Loss(answers);

// Create a Conjugate Gradient algorithm to estimate the regressionvar mlbfgs = new MultinomialLogisticLearning<BroydenFletcherGoldfarbShanno>();
// Now, we can estimate our model using BFGS
MultinomialLogisticRegression mlr = mlbfgs.Learn(inputs, outputs);
// We can compute the model answersint[] answers = mlr.Decide(inputs);
// And also the probability of each of the answersdouble[][] probabilities = mlr.Probabilities(inputs);
// Now we can check how good our model is at predictingdouble error = new ZeroOneLoss(outputs).Loss(answers);