http://git-wip-us.apache.org/repos/asf/incubator-hivemall-site/blob/6c6a1b42/userguide/supervised_learning/prediction.html ---------------------------------------------------------------------- diff --git a/userguide/supervised_learning/prediction.html b/userguide/supervised_learning/prediction.html new file mode 100644 index 0000000..a4719e5 --- /dev/null +++ b/userguide/supervised_learning/prediction.html @@ -0,0 +1,2604 @@ + +<!DOCTYPE HTML> +<html lang="" > + <head> + <meta charset="UTF-8"> + <meta content="text/html; charset=utf-8" http-equiv="Content-Type"> + <title>How Prediction Works · Hivemall User Manual</title> + <meta http-equiv="X-UA-Compatible" content="IE=edge" /> + <meta name="description" content=""> + <meta name="generator" content="GitBook 3.2.3"> + + + + + <link rel="stylesheet" href="../gitbook/style.css"> + + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-splitter/splitter.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-etoc/plugin.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-callouts/plugin.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-toggle-chapters/toggle.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-codeblock-filename/block.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-expandable-chapters/expandable-chapters.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-multipart/multipart.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-katex/katex.min.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-emphasize/plugin.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-highlight/website.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-search/search.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-fontsettings/website.css"> + + + + <link rel="stylesheet" href="../gitbook/gitbook-plugin-theme-api/theme-api.css"> + + + + + + + + + + + + + + + + + + + + + + + + <meta name="HandheldFriendly" content="true"/> + <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no"> + <meta name="apple-mobile-web-app-capable" content="yes"> + <meta name="apple-mobile-web-app-status-bar-style" content="black"> + <link rel="apple-touch-icon-precomposed" sizes="152x152" href="../gitbook/images/apple-touch-icon-precomposed-152.png"> + <link rel="shortcut icon" href="../gitbook/images/favicon.ico" type="image/x-icon"> + + + <link rel="next" href="tutorial.html" /> + + + <link rel="prev" href="../eval/lr_datagen.html" /> + + + </head> + <body> + +<div class="book"> + <div class="book-summary"> + + +<div id="book-search-input" role="search"> + <input type="text" placeholder="Type to search" /> +</div> + + + <nav role="navigation"> + + + +<ul class="summary"> + + + + + <li> + <a href="http://hivemall.incubator.apache.org/" target="_blank" class="custom-link"><i class="fa fa-home"></i> Home</a> + </li> + + + + + <li class="divider"></li> + + + + + <li class="header">TABLE OF CONTENTS</li> + + + + <li class="chapter " data-level="1.1" data-path="../"> + + <a href="../"> + + + <b>1.1.</b> + + Introduction + + </a> + + + + </li> + + <li class="chapter " data-level="1.2" data-path="../getting_started/"> + + <a href="../getting_started/"> + + + <b>1.2.</b> + + Getting Started + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="1.2.1" data-path="../getting_started/installation.html"> + + <a href="../getting_started/installation.html"> + + + <b>1.2.1.</b> + + Installation + + </a> + + + + </li> + + <li class="chapter " data-level="1.2.2" data-path="../getting_started/permanent-functions.html"> + + <a href="../getting_started/permanent-functions.html"> + + + <b>1.2.2.</b> + + Install as permanent functions + + </a> + + + + </li> + + <li class="chapter " data-level="1.2.3" data-path="../getting_started/input-format.html"> + + <a href="../getting_started/input-format.html"> + + + <b>1.2.3.</b> + + Input Format + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="1.3" data-path="../misc/funcs.html"> + + <a href="../misc/funcs.html"> + + + <b>1.3.</b> + + List of Functions + + </a> + + + + </li> + + <li class="chapter " data-level="1.4" data-path="../tips/"> + + <a href="../tips/"> + + + <b>1.4.</b> + + Tips for Effective Hivemall + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="1.4.1" data-path="../tips/addbias.html"> + + <a href="../tips/addbias.html"> + + + <b>1.4.1.</b> + + Explicit add_bias() for better prediction + + </a> + + + + </li> + + <li class="chapter " data-level="1.4.2" data-path="../tips/rand_amplify.html"> + + <a href="../tips/rand_amplify.html"> + + + <b>1.4.2.</b> + + Use rand_amplify() to better prediction results + + </a> + + + + </li> + + <li class="chapter " data-level="1.4.3" data-path="../tips/rt_prediction.html"> + + <a href="../tips/rt_prediction.html"> + + + <b>1.4.3.</b> + + Real-time prediction on RDBMS + + </a> + + + + </li> + + <li class="chapter " data-level="1.4.4" data-path="../tips/ensemble_learning.html"> + + <a href="../tips/ensemble_learning.html"> + + + <b>1.4.4.</b> + + Ensemble learning for stable prediction + + </a> + + + + </li> + + <li class="chapter " data-level="1.4.5" data-path="../tips/mixserver.html"> + + <a href="../tips/mixserver.html"> + + + <b>1.4.5.</b> + + Mixing models for a better prediction convergence (MIX server) + + </a> + + + + </li> + + <li class="chapter " data-level="1.4.6" data-path="../tips/emr.html"> + + <a href="../tips/emr.html"> + + + <b>1.4.6.</b> + + Run Hivemall on Amazon Elastic MapReduce + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="1.5" data-path="../tips/general_tips.html"> + + <a href="../tips/general_tips.html"> + + + <b>1.5.</b> + + General Hive/Hadoop Tips + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="1.5.1" data-path="../tips/rowid.html"> + + <a href="../tips/rowid.html"> + + + <b>1.5.1.</b> + + Adding rowid for each row + + </a> + + + + </li> + + <li class="chapter " data-level="1.5.2" data-path="../tips/hadoop_tuning.html"> + + <a href="../tips/hadoop_tuning.html"> + + + <b>1.5.2.</b> + + Hadoop tuning for Hivemall + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="1.6" data-path="../troubleshooting/"> + + <a href="../troubleshooting/"> + + + <b>1.6.</b> + + Troubleshooting + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="1.6.1" data-path="../troubleshooting/oom.html"> + + <a href="../troubleshooting/oom.html"> + + + <b>1.6.1.</b> + + OutOfMemoryError in training + + </a> + + + + </li> + + <li class="chapter " data-level="1.6.2" data-path="../troubleshooting/mapjoin_task_error.html"> + + <a href="../troubleshooting/mapjoin_task_error.html"> + + + <b>1.6.2.</b> + + SemanticException generate map join task error: Cannot serialize object + + </a> + + + + </li> + + <li class="chapter " data-level="1.6.3" data-path="../troubleshooting/asterisk.html"> + + <a href="../troubleshooting/asterisk.html"> + + + <b>1.6.3.</b> + + Asterisk argument for UDTF does not work + + </a> + + + + </li> + + <li class="chapter " data-level="1.6.4" data-path="../troubleshooting/num_mappers.html"> + + <a href="../troubleshooting/num_mappers.html"> + + + <b>1.6.4.</b> + + The number of mappers is less than input splits in Hadoop 2.x + + </a> + + + + </li> + + <li class="chapter " data-level="1.6.5" data-path="../troubleshooting/mapjoin_classcastex.html"> + + <a href="../troubleshooting/mapjoin_classcastex.html"> + + + <b>1.6.5.</b> + + Map-side join causes ClassCastException on Tez + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part II - Generic Features</li> + + + + <li class="chapter " data-level="2.1" data-path="../misc/generic_funcs.html"> + + <a href="../misc/generic_funcs.html"> + + + <b>2.1.</b> + + List of Generic Hivemall Functions + + </a> + + + + </li> + + <li class="chapter " data-level="2.2" data-path="../misc/topk.html"> + + <a href="../misc/topk.html"> + + + <b>2.2.</b> + + Efficient Top-K Query Processing + + </a> + + + + </li> + + <li class="chapter " data-level="2.3" data-path="../misc/tokenizer.html"> + + <a href="../misc/tokenizer.html"> + + + <b>2.3.</b> + + Text Tokenizer + + </a> + + + + </li> + + <li class="chapter " data-level="2.4" data-path="../misc/approx.html"> + + <a href="../misc/approx.html"> + + + <b>2.4.</b> + + Approximate Aggregate Functions + + </a> + + + + </li> + + + + + <li class="header">Part III - Feature Engineering</li> + + + + <li class="chapter " data-level="3.1" data-path="../ft_engineering/scaling.html"> + + <a href="../ft_engineering/scaling.html"> + + + <b>3.1.</b> + + Feature Scaling + + </a> + + + + </li> + + <li class="chapter " data-level="3.2" data-path="../ft_engineering/hashing.html"> + + <a href="../ft_engineering/hashing.html"> + + + <b>3.2.</b> + + Feature Hashing + + </a> + + + + </li> + + <li class="chapter " data-level="3.3" data-path="../ft_engineering/selection.html"> + + <a href="../ft_engineering/selection.html"> + + + <b>3.3.</b> + + Feature Selection + + </a> + + + + </li> + + <li class="chapter " data-level="3.4" data-path="../ft_engineering/binning.html"> + + <a href="../ft_engineering/binning.html"> + + + <b>3.4.</b> + + Feature Binning + + </a> + + + + </li> + + <li class="chapter " data-level="3.5" data-path="../ft_engineering/pairing.html"> + + <a href="../ft_engineering/pairing.html"> + + + <b>3.5.</b> + + Feature Paring + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="3.5.1" data-path="../ft_engineering/polynomial.html"> + + <a href="../ft_engineering/polynomial.html"> + + + <b>3.5.1.</b> + + Polynomial features + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="3.6" data-path="../ft_engineering/ft_trans.html"> + + <a href="../ft_engineering/ft_trans.html"> + + + <b>3.6.</b> + + Feature Transformation + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="3.6.1" data-path="../ft_engineering/vectorization.html"> + + <a href="../ft_engineering/vectorization.html"> + + + <b>3.6.1.</b> + + Feature vectorization + + </a> + + + + </li> + + <li class="chapter " data-level="3.6.2" data-path="../ft_engineering/quantify.html"> + + <a href="../ft_engineering/quantify.html"> + + + <b>3.6.2.</b> + + Quantify non-number features + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="3.7" data-path="../ft_engineering/tfidf.html"> + + <a href="../ft_engineering/tfidf.html"> + + + <b>3.7.</b> + + TF-IDF Calculation + + </a> + + + + </li> + + + + + <li class="header">Part IV - Evaluation</li> + + + + <li class="chapter " data-level="4.1" data-path="../eval/binary_classification_measures.html"> + + <a href="../eval/binary_classification_measures.html"> + + + <b>4.1.</b> + + Binary Classification Metrics + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="4.1.1" data-path="../eval/auc.html"> + + <a href="../eval/auc.html"> + + + <b>4.1.1.</b> + + Area under the ROC curve + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="4.2" data-path="../eval/multilabel_classification_measures.html"> + + <a href="../eval/multilabel_classification_measures.html"> + + + <b>4.2.</b> + + Multi-label Classification Metrics + + </a> + + + + </li> + + <li class="chapter " data-level="4.3" data-path="../eval/regression.html"> + + <a href="../eval/regression.html"> + + + <b>4.3.</b> + + Regression Metrics + + </a> + + + + </li> + + <li class="chapter " data-level="4.4" data-path="../eval/rank.html"> + + <a href="../eval/rank.html"> + + + <b>4.4.</b> + + Ranking Measures + + </a> + + + + </li> + + <li class="chapter " data-level="4.5" data-path="../eval/datagen.html"> + + <a href="../eval/datagen.html"> + + + <b>4.5.</b> + + Data Generation + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="4.5.1" data-path="../eval/lr_datagen.html"> + + <a href="../eval/lr_datagen.html"> + + + <b>4.5.1.</b> + + Logistic Regression data generation + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part V - Supervised Learning</li> + + + + <li class="chapter active" data-level="5.1" data-path="prediction.html"> + + <a href="prediction.html"> + + + <b>5.1.</b> + + How Prediction Works + + </a> + + + + </li> + + <li class="chapter " data-level="5.2" data-path="tutorial.html"> + + <a href="tutorial.html"> + + + <b>5.2.</b> + + Step-by-Step Tutorial on Supervised Learning + + </a> + + + + </li> + + + + + <li class="header">Part VI - Binary Classification</li> + + + + <li class="chapter " data-level="6.1" data-path="../binaryclass/general.html"> + + <a href="../binaryclass/general.html"> + + + <b>6.1.</b> + + Binary Classification + + </a> + + + + </li> + + <li class="chapter " data-level="6.2" data-path="../binaryclass/a9a.html"> + + <a href="../binaryclass/a9a.html"> + + + <b>6.2.</b> + + a9a Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="6.2.1" data-path="../binaryclass/a9a_dataset.html"> + + <a href="../binaryclass/a9a_dataset.html"> + + + <b>6.2.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="6.2.2" data-path="../binaryclass/a9a_lr.html"> + + <a href="../binaryclass/a9a_lr.html"> + + + <b>6.2.2.</b> + + Logistic Regression + + </a> + + + + </li> + + <li class="chapter " data-level="6.2.3" data-path="../binaryclass/a9a_minibatch.html"> + + <a href="../binaryclass/a9a_minibatch.html"> + + + <b>6.2.3.</b> + + Mini-batch gradient descent + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="6.3" data-path="../binaryclass/news20.html"> + + <a href="../binaryclass/news20.html"> + + + <b>6.3.</b> + + News20 Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="6.3.1" data-path="../binaryclass/news20_dataset.html"> + + <a href="../binaryclass/news20_dataset.html"> + + + <b>6.3.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="6.3.2" data-path="../binaryclass/news20_pa.html"> + + <a href="../binaryclass/news20_pa.html"> + + + <b>6.3.2.</b> + + Perceptron, Passive Aggressive + + </a> + + + + </li> + + <li class="chapter " data-level="6.3.3" data-path="../binaryclass/news20_scw.html"> + + <a href="../binaryclass/news20_scw.html"> + + + <b>6.3.3.</b> + + CW, AROW, SCW + + </a> + + + + </li> + + <li class="chapter " data-level="6.3.4" data-path="../binaryclass/news20_adagrad.html"> + + <a href="../binaryclass/news20_adagrad.html"> + + + <b>6.3.4.</b> + + AdaGradRDA, AdaGrad, AdaDelta + + </a> + + + + </li> + + <li class="chapter " data-level="6.3.5" data-path="../binaryclass/news20_rf.html"> + + <a href="../binaryclass/news20_rf.html"> + + + <b>6.3.5.</b> + + Random Forest + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="6.4" data-path="../binaryclass/kdd2010a.html"> + + <a href="../binaryclass/kdd2010a.html"> + + + <b>6.4.</b> + + KDD2010a Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="6.4.1" data-path="../binaryclass/kdd2010a_dataset.html"> + + <a href="../binaryclass/kdd2010a_dataset.html"> + + + <b>6.4.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="6.4.2" data-path="../binaryclass/kdd2010a_scw.html"> + + <a href="../binaryclass/kdd2010a_scw.html"> + + + <b>6.4.2.</b> + + PA, CW, AROW, SCW + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="6.5" data-path="../binaryclass/kdd2010b.html"> + + <a href="../binaryclass/kdd2010b.html"> + + + <b>6.5.</b> + + KDD2010b Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="6.5.1" data-path="../binaryclass/kdd2010b_dataset.html"> + + <a href="../binaryclass/kdd2010b_dataset.html"> + + + <b>6.5.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="6.5.2" data-path="../binaryclass/kdd2010b_arow.html"> + + <a href="../binaryclass/kdd2010b_arow.html"> + + + <b>6.5.2.</b> + + AROW + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="6.6" data-path="../binaryclass/webspam.html"> + + <a href="../binaryclass/webspam.html"> + + + <b>6.6.</b> + + Webspam Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="6.6.1" data-path="../binaryclass/webspam_dataset.html"> + + <a href="../binaryclass/webspam_dataset.html"> + + + <b>6.6.1.</b> + + Data pareparation + + </a> + + + + </li> + + <li class="chapter " data-level="6.6.2" data-path="../binaryclass/webspam_scw.html"> + + <a href="../binaryclass/webspam_scw.html"> + + + <b>6.6.2.</b> + + PA1, AROW, SCW + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="6.7" data-path="../binaryclass/titanic_rf.html"> + + <a href="../binaryclass/titanic_rf.html"> + + + <b>6.7.</b> + + Kaggle Titanic Tutorial + + </a> + + + + </li> + + <li class="chapter " data-level="6.8" data-path="../binaryclass/criteo.html"> + + <a href="../binaryclass/criteo.html"> + + + <b>6.8.</b> + + Criteo Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="6.8.1" data-path="../binaryclass/criteo_dataset.html"> + + <a href="../binaryclass/criteo_dataset.html"> + + + <b>6.8.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="6.8.2" data-path="../binaryclass/criteo_ffm.html"> + + <a href="../binaryclass/criteo_ffm.html"> + + + <b>6.8.2.</b> + + Field-Aware Factorization Machines + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part VII - Multiclass Classification</li> + + + + <li class="chapter " data-level="7.1" data-path="../multiclass/news20.html"> + + <a href="../multiclass/news20.html"> + + + <b>7.1.</b> + + News20 Multiclass Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="7.1.1" data-path="../multiclass/news20_dataset.html"> + + <a href="../multiclass/news20_dataset.html"> + + + <b>7.1.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="7.1.2" data-path="../multiclass/news20_one-vs-the-rest_dataset.html"> + + <a href="../multiclass/news20_one-vs-the-rest_dataset.html"> + + + <b>7.1.2.</b> + + Data preparation for one-vs-the-rest classifiers + + </a> + + + + </li> + + <li class="chapter " data-level="7.1.3" data-path="../multiclass/news20_pa.html"> + + <a href="../multiclass/news20_pa.html"> + + + <b>7.1.3.</b> + + PA + + </a> + + + + </li> + + <li class="chapter " data-level="7.1.4" data-path="../multiclass/news20_scw.html"> + + <a href="../multiclass/news20_scw.html"> + + + <b>7.1.4.</b> + + CW, AROW, SCW + + </a> + + + + </li> + + <li class="chapter " data-level="7.1.5" data-path="../multiclass/news20_ensemble.html"> + + <a href="../multiclass/news20_ensemble.html"> + + + <b>7.1.5.</b> + + Ensemble learning + + </a> + + + + </li> + + <li class="chapter " data-level="7.1.6" data-path="../multiclass/news20_one-vs-the-rest.html"> + + <a href="../multiclass/news20_one-vs-the-rest.html"> + + + <b>7.1.6.</b> + + one-vs-the-rest classifier + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="7.2" data-path="../multiclass/iris.html"> + + <a href="../multiclass/iris.html"> + + + <b>7.2.</b> + + Iris Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="7.2.1" data-path="../multiclass/iris_dataset.html"> + + <a href="../multiclass/iris_dataset.html"> + + + <b>7.2.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="7.2.2" data-path="../multiclass/iris_scw.html"> + + <a href="../multiclass/iris_scw.html"> + + + <b>7.2.2.</b> + + SCW + + </a> + + + + </li> + + <li class="chapter " data-level="7.2.3" data-path="../multiclass/iris_randomforest.html"> + + <a href="../multiclass/iris_randomforest.html"> + + + <b>7.2.3.</b> + + Random Forest + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part VIII - Regression</li> + + + + <li class="chapter " data-level="8.1" data-path="../regression/general.html"> + + <a href="../regression/general.html"> + + + <b>8.1.</b> + + Regression + + </a> + + + + </li> + + <li class="chapter " data-level="8.2" data-path="../regression/e2006.html"> + + <a href="../regression/e2006.html"> + + + <b>8.2.</b> + + E2006-tfidf Regression Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="8.2.1" data-path="../regression/e2006_dataset.html"> + + <a href="../regression/e2006_dataset.html"> + + + <b>8.2.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="8.2.2" data-path="../regression/e2006_arow.html"> + + <a href="../regression/e2006_arow.html"> + + + <b>8.2.2.</b> + + Passive Aggressive, AROW + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="8.3" data-path="../regression/kddcup12tr2.html"> + + <a href="../regression/kddcup12tr2.html"> + + + <b>8.3.</b> + + KDDCup 2012 Track 2 CTR Prediction Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="8.3.1" data-path="../regression/kddcup12tr2_dataset.html"> + + <a href="../regression/kddcup12tr2_dataset.html"> + + + <b>8.3.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="8.3.2" data-path="../regression/kddcup12tr2_lr.html"> + + <a href="../regression/kddcup12tr2_lr.html"> + + + <b>8.3.2.</b> + + Logistic Regression, Passive Aggressive + + </a> + + + + </li> + + <li class="chapter " data-level="8.3.3" data-path="../regression/kddcup12tr2_lr_amplify.html"> + + <a href="../regression/kddcup12tr2_lr_amplify.html"> + + + <b>8.3.3.</b> + + Logistic Regression with amplifier + + </a> + + + + </li> + + <li class="chapter " data-level="8.3.4" data-path="../regression/kddcup12tr2_adagrad.html"> + + <a href="../regression/kddcup12tr2_adagrad.html"> + + + <b>8.3.4.</b> + + AdaGrad, AdaDelta + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part IX - Recommendation</li> + + + + <li class="chapter " data-level="9.1" data-path="../recommend/cf.html"> + + <a href="../recommend/cf.html"> + + + <b>9.1.</b> + + Collaborative Filtering + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="9.1.1" data-path="../recommend/item_based_cf.html"> + + <a href="../recommend/item_based_cf.html"> + + + <b>9.1.1.</b> + + Item-based collaborative filtering + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="9.2" data-path="../recommend/news20.html"> + + <a href="../recommend/news20.html"> + + + <b>9.2.</b> + + News20 Related Article Recommendation Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="9.2.1" data-path="../multiclass/news20_dataset.html"> + + <a href="../multiclass/news20_dataset.html"> + + + <b>9.2.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="9.2.2" data-path="../recommend/news20_jaccard.html"> + + <a href="../recommend/news20_jaccard.html"> + + + <b>9.2.2.</b> + + LSH/MinHash and Jaccard similarity + + </a> + + + + </li> + + <li class="chapter " data-level="9.2.3" data-path="../recommend/news20_knn.html"> + + <a href="../recommend/news20_knn.html"> + + + <b>9.2.3.</b> + + LSH/MinHash and brute-force search + + </a> + + + + </li> + + <li class="chapter " data-level="9.2.4" data-path="../recommend/news20_bbit_minhash.html"> + + <a href="../recommend/news20_bbit_minhash.html"> + + + <b>9.2.4.</b> + + kNN search using b-Bits MinHash + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="9.3" data-path="../recommend/movielens.html"> + + <a href="../recommend/movielens.html"> + + + <b>9.3.</b> + + MovieLens Movie Recommendation Tutorial + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="9.3.1" data-path="../recommend/movielens_dataset.html"> + + <a href="../recommend/movielens_dataset.html"> + + + <b>9.3.1.</b> + + Data preparation + + </a> + + + + </li> + + <li class="chapter " data-level="9.3.2" data-path="../recommend/movielens_cf.html"> + + <a href="../recommend/movielens_cf.html"> + + + <b>9.3.2.</b> + + Item-based collaborative filtering + + </a> + + + + </li> + + <li class="chapter " data-level="9.3.3" data-path="../recommend/movielens_mf.html"> + + <a href="../recommend/movielens_mf.html"> + + + <b>9.3.3.</b> + + Matrix Factorization + + </a> + + + + </li> + + <li class="chapter " data-level="9.3.4" data-path="../recommend/movielens_fm.html"> + + <a href="../recommend/movielens_fm.html"> + + + <b>9.3.4.</b> + + Factorization Machine + + </a> + + + + </li> + + <li class="chapter " data-level="9.3.5" data-path="../recommend/movielens_slim.html"> + + <a href="../recommend/movielens_slim.html"> + + + <b>9.3.5.</b> + + SLIM for fast top-k recommendation + + </a> + + + + </li> + + <li class="chapter " data-level="9.3.6" data-path="../recommend/movielens_cv.html"> + + <a href="../recommend/movielens_cv.html"> + + + <b>9.3.6.</b> + + 10-fold cross validation (Matrix Factorization) + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part X - Anomaly Detection</li> + + + + <li class="chapter " data-level="10.1" data-path="../anomaly/lof.html"> + + <a href="../anomaly/lof.html"> + + + <b>10.1.</b> + + Outlier Detection using Local Outlier Factor (LOF) + + </a> + + + + </li> + + <li class="chapter " data-level="10.2" data-path="../anomaly/sst.html"> + + <a href="../anomaly/sst.html"> + + + <b>10.2.</b> + + Change-Point Detection using Singular Spectrum Transformation (SST) + + </a> + + + + </li> + + <li class="chapter " data-level="10.3" data-path="../anomaly/changefinder.html"> + + <a href="../anomaly/changefinder.html"> + + + <b>10.3.</b> + + ChangeFinder: Detecting Outlier and Change-Point Simultaneously + + </a> + + + + </li> + + + + + <li class="header">Part XI - Clustering</li> + + + + <li class="chapter " data-level="11.1" data-path="../clustering/lda.html"> + + <a href="../clustering/lda.html"> + + + <b>11.1.</b> + + Latent Dirichlet Allocation + + </a> + + + + </li> + + <li class="chapter " data-level="11.2" data-path="../clustering/plsa.html"> + + <a href="../clustering/plsa.html"> + + + <b>11.2.</b> + + Probabilistic Latent Semantic Analysis + + </a> + + + + </li> + + + + + <li class="header">Part XII - GeoSpatial Functions</li> + + + + <li class="chapter " data-level="12.1" data-path="../geospatial/latlon.html"> + + <a href="../geospatial/latlon.html"> + + + <b>12.1.</b> + + Lat/Lon functions + + </a> + + + + </li> + + + + + <li class="header">Part XIII - Hivemall on Spark</li> + + + + <li class="chapter " data-level="13.1" data-path="../spark/getting_started/"> + + <a href="../spark/getting_started/"> + + + <b>13.1.</b> + + Getting Started + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="13.1.1" data-path="../spark/getting_started/installation.html"> + + <a href="../spark/getting_started/installation.html"> + + + <b>13.1.1.</b> + + Installation + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="13.2" data-path="../spark/binaryclass/"> + + <a href="../spark/binaryclass/"> + + + <b>13.2.</b> + + Binary Classification + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="13.2.1" data-path="../spark/binaryclass/a9a_df.html"> + + <a href="../spark/binaryclass/a9a_df.html"> + + + <b>13.2.1.</b> + + a9a tutorial for DataFrame + + </a> + + + + </li> + + <li class="chapter " data-level="13.2.2" data-path="../spark/binaryclass/a9a_sql.html"> + + <a href="../spark/binaryclass/a9a_sql.html"> + + + <b>13.2.2.</b> + + a9a tutorial for SQL + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="13.3" data-path="../spark/binaryclass/"> + + <a href="../spark/binaryclass/"> + + + <b>13.3.</b> + + Regression + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="13.3.1" data-path="../spark/regression/e2006_df.html"> + + <a href="../spark/regression/e2006_df.html"> + + + <b>13.3.1.</b> + + E2006-tfidf regression tutorial for DataFrame + + </a> + + + + </li> + + <li class="chapter " data-level="13.3.2" data-path="../spark/regression/e2006_sql.html"> + + <a href="../spark/regression/e2006_sql.html"> + + + <b>13.3.2.</b> + + E2006-tfidf regression tutorial for SQL + + </a> + + + + </li> + + + </ul> + + </li> + + <li class="chapter " data-level="13.4" data-path="../spark/misc/misc.html"> + + <a href="../spark/misc/misc.html"> + + + <b>13.4.</b> + + Generic features + + </a> + + + + <ul class="articles"> + + + <li class="chapter " data-level="13.4.1" data-path="../spark/misc/topk_join.html"> + + <a href="../spark/misc/topk_join.html"> + + + <b>13.4.1.</b> + + Top-k join processing + + </a> + + + + </li> + + <li class="chapter " data-level="13.4.2" data-path="../spark/misc/functions.html"> + + <a href="../spark/misc/functions.html"> + + + <b>13.4.2.</b> + + Other utility functions + + </a> + + + + </li> + + + </ul> + + </li> + + + + + <li class="header">Part XIV - Hivemall on Docker</li> + + + + <li class="chapter " data-level="14.1" data-path="../docker/getting_started.html"> + + <a href="../docker/getting_started.html"> + + + <b>14.1.</b> + + Getting Started + + </a> + + + + </li> + + + + + <li class="header">Part XIV - External References</li> + + + + <li class="chapter " data-level="15.1" > + + <a target="_blank" href="https://github.com/daijyc/hivemall/wiki/PigHome"> + + + <b>15.1.</b> + + Hivemall on Apache Pig + + </a> + + + + </li> + + + + + <li class="divider"></li> + + <li> + <a href="https://www.gitbook.com" target="blank" class="gitbook-link"> + Published with GitBook + </a> + </li> +</ul> + + + </nav> + + + </div> + + <div class="book-body"> + + <div class="body-inner"> + + + +<div class="book-header" role="navigation"> + + + <!-- Title --> + <h1> + <i class="fa fa-circle-o-notch fa-spin"></i> + <a href=".." >How Prediction Works</a> + </h1> +</div> + + + + + <div class="page-wrapper" tabindex="-1" role="main"> + <div class="page-inner"> + +<div id="book-search-results"> + <div class="search-noresults"> + + <section class="normal markdown-section"> + + <!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +--> +<!-- toc --><div id="toc" class="toc"> + +<ul> +<li><a href="#what-is-prediction-problem">What is "prediction problem"?</a></li> +<li><a href="#regression">Regression</a></li> +<li><a href="#classification">Classification</a></li> +<li><a href="#mathematical-formulation-of-generic-prediction-model">Mathematical formulation of generic prediction model</a></li> +</ul> + +</div><!-- tocstop --> +<h1 id="what-is-prediction-problem">What is "prediction problem"?</h1> +<p>In a context of machine learning, numerous tasks can be seen as <strong>prediction problem</strong>. For example, this user guide provides solutions for:</p> +<ul> +<li><a href="../binaryclass/webspam.html">spam detection</a></li> +<li><a href="../multiclass/news20.html">news article classification</a></li> +<li><a href="../regression/kddcup12tr2.html">click-through-rate estimation</a></li> +</ul> +<p>For any kinds of prediction problems, we generally provide a set of input-output pairs as:</p> +<ul> +<li><strong>Input:</strong> Set of features<ul> +<li>e.g., <code>["1:0.001","4:0.23","35:0.0035",...]</code></li> +</ul> +</li> +<li><strong>Output:</strong> Target value<ul> +<li>e.g., 1, 0, 0.54, 42.195, ...</li> +</ul> +</li> +</ul> +<p>Once a prediction model has been constructed based on the samples, the model can make prediction for unforeseen inputs. </p> +<p>In order to train prediction models, an algorithm so-called <strong><em>stochastic gradient descent</em></strong> (SGD) is normally applied. You can learn more about this from the following external resources:</p> +<ul> +<li><a href="http://scikit-learn.org/stable/modules/sgd.html" target="_blank">scikit-learn documentation</a></li> +<li><a href="http://spark.apache.org/docs/latest/mllib-optimization.html" target="_blank">Spark MLlib documentation</a></li> +</ul> +<p>Importantly, depending on types of output value, prediction problem can be categorized into <strong>regression</strong> and <strong>classification</strong> problem.</p> +<h1 id="regression">Regression</h1> +<p>The goal of regression is to predict <strong>real values</strong> as shown below:</p> +<table> +<thead> +<tr> +<th style="text-align:left">features (input)</th> +<th style="text-align:center">target real value (output)</th> +</tr> +</thead> +<tbody> +<tr> +<td style="text-align:left">["1:0.001","4:0.23","35:0.0035",...]</td> +<td style="text-align:center">21.3</td> +</tr> +<tr> +<td style="text-align:left">["1:0.2","3:0.1","13:0.005",...]</td> +<td style="text-align:center">6.2</td> +</tr> +<tr> +<td style="text-align:left">["5:1.3","22:0.0.089","77:0.0001",...]</td> +<td style="text-align:center">17.1</td> +</tr> +<tr> +<td style="text-align:left">...</td> +<td style="text-align:center">...</td> +</tr> +</tbody> +</table> +<p>In practice, target values could be any of small/large float/int negative/positive values. <a href="../regression/kddcup12tr2.html">Our CTR prediction tutorial</a> solves regression problem with small floating point target values in a 0-1 range, for example.</p> +<p>While there are several ways to realize regression by using Hivemall, <code>train_regressor()</code> is one of the most flexible functions. This feature is explained in <a href="../regression/general.html">this page</a>.</p> +<h1 id="classification">Classification</h1> +<p>In contrast to regression, output for classification problems should be (integer) <strong>labels</strong>:</p> +<table> +<thead> +<tr> +<th style="text-align:left">features (input)</th> +<th style="text-align:center">label (output)</th> +</tr> +</thead> +<tbody> +<tr> +<td style="text-align:left">["1:0.001","4:0.23","35:0.0035",...]</td> +<td style="text-align:center">0</td> +</tr> +<tr> +<td style="text-align:left">["1:0.2","3:0.1","13:0.005",...]</td> +<td style="text-align:center">1</td> +</tr> +<tr> +<td style="text-align:left">["5:1.3","22:0.0.089","77:0.0001",...]</td> +<td style="text-align:center">1</td> +</tr> +<tr> +<td style="text-align:left">...</td> +<td style="text-align:center">...</td> +</tr> +</tbody> +</table> +<p>In case the number of possible labels is 2 (0/1 or -1/1), the problem is <strong>binary classification</strong>, and Hivemall's <code>train_classifier()</code> function enables you to build binary classifiers. <a href="../binaryclass/general.html">Binary Classification</a> demonstrates how to use the function.</p> +<p>Another type of classification problems is <strong>multi-class classification</strong>. This task assumes that the number of possible labels is more than 2. We need to use different functions for the multi-class problems, and our <a href="../multiclass/news20.html">news20</a> and <a href="../multiclass/iris.html">iris</a> tutorials would be helpful.</p> +<h1 id="mathematical-formulation-of-generic-prediction-model">Mathematical formulation of generic prediction model</h1> +<p>Here, we briefly explain about how prediction model is constructed.</p> +<p>First and foremost, we represent <strong>input</strong> and <strong>output</strong> for prediction models as follows:</p> +<ul> +<li><strong>Input:</strong> a vector <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mrow><mi mathvariant="bold">x</mi></mrow></mrow><annotation encoding="application/x-tex">\mathbf{x}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.44444em;"></span><span class="strut bottom" style="height:0.44444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathbf">x</span></span></span></span></span></li> +<li><strong>Output:</strong> a value <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mi>y</mi></mrow><annotation encoding="application/x-tex">y</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.43056em;"></span><span class="strut bottom" style="height:0.625em;vertical-align:-0.19444em;"></span><span class="base textstyle uncramped"><span class="mord mathit" style="margin-right:0.03588em;">y</span></span></span></span></li> +</ul> +<p>For a set of samples <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mo>(</mo><msub><mrow><mi mathvariant="bold">x</mi></mrow><mn>1</mn></msub><mo separator="true">,</mo><msub><mi>y</mi><mn>1</mn></msub><mo>)</mo><mo separator="true">,</mo><mo>(</mo><msub><mrow><mi mathvariant="bold">x</mi></mrow><mn>2</mn></msub><mo separator="true">,</mo><msub><mi>y</mi><mn>2</mn></msub><mo>)</mo><mo separator="true">,</mo><mo>⋯</mo><mo separator="true">,</mo><mo>(</mo><msub><mrow><mi mathvariant="bold">x</mi></mrow><mi>n</mi></msub><mo separator="true">,</mo><msub><mi>y</mi><mi>n</mi></msub><mo>)</mo></mrow><annotation encoding="application/x-tex">(\mathbf{x}_1, y_1), (\mathbf{x}_2, y_2), \cdots, (\mathbf{x}_n, y_n)</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.75em;"></span><span class="strut bottom" style="height:1em;vertical-align:-0.25em;"></span><span class="base textstyle uncramped"><spa n class="mopen">(</span><span class="mord"><span class="mord textstyle uncramped"><span class="mord mathbf">x</span></span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathrm mtight">1</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mpunct">,</span><span class="mord"><span class="mord mathit" style="margin-right:0.03588em;">y</span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;margin-left:-0.03588em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathrm mtig ht">1</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mclose">)</span><span class="mpunct">,</span><span class="mopen">(</span><span class="mord"><span class="mord textstyle uncramped"><span class="mord mathbf">x</span></span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathrm mtight">2</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mpunct">,</span><span class="mord"><span class="mord mathit" style="margin-right:0.03588em;">y</span><span class="msupsub"><span class="vlist"><spa n style="top:0.15em;margin-right:0.05em;margin-left:-0.03588em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathrm mtight">2</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mclose">)</span><span class="mpunct">,</span><span class="minner">⋯</span><span class="mpunct">,</span><span class="mopen">(</span><span class="mord"><span class="mord textstyle uncramped"><span class="mord mathbf">x</span></span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">n</span></span></span><span c lass="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mpunct">,</span><span class="mord"><span class="mord mathit" style="margin-right:0.03588em;">y</span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;margin-left:-0.03588em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">n</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mclose">)</span></span></span></span>, the goal of prediction algorithms is to find a weight vector (i.e., parameters) <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mrow><mi mathvariant="bold">w</mi></mr ow></mrow><annotation encoding="application/x-tex">\mathbf{w}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.44444em;"></span><span class="strut bottom" style="height:0.44444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span></span></span></span> by minimizing the following error:</p> +<p><span class="katex-display"><span class="katex"><span class="katex-mathml"><math><semantics><mrow><mi>E</mi><mo>(</mo><mrow><mi mathvariant="bold">w</mi></mrow><mo>)</mo><mo>:</mo><mo>=</mo><mfrac><mrow><mn>1</mn></mrow><mrow><mi>n</mi></mrow></mfrac><msubsup><mo>∑</mo><mrow><mi>i</mi><mo>=</mo><mn>1</mn></mrow><mrow><mi>n</mi></mrow></msubsup><mi>L</mi><mo>(</mo><mrow><mi mathvariant="bold">w</mi></mrow><mo separator="true">;</mo><msub><mrow><mi mathvariant="bold">x</mi></mrow><mi>i</mi></msub><mo separator="true">,</mo><msub><mi>y</mi><mi>i</mi></msub><mo>)</mo><mo>+</mo><mi>λ</mi><mi>R</mi><mo>(</mo><mrow><mi mathvariant="bold">w</mi></mrow><mo>)</mo></mrow><annotation encoding="application/x-tex"> +E(\mathbf{w}) := \frac{1}{n} \sum_{i=1}^{n} L(\mathbf{w}; \mathbf{x}_i, y_i) + \lambda R(\mathbf{w}) +</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:1.6513970000000002em;"></span><span class="strut bottom" style="height:2.929066em;vertical-align:-1.277669em;"></span><span class="base displaystyle textstyle uncramped"><span class="mord mathit" style="margin-right:0.05764em;">E</span><span class="mopen">(</span><span class="mord displaystyle textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span><span class="mclose">)</span><span class="mrel">:</span><span class="mrel">=</span><span class="mord reset-textstyle displaystyle textstyle uncramped"><span class="mopen sizing reset-size5 size5 reset-textstyle textstyle uncramped nulldelimiter"></span><span class="mfrac"><span class="vlist"><span style="top:0.686em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle textstyle cramped"><span class="mord texts tyle cramped"><span class="mord mathit">n</span></span></span></span><span style="top:-0.22999999999999998em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle textstyle uncramped frac-line"></span></span><span style="top:-0.677em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathrm">1</span></span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span><span class="mclose sizing reset-size5 size5 reset-textstyle textstyle uncramped nulldelimiter"></span></span><span class="mop op-limits"><span class="vlist"><span style="top:1.1776689999999999em;margin-left:0em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-siz e:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord scriptstyle cramped mtight"><span class="mord mathit mtight">i</span><span class="mrel mtight">=</span><span class="mord mathrm mtight">1</span></span></span></span><span style="top:-0.000005000000000143778em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span><span class="mop op-symbol large-op">∑</span></span></span><span style="top:-1.2500050000000003em;margin-left:0em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle uncramped mtight"><span class="mord scriptstyle uncramped mtight"><span class="mord mathit mtight">n</span></span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span><span class="mord mathit">L </span><span class="mopen">(</span><span class="mord displaystyle textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span><span class="mpunct">;</span><span class="mord"><span class="mord displaystyle textstyle uncramped"><span class="mord mathbf">x</span></span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">i</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mpunct">,</span><span class="mord"><span class="mord mathit" style="margin-right:0.03588em;">y</span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;margin-left:-0.03588em;"><span cl ass="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">i</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mclose">)</span><span class="mbin">+</span><span class="mord mathit">λ</span><span class="mord mathit" style="margin-right:0.00773em;">R</span><span class="mopen">(</span><span class="mord displaystyle textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span><span class="mclose">)</span></span></span></span></span></p> +<p>In the above formulation, there are two auxiliary functions we have to know: </p> +<ul> +<li><span class="katex"><span class="katex-mathml"><math><semantics><mrow><mi>L</mi><mo>(</mo><mrow><mi mathvariant="bold">w</mi></mrow><mo separator="true">;</mo><msub><mrow><mi mathvariant="bold">x</mi></mrow><mi>i</mi></msub><mo separator="true">,</mo><msub><mi>y</mi><mi>i</mi></msub><mo>)</mo></mrow><annotation encoding="application/x-tex">L(\mathbf{w}; \mathbf{x}_i, y_i)</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.75em;"></span><span class="strut bottom" style="height:1em;vertical-align:-0.25em;"></span><span class="base textstyle uncramped"><span class="mord mathit">L</span><span class="mopen">(</span><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span><span class="mpunct">;</span><span class="mord"><span class="mord textstyle uncramped"><span class="mord mathbf">x</span></span><span class="msupsub"><span class="vlist"><span style="top:0.15em;ma rgin-right:0.05em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">i</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mpunct">,</span><span class="mord"><span class="mord mathit" style="margin-right:0.03588em;">y</span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;margin-left:-0.03588em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">i</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><s pan class="mclose">)</span></span></span></span><ul> +<li><strong>Loss function</strong> for a single sample <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mo>(</mo><msub><mrow><mi mathvariant="bold">x</mi></mrow><mi>i</mi></msub><mo separator="true">,</mo><msub><mi>y</mi><mi>i</mi></msub><mo>)</mo></mrow><annotation encoding="application/x-tex">(\mathbf{x}_i, y_i)</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.75em;"></span><span class="strut bottom" style="height:1em;vertical-align:-0.25em;"></span><span class="base textstyle uncramped"><span class="mopen">(</span><span class="mord"><span class="mord textstyle uncramped"><span class="mord mathbf">x</span></span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">i</spa n></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mpunct">,</span><span class="mord"><span class="mord mathit" style="margin-right:0.03588em;">y</span><span class="msupsub"><span class="vlist"><span style="top:0.15em;margin-right:0.05em;margin-left:-0.03588em;"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span><span class="reset-textstyle scriptstyle cramped mtight"><span class="mord mathit mtight">i</span></span></span><span class="baseline-fix"><span class="fontsize-ensurer reset-size5 size5"><span style="font-size:0em;">​</span></span>​</span></span></span></span><span class="mclose">)</span></span></span></span> and given <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mrow><mi mathvariant="bold">w</mi></mrow></mrow><annotation encoding="application/x-te x">\mathbf{w}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.44444em;"></span><span class="strut bottom" style="height:0.44444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span></span></span></span>.</li> +<li>If this function produces small values, it means the parameter <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mrow><mi mathvariant="bold">w</mi></mrow></mrow><annotation encoding="application/x-tex">\mathbf{w}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.44444em;"></span><span class="strut bottom" style="height:0.44444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span></span></span></span> is successfully learnt. </li> +</ul> +</li> +<li><span class="katex"><span class="katex-mathml"><math><semantics><mrow><mi>R</mi><mo>(</mo><mrow><mi mathvariant="bold">w</mi></mrow><mo>)</mo></mrow><annotation encoding="application/x-tex">R(\mathbf{w})</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.75em;"></span><span class="strut bottom" style="height:1em;vertical-align:-0.25em;"></span><span class="base textstyle uncramped"><span class="mord mathit" style="margin-right:0.00773em;">R</span><span class="mopen">(</span><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span><span class="mclose">)</span></span></span></span><ul> +<li><strong>Regularization function</strong> for the current parameter <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mrow><mi mathvariant="bold">w</mi></mrow></mrow><annotation encoding="application/x-tex">\mathbf{w}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.44444em;"></span><span class="strut bottom" style="height:0.44444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span></span></span></span>.</li> +<li>It prevents failing to a negative condition so-called <strong>over-fitting</strong>.</li> +</ul> +</li> +</ul> +<p>(<span class="katex"><span class="katex-mathml"><math><semantics><mrow><mi>λ</mi></mrow><annotation encoding="application/x-tex">\lambda</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.69444em;"></span><span class="strut bottom" style="height:0.69444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord mathit">λ</span></span></span></span> is a small value which controls the effect of regularization function.)</p> +<p>Eventually, minimizing the function <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mi>E</mi><mo>(</mo><mrow><mi mathvariant="bold">w</mi></mrow><mo>)</mo></mrow><annotation encoding="application/x-tex">E(\mathbf{w})</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.75em;"></span><span class="strut bottom" style="height:1em;vertical-align:-0.25em;"></span><span class="base textstyle uncramped"><span class="mord mathit" style="margin-right:0.05764em;">E</span><span class="mopen">(</span><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span><span class="mclose">)</span></span></span></span> can be implemented by the SGD technique as described before, and <span class="katex"><span class="katex-mathml"><math><semantics><mrow><mrow><mi mathvariant="bold">w</mi></mrow></mrow><annotation encoding="application/x-tex">\mathbf{w}</annotation>< /semantics></math></span><span class="katex-html" aria-hidden="true"><span class="strut" style="height:0.44444em;"></span><span class="strut bottom" style="height:0.44444em;vertical-align:0em;"></span><span class="base textstyle uncramped"><span class="mord textstyle uncramped"><span class="mord mathbf" style="margin-right:0.01597em;">w</span></span></span></span></span> itself is used as a "model" for future prediction.</p> +<p>Interestingly, depending on a choice of loss and regularization function, prediction model you obtained will behave differently; even if one combination could work as a classifier, another choice might be appropriate for regression.</p> +<p>Below we list possible options for <code>train_regressor</code> and <code>train_classifier</code>, and this is the reason why these two functions are the most flexible in Hivemall:</p> +<ul> +<li><p>Loss function: <code>-loss</code>, <code>-loss_function</code></p> +<ul> +<li>For <code>train_regressor</code><ul> +<li>SquaredLoss (synonym: squared)</li> +<li>QuantileLoss (synonym: quantile)</li> +<li>EpsilonInsensitiveLoss (synonym: epsilon_insensitive)</li> +<li>SquaredEpsilonInsensitiveLoss (synonym: squared_epsilon_insensitive)</li> +<li>HuberLoss (synonym: huber)</li> +</ul> +</li> +<li>For <code>train_classifier</code><ul> +<li>HingeLoss (synonym: hinge)</li> +<li>LogLoss (synonym: log, logistic)</li> +<li>SquaredHingeLoss (synonym: squared_hinge)</li> +<li>ModifiedHuberLoss (synonym: modified_huber)</li> +<li>The following losses are mainly designed for regression but can sometimes be useful in classification as well:<ul> +<li>SquaredLoss (synonym: squared)</li> +<li>QuantileLoss (synonym: quantile)</li> +<li>EpsilonInsensitiveLoss (synonym: epsilon_insensitive)</li> +<li>SquaredEpsilonInsensitiveLoss (synonym: squared_epsilon_insensitive)</li> +<li>HuberLoss (synonym: huber)</li> +</ul> +</li> +</ul> +</li> +</ul> +</li> +<li><p>Regularization function: <code>-reg</code>, <code>-regularization</code></p> +<ul> +<li>L1</li> +<li>L2</li> +<li>ElasticNet</li> +<li>RDA</li> +</ul> +</li> +</ul> +<p>Additionally, there are several variants of the SGD technique, and it is also configurable as:</p> +<ul> +<li>Optimizer: <code>-opt</code>, <code>-optimizer</code><ul> +<li>SGD</li> +<li>AdaGrad</li> +<li>AdaDelta</li> +<li>Adam</li> +</ul> +</li> +</ul> +<div class="panel panel-primary"><div class="panel-heading"><h3 class="panel-title" id="note"><i class="fa fa-edit"></i> Note</h3></div><div class="panel-body"><p>Option values are case insensitive and you can use <code>sgd</code> or <code>rda</code>, or <code>huberloss</code> in lower-case letters.</p></div></div> +<p>Furthermore, optimizer offers to set auxiliary options such as:</p> +<ul> +<li>Number of iterations: <code>-iter</code>, <code>-iterations</code> [default: 10]<ul> +<li>Repeat optimizer's learning procedure more than once to diligently find better result.</li> +</ul> +</li> +<li>Convergence rate: <code>-cv_rate</code>, <code>-convergence_rate</code> [default: 0.005]<ul> +<li>Define a stopping criterion for the iterative training.</li> +<li>If the criterion is too small or too large, you may encounter over-fitting or under-fitting depending on value of <code>-iter</code> option.</li> +</ul> +</li> +<li>Mini-batch size: <code>-mini_batch</code>, <code>-mini_batch_size</code> [default: 1]<ul> +<li>Instead of learning samples one-by-one, this option enables optimizer to utilize multiple samples at once to minimize the error function.</li> +<li>Appropriate mini-batch size leads efficient training and effective prediction model.</li> +</ul> +</li> +</ul> +<p>For details of available options, following queries might be helpful to list all of them:</p> +<pre><code class="lang-sql"><span class="hljs-keyword">select</span> train_regressor(<span class="hljs-built_in">array</span>(), <span class="hljs-number">0</span>, <span class="hljs-string">'-help'</span>); +<span class="hljs-keyword">select</span> train_classifier(<span class="hljs-built_in">array</span>(), <span class="hljs-number">0</span>, <span class="hljs-string">'-help'</span>); +</code></pre> +<p>In practice, you can try different combinations of the options in order to achieve higher prediction accuracy.</p> +<p><div id="page-footer" class="localized-footer"><hr><!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +--> +<p><sub><font color="gray"> +Apache Hivemall is an effort undergoing incubation at The Apache Software Foundation (ASF), sponsored by the Apache Incubator. +</font></sub></p> +</div></p> + + + </section> + + </div> + <div class="search-results"> + <div class="has-results"> + + <h1 class="search-results-title"><span class='search-results-count'></span> results matching "<span class='search-query'></span>"</h1> + <ul class="search-results-list"></ul> + + </div> + <div class="no-results"> + + <h1 class="search-results-title">No results matching "<span class='search-query'></span>"</h1> + + </div> + </div> +</div> + +
<TRUNCATED>