\documentclass[10pt,a4paper]{article} % Packages \usepackage{fancyhdr} % For header and footer \usepackage{multicol} % Allows multicols in tables \usepackage{tabularx} % Intelligent column widths \usepackage{tabulary} % Used in header and footer \usepackage{hhline} % Border under tables \usepackage{graphicx} % For images \usepackage{xcolor} % For hex colours %\usepackage[utf8x]{inputenc} % For unicode character support \usepackage[T1]{fontenc} % Without this we get weird character replacements \usepackage{colortbl} % For coloured tables \usepackage{setspace} % For line height \usepackage{lastpage} % Needed for total page number \usepackage{seqsplit} % Splits long words. %\usepackage{opensans} % Can't make this work so far. Shame. Would be lovely. \usepackage[normalem]{ulem} % For underlining links % Most of the following are not required for the majority % of cheat sheets but are needed for some symbol support. \usepackage{amsmath} % Symbols \usepackage{MnSymbol} % Symbols \usepackage{wasysym} % Symbols %\usepackage[english,german,french,spanish,italian]{babel} % Languages % Document Info \author{spriiprad} \pdfinfo{ /Title (machine-learning-model-basics-intermediate.pdf) /Creator (Cheatography) /Author (spriiprad) /Subject (Machine Learning Model - Basics/Intermediate Cheat Sheet) } % Lengths and widths \addtolength{\textwidth}{6cm} \addtolength{\textheight}{-1cm} \addtolength{\hoffset}{-3cm} \addtolength{\voffset}{-2cm} \setlength{\tabcolsep}{0.2cm} % Space between columns \setlength{\headsep}{-12pt} % Reduce space between header and content \setlength{\headheight}{85pt} % If less, LaTeX automatically increases it \renewcommand{\footrulewidth}{0pt} % Remove footer line \renewcommand{\headrulewidth}{0pt} % Remove header line \renewcommand{\seqinsert}{\ifmmode\allowbreak\else\-\fi} % Hyphens in seqsplit % This two commands together give roughly % the right line height in the tables \renewcommand{\arraystretch}{1.3} \onehalfspacing % Commands \newcommand{\SetRowColor}[1]{\noalign{\gdef\RowColorName{#1}}\rowcolor{\RowColorName}} % Shortcut for row colour \newcommand{\mymulticolumn}[3]{\multicolumn{#1}{>{\columncolor{\RowColorName}}#2}{#3}} % For coloured multi-cols \newcolumntype{x}[1]{>{\raggedright}p{#1}} % New column types for ragged-right paragraph columns \newcommand{\tn}{\tabularnewline} % Required as custom column type in use % Font and Colours \definecolor{HeadBackground}{HTML}{333333} \definecolor{FootBackground}{HTML}{666666} \definecolor{TextColor}{HTML}{333333} \definecolor{DarkBackground}{HTML}{013659} \definecolor{LightBackground}{HTML}{F7F8F9} \renewcommand{\familydefault}{\sfdefault} \color{TextColor} % Header and Footer \pagestyle{fancy} \fancyhead{} % Set header to blank \fancyfoot{} % Set footer to blank \fancyhead[L]{ \noindent \begin{multicols}{3} \begin{tabulary}{5.8cm}{C} \SetRowColor{DarkBackground} \vspace{-7pt} {\parbox{\dimexpr\textwidth-2\fboxsep\relax}{\noindent \hspace*{-6pt}\includegraphics[width=5.8cm]{/web/www.cheatography.com/public/images/cheatography_logo.pdf}} } \end{tabulary} \columnbreak \begin{tabulary}{11cm}{L} \vspace{-2pt}\large{\bf{\textcolor{DarkBackground}{\textrm{Machine Learning Model - Basics/Intermediate Cheat Sheet}}}} \\ \normalsize{by \textcolor{DarkBackground}{spriiprad} via \textcolor{DarkBackground}{\uline{cheatography.com/122548/cs/22783/}}} \end{tabulary} \end{multicols}} \fancyfoot[L]{ \footnotesize \noindent \begin{multicols}{3} \begin{tabulary}{5.8cm}{LL} \SetRowColor{FootBackground} \mymulticolumn{2}{p{5.377cm}}{\bf\textcolor{white}{Cheatographer}} \\ \vspace{-2pt}spriiprad \\ \uline{cheatography.com/spriiprad} \\ \end{tabulary} \vfill \columnbreak \begin{tabulary}{5.8cm}{L} \SetRowColor{FootBackground} \mymulticolumn{1}{p{5.377cm}}{\bf\textcolor{white}{Cheat Sheet}} \\ \vspace{-2pt}Not Yet Published.\\ Updated 15th May, 2020.\\ Page {\thepage} of \pageref{LastPage}. \end{tabulary} \vfill \columnbreak \begin{tabulary}{5.8cm}{L} \SetRowColor{FootBackground} \mymulticolumn{1}{p{5.377cm}}{\bf\textcolor{white}{Sponsor}} \\ \SetRowColor{white} \vspace{-5pt} %\includegraphics[width=48px,height=48px]{dave.jpeg} Measure your website readability!\\ www.readability-score.com \end{tabulary} \end{multicols}} \begin{document} \raggedright \raggedcolumns % Set font size to small. Switch to any value % from this page to resize cheat sheet text: % www.emerson.emory.edu/services/latex/latex_169.html \footnotesize % Small font. \begin{multicols*}{3} \begin{tabularx}{5.377cm}{x{2.4885 cm} x{2.4885 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{5.377cm}}{\bf\textcolor{white}{Supervised Vs Unsupervised Learnig}} \tn % Row 0 \SetRowColor{LightBackground} {\bf{Supervised}} & {\bf{Unsupervised}} \tn % Row Count 1 (+ 1) % Row 1 \SetRowColor{white} Used in Classification and Prediction & Dimension Reduction and clustering \tn % Row Count 3 (+ 2) % Row 2 \SetRowColor{LightBackground} Value of outcome must be known & No outcome variable to predict or classify \tn % Row Count 6 (+ 3) % Row 3 \SetRowColor{white} Learns from training data and applied to validation & No learning \tn % Row Count 9 (+ 3) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{How Supervised Learning Looks}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589401009_1_ASYpFfDh7XnreU-ygqXonw.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{How Unsupervised Learning Looks}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589401041_1_lhkCOodCMZ0-SSziEDpwpA.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{Supervised vs Unsupervised TLDR}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589401193_1_zWBYt9DQQEf_XxXWLA2tzQ.jpeg}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{x{2.28942 cm} x{2.68758 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{5.377cm}}{\bf\textcolor{white}{1. Linear Regression}} \tn % Row 0 \SetRowColor{LightBackground} Type of Response & Continuous \tn % Row Count 1 (+ 1) % Row 1 \SetRowColor{white} {\bf{Simple Regression}} & {\bf{Multiple Regression}} \tn % Row Count 3 (+ 2) % Row 2 \SetRowColor{LightBackground} One Independent Variable Used & Multiple Independent Variable Used \tn % Row Count 5 (+ 2) % Row 3 \SetRowColor{white} Only One Dependent Variable & Only One Dependent Variable \tn % Row Count 7 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{Relationships that are significant when using simple linear regression may no longer be when using multiple linear regression and vice-versa. \newline \newline Insignificant relationships in simple linear regression may become significant in multiple linear regression.} \tn \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{2. How Logistic Regression Works}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589399423_1_UgYbimgPXf6XXxMy2yqRLw.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{x{1.69349 cm} x{1.23579 cm} x{1.64772 cm} } \SetRowColor{DarkBackground} \mymulticolumn{3}{x{5.377cm}}{\bf\textcolor{white}{2. Logistic Regression}} \tn % Row 0 \SetRowColor{LightBackground} Type of Response & \seqsplit{Categorical} & \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} \mymulticolumn{3}{x{5.377cm}}{It can be used for explanatory tasks (=profiling) or predictive tasks (=classification)} \tn % Row Count 4 (+ 2) % Row 2 \SetRowColor{LightBackground} \mymulticolumn{3}{x{5.377cm}}{The predictors are related to the response Y via a nonlinear function called the logit} \tn % Row Count 6 (+ 2) % Row 3 \SetRowColor{white} \mymulticolumn{3}{x{5.377cm}}{Reducing predictors can be done via variable selection} \tn % Row Count 8 (+ 2) % Row 4 \SetRowColor{LightBackground} \mymulticolumn{3}{x{5.377cm}}{{\bf{Types}}} \tn % Row Count 9 (+ 1) % Row 5 \SetRowColor{white} 1. Binary Regression & Two \seqsplit{Categories}. & Example: Spam or Not \tn % Row Count 11 (+ 2) % Row 6 \SetRowColor{LightBackground} 2. Multinomial Logistic Regression & Three or more \seqsplit{categories}. & Example: Veg, Non-Veg, Vegan \tn % Row Count 14 (+ 3) % Row 7 \SetRowColor{white} 3. Ordinal Logistic Regression & Three or more \seqsplit{categories} & Example: Movie rating from 1 to 5 \tn % Row Count 17 (+ 3) \hhline{>{\arrayrulecolor{DarkBackground}}---} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{3. How Naive Bayes Work}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589400525_Picturedd1.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{x{2.4885 cm} x{2.4885 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{5.377cm}}{\bf\textcolor{white}{3. Naive Bayes Classifier}} \tn % Row 0 \SetRowColor{LightBackground} Type of Response & Categorical \tn % Row Count 1 (+ 1) % Row 1 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{Probabilistic machine learning model that's used for classification task.} \tn % Row Count 3 (+ 2) % Row 2 \SetRowColor{LightBackground} The heart of the classifier is based on the Bayes theorem. & Bayes theorem provides a way relating the likelihood of some outcome given some informative prior information. \tn % Row Count 9 (+ 6) % Row 3 \SetRowColor{white} We can find the probability of A happening, given that B has occurred. & B is the evidence and A is the hypothesis. That is presence of one particular feature does not affect the other. \tn % Row Count 15 (+ 6) % Row 4 \SetRowColor{LightBackground} Bayes Theorem Probability Formula & P(A/B) = (P(B|A)*P(A))/P(B) \tn % Row Count 17 (+ 2) % Row 5 \SetRowColor{white} Naive Bayes works well when there is a large number of predictor variables & It also works when there are missing values. \tn % Row Count 21 (+ 4) % Row 6 \SetRowColor{LightBackground} The probability estimates are not very accurate & The classifications or predictions are generally accurate. \tn % Row Count 24 (+ 3) % Row 7 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{{\bf{Assumptions}}} \tn % Row Count 25 (+ 1) % Row 8 \SetRowColor{LightBackground} 1. Predictors/features work independently on the target variable. & 2. All the predictors have an equal effect on the outcome. \tn % Row Count 29 (+ 4) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{4. How Neural Net Works}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589400139_1_yGMk1GSKKbyKr_cMarlWnA.jpeg}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{x{1.44333 cm} x{3.53367 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{5.377cm}}{\bf\textcolor{white}{4. Neural Networks}} \tn % Row 0 \SetRowColor{LightBackground} Type of Response & Both Categorical and Continuous (particularly useful) \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{Learns complex patterns using layers of neurons which mathematically transform the data.} \tn % Row Count 4 (+ 2) % Row 2 \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{The layers between the input and output are referred to as "hidden layers".} \tn % Row Count 6 (+ 2) % Row 3 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{Learns relationships between the features that other algorithms cannot easily discover.} \tn % Row Count 8 (+ 2) % Row 4 \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{{\bf{Architecture of Neural Net}}} \tn % Row Count 9 (+ 1) % Row 5 \SetRowColor{white} Input Layer & Nodes(variables) with information from the external environment \tn % Row Count 12 (+ 3) % Row 6 \SetRowColor{LightBackground} Output Layer & Nodes(variables) that send information to the external environment or to another element in the network \tn % Row Count 16 (+ 4) % Row 7 \SetRowColor{white} Hidden Layer & Nodes that only communicate with other layers of the network and are not visible to the external environment \tn % Row Count 20 (+ 4) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{5. How Decision Trees Work}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589399548_Picture1.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{5. Different Types of Trees}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589399970_LearningTrees.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{5. How Ensemble Model Works}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589400072_Ensemble-example.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{x{2.4885 cm} x{2.4885 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{5.377cm}}{\bf\textcolor{white}{5. Decision Trees}} \tn % Row 0 \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{The decision tree is produced by successively cutting the data set into smaller and smaller chunks, which are increasingly "pure" in terms of the value of the target variable.} \tn % Row Count 4 (+ 4) % Row 1 \SetRowColor{white} {\bf{Random Forest - Ensemble Method}} & {\bf{Boosted Trees - Ensemble Method}} \tn % Row Count 6 (+ 2) % Row 2 \SetRowColor{LightBackground} Consists of a large number of individual decision trees that operate as an ensemble & Boosting is a method of converting weak learners into strong learners. \tn % Row Count 11 (+ 5) % Row 3 \SetRowColor{white} Each individual tree in the random forest spits out a class prediction and the class with the most votes becomes our model's prediction & Boosted trees is the process of building a large, additive tree by fitting a sequence of smaller trees \tn % Row Count 18 (+ 7) % Row 4 \SetRowColor{LightBackground} The predictions (and therefore the errors) made by the individual trees need to have low correlations with each other. & In boosting, each new tree is a fit on a modified version of the original data set. \tn % Row Count 24 (+ 6) % Row 5 \SetRowColor{white} Random Forests train each tree independently, using a random sample of the data. & GBTs train one tree at a time, where each new tree helps to correct errors made by previously trained trees. \tn % Row Count 30 (+ 6) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{5.377cm}}{\bf\textcolor{white}{6. How KNN works}} \tn \SetRowColor{LightBackground} \mymulticolumn{1}{p{5.377cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/spriiprad_1589400339_Pictures1.png}}} \tn \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{5.377cm}{x{1.69218 cm} x{3.28482 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{5.377cm}}{\bf\textcolor{white}{6. K-Nearest Neighbors}} \tn % Row 0 \SetRowColor{LightBackground} Type of Response & Both Categorical and Continuous \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{KNN is method for classifying objects based on their similarity to a data with known classifications.} \tn % Row Count 5 (+ 3) % Row 2 \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{K-Nearest Neighbors (KNN) makes a prediction for a new observation by searching for the most similar training observations and pooling (usually done by taking the mean average) their values} \tn % Row Count 9 (+ 4) % Row 3 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{Training set has to be very large for this to work effectively} \tn % Row Count 11 (+ 2) % Row 4 \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{Redundant and/or irrelevant variables can distort the classification results; the method is sensitive to noise in the data.} \tn % Row Count 14 (+ 3) % Row 5 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{Nominal variables pose problems for measuring distance} \tn % Row Count 16 (+ 2) % Row 6 \SetRowColor{LightBackground} \mymulticolumn{2}{x{5.377cm}}{It is a non-parametric model ... does not require distribution assumptions regarding the variables and does not make statistical inferences to a population} \tn % Row Count 20 (+ 4) % Row 7 \SetRowColor{white} \mymulticolumn{2}{x{5.377cm}}{KNN is an example of a family of algorithms known as instance-based or memory-based learning that classify new objects by their similarity to previously known objects.} \tn % Row Count 24 (+ 4) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} % That's all folks \end{multicols*} \end{document}