\documentclass[10pt,a4paper]{article} % Packages \usepackage{fancyhdr} % For header and footer \usepackage{multicol} % Allows multicols in tables \usepackage{tabularx} % Intelligent column widths \usepackage{tabulary} % Used in header and footer \usepackage{hhline} % Border under tables \usepackage{graphicx} % For images \usepackage{xcolor} % For hex colours %\usepackage[utf8x]{inputenc} % For unicode character support \usepackage[T1]{fontenc} % Without this we get weird character replacements \usepackage{colortbl} % For coloured tables \usepackage{setspace} % For line height \usepackage{lastpage} % Needed for total page number \usepackage{seqsplit} % Splits long words. %\usepackage{opensans} % Can't make this work so far. Shame. Would be lovely. \usepackage[normalem]{ulem} % For underlining links % Most of the following are not required for the majority % of cheat sheets but are needed for some symbol support. \usepackage{amsmath} % Symbols \usepackage{MnSymbol} % Symbols \usepackage{wasysym} % Symbols %\usepackage[english,german,french,spanish,italian]{babel} % Languages % Document Info \author{Jingyi Feng (jenniferfjy)} \pdfinfo{ /Title (machine-learning-in-r-and-python.pdf) /Creator (Cheatography) /Author (Jingyi Feng (jenniferfjy)) /Subject (Machine Learning in R and Python Cheat Sheet) } % Lengths and widths \addtolength{\textwidth}{6cm} \addtolength{\textheight}{-1cm} \addtolength{\hoffset}{-3cm} \addtolength{\voffset}{-2cm} \setlength{\tabcolsep}{0.2cm} % Space between columns \setlength{\headsep}{-12pt} % Reduce space between header and content \setlength{\headheight}{85pt} % If less, LaTeX automatically increases it \renewcommand{\footrulewidth}{0pt} % Remove footer line \renewcommand{\headrulewidth}{0pt} % Remove header line \renewcommand{\seqinsert}{\ifmmode\allowbreak\else\-\fi} % Hyphens in seqsplit % This two commands together give roughly % the right line height in the tables \renewcommand{\arraystretch}{1.3} \onehalfspacing % Commands \newcommand{\SetRowColor}[1]{\noalign{\gdef\RowColorName{#1}}\rowcolor{\RowColorName}} % Shortcut for row colour \newcommand{\mymulticolumn}[3]{\multicolumn{#1}{>{\columncolor{\RowColorName}}#2}{#3}} % For coloured multi-cols \newcolumntype{x}[1]{>{\raggedright}p{#1}} % New column types for ragged-right paragraph columns \newcommand{\tn}{\tabularnewline} % Required as custom column type in use % Font and Colours \definecolor{HeadBackground}{HTML}{333333} \definecolor{FootBackground}{HTML}{666666} \definecolor{TextColor}{HTML}{333333} \definecolor{DarkBackground}{HTML}{5D66A3} \definecolor{LightBackground}{HTML}{F4F5F9} \renewcommand{\familydefault}{\sfdefault} \color{TextColor} % Header and Footer \pagestyle{fancy} \fancyhead{} % Set header to blank \fancyfoot{} % Set footer to blank \fancyhead[L]{ \noindent \begin{multicols}{3} \begin{tabulary}{5.8cm}{C} \SetRowColor{DarkBackground} \vspace{-7pt} {\parbox{\dimexpr\textwidth-2\fboxsep\relax}{\noindent \hspace*{-6pt}\includegraphics[width=5.8cm]{/web/www.cheatography.com/public/images/cheatography_logo.pdf}} } \end{tabulary} \columnbreak \begin{tabulary}{11cm}{L} \vspace{-2pt}\large{\bf{\textcolor{DarkBackground}{\textrm{Machine Learning in R and Python Cheat Sheet}}}} \\ \normalsize{by \textcolor{DarkBackground}{Jingyi Feng (jenniferfjy)} via \textcolor{DarkBackground}{\uline{cheatography.com/169260/cs/35433/}}} \end{tabulary} \end{multicols}} \fancyfoot[L]{ \footnotesize \noindent \begin{multicols}{3} \begin{tabulary}{5.8cm}{LL} \SetRowColor{FootBackground} \mymulticolumn{2}{p{5.377cm}}{\bf\textcolor{white}{Cheatographer}} \\ \vspace{-2pt}Jingyi Feng (jenniferfjy) \\ \uline{cheatography.com/jenniferfjy} \\ \end{tabulary} \vfill \columnbreak \begin{tabulary}{5.8cm}{L} \SetRowColor{FootBackground} \mymulticolumn{1}{p{5.377cm}}{\bf\textcolor{white}{Cheat Sheet}} \\ \vspace{-2pt}Not Yet Published.\\ Updated 13th November, 2022.\\ Page {\thepage} of \pageref{LastPage}. \end{tabulary} \vfill \columnbreak \begin{tabulary}{5.8cm}{L} \SetRowColor{FootBackground} \mymulticolumn{1}{p{5.377cm}}{\bf\textcolor{white}{Sponsor}} \\ \SetRowColor{white} \vspace{-5pt} %\includegraphics[width=48px,height=48px]{dave.jpeg} Measure your website readability!\\ www.readability-score.com \end{tabulary} \end{multicols}} \begin{document} \raggedright \raggedcolumns % Set font size to small. Switch to any value % from this page to resize cheat sheet text: % www.emerson.emory.edu/services/latex/latex_169.html \footnotesize % Small font. \begin{multicols*}{2} \begin{tabularx}{8.4cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{Introduction}} \tn % Row 0 \SetRowColor{LightBackground} \mymulticolumn{1}{x{8.4cm}}{This cheat sheet provides a comparison between basic data processing technique as well as machine learning models in both R and Python.} \tn % Row Count 3 (+ 3) \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{Documentations}} \tn % Row 0 \SetRowColor{LightBackground} \mymulticolumn{1}{x{8.4cm}}{\seqsplit{https://scikit-learn.org/stable/auto\_examples/index.html}} \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} \mymulticolumn{1}{x{8.4cm}}{\seqsplit{https://seaborn.pydata.org/}} \tn % Row Count 3 (+ 1) % Row 2 \SetRowColor{LightBackground} \mymulticolumn{1}{x{8.4cm}}{\seqsplit{https://cran.r-project.org/web/packages/rpart/index.html}} \tn % Row Count 5 (+ 2) % Row 3 \SetRowColor{white} \mymulticolumn{1}{x{8.4cm}}{\seqsplit{https://cran.r-project.org/web/packages/caret/index.html}} \tn % Row Count 7 (+ 2) % Row 4 \SetRowColor{LightBackground} \mymulticolumn{1}{x{8.4cm}}{\seqsplit{https://cran.r-project.org/web/packages/randomForest/index.html}} \tn % Row Count 9 (+ 2) % Row 5 \SetRowColor{white} \mymulticolumn{1}{x{8.4cm}}{\seqsplit{https://www.rdocumentation.org/packages/stats/versions/3.6.2}} \tn % Row Count 11 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{2.4 cm} x{5.6 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Load dataset in R}} \tn % Row 0 \SetRowColor{LightBackground} \seqsplit{library(datasets)} & Import packages \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} data(iris) & Load dataset \tn % Row Count 3 (+ 1) % Row 2 \SetRowColor{LightBackground} head(iris) & Look up the first 6 rows of the dataset \tn % Row Count 5 (+ 2) % Row 3 \SetRowColor{white} \seqsplit{summary(iris)} & Get summary statistics of each columns \tn % Row Count 7 (+ 2) % Row 4 \SetRowColor{LightBackground} names(iris) & Get the column names \tn % Row Count 8 (+ 1) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{4 cm} x{4 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Data preprocessing in R}} \tn % Row 0 \SetRowColor{LightBackground} scaling = preProcess(data, method = c('center', 'scale')) & Create scaling based on data \tn % Row Count 3 (+ 3) % Row 1 \SetRowColor{white} data\_scaled = predict(scaling, data) & Apply scaling to data \tn % Row Count 5 (+ 2) % Row 2 \SetRowColor{LightBackground} train\_partition = \seqsplit{createDataPartition(y}, p = 0.8, list = FALSE) & Balanced splitting based on the outcome ( 80/20 split) \tn % Row Count 9 (+ 4) % Row 3 \SetRowColor{white} data\_train = data{[}train\_partition,{]} & Split data into train and test sets \tn % Row Count 11 (+ 2) % Row 4 \SetRowColor{LightBackground} data\_test = data{[}-train\_partition,{]} & Split data into train and test sets \tn % Row Count 13 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{4 cm} x{4 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Supervised learning models in R}} \tn % Row 0 \SetRowColor{LightBackground} model = lm(data, y \textasciitilde{} x) & Simple linear regression \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} model = lm(data, y \textasciitilde{} x1 + x2 + x3) & Multiple linear regression \tn % Row Count 4 (+ 2) % Row 2 \SetRowColor{LightBackground} summary(model) & Print summary statistics from linear model \tn % Row Count 7 (+ 3) % Row 3 \SetRowColor{white} predictions = predict(object, newdata) & Make prediction based on the model object \tn % Row Count 10 (+ 3) % Row 4 \SetRowColor{LightBackground} model = glm(data, y \textasciitilde{} x1 + x2 + x3, family = 'binomial') & Logistic regression \tn % Row Count 13 (+ 3) % Row 5 \SetRowColor{white} model = svm(data, y \textasciitilde{} x1 + x2 + x3, params) & Support vector machines (SVM) \tn % Row Count 16 (+ 3) % Row 6 \SetRowColor{LightBackground} model = rpart(data, y \textasciitilde{} x1 + x2 + x3, params) & Decision trees \tn % Row Count 19 (+ 3) % Row 7 \SetRowColor{white} model = randomForest(data, y \textasciitilde{} x1 + x2 + x3, params) & Random forest \tn % Row Count 22 (+ 3) % Row 8 \SetRowColor{LightBackground} data\_xgb = xgb.DMatrix(data, label) & Transform the data into DMatrix format \tn % Row Count 24 (+ 2) % Row 9 \SetRowColor{white} model = xgb.train(data\_xgb, label, params) & Gradient boosting models \tn % Row Count 27 (+ 3) % Row 10 \SetRowColor{LightBackground} predictions = knn(train, test, cl, params) & k-NN with labels cl and parameters (e.g., number of neighbors) \tn % Row Count 31 (+ 4) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{3.36 cm} x{4.64 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Unsupervised learning models}} \tn % Row 0 \SetRowColor{LightBackground} model = kmeans(x, params) & K-Means clustering \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} model = prcomp(x, params) & Principal components analysis (PCA) \tn % Row Count 4 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{3.92 cm} x{4.08 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Model performance in R}} \tn % Row 0 \SetRowColor{LightBackground} RMSE(pred, actual) & Root mean square error \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} R2(pred, actual, form = 'traditional' ) & Proportion of the variance explained by the model \tn % Row Count 5 (+ 3) % Row 2 \SetRowColor{LightBackground} mean(actual == pred) & Accuracy (how accurate positive predictions are) \tn % Row Count 8 (+ 3) % Row 3 \SetRowColor{white} \seqsplit{confusionMatrix(actual}, pred) & Confusion matrix \tn % Row Count 10 (+ 2) % Row 4 \SetRowColor{LightBackground} auc(actual, pred) & Area under the ROC curve \tn % Row Count 12 (+ 2) % Row 5 \SetRowColor{white} f1Score(actual, pred) & Harmonic mean of precision and recall \tn % Row Count 14 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{6.16 cm} x{1.84 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Data visualization in R}} \tn % Row 0 \SetRowColor{LightBackground} geom\_point(x, y, color, size, fill, alpha) & Scatter plot \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} geom\_line(x, y, color, size, fill, alpha, linetype) & Line plot \tn % Row Count 4 (+ 2) % Row 2 \SetRowColor{LightBackground} geom\_bar(x, y, color, size, fill, alpha) & Bar chart \tn % Row Count 6 (+ 2) % Row 3 \SetRowColor{white} geom\_boxplot(x, y, color) & Box plot \tn % Row Count 7 (+ 1) % Row 4 \SetRowColor{LightBackground} geom\_tile(x, y, color, fill) & Heatmap \tn % Row Count 8 (+ 1) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{X} \SetRowColor{DarkBackground} \mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{Python}} \tn % Row 0 \SetRowColor{LightBackground} \mymulticolumn{1}{x{8.4cm}}{} \tn % Row Count 0 (+ 0) \hhline{>{\arrayrulecolor{DarkBackground}}-} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{2.64 cm} x{5.36 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Import file in Python}} \tn % Row 0 \SetRowColor{LightBackground} import pandas as pd & Import package \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} df = \seqsplit{pd.read\_csv()} & Read csv files \tn % Row Count 4 (+ 2) % Row 2 \SetRowColor{LightBackground} df.head(n) & Look up the first n rows of the dataset \tn % Row Count 6 (+ 2) % Row 3 \SetRowColor{white} \seqsplit{df.describe()} & Get summary statistics of each columns \tn % Row Count 8 (+ 2) % Row 4 \SetRowColor{LightBackground} df.columns & Get column names \tn % Row Count 9 (+ 1) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{4 cm} x{4 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Data Processing in Python}} \tn % Row 0 \SetRowColor{LightBackground} X\_train, X\_test, y\_train, y\_test = \seqsplit{train\_test\_split(X}, y, test\_size=0.2, random\_state=0) & Split the dataset into training (80\%) and test (20\%) sets \tn % Row Count 5 (+ 5) % Row 1 \SetRowColor{white} scaler = StandardScaler() & Standardize features by removing the mean and scaling to unit variance \tn % Row Count 9 (+ 4) % Row 2 \SetRowColor{LightBackground} X\_train = \seqsplit{scaler.fit\_transform(X\_train)} & Fit and transform scalar on X\_train \tn % Row Count 11 (+ 2) % Row 3 \SetRowColor{white} X\_test = \seqsplit{scaler.transform(X\_test)} & Transform X\_test \tn % Row Count 13 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{4.24 cm} x{3.76 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Supervised learning models in Python}} \tn % Row 0 \SetRowColor{LightBackground} model = LinearRegression() & Linear regression \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} model.fit(X\_train, y\_train) & Fit linear model \tn % Row Count 4 (+ 2) % Row 2 \SetRowColor{LightBackground} \seqsplit{model.predict(X\_test)} & Predict using the linear model \tn % Row Count 6 (+ 2) % Row 3 \SetRowColor{white} \seqsplit{LogisticRegression().fit(X\_train}, y\_train) & Logistic regression \tn % Row Count 8 (+ 2) % Row 4 \SetRowColor{LightBackground} \seqsplit{LinearSVC.fit(X\_train}, y\_train) & Train primal SVM \tn % Row Count 10 (+ 2) % Row 5 \SetRowColor{white} SVC().fit(X\_train, y\_train) & Train dual SVM \tn % Row Count 12 (+ 2) % Row 6 \SetRowColor{LightBackground} \seqsplit{DecisionTreeClassifier()}.fit(X\_train, y\_train) & Decision tree classifier \tn % Row Count 15 (+ 3) % Row 7 \SetRowColor{white} \seqsplit{RandomForestClassifier()}.fit(X\_train, y\_train) & Random forest classifier \tn % Row Count 18 (+ 3) % Row 8 \SetRowColor{LightBackground} \seqsplit{GradientBoostingClassifier()}.fit(X\_train, y\_train) & Gradient boosting for classification \tn % Row Count 21 (+ 3) % Row 9 \SetRowColor{white} \seqsplit{XGBClassifier().fit(X\_train}, y\_train) & XGboost classifier \tn % Row Count 23 (+ 2) % Row 10 \SetRowColor{LightBackground} \seqsplit{KNeighborsClassifier()}.fit(X\_train, y\_train) & k-NN \tn % Row Count 26 (+ 3) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{2.48 cm} x{5.52 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Unsupervised learning models}} \tn % Row 0 \SetRowColor{LightBackground} \seqsplit{KMeans().fit(X)} & K-Means clustering \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} \seqsplit{PCA().fit(X)} & Principal component analysis (PCA) \tn % Row Count 4 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{4 cm} x{4 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Model performance in Python}} \tn % Row 0 \SetRowColor{LightBackground} \seqsplit{metrics.mean\_squared\_error(y\_true}, y\_pred, squared=False) & Root mean squared error \tn % Row Count 3 (+ 3) % Row 1 \SetRowColor{white} \seqsplit{metrics.r2\_score(y\_true}, y\_pred) & Proportion of the variance explained by the model \tn % Row Count 6 (+ 3) % Row 2 \SetRowColor{LightBackground} \seqsplit{metrics.confusion\_matrix(y\_true}, y\_pred) & Confusion matrix \tn % Row Count 8 (+ 2) % Row 3 \SetRowColor{white} \seqsplit{metrics.accuracy\_score(y\_true}, y\_pred) & Accuracy classification score \tn % Row Count 10 (+ 2) % Row 4 \SetRowColor{LightBackground} \seqsplit{metrics.roc\_auc\_score()} & Compute ROC-AUC from prediction scores \tn % Row Count 12 (+ 2) % Row 5 \SetRowColor{white} f1\_score(y\_true, y\_pred, average='macro') & Harmonic mean of the precision and recall \tn % Row Count 15 (+ 3) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} \begin{tabularx}{8.4cm}{x{6.08 cm} x{1.92 cm} } \SetRowColor{DarkBackground} \mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{Data visualization in Python}} \tn % Row 0 \SetRowColor{LightBackground} sns.scatterplot(x, y, hue, size) & Scatter plot \tn % Row Count 2 (+ 2) % Row 1 \SetRowColor{white} sns.lineplot(x, y, hue, size) & Line plot \tn % Row Count 3 (+ 1) % Row 2 \SetRowColor{LightBackground} sns.barplot(x, y, hue) & Bar chart \tn % Row Count 4 (+ 1) % Row 3 \SetRowColor{white} sns.boxplot(x, y, hue) & Box plot \tn % Row Count 5 (+ 1) % Row 4 \SetRowColor{LightBackground} sns.heatmap(data, linecolor, linewidth) & Heatmap \tn % Row Count 7 (+ 2) \hhline{>{\arrayrulecolor{DarkBackground}}--} \end{tabularx} \par\addvspace{1.3em} % That's all folks \end{multicols*} \end{document}