\documentclass[10pt,a4paper]{article}

% Packages
\usepackage{fancyhdr}           % For header and footer
\usepackage{multicol}           % Allows multicols in tables
\usepackage{tabularx}           % Intelligent column widths
\usepackage{tabulary}           % Used in header and footer
\usepackage{hhline}             % Border under tables
\usepackage{graphicx}           % For images
\usepackage{xcolor}             % For hex colours
%\usepackage[utf8x]{inputenc}    % For unicode character support
\usepackage[T1]{fontenc}        % Without this we get weird character replacements
\usepackage{colortbl}           % For coloured tables
\usepackage{setspace}           % For line height
\usepackage{lastpage}           % Needed for total page number
\usepackage{seqsplit}           % Splits long words.
%\usepackage{opensans}          % Can't make this work so far. Shame. Would be lovely.
\usepackage[normalem]{ulem}     % For underlining links
% Most of the following are not required for the majority
% of cheat sheets but are needed for some symbol support.
\usepackage{amsmath}            % Symbols
\usepackage{MnSymbol}           % Symbols
\usepackage{wasysym}            % Symbols
%\usepackage[english,german,french,spanish,italian]{babel}              % Languages

% Document Info
\author{DarioPittera (aggialavura)}
\pdfinfo{
  /Title (python-supported-vector-machine-svm.pdf)
  /Creator (Cheatography)
  /Author (DarioPittera (aggialavura))
  /Subject (Python - Supported Vector Machine (SVM) Cheat Sheet)
}

% Lengths and widths
\addtolength{\textwidth}{6cm}
\addtolength{\textheight}{-1cm}
\addtolength{\hoffset}{-3cm}
\addtolength{\voffset}{-2cm}
\setlength{\tabcolsep}{0.2cm} % Space between columns
\setlength{\headsep}{-12pt} % Reduce space between header and content
\setlength{\headheight}{85pt} % If less, LaTeX automatically increases it
\renewcommand{\footrulewidth}{0pt} % Remove footer line
\renewcommand{\headrulewidth}{0pt} % Remove header line
\renewcommand{\seqinsert}{\ifmmode\allowbreak\else\-\fi} % Hyphens in seqsplit
% This two commands together give roughly
% the right line height in the tables
\renewcommand{\arraystretch}{1.3}
\onehalfspacing

% Commands
\newcommand{\SetRowColor}[1]{\noalign{\gdef\RowColorName{#1}}\rowcolor{\RowColorName}} % Shortcut for row colour
\newcommand{\mymulticolumn}[3]{\multicolumn{#1}{>{\columncolor{\RowColorName}}#2}{#3}} % For coloured multi-cols
\newcolumntype{x}[1]{>{\raggedright}p{#1}} % New column types for ragged-right paragraph columns
\newcommand{\tn}{\tabularnewline} % Required as custom column type in use

% Font and Colours
\definecolor{HeadBackground}{HTML}{333333}
\definecolor{FootBackground}{HTML}{666666}
\definecolor{TextColor}{HTML}{333333}
\definecolor{DarkBackground}{HTML}{FFA938}
\definecolor{LightBackground}{HTML}{FFF4E6}
\renewcommand{\familydefault}{\sfdefault}
\color{TextColor}

% Header and Footer
\pagestyle{fancy}
\fancyhead{} % Set header to blank
\fancyfoot{} % Set footer to blank
\fancyhead[L]{
\noindent
\begin{multicols}{3}
\begin{tabulary}{5.8cm}{C}
    \SetRowColor{DarkBackground}
    \vspace{-7pt}
    {\parbox{\dimexpr\textwidth-2\fboxsep\relax}{\noindent
        \hspace*{-6pt}\includegraphics[width=5.8cm]{/web/www.cheatography.com/public/images/cheatography_logo.pdf}}
    }
\end{tabulary}
\columnbreak
\begin{tabulary}{11cm}{L}
    \vspace{-2pt}\large{\bf{\textcolor{DarkBackground}{\textrm{Python - Supported Vector Machine (SVM) Cheat Sheet}}}} \\
    \normalsize{by \textcolor{DarkBackground}{DarioPittera (aggialavura)} via \textcolor{DarkBackground}{\uline{cheatography.com/83764/cs/20045/}}}
\end{tabulary}
\end{multicols}}

\fancyfoot[L]{ \footnotesize
\noindent
\begin{multicols}{3}
\begin{tabulary}{5.8cm}{LL}
  \SetRowColor{FootBackground}
  \mymulticolumn{2}{p{5.377cm}}{\bf\textcolor{white}{Cheatographer}}  \\
  \vspace{-2pt}DarioPittera (aggialavura) \\
  \uline{cheatography.com/aggialavura} \\
        \uline{\seqsplit{www}.dariopittera.com}
  \end{tabulary}
\vfill
\columnbreak
\begin{tabulary}{5.8cm}{L}
  \SetRowColor{FootBackground}
  \mymulticolumn{1}{p{5.377cm}}{\bf\textcolor{white}{Cheat Sheet}}  \\
   \vspace{-2pt}Not Yet Published.\\
   Updated 17th July, 2019.\\
   Page {\thepage} of \pageref{LastPage}.
\end{tabulary}
\vfill
\columnbreak
\begin{tabulary}{5.8cm}{L}
  \SetRowColor{FootBackground}
  \mymulticolumn{1}{p{5.377cm}}{\bf\textcolor{white}{Sponsor}}  \\
  \SetRowColor{white}
  \vspace{-5pt}
  %\includegraphics[width=48px,height=48px]{dave.jpeg}
  Measure your website readability!\\
  www.readability-score.com
\end{tabulary}
\end{multicols}}




\begin{document}
\raggedright
\raggedcolumns

% Set font size to small. Switch to any value
% from this page to resize cheat sheet text:
% www.emerson.emory.edu/services/latex/latex_169.html
\footnotesize % Small font.

\begin{multicols*}{2}

\begin{tabularx}{8.4cm}{X}
\SetRowColor{DarkBackground}
\mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{TO START}}  \tn
\SetRowColor{LightBackground}
\mymulticolumn{1}{x{8.4cm}}{\# IMPORT DATA LIBRARIES \newline import pandas as pd \newline import numpy as np \newline  \newline \# IMPORT VIS LIBRARIES \newline import matplotlib.pyplot as plt \newline import seaborn as sns \newline \%matplotlib inline \newline  \newline \# IMPORT MODELLING LIBRARIES \newline from sklearn.model\_selection import train\_test\_split \newline from sklearn.svm import SVC \newline from sklearn.metrics import classification\_report,confusion\_matrix} \tn 
\hhline{>{\arrayrulecolor{DarkBackground}}-}
\end{tabularx}
\par\addvspace{1.3em}

\begin{tabularx}{8.4cm}{x{4.64 cm} x{3.36 cm} }
\SetRowColor{DarkBackground}
\mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{TRAIN MODEL}}  \tn
% Row 0
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{{\bf{\{\{fa-columns\}\} SPLIT DATASET}}} \tn 
% Row Count 1 (+ 1)
% Row 1
\SetRowColor{white}
X = df{[}{[}'col1','col2',etc.{]}{]} & create df features \tn 
% Row Count 3 (+ 2)
% Row 2
\SetRowColor{LightBackground}
y = df{[}'col'{]} & create df var to predict \tn 
% Row Count 5 (+ 2)
% Row 3
\SetRowColor{white}
X\_train, X\_test, y\_train, y\_test = \{\{nl\}\} train\_test\_split(\{\{nl\}\}~~X,\{\{nl\}\}~~y, \{\{nl\}\}~~test\_size=0.3) & split df in train and test df \tn 
% Row Count 11 (+ 6)
% Row 4
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{{\bf{\{\{fa-signal\}\} FIT THE MODEL}}} \tn 
% Row Count 12 (+ 1)
% Row 5
\SetRowColor{white}
svc= SVC() & instatiate model \tn 
% Row Count 13 (+ 1)
% Row 6
\SetRowColor{LightBackground}
svc.fit(X\_train,y\_train) & train/fit the model \tn 
% Row Count 15 (+ 2)
% Row 7
\SetRowColor{white}
\mymulticolumn{2}{x{8.4cm}}{{\bf{\{\{fa-bullseye\}\} MAKE PREDICTIONS}}} \tn 
% Row Count 16 (+ 1)
% Row 8
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{pred = svm.predict(X\_test)} \tn 
% Row Count 17 (+ 1)
% Row 9
\SetRowColor{white}
\mymulticolumn{2}{x{8.4cm}}{{\bf{\{\{fa-check\}\} EVAUATE MODEL}}} \tn 
% Row Count 18 (+ 1)
% Row 10
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{\seqsplit{print(confusion\_matrix(y\_test},pred))} \tn 
% Row Count 19 (+ 1)
% Row 11
\SetRowColor{white}
\mymulticolumn{2}{x{8.4cm}}{\seqsplit{print(classification\_report(y\_test},pred))} \tn 
% Row Count 20 (+ 1)
\hhline{>{\arrayrulecolor{DarkBackground}}--}
\end{tabularx}
\par\addvspace{1.3em}

\begin{tabularx}{8.4cm}{X}
\SetRowColor{DarkBackground}
\mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{GRID SEARCH EXPLANATION}}  \tn
\SetRowColor{white}
\mymulticolumn{1}{x{8.4cm}}{Finding the right parameters (like what C or gamma values to use) is a tricky task! But luckily, we can be a little lazy and just try a bunch of combinations and see what works best! This idea of creating a 'grid' of parameters and just trying out all the possible combinations is called a Gridsearch, this method is common enough that Scikit-learn has this functionality built-in with GridSearchCV! The CV stands for cross-validation which is the GridSearchCV takes a dictionary that describes the parameters that should be tried and a model to train. The grid of parameters is defined as a dictionary, where the keys are the parameters and the values are the settings to be tested. \newline % Row Count 15 (+ 15)
\seqsplit{============================================} \newline % Row Count 17 (+ 2)
{\bf{C}} is the parameter for the soft margin cost function, which controls the influence of each individual support vector; this process involves trading error penalty for stability. C is the {\bf{cost of misclassification of training examples}} against the simplicity of the decision surface. A {\bf{large C}} gives low bias and high variance. Low bias because you penalize the cost of missclasification a lot. A {\bf{small C}} gives you higher bias and lower variance. \newline % Row Count 27 (+ 10)
{\bf{Gamma}} is the parameter of a {\bf{Gaussian Kernel}} (to handle non-linear classification). Gamma {\bf{controls the shape of the "peaks"}} where you raise the points. A small gamma gives a pointed bump in the higher dimensions, a large gamma gives a softer, broader bump. So a {\bf{small gamma}} will give you low bias and high variance while a {\bf{large gamma}} will give you higher bias and low variance. You usually find the best C and Gamma hyper-parameters using Grid-Search. \newline % Row Count 37 (+ 10)
} \tn 
\end{tabularx}
\par\addvspace{1.3em}

\vfill
\columnbreak
\begin{tabularx}{8.4cm}{X}
\SetRowColor{DarkBackground}
\mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{GRID SEARCH EXPLANATION (cont)}}  \tn
\SetRowColor{white}
\mymulticolumn{1}{x{8.4cm}}{{\bf{Kernel}} will decide the hyperplane you will use to divide the points. \newline % Row Count 2 (+ 2)
\seqsplit{============================================} \newline % Row Count 4 (+ 2)
{\bf{Refit}} an estimator using the best-found parameters on the whole dataset. \newline % Row Count 6 (+ 2)
{\bf{Verbose}} controls the verbosity: the higher, the more messages.% Row Count 8 (+ 2)
} \tn 
\hhline{>{\arrayrulecolor{DarkBackground}}-}
\end{tabularx}
\par\addvspace{1.3em}

\begin{tabularx}{8.4cm}{X}
\SetRowColor{DarkBackground}
\mymulticolumn{1}{x{8.4cm}}{\bf\textcolor{white}{SVM parameters}}  \tn
\SetRowColor{LightBackground}
\mymulticolumn{1}{p{8.4cm}}{\vspace{1px}\centerline{\includegraphics[width=5.1cm]{/web/www.cheatography.com/public/uploads/aggialavura_1563379102_param.PNG}}} \tn 
\hhline{>{\arrayrulecolor{DarkBackground}}-}
\end{tabularx}
\par\addvspace{1.3em}

\begin{tabularx}{8.4cm}{x{5.36 cm} x{2.64 cm} }
\SetRowColor{DarkBackground}
\mymulticolumn{2}{x{8.4cm}}{\bf\textcolor{white}{GRID SEARCH}}  \tn
% Row 0
\SetRowColor{LightBackground}
{\bf{from sklearn.model\_selection import GridSearchCV}} & import GridSearch \tn 
% Row Count 2 (+ 2)
% Row 1
\SetRowColor{white}
param\_grid = \{\{\{nl\}\}'C': {[}0.1,1, 10, 100, 1000{]}, \{\{nl\}\}'gamma': {[}1,0.1,0.01,0.001,0.0001{]}, \{\{nl\}\}'kernel': {[}'rbf'{]}\} & parameters, see info \tn 
% Row Count 7 (+ 5)
% Row 2
\SetRowColor{LightBackground}
grid = GridSearchCV(\{\{nl\}\}SVC(),\{\{nl\}\}param\_grid,\{\{nl\}\}refit=True,\{\{nl\}\}verbose=3) & parameters, see info \tn 
% Row Count 11 (+ 4)
% Row 3
\SetRowColor{white}
\mymulticolumn{2}{x{8.4cm}}{grid.fit(X\_train,y\_train)} \tn 
% Row Count 12 (+ 1)
% Row 4
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{grid.best\_params\_} \tn 
% Row Count 13 (+ 1)
% Row 5
\SetRowColor{white}
\mymulticolumn{2}{x{8.4cm}}{grid.best\_estimator\_} \tn 
% Row Count 14 (+ 1)
% Row 6
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{grid\_predictions = grid.predict(X\_test)} \tn 
% Row Count 15 (+ 1)
% Row 7
\SetRowColor{white}
\mymulticolumn{2}{x{8.4cm}}{\seqsplit{print(confusion\_matrix(y\_test},grid\_predictions))} \tn 
% Row Count 16 (+ 1)
% Row 8
\SetRowColor{LightBackground}
\mymulticolumn{2}{x{8.4cm}}{\seqsplit{print(classification\_report(y\_test},grid\_predictions))} \tn 
% Row Count 18 (+ 2)
\hhline{>{\arrayrulecolor{DarkBackground}}--}
\end{tabularx}
\par\addvspace{1.3em}


% That's all folks
\end{multicols*}

\end{document}