import  numpy as  np
import  matplotlib. pyplot as  plt
import  seaborn as  sns
from  sklearn. metrics import  roc_curve,  auc,  confusion_matrix,  \precision_recall_curve,  average_precision_score
from  sklearn. metrics import  roc_auc_score
y_true =  [ 0 ,  1 ,  0 ,  1 ,  1 ,  0 ] 
y_pred =  [ 0.2 ,  0.6 ,  0.3 ,  0.8 ,  0.2 ,  0.1 ] 
fpr,  tpr,  thresholds =  roc_curve( y_true,  y_pred) 
roc_auc =  auc( fpr,  tpr) 
plt. title( 'Receiver Operating Characteristic' ) 
plt. plot( fpr,  tpr,  'b' ,  label= 'AUC = %0.2f'  %  roc_auc) 
plt. legend( loc= 'lower right' ) 
plt. plot( [ 0 ,  1 ] ,  [ 0 ,  1 ] ,  'r--' ) 
plt. xlim( [ - 0.1 ,  1.1 ] ) 
plt. ylim( [ - 0.1 ,  1.1 ] ) 
plt. ylabel( 'True Positive Rate' ) 
plt. xlabel( 'False Positive Rate' ) 
plt. show( ) 
tn,  fp,  fn,  tp =  confusion_matrix( y_true,  [ 1  if  i >  0.5  else  0  for  i in  y_pred] ) . ravel( ) 
labels =  [ 'True Negative' ,  'False Positive' ,  'False Negative' ,  'True Positive' ] 
categories =  [ 'Negative' ,  'Positive' ] 
sns. heatmap( [ [ tn,  fp] ,  [ fn,  tp] ] ,  annot= True ,  fmt= 'd' ,  xticklabels= categories,  yticklabels= categories,  cmap= "YlGnBu" ) 
plt. xlabel( 'Predicted Label' ) 
plt. ylabel( 'True Label' ) 
plt. title( 'Confusion Matrix' ) 
plt. show( ) 
precision,  recall,  thresholds =  precision_recall_curve( y_true,  y_pred) 
average_precision =  average_precision_score( y_true,  y_pred) 
plt. step( recall,  precision,  color= 'b' ,  alpha= 0.2 , where= 'post' ) 
plt. fill_between( recall,  precision,  step= 'post' ,  alpha= 0.2 , color= 'b' ) 
plt. xlabel( 'Recall' ) 
plt. ylabel( 'Precision' ) 
plt. ylim( [ 0.0 ,  1.05 ] ) 
plt. xlim( [ 0.0 ,  1.0 ] ) 
plt. title( 'Precision-Recall curve: AP={0:0.2f}' . format ( average_precision) ) 
plt. show( ) plt. show( )