adding all files done so far
This commit is contained in:
BIN
bayes-learning/__pycache__/FeatureScaling.cpython-36.pyc
Normal file
BIN
bayes-learning/__pycache__/FeatureScaling.cpython-36.pyc
Normal file
Binary file not shown.
BIN
bayes-learning/__pycache__/Naive_Bayes.cpython-36.pyc
Normal file
BIN
bayes-learning/__pycache__/Naive_Bayes.cpython-36.pyc
Normal file
Binary file not shown.
BIN
bayes-learning/__pycache__/seaborn.cpython-37.pyc
Normal file
BIN
bayes-learning/__pycache__/seaborn.cpython-37.pyc
Normal file
Binary file not shown.
42
bayes-learning/class.py
Normal file
42
bayes-learning/class.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
|
||||
# path = os.getcwd()
|
||||
|
||||
# data = pd.read_csv(path + str('/data/Social_Network_Ads.csv'),
|
||||
# engine='python')
|
||||
# df = pd.DataFrame(data)
|
||||
|
||||
|
||||
# train_size = int(0.75 * df.shape[0])
|
||||
# test_size = int(0.25 * df.shape[0])
|
||||
|
||||
# print('Training set size {}, Testing set size {}'.format(train_size,
|
||||
# test_size))
|
||||
|
||||
|
||||
class bayesClassifer(object):
|
||||
"""initial implmentation of a bayes classifer"""
|
||||
|
||||
path = os.getcwd()
|
||||
|
||||
def __init__(self, data_file):
|
||||
super(bayesClassifer, self).__init__()
|
||||
self.data_file = data_file
|
||||
|
||||
def createDataFrame(self):
|
||||
return pd.read_csv(self.path + self.data_file, engine='python')
|
||||
|
||||
def print_debug(self):
|
||||
print('{0} rows, {1} columns'.format(self.df.shape[0],
|
||||
self.df.shape[1]))
|
||||
print(self.df[1:7])
|
||||
|
||||
def trainData(dataframe):
|
||||
train_size = int(0.75 * dataframe.shape[0])
|
||||
test_size = int(0.25 * dataframe.shape[0])
|
||||
return(train_size, test_size)
|
||||
|
||||
|
||||
bayesClassifer('/data/Social_Network_Ads.csv')
|
||||
bayesClassifer.createDataFrame()
|
||||
401
bayes-learning/data/Social_Network_Ads.csv
Normal file
401
bayes-learning/data/Social_Network_Ads.csv
Normal file
@@ -0,0 +1,401 @@
|
||||
User ID,Gender,Age,EstimatedSalary,Purchased
|
||||
15624510,Male,19,19000,0
|
||||
15810944,Male,35,20000,0
|
||||
15668575,Female,26,43000,0
|
||||
15603246,Female,27,57000,0
|
||||
15804002,Male,19,76000,0
|
||||
15728773,Male,27,58000,0
|
||||
15598044,Female,27,84000,0
|
||||
15694829,Female,32,150000,1
|
||||
15600575,Male,25,33000,0
|
||||
15727311,Female,35,65000,0
|
||||
15570769,Female,26,80000,0
|
||||
15606274,Female,26,52000,0
|
||||
15746139,Male,20,86000,0
|
||||
15704987,Male,32,18000,0
|
||||
15628972,Male,18,82000,0
|
||||
15697686,Male,29,80000,0
|
||||
15733883,Male,47,25000,1
|
||||
15617482,Male,45,26000,1
|
||||
15704583,Male,46,28000,1
|
||||
15621083,Female,48,29000,1
|
||||
15649487,Male,45,22000,1
|
||||
15736760,Female,47,49000,1
|
||||
15714658,Male,48,41000,1
|
||||
15599081,Female,45,22000,1
|
||||
15705113,Male,46,23000,1
|
||||
15631159,Male,47,20000,1
|
||||
15792818,Male,49,28000,1
|
||||
15633531,Female,47,30000,1
|
||||
15744529,Male,29,43000,0
|
||||
15669656,Male,31,18000,0
|
||||
15581198,Male,31,74000,0
|
||||
15729054,Female,27,137000,1
|
||||
15573452,Female,21,16000,0
|
||||
15776733,Female,28,44000,0
|
||||
15724858,Male,27,90000,0
|
||||
15713144,Male,35,27000,0
|
||||
15690188,Female,33,28000,0
|
||||
15689425,Male,30,49000,0
|
||||
15671766,Female,26,72000,0
|
||||
15782806,Female,27,31000,0
|
||||
15764419,Female,27,17000,0
|
||||
15591915,Female,33,51000,0
|
||||
15772798,Male,35,108000,0
|
||||
15792008,Male,30,15000,0
|
||||
15715541,Female,28,84000,0
|
||||
15639277,Male,23,20000,0
|
||||
15798850,Male,25,79000,0
|
||||
15776348,Female,27,54000,0
|
||||
15727696,Male,30,135000,1
|
||||
15793813,Female,31,89000,0
|
||||
15694395,Female,24,32000,0
|
||||
15764195,Female,18,44000,0
|
||||
15744919,Female,29,83000,0
|
||||
15671655,Female,35,23000,0
|
||||
15654901,Female,27,58000,0
|
||||
15649136,Female,24,55000,0
|
||||
15775562,Female,23,48000,0
|
||||
15807481,Male,28,79000,0
|
||||
15642885,Male,22,18000,0
|
||||
15789109,Female,32,117000,0
|
||||
15814004,Male,27,20000,0
|
||||
15673619,Male,25,87000,0
|
||||
15595135,Female,23,66000,0
|
||||
15583681,Male,32,120000,1
|
||||
15605000,Female,59,83000,0
|
||||
15718071,Male,24,58000,0
|
||||
15679760,Male,24,19000,0
|
||||
15654574,Female,23,82000,0
|
||||
15577178,Female,22,63000,0
|
||||
15595324,Female,31,68000,0
|
||||
15756932,Male,25,80000,0
|
||||
15726358,Female,24,27000,0
|
||||
15595228,Female,20,23000,0
|
||||
15782530,Female,33,113000,0
|
||||
15592877,Male,32,18000,0
|
||||
15651983,Male,34,112000,1
|
||||
15746737,Male,18,52000,0
|
||||
15774179,Female,22,27000,0
|
||||
15667265,Female,28,87000,0
|
||||
15655123,Female,26,17000,0
|
||||
15595917,Male,30,80000,0
|
||||
15668385,Male,39,42000,0
|
||||
15709476,Male,20,49000,0
|
||||
15711218,Male,35,88000,0
|
||||
15798659,Female,30,62000,0
|
||||
15663939,Female,31,118000,1
|
||||
15694946,Male,24,55000,0
|
||||
15631912,Female,28,85000,0
|
||||
15768816,Male,26,81000,0
|
||||
15682268,Male,35,50000,0
|
||||
15684801,Male,22,81000,0
|
||||
15636428,Female,30,116000,0
|
||||
15809823,Male,26,15000,0
|
||||
15699284,Female,29,28000,0
|
||||
15786993,Female,29,83000,0
|
||||
15709441,Female,35,44000,0
|
||||
15710257,Female,35,25000,0
|
||||
15582492,Male,28,123000,1
|
||||
15575694,Male,35,73000,0
|
||||
15756820,Female,28,37000,0
|
||||
15766289,Male,27,88000,0
|
||||
15593014,Male,28,59000,0
|
||||
15584545,Female,32,86000,0
|
||||
15675949,Female,33,149000,1
|
||||
15672091,Female,19,21000,0
|
||||
15801658,Male,21,72000,0
|
||||
15706185,Female,26,35000,0
|
||||
15789863,Male,27,89000,0
|
||||
15720943,Male,26,86000,0
|
||||
15697997,Female,38,80000,0
|
||||
15665416,Female,39,71000,0
|
||||
15660200,Female,37,71000,0
|
||||
15619653,Male,38,61000,0
|
||||
15773447,Male,37,55000,0
|
||||
15739160,Male,42,80000,0
|
||||
15689237,Male,40,57000,0
|
||||
15679297,Male,35,75000,0
|
||||
15591433,Male,36,52000,0
|
||||
15642725,Male,40,59000,0
|
||||
15701962,Male,41,59000,0
|
||||
15811613,Female,36,75000,0
|
||||
15741049,Male,37,72000,0
|
||||
15724423,Female,40,75000,0
|
||||
15574305,Male,35,53000,0
|
||||
15678168,Female,41,51000,0
|
||||
15697020,Female,39,61000,0
|
||||
15610801,Male,42,65000,0
|
||||
15745232,Male,26,32000,0
|
||||
15722758,Male,30,17000,0
|
||||
15792102,Female,26,84000,0
|
||||
15675185,Male,31,58000,0
|
||||
15801247,Male,33,31000,0
|
||||
15725660,Male,30,87000,0
|
||||
15638963,Female,21,68000,0
|
||||
15800061,Female,28,55000,0
|
||||
15578006,Male,23,63000,0
|
||||
15668504,Female,20,82000,0
|
||||
15687491,Male,30,107000,1
|
||||
15610403,Female,28,59000,0
|
||||
15741094,Male,19,25000,0
|
||||
15807909,Male,19,85000,0
|
||||
15666141,Female,18,68000,0
|
||||
15617134,Male,35,59000,0
|
||||
15783029,Male,30,89000,0
|
||||
15622833,Female,34,25000,0
|
||||
15746422,Female,24,89000,0
|
||||
15750839,Female,27,96000,1
|
||||
15749130,Female,41,30000,0
|
||||
15779862,Male,29,61000,0
|
||||
15767871,Male,20,74000,0
|
||||
15679651,Female,26,15000,0
|
||||
15576219,Male,41,45000,0
|
||||
15699247,Male,31,76000,0
|
||||
15619087,Female,36,50000,0
|
||||
15605327,Male,40,47000,0
|
||||
15610140,Female,31,15000,0
|
||||
15791174,Male,46,59000,0
|
||||
15602373,Male,29,75000,0
|
||||
15762605,Male,26,30000,0
|
||||
15598840,Female,32,135000,1
|
||||
15744279,Male,32,100000,1
|
||||
15670619,Male,25,90000,0
|
||||
15599533,Female,37,33000,0
|
||||
15757837,Male,35,38000,0
|
||||
15697574,Female,33,69000,0
|
||||
15578738,Female,18,86000,0
|
||||
15762228,Female,22,55000,0
|
||||
15614827,Female,35,71000,0
|
||||
15789815,Male,29,148000,1
|
||||
15579781,Female,29,47000,0
|
||||
15587013,Male,21,88000,0
|
||||
15570932,Male,34,115000,0
|
||||
15794661,Female,26,118000,0
|
||||
15581654,Female,34,43000,0
|
||||
15644296,Female,34,72000,0
|
||||
15614420,Female,23,28000,0
|
||||
15609653,Female,35,47000,0
|
||||
15594577,Male,25,22000,0
|
||||
15584114,Male,24,23000,0
|
||||
15673367,Female,31,34000,0
|
||||
15685576,Male,26,16000,0
|
||||
15774727,Female,31,71000,0
|
||||
15694288,Female,32,117000,1
|
||||
15603319,Male,33,43000,0
|
||||
15759066,Female,33,60000,0
|
||||
15814816,Male,31,66000,0
|
||||
15724402,Female,20,82000,0
|
||||
15571059,Female,33,41000,0
|
||||
15674206,Male,35,72000,0
|
||||
15715160,Male,28,32000,0
|
||||
15730448,Male,24,84000,0
|
||||
15662067,Female,19,26000,0
|
||||
15779581,Male,29,43000,0
|
||||
15662901,Male,19,70000,0
|
||||
15689751,Male,28,89000,0
|
||||
15667742,Male,34,43000,0
|
||||
15738448,Female,30,79000,0
|
||||
15680243,Female,20,36000,0
|
||||
15745083,Male,26,80000,0
|
||||
15708228,Male,35,22000,0
|
||||
15628523,Male,35,39000,0
|
||||
15708196,Male,49,74000,0
|
||||
15735549,Female,39,134000,1
|
||||
15809347,Female,41,71000,0
|
||||
15660866,Female,58,101000,1
|
||||
15766609,Female,47,47000,0
|
||||
15654230,Female,55,130000,1
|
||||
15794566,Female,52,114000,0
|
||||
15800890,Female,40,142000,1
|
||||
15697424,Female,46,22000,0
|
||||
15724536,Female,48,96000,1
|
||||
15735878,Male,52,150000,1
|
||||
15707596,Female,59,42000,0
|
||||
15657163,Male,35,58000,0
|
||||
15622478,Male,47,43000,0
|
||||
15779529,Female,60,108000,1
|
||||
15636023,Male,49,65000,0
|
||||
15582066,Male,40,78000,0
|
||||
15666675,Female,46,96000,0
|
||||
15732987,Male,59,143000,1
|
||||
15789432,Female,41,80000,0
|
||||
15663161,Male,35,91000,1
|
||||
15694879,Male,37,144000,1
|
||||
15593715,Male,60,102000,1
|
||||
15575002,Female,35,60000,0
|
||||
15622171,Male,37,53000,0
|
||||
15795224,Female,36,126000,1
|
||||
15685346,Male,56,133000,1
|
||||
15691808,Female,40,72000,0
|
||||
15721007,Female,42,80000,1
|
||||
15794253,Female,35,147000,1
|
||||
15694453,Male,39,42000,0
|
||||
15813113,Male,40,107000,1
|
||||
15614187,Male,49,86000,1
|
||||
15619407,Female,38,112000,0
|
||||
15646227,Male,46,79000,1
|
||||
15660541,Male,40,57000,0
|
||||
15753874,Female,37,80000,0
|
||||
15617877,Female,46,82000,0
|
||||
15772073,Female,53,143000,1
|
||||
15701537,Male,42,149000,1
|
||||
15736228,Male,38,59000,0
|
||||
15780572,Female,50,88000,1
|
||||
15769596,Female,56,104000,1
|
||||
15586996,Female,41,72000,0
|
||||
15722061,Female,51,146000,1
|
||||
15638003,Female,35,50000,0
|
||||
15775590,Female,57,122000,1
|
||||
15730688,Male,41,52000,0
|
||||
15753102,Female,35,97000,1
|
||||
15810075,Female,44,39000,0
|
||||
15723373,Male,37,52000,0
|
||||
15795298,Female,48,134000,1
|
||||
15584320,Female,37,146000,1
|
||||
15724161,Female,50,44000,0
|
||||
15750056,Female,52,90000,1
|
||||
15609637,Female,41,72000,0
|
||||
15794493,Male,40,57000,0
|
||||
15569641,Female,58,95000,1
|
||||
15815236,Female,45,131000,1
|
||||
15811177,Female,35,77000,0
|
||||
15680587,Male,36,144000,1
|
||||
15672821,Female,55,125000,1
|
||||
15767681,Female,35,72000,0
|
||||
15600379,Male,48,90000,1
|
||||
15801336,Female,42,108000,1
|
||||
15721592,Male,40,75000,0
|
||||
15581282,Male,37,74000,0
|
||||
15746203,Female,47,144000,1
|
||||
15583137,Male,40,61000,0
|
||||
15680752,Female,43,133000,0
|
||||
15688172,Female,59,76000,1
|
||||
15791373,Male,60,42000,1
|
||||
15589449,Male,39,106000,1
|
||||
15692819,Female,57,26000,1
|
||||
15727467,Male,57,74000,1
|
||||
15734312,Male,38,71000,0
|
||||
15764604,Male,49,88000,1
|
||||
15613014,Female,52,38000,1
|
||||
15759684,Female,50,36000,1
|
||||
15609669,Female,59,88000,1
|
||||
15685536,Male,35,61000,0
|
||||
15750447,Male,37,70000,1
|
||||
15663249,Female,52,21000,1
|
||||
15638646,Male,48,141000,0
|
||||
15734161,Female,37,93000,1
|
||||
15631070,Female,37,62000,0
|
||||
15761950,Female,48,138000,1
|
||||
15649668,Male,41,79000,0
|
||||
15713912,Female,37,78000,1
|
||||
15586757,Male,39,134000,1
|
||||
15596522,Male,49,89000,1
|
||||
15625395,Male,55,39000,1
|
||||
15760570,Male,37,77000,0
|
||||
15566689,Female,35,57000,0
|
||||
15725794,Female,36,63000,0
|
||||
15673539,Male,42,73000,1
|
||||
15705298,Female,43,112000,1
|
||||
15675791,Male,45,79000,0
|
||||
15747043,Male,46,117000,1
|
||||
15736397,Female,58,38000,1
|
||||
15678201,Male,48,74000,1
|
||||
15720745,Female,37,137000,1
|
||||
15637593,Male,37,79000,1
|
||||
15598070,Female,40,60000,0
|
||||
15787550,Male,42,54000,0
|
||||
15603942,Female,51,134000,0
|
||||
15733973,Female,47,113000,1
|
||||
15596761,Male,36,125000,1
|
||||
15652400,Female,38,50000,0
|
||||
15717893,Female,42,70000,0
|
||||
15622585,Male,39,96000,1
|
||||
15733964,Female,38,50000,0
|
||||
15753861,Female,49,141000,1
|
||||
15747097,Female,39,79000,0
|
||||
15594762,Female,39,75000,1
|
||||
15667417,Female,54,104000,1
|
||||
15684861,Male,35,55000,0
|
||||
15742204,Male,45,32000,1
|
||||
15623502,Male,36,60000,0
|
||||
15774872,Female,52,138000,1
|
||||
15611191,Female,53,82000,1
|
||||
15674331,Male,41,52000,0
|
||||
15619465,Female,48,30000,1
|
||||
15575247,Female,48,131000,1
|
||||
15695679,Female,41,60000,0
|
||||
15713463,Male,41,72000,0
|
||||
15785170,Female,42,75000,0
|
||||
15796351,Male,36,118000,1
|
||||
15639576,Female,47,107000,1
|
||||
15693264,Male,38,51000,0
|
||||
15589715,Female,48,119000,1
|
||||
15769902,Male,42,65000,0
|
||||
15587177,Male,40,65000,0
|
||||
15814553,Male,57,60000,1
|
||||
15601550,Female,36,54000,0
|
||||
15664907,Male,58,144000,1
|
||||
15612465,Male,35,79000,0
|
||||
15810800,Female,38,55000,0
|
||||
15665760,Male,39,122000,1
|
||||
15588080,Female,53,104000,1
|
||||
15776844,Male,35,75000,0
|
||||
15717560,Female,38,65000,0
|
||||
15629739,Female,47,51000,1
|
||||
15729908,Male,47,105000,1
|
||||
15716781,Female,41,63000,0
|
||||
15646936,Male,53,72000,1
|
||||
15768151,Female,54,108000,1
|
||||
15579212,Male,39,77000,0
|
||||
15721835,Male,38,61000,0
|
||||
15800515,Female,38,113000,1
|
||||
15591279,Male,37,75000,0
|
||||
15587419,Female,42,90000,1
|
||||
15750335,Female,37,57000,0
|
||||
15699619,Male,36,99000,1
|
||||
15606472,Male,60,34000,1
|
||||
15778368,Male,54,70000,1
|
||||
15671387,Female,41,72000,0
|
||||
15573926,Male,40,71000,1
|
||||
15709183,Male,42,54000,0
|
||||
15577514,Male,43,129000,1
|
||||
15778830,Female,53,34000,1
|
||||
15768072,Female,47,50000,1
|
||||
15768293,Female,42,79000,0
|
||||
15654456,Male,42,104000,1
|
||||
15807525,Female,59,29000,1
|
||||
15574372,Female,58,47000,1
|
||||
15671249,Male,46,88000,1
|
||||
15779744,Male,38,71000,0
|
||||
15624755,Female,54,26000,1
|
||||
15611430,Female,60,46000,1
|
||||
15774744,Male,60,83000,1
|
||||
15629885,Female,39,73000,0
|
||||
15708791,Male,59,130000,1
|
||||
15793890,Female,37,80000,0
|
||||
15646091,Female,46,32000,1
|
||||
15596984,Female,46,74000,0
|
||||
15800215,Female,42,53000,0
|
||||
15577806,Male,41,87000,1
|
||||
15749381,Female,58,23000,1
|
||||
15683758,Male,42,64000,0
|
||||
15670615,Male,48,33000,1
|
||||
15715622,Female,44,139000,1
|
||||
15707634,Male,49,28000,1
|
||||
15806901,Female,57,33000,1
|
||||
15775335,Male,56,60000,1
|
||||
15724150,Female,49,39000,1
|
||||
15627220,Male,39,71000,0
|
||||
15672330,Male,47,34000,1
|
||||
15668521,Female,48,35000,1
|
||||
15807837,Male,48,33000,1
|
||||
15592570,Male,47,23000,1
|
||||
15748589,Female,45,45000,1
|
||||
15635893,Male,60,42000,1
|
||||
15757632,Female,39,59000,0
|
||||
15691863,Female,46,41000,1
|
||||
15706071,Male,51,23000,1
|
||||
15654296,Female,50,20000,1
|
||||
15755018,Male,36,33000,0
|
||||
15594041,Female,49,36000,1
|
||||
|
33
bayes-learning/lesson1-rewrite.py
Normal file
33
bayes-learning/lesson1-rewrite.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from matplotlib.colors import ListedColormap
|
||||
import numpy as np
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
matplotlib.rcParams['backend'] = 'TkAgg'
|
||||
|
||||
|
||||
class getData(object):
|
||||
"""docstring for getData"""
|
||||
def __init__(self, file):
|
||||
super(getData, self).__init__()
|
||||
self.file = file
|
||||
|
||||
|
||||
class Bayes(getData):
|
||||
"""docstring for Bayes"""
|
||||
|
||||
def __init__(self, file):
|
||||
super(Bayes, self).__init__()
|
||||
self.file = file
|
||||
|
||||
def get_path(self):
|
||||
path = os.getcwd()
|
||||
file_path = path + self.file
|
||||
return file_path
|
||||
|
||||
|
||||
bayes = Bayes(file='data/Social_Network_Ads.csv')
|
||||
print(bayes.get_path())
|
||||
152
bayes-learning/lesson1.py
Normal file
152
bayes-learning/lesson1.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
from sklearn.preprocessing import Normalizer, StandardScaler
|
||||
from matplotlib.colors import ListedColormap
|
||||
import numpy as np
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
from sklearn.naive_bayes import GaussianNB
|
||||
from sklearn.metrics import confusion_matrix, classification_report
|
||||
|
||||
matplotlib.rcParams['backend'] = 'TkAgg'
|
||||
|
||||
plt.style.use('seaborn-dark-palette')
|
||||
|
||||
# path = os.getcwd()
|
||||
path = '/home/dtomlinson/projects/bayes-learning'
|
||||
|
||||
data = pd.read_csv(path + str('/data/Social_Network_Ads.csv'), engine='python')
|
||||
df = pd.DataFrame(data)
|
||||
|
||||
print('{0} rows, {1} columns'.format(df.shape[0], df.shape[1]))
|
||||
# print(df[1:7])
|
||||
|
||||
train_size = int(0.75 * df.shape[0])
|
||||
test_size = int(0.25 * df.shape[0])
|
||||
|
||||
print('Training set size {}, Testing set size {}'.format(train_size,
|
||||
test_size))
|
||||
|
||||
df = df.sample(frac=1).reset_index(drop=True)
|
||||
|
||||
print(df[0:5])
|
||||
|
||||
X = df.iloc[:, [2, 3]].values
|
||||
y = df.iloc[:, 4].values
|
||||
|
||||
normalizer = StandardScaler(copy=False).fit(X)
|
||||
X = normalizer.fit_transform(X)
|
||||
|
||||
X_train = X[0:train_size, :]
|
||||
y_train = y[0:train_size]
|
||||
|
||||
X_test = X[train_size:, :]
|
||||
y_test = y[train_size:]
|
||||
|
||||
X_set, y_set = X_train, y_train
|
||||
|
||||
|
||||
# ind = np.argsort(X_set[:, 0])
|
||||
# X_set = X_set[ind]
|
||||
|
||||
|
||||
for i, j in enumerate(np.unique(y_set)):
|
||||
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
|
||||
c=ListedColormap(('red', 'blue'))(i),
|
||||
label=j, marker='.')
|
||||
|
||||
|
||||
plt.title('Training Set')
|
||||
plt.xlabel('Age')
|
||||
plt.ylabel('Estimated Salary')
|
||||
plt.legend()
|
||||
|
||||
|
||||
def generate_data(class_data_dic, X_train, y_train):
|
||||
|
||||
first_one = True
|
||||
first_zero = True
|
||||
|
||||
for i in range(y_train.shape[0]):
|
||||
X_temp = X_train[i, :].reshape(X_train[i, :].shape[0], 1)
|
||||
|
||||
if y_train[i] == 1:
|
||||
if first_one is True:
|
||||
class_data_dic[1] = X_temp
|
||||
first_one = False
|
||||
else:
|
||||
class_data_dic[1] = np.append(class_data_dic[1], X_temp,
|
||||
axis=1)
|
||||
elif y_train[i] == 0:
|
||||
if first_zero is True:
|
||||
class_data_dic[0] = X_temp
|
||||
first_zero = False
|
||||
else:
|
||||
class_data_dic[0] = np.append(class_data_dic[0], X_temp,
|
||||
axis=1)
|
||||
|
||||
return class_data_dic
|
||||
|
||||
|
||||
class_data_dic = generate_data(class_data_dic={}, X_train=X_train,
|
||||
y_train=y_train)
|
||||
|
||||
"""find the mean (2x1) for each column. 0 and 1 are the values for having 0
|
||||
and 1 seperately"""
|
||||
|
||||
mean_0 = np.mean(class_data_dic[0], axis=1)
|
||||
mean_1 = np.mean(class_data_dic[1], axis=1)
|
||||
std_0 = np.std(class_data_dic[0], axis=1)
|
||||
std_1 = np.std(class_data_dic[1], axis=1)
|
||||
|
||||
print('mean_0={}, std_0={}, mean_1={}, std_1={}'.format(
|
||||
mean_0, mean_1, std_0, std_1))
|
||||
# plt.show()
|
||||
|
||||
"""define the likelyhood function (the pdf of the norm dist) """
|
||||
|
||||
|
||||
def likelyhood(x, mean, sigma):
|
||||
return np.exp(-(x - mean)**2 / (2 * sigma ** 2)) * (1 / (np.sqrt(2 * np.pi) * sigma ** 2))
|
||||
|
||||
|
||||
""" the posterior function times together all the likelihoods for each row
|
||||
of X_test here we are working out the likelihood func for each row of X_test
|
||||
with their corresponding mean and stdev """
|
||||
"""we then times this by the prior-prob-func to find the posterior func"""
|
||||
|
||||
|
||||
def posterior(X, X_train_class, mean_, std_):
|
||||
product = np.prod(likelyhood(X, mean_, std_), axis=1)
|
||||
product = product * (X_train_class.shape[0] / X.shape[0])
|
||||
return product
|
||||
|
||||
|
||||
""" we test the posterior fun with the test data to find the probs"""
|
||||
p_1 = posterior(X_test, class_data_dic[1], mean_1, std_1)
|
||||
p_0 = posterior(X_test, class_data_dic[0], mean_0, std_0)
|
||||
y_pred = 1 * (p_1 > p_0)
|
||||
|
||||
print(X_test.shape)
|
||||
print(class_data_dic[0].shape)
|
||||
print(p_1.shape)
|
||||
tp = len([i for i in range(0, y_test.shape[0])
|
||||
if y_test[i] == 0 and y_pred[i] == 0])
|
||||
tn = len([i for i in range(0, y_test.shape[0])
|
||||
if y_test[i] == 0 and y_pred[i] == 1])
|
||||
fp = len([i for i in range(0, y_test.shape[0])
|
||||
if y_test[i] == 1 and y_pred[i] == 0])
|
||||
fn = len([i for i in range(0, y_test.shape[0])
|
||||
if y_test[i] == 1 and y_pred[i] == 1])
|
||||
confusion_matrix_alg = np.array([[tp, tn], [fp, fn]])
|
||||
print(confusion_matrix_alg)
|
||||
|
||||
|
||||
classifer = GaussianNB()
|
||||
classifer.fit(X_train, y_train)
|
||||
|
||||
y_pred = classifer.predict(X_test)
|
||||
cm = confusion_matrix(y_test, y_pred)
|
||||
report = classification_report(y_test, y_pred)
|
||||
print(cm)
|
||||
print(report)
|
||||
3
bayes-learning/output.csv
Normal file
3
bayes-learning/output.csv
Normal file
@@ -0,0 +1,3 @@
|
||||
,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190
|
||||
0,-0.06256109973954335,-1.1132055228463764,-0.25358735848624026,-0.06256109973954335,0.22397828838050202,-0.4446136172329372,0.3194914177538505,0.12846515900715358,-0.15807422911289182,-0.34910048785958875,1.083596452740638,-0.25358735848624026,-0.7311530053529826,-0.4446136172329372,-0.25358735848624026,-0.15807422911289182,-1.3042317815930733,-1.7817974284598157,-0.7311530053529826,-0.7311530053529826,0.7970570646205928,-1.2087186522197249,-1.3997449109664217,-0.7311530053529826,-1.7817974284598157,-0.25358735848624026,-0.34910048785958875,0.415004547127199,-0.06256109973954335,-1.1132055228463764,-1.6862842990864673,-0.25358735848624026,-1.3997449109664217,-0.06256109973954335,-0.9221792640996795,-0.6356398759796341,-0.826666134726331,0.12846515900715358,0.22397828838050202,-0.06256109973954335,-0.6356398759796341,-0.6356398759796341,-0.826666134726331,0.3194914177538505,0.5105176765005474,-1.3997449109664217,-1.3042317815930733,-1.3042317815930733,-0.25358735848624026,-0.9221792640996795,-0.15807422911289182,-1.017692393473028,-0.826666134726331,-0.4446136172329372,-0.5401267466062857,-1.1132055228463764,-0.25358735848624026,-1.1132055228463764,-0.9221792640996795,-1.017692393473028,0.03295202963380511,-0.4446136172329372,-1.017692393473028,-1.2087186522197249,0.12846515900715358,0.415004547127199,-0.25358735848624026,-1.017692393473028,-0.25358735848624026,0.3194914177538505,-1.4952580403397702,-1.017692393473028,0.3194914177538505,1.1791095821139865,-0.5401267466062857,0.9880833233672897,-0.826666134726331,-1.1132055228463764,-1.8773105578331641,0.03295202963380511,-0.9221792640996795,-1.5907711697131186,-1.5907711697131186,2.0387277464741227,0.3194914177538505,-1.1132055228463764,-1.4952580403397702,1.083596452740638,-0.826666134726331,-1.5907711697131186,-0.6356398759796341,-1.8773105578331641,-0.7311530053529826,0.03295202963380511,-0.25358735848624026,-1.7817974284598157,-0.7311530053529826,-1.017692393473028,-0.06256109973954335,-0.25358735848624026,-0.5401267466062857,1.3701358408606836,-0.6356398759796341,0.22397828838050202,-0.06256109973954335,-1.2087186522197249,0.03295202963380511,-0.4446136172329372,0.7970570646205928,-1.017692393473028,-0.6356398759796341,-0.4446136172329372,-0.6356398759796341,0.03295202963380511,0.22397828838050202,0.22397828838050202,-1.6862842990864673,-1.1132055228463764,0.8925701939939412,-1.017692393473028,0.12846515900715358,-0.15807422911289182,0.22397828838050202,0.12846515900715358,-0.6356398759796341,-0.9221792640996795,-0.06256109973954335,-0.25358735848624026,-0.826666134726331,0.03295202963380511,-0.25358735848624026,-1.2087186522197249,-0.25358735848624026,0.3194914177538505,-1.1132055228463764,-0.25358735848624026,-1.017692393473028,-1.1132055228463764,-0.25358735848624026,-0.25358735848624026,-0.06256109973954335,-1.7817974284598157,-0.4446136172329372,-1.6862842990864673,-1.8773105578331641,-0.25358735848624026,-1.7817974284598157,-1.3997449109664217,-1.1132055228463764,0.415004547127199,0.12846515900715358,-0.25358735848624026,-1.3042317815930733,-1.3997449109664217,-0.06256109973954335,-0.34910048785958875,0.415004547127199,-0.25358735848624026,-1.2087186522197249,-0.25358735848624026,-0.15807422911289182,0.3194914177538505,-1.1132055228463764,-1.7817974284598157,-1.3042317815930733,-0.9221792640996795,-1.5907711697131186,0.03295202963380511,0.03295202963380511,-1.3042317815930733,-0.25358735848624026,-0.15807422911289182,-0.9221792640996795,0.415004547127199,-1.6862842990864673,-1.1132055228463764,-0.7311530053529826,-0.25358735848624026,0.03295202963380511,-0.9221792640996795,-0.25358735848624026,0.3194914177538505,-0.25358735848624026,-0.06256109973954335,0.3194914177538505,-1.017692393473028,-1.3042317815930733,0.22397828838050202,-0.7311530053529826,-1.3042317815930733,0.415004547127199
|
||||
1,-0.5210059679415002,-1.0202085298693855,-0.25672225868556103,-0.49164111135750704,0.1543857334903445,-0.28608711526955427,0.06629116373836477,-0.25672225868556103,-0.5797356811094868,1.3289799968500746,0.12502087690635127,-0.28608711526955427,-0.60910053769348,-0.8440193903654261,-0.3154519718535475,-0.1979925455175745,-1.2551273825413316,0.44803429933027705,-0.22735740210156777,1.3583448534340679,0.3599397295782973,0.5067640124982635,-0.6384653942774733,0.5067640124982635,-1.4900462352132775,0.21311544665833101,-0.7852896771974396,-0.139262832349588,0.12502087690635127,0.3012100164103108,0.12502087690635127,-0.4329113981895205,-1.2257625259573384,-0.22735740210156777,-0.961478816701399,-1.0495733864533787,-0.7852896771974396,-0.3154519718535475,-0.37418168502153404,0.1543857334903445,0.12502087690635127,-0.10989797576559475,0.1543857334903445,0.06629116373836477,1.8575474153619531,-0.1979925455175745,0.56549372566625,-1.3725868088773046,-1.3725868088773046,0.56549372566625,-0.46227625477351375,-1.1376679562053587,-0.7852896771974396,1.2702502836820881,0.4773991559142703,0.4773991559142703,0.036926307154371514,-1.1083030996213654,-0.7559248206134463,-0.34481682843754075,1.240885427098095,-1.1376679562053587,-0.34481682843754075,0.3012100164103108,0.036926307154371514,0.3012100164103108,0.09565602032235802,-1.4606813786292843,0.06629116373836477,-0.5210059679415002,0.33057487299430405,0.56549372566625,-0.5210059679415002,-0.7559248206134463,1.387709710018061,2.092466268033899,-0.25672225868556103,0.33057487299430405,0.4773991559142703,-0.25672225868556103,0.2718451598263175,0.5361288690822568,-0.05116826259760824,0.3893045861622905,0.2718451598263175,-1.167032812789352,-0.1979925455175745,-0.139262832349588,0.3893045861622905,-1.5781408049652574,-1.6075056615492507,-0.7559248206134463,0.3012100164103108,-0.5797356811094868,-1.2551273825413316,0.18375059007433778,0.2718451598263175,0.4186694427462838,0.3012100164103108,-0.9027491035334125,-1.519411091797271,1.2996151402660814,-0.05116826259760824,0.24248030324232428,0.036926307154371514,-1.4019516654612978,-0.5797356811094868,-1.2257625259573384,0.7710477217542028,-0.37418168502153404,-0.34481682843754075,-0.02180340601361499,0.18375059007433778,0.036926307154371514,0.1543857334903445,-0.139262832349588,-0.60910053769348,0.3012100164103108,-0.6678302508614665,0.5948585822502434,0.2718451598263175,-0.28608711526955427,0.06629116373836477,0.09565602032235802,0.56549372566625,0.44803429933027705,0.21311544665833101,-0.7559248206134463,0.3012100164103108,0.036926307154371514,1.123426000762122,0.5948585822502434,-0.37418168502153404,0.3012100164103108,1.4170745666020543,-0.9321139601174058,-1.5487759483812642,-1.5781408049652574,-1.4019516654612978,-1.313857095709318,0.06629116373836477,-1.2844922391253248,-0.7852896771974396,0.4773991559142703,-0.05116826259760824,-0.34481682843754075,-1.431316522045291,-1.4606813786292843,-0.5210059679415002,-0.16862768893358124,-0.8146545337814328,-0.6678302508614665,-0.4329113981895205,0.3599397295782973,-0.4329113981895205,0.06629116373836477,-0.46227625477351375,-0.5797356811094868,-1.0789382430373722,-1.4606813786292843,-0.5210059679415002,-1.167032812789352,0.4186694427462838,0.007561450570378263,-1.4900462352132775,-0.3154519718535475,0.06629116373836477,-0.139262832349588,-0.3154519718535475,-1.1083030996213654,-0.139262832349588,0.1543857334903445,-0.4329113981895205,-0.49164111135750704,-1.3725868088773046,-0.7852896771974396,0.56549372566625,0.06629116373836477,0.3012100164103108,-1.1083030996213654,0.5361288690822568,-0.28608711526955427,0.1543857334903445,0.3012100164103108,-0.726559964029453,-0.46227625477351375,-0.4329113981895205,-0.25672225868556103,-1.6075056615492507,-0.34481682843754075,0.007561450570378263
|
||||
|
1
bayes-learning/packages/matplotlib
Submodule
1
bayes-learning/packages/matplotlib
Submodule
Submodule bayes-learning/packages/matplotlib added at fdc7a8c384
84
bayes-learning/seaborn-descr.py
Normal file
84
bayes-learning/seaborn-descr.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
# from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.model_selection import train_test_split
|
||||
import numpy as np
|
||||
from scipy.stats import trim_mean, kurtosis
|
||||
from scipy.stats.mstats import mode, gmean, hmean
|
||||
|
||||
|
||||
def linebreak():
|
||||
"""prints a line break to split up functions"""
|
||||
print('\n ============================================== \n')
|
||||
|
||||
|
||||
matplotlib.rcParams['backend'] = 'TkAgg'
|
||||
plt.style.use('seaborn-dark-palette')
|
||||
|
||||
path = os.getcwd()
|
||||
data_file = str('/data/Social_Network_Ads.csv')
|
||||
|
||||
df = pd.read_csv(path + data_file)
|
||||
# df = pd.DataFrame(df)
|
||||
|
||||
df = df.sample(frac=1).reset_index(drop=True)
|
||||
|
||||
print('{} rows. {} cols.'.format(df.shape[0], df.shape[1]))
|
||||
|
||||
linebreak()
|
||||
print(df.iloc[0:10, :])
|
||||
|
||||
linebreak()
|
||||
X = df[['Age', 'EstimatedSalary']]
|
||||
y = df['Purchased'].to_frame()
|
||||
|
||||
print('X equals:')
|
||||
print(X.iloc[0:5])
|
||||
linebreak()
|
||||
print('y equals:')
|
||||
print(y[0:5])
|
||||
linebreak()
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
|
||||
|
||||
description = df.describe().drop(columns=['User ID'])
|
||||
|
||||
description_grouped = df.groupby(['Purchased'])
|
||||
|
||||
description_grouped_split = description_grouped['Age', 'EstimatedSalary']\
|
||||
.describe().unstack()
|
||||
|
||||
description_grouped_mode = description_grouped['Age'].apply(mode, axis=None)
|
||||
|
||||
df_quartile_slary = df.groupby('Purchased')['EstimatedSalary']\
|
||||
.quantile([.1, .5, .9])
|
||||
df_quartile_age = df.groupby('Purchased')['Age'].quantile([.1, .5, .9])
|
||||
|
||||
df_trimmed_mean = description_grouped['Age', 'EstimatedSalary'].\
|
||||
aggregate(trim_mean, .1)
|
||||
|
||||
df_summary = description_grouped['Age', 'EstimatedSalary']\
|
||||
.aggregate([np.median, np.std, np.mean, gmean, hmean])
|
||||
|
||||
df_var = description_grouped['Age', 'EstimatedSalary'].var()
|
||||
|
||||
df_null = df.isna().sum()
|
||||
|
||||
print(description)
|
||||
linebreak()
|
||||
print(description_grouped_split)
|
||||
linebreak()
|
||||
print(description_grouped_mode)
|
||||
linebreak()
|
||||
print(df_quartile_slary)
|
||||
print(df_quartile_age)
|
||||
linebreak()
|
||||
print(df_trimmed_mean)
|
||||
linebreak()
|
||||
print(df_summary)
|
||||
linebreak()
|
||||
print(df_var)
|
||||
linebreak()
|
||||
print(df_null)
|
||||
58
bayes-learning/seaborn-graphing.py
Normal file
58
bayes-learning/seaborn-graphing.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.stats import trim_mean
|
||||
from scipy.stats.mstats import mode, gmean, hmean
|
||||
from sklearn.model_selection import train_test_split
|
||||
import seaborn as sns
|
||||
|
||||
|
||||
def linebreak():
|
||||
"""prints a line break to split up functions"""
|
||||
print('\n ============================================== \n')
|
||||
|
||||
|
||||
matplotlib.rcParams['backend'] = 'TkAgg'
|
||||
# plt.style.use('seaborn-dark-palette')
|
||||
|
||||
path = os.getcwd()
|
||||
data_file = str('/data/Social_Network_Ads.csv')
|
||||
|
||||
df = pd.read_csv(path + data_file)
|
||||
|
||||
df = df.sample(frac=1).reset_index(drop=True)
|
||||
|
||||
print(df[0:5])
|
||||
|
||||
X = df[['Age', 'EstimatedSalary']]
|
||||
y = df['Purchased']
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
|
||||
|
||||
|
||||
# ax1 = df.plot.scatter(x='Age', y='EstimatedSalary', c='DarkBlue')
|
||||
# ax2 = df.query('Age < 30').plot.scatter(x='Age', y='EstimatedSalary',
|
||||
# c='DarkBlue')
|
||||
|
||||
|
||||
# figure_1 = df.query('Age < 35').plot(kind='scatter', x='Age',
|
||||
# y='EstimatedSalary')
|
||||
|
||||
df_purchased_sum = df['Purchased'].value_counts()
|
||||
|
||||
# figure_2 = plt.plot(df_purchased_sum)
|
||||
|
||||
# cp = sns.countplot(data=df, y='Purchased')
|
||||
|
||||
# pal = dict(1="seagreen", 0="gray")
|
||||
|
||||
fig, axs = plt.subplots(ncols=2)
|
||||
|
||||
sns.countplot(data=df, x='Age', hue='Purchased', ax=axs[0])
|
||||
cp = sns.countplot(data=df, x='Purchased', ax=axs[1])
|
||||
|
||||
plt.show()
|
||||
|
||||
# print(df_purchased_sum)
|
||||
1661
bayes-learning/seaborn.sublime-workspace
Normal file
1661
bayes-learning/seaborn.sublime-workspace
Normal file
File diff suppressed because it is too large
Load Diff
60
bayes-learning/sites.txt
Normal file
60
bayes-learning/sites.txt
Normal file
@@ -0,0 +1,60 @@
|
||||
https://medium.com/@urvashilluniya/why-data-normalization-is-necessary-for-machine-learning-models-681b65a05029
|
||||
|
||||
visualise data
|
||||
https://towardsdatascience.com/the-art-of-effective-visualization-of-multi-dimensional-data-6c7202990c57
|
||||
|
||||
https://www.marsja.se/pandas-python-descriptive-statistics/
|
||||
|
||||
https://www.marsja.se/explorative-data-analysis-with-pandas-scipy-and-seaborn/
|
||||
|
||||
|
||||
|
||||
deep learning https://towardsdatascience.com/detecting-malaria-with-deep-learning-9e45c1e34b60
|
||||
|
||||
likelihood functions https://stats.stackexchange.com/questions/2641/what-is-the-difference-between-likelihood-and-probability
|
||||
|
||||
|
||||
plotting with df and matplotlib
|
||||
https://nbviewer.jupyter.org/urls/gist.github.com/fonnesbeck/5850463/raw/a29d9ffb863bfab09ff6c1fc853e1d5bf69fe3e4/3.+Plotting+and+Visualization.ipynb
|
||||
https://towardsdatascience.com/a-guide-to-pandas-and-matplotlib-for-data-exploration-56fad95f951c
|
||||
|
||||
seaborne
|
||||
|
||||
vis the dist
|
||||
https://seaborn.pydata.org/tutorial/distributions.html
|
||||
|
||||
|
||||
pdfs
|
||||
https://stats.stackexchange.com/questions/14483/intuitive-explanation-for-density-of-transformed-variable
|
||||
|
||||
kernel
|
||||
https://mathisonian.github.io/kde/
|
||||
https://chemicalstatistician.wordpress.com/2013/06/09/exploratory-data-analysis-kernel-density-estimation-in-r-on-ozone-pollution-data-in-new-york-and-ozonopolis/
|
||||
https://www.quora.com/What-is-kernel-density-estimation
|
||||
choose bandwith so that it minimses the mean integrated square error so ban = min(MSIE)
|
||||
|
||||
|
||||
pandas plotting
|
||||
https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html
|
||||
|
||||
|
||||
class
|
||||
https://jeffknupp.com/blog/2017/03/27/improve-your-python-python-classes-and-object-oriented-programming/
|
||||
https://realpython.com/python3-object-oriented-programming/
|
||||
|
||||
errors
|
||||
https://doughellmann.com/blog/2009/06/19/python-exception-handling-techniques/
|
||||
|
||||
public + private members
|
||||
https://www.tutorialsteacher.com/python/private-and-protected-access-modifiers-in-python
|
||||
|
||||
properties
|
||||
https://www.programiz.com/python-programming/property
|
||||
|
||||
|
||||
generators
|
||||
https://pythontips.com/2013/09/29/the-python-yield-keyword-explained/
|
||||
https://www.programiz.com/python-programming/generator
|
||||
|
||||
class methods with inheritence
|
||||
https://stackoverflow.com/questions/5738470/whats-an-example-use-case-for-a-python-classmethod
|
||||
6
bayes-learning/snippets.txt
Normal file
6
bayes-learning/snippets.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
from sklearn.cross_validation import train_test_split
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y , train_size = 0.7, random_state = 90)
|
||||
|
||||
normalizing methods : https://docs.microsoft.com/en-us/azure/machine-learning/studio-module-reference/normalize-data
|
||||
https://en.wikipedia.org/wiki/Feature_scaling
|
||||
we apply the normalizing methods to the training data first, then seperately to the test data to compare!
|
||||
BIN
bayes-learning/test.png
Normal file
BIN
bayes-learning/test.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 20 KiB |
21
bayes-learning/test.py
Normal file
21
bayes-learning/test.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import matplotlib.colors
|
||||
|
||||
x = np.linspace(-3, 3)
|
||||
X, Y = np.meshgrid(x, x)
|
||||
Z = np.exp(-(X**2 + Y**2))
|
||||
fig, (ax, ax2) = plt.subplots(ncols=2)
|
||||
|
||||
colors = ["red", "orange", "gold", "limegreen", "k",
|
||||
"#550011", "purple", "seagreen"]
|
||||
|
||||
ax.set_title("contour with color list")
|
||||
contour = ax.contourf(X, Y, Z, colors=colors)
|
||||
|
||||
ax2.set_title("contour with colormap")
|
||||
cmap = matplotlib.colors.ListedColormap(colors)
|
||||
contour = ax2.contourf(X, Y, Z, cmap=cmap)
|
||||
fig.colorbar(contour)
|
||||
|
||||
plt.show()
|
||||
Reference in New Issue
Block a user