您的位置:首页 > 编程语言 > Python开发

机器学习实战_初识朴素贝叶斯算法_理解其python代码(一)

2017-08-14 16:45 791 查看
这是经过我修改后能在python3.0中完美运行的Bayes project源代码,可以直接拿来学习:
http://download.csdn.net/download/qq_36396104/10142849
一:加载数据的代码:

def loadDataSet():
postingList=[['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'],
['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],
['my', 'dalmation', 'is', 'so', 'cute', 'I', 'love', 'him'],
['stop', 'posting', 'stupid', 'worthless', 'garbage'],
['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],
['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']]
classVec = [0,1,0,1,0,1]    #1 is abusive, 0 not
return postingList,classVec

'''将文档的新词加入集合中,得到参考用的词典'''
def createVocabList(dataSet):
vocabSet = set([])
for document in dataSet:
vocabSet = vocabSet|set(document)#得到dataSet里面的所有语句的单词(不重复,但排序无规律)
return list(vocabSet)

'''将一条留言转换为词(数字)向量(参照vocabList),通过词向量和vocabList便可得到一个句子'''
def setOfWords2Vec(vocabList,inputSet):
returnVec = [0]*len(vocabList)
for word in inputSet:
if word in vocabList:#判断inputSet里的单词是否在vocabList内
returnVec[vocabList.index(word)] = 1
else: print("the word:%s is not in my Vocabulary!" % word)
return returnVec
'''词袋模型:一个词可以在文档内出现多次(>=1)(一旦出现这种情况则:可能包含该词是否出现在文档所不能表达的某种信息)'''
def bagOfWords2VecMN(vocabList,inputSet):
returnVec = [0] * len(vocabList)
for word in inputSet:
if word in vocabList:  # 判断inputSet里的单词是否在vocabList内
returnVec[vocabList.index(word)] += 1
return returnVec


二:朴素贝叶斯算法:

from numpy import *
import LoadData

'''从词向量中计算每个单词(属性)在相应类别中的概率'''
def trainNB0(trainMatrix,trainCategory):#输入的是词向量以及其对应的类别
numTrainDocs = len(trainMatrix)
numWords = len(trainMatrix[0])
pAbusive = sum(trainCategory)/float(numTrainDocs)#计算侮辱性文档的概率
# p0Num = zeros(numWords);p1Num = zeros(numWords)#得到正常文档和侮辱性文档中使用词汇表中每个单词的使用次数
# p0Denom = 0.0;p1Denom = 0.0#计算正常文档和侮辱性文档中使用词汇表中的单词的总个数
'''为避免出现概率率值为0的数:'''
p0Num = ones(numWords);# 得到正常文档和侮辱性文档中使用词汇表中每个单词的使用次数
p1Num = ones(numWords)
p0Denom = 1.0;# 计算正常文档和侮辱性文档中使用词汇表中的单词的总个数
p1Denom = 1.0
for i in range(numTrainDocs):#统计每个词向量
if trainCategory[i] ==1:
p1Num += trainMatrix[i]
p1Denom += sum(trainMatrix[i])
else:
p0Num += trainMatrix[i]
p0Denom += sum(trainMatrix[i])
# p1Vect = p1Num/p1Denom#计算侮辱性文档中使用到的词汇表中每个单词的概率,即计算侮辱性文档中每个属性的概率P1(w|c)
# p0Vect = p0Num/p0Denom#计算正常文档中使用到的词汇表中每个单词的概率,即计算正常文档中每个属性的概率P1(w|c)
'''防止下溢出(计算得到的数字太小而被四舍五入为0)'''
p1Vect = log(p1Num / p1Denom) # 计算侮辱性文档中使用到的词汇表中每个单词的概率,即计算侮辱性文档中每个属性的概率P1(w|c)
p0Vect = log(p0Num / p0Denom)  # 计算正常文档中使用到的词汇表中每个单词的概率,即计算正常文档中每个属性的概率P1(w|c)
return p0Vect,p1Vect,pAbusive

def classifyNB(vec2Classify,p0Vec,p1Vec,pClass1):#使用计算出的相应概率利用公式进行类别概率计算
p1 = sum(vec2Classify*p1Vec) + log(pClass1)
p0 = sum(vec2Classify*p0Vec) + log(1.0 - pClass1)
if p1 > p0:
return 1
else:
return 0
def testingNB():
#计算相应的条件概率
listOPosts,listClassis = LoadData.loadDataSet()
myVocabList = LoadData.createVocabList(listOPosts)
trainMat = []
for postinDoc in listOPosts:
trainMat.append(LoadData.setOfWords2Vec(myVocabList,postinDoc))
p0V,p1V,pAb = trainNB0(array(trainMat),array(listClassis))
'''正式归类'''
testEntry = ['love','my','dalmation']
thisDoc = array(LoadData.setOfWords2Vec(myVocabList,testEntry))
print(testEntry,'classified as:',classifyNB(thisDoc,p0V,p1V,pAb))
testEntry = ['stupid','garbage']
thisDoc = array(LoadData.setOfWords2Vec(myVocabList,testEntry))
print(testEntry,'classified as:',classifyNB(thisDoc,p0V,p1V,pAb))


三:测试代码:

import LoadData
import bayes
'''准备数据'''
listOPosts,listClasses = LoadData.loadDataSet()
myVocabList = LoadData.createVocabList(listOPosts)
print(myVocabList)
print(LoadData.setOfWords2Vec(myVocabList,listOPosts[0]))
print(LoadData.setOfWords2Vec(myVocabList,listOPosts[3]))
'''从词向量计算概率:'''
from numpy import *
import bayes

listOPosts,listClasses = LoadData.loadDataSet()
myVocabList = LoadData.createVocabList(listOPosts)
trainMat = []
for postinDoc in listOPosts:
trainMat.append(LoadData.setOfWords2Vec(myVocabList,postinDoc))
print(trainMat)
p0V,p1V,pAb = bayes.trainNB0(trainMat,listClasses)
print(pAb)
print(p0V)
print(p1V)
'''测试算法:'''
bayes.testingNB()
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息