1from nltk import word_tokenize
2from nltk import bleu_score
34#参照文5references ='I have a pen and apple'6ref =[word_tokenize(references)]7#仮定文8hypothesis ='I have a pineapple'9hyp = word_tokenize(hypothesis)10#計算11bleuscore = bleu_score.sentence_bleu(ref, hyp)12#出力13print(bleuscore)
1from nltk import word_tokenize
2from nltk import bleu_score
34# 元コード5references ='I have a pen and apple'6ref =[word_tokenize(references)]7hypothesis ='I have a pineapple'8hyp = word_tokenize(hypothesis)9bleuscore = bleu_score.sentence_bleu(ref, hyp)10print(bleuscore)11#/bleu_score.py:516: UserWarning: 12#The hypothesis contains 0 counts of 4-gram overlaps.13#Therefore the BLEU score evaluates to 0, independently of14#how many N-gram overlaps of lower order it contains.15#Consider using lower n-gram order or use SmoothingFunction()16# warnings.warn(_msg)17#5.238101011110965e-781819# 修正コード1: 連続4単語が一致している場合は値が出力される20references ='I have a pen and apple'21ref =[word_tokenize(references)]22hypothesis ='I have a pen and pineapple'23hyp = word_tokenize(hypothesis)24bleuscore = bleu_score.sentence_bleu(ref, hyp)25print(bleuscore)26#0.75983568565159252728# 修正コード2: using lower n-gram order29references ='I have a pen and apple'30ref =[word_tokenize(references)]31hypothesis ='I have a pineapple'32hyp = word_tokenize(hypothesis)33bleuscore = bleu_score.sentence_bleu(ref, hyp,(1./3.,1./3.,1./3.))34print(bleuscore)35#0.38209037278928563637# 修正コード3: use SmoothingFunction()38references ='I have a pen and apple'39ref =[word_tokenize(references)]40hypothesis ='I have a pineapple'41hyp = word_tokenize(hypothesis)42bleuscore = bleu_score.sentence_bleu(ref, hyp, smoothing_function=bleu_score.SmoothingFunction().method1)43print(bleuscore)44#0.2411780398846129845
バッドをするには、ログインかつ
こちらの条件を満たす必要があります。
2020/11/03 06:54