1
0
mirror of https://github.com/gsi-upm/senpy synced 2024-11-14 12:42:27 +00:00
This commit is contained in:
militarpancho 2017-02-28 14:27:18 +01:00
parent 9ea177a780
commit 65b8873092
8 changed files with 35 additions and 89 deletions

View File

@ -19,7 +19,7 @@ class unifiedPlugin(SentimentPlugin):
def deactivate(self, *args, **kwargs): def deactivate(self, *args, **kwargs):
self.close() self.save()
def analyse(self, **kwargs): def analyse(self, **kwargs):

View File

@ -15,7 +15,6 @@ from pattern.en import parse
from senpy.plugins import EmotionPlugin, SenpyPlugin, ShelfMixin from senpy.plugins import EmotionPlugin, SenpyPlugin, ShelfMixin
from senpy.models import Results, EmotionSet, Entry, Emotion from senpy.models import Results, EmotionSet, Entry, Emotion
logger = logging.getLogger(__name__)
class EmotionTextPlugin(EmotionPlugin, ShelfMixin): class EmotionTextPlugin(EmotionPlugin, ShelfMixin):
@ -67,27 +66,25 @@ class EmotionTextPlugin(EmotionPlugin, ShelfMixin):
'sadness': 'sadness'} 'sadness': 'sadness'}
self._load_emotions(self._info['hierarchy_path']) self._load_emotions(self.hierarchy_path)
if 'total_synsets' not in self.sh: if 'total_synsets' not in self.sh:
total_synsets = self._load_synsets(self._info['synsets_path']) total_synsets = self._load_synsets(self.synsets_path)
self.sh['total_synsets'] = total_synsets self.sh['total_synsets'] = total_synsets
self._total_synsets = self.sh['total_synsets'] self._total_synsets = self.sh['total_synsets']
if 'wn16' not in self.sh: if 'wn16' not in self.sh:
self._wn16_path = self._info['wn16_path'] self._wn16_path = self.wn16_path
wn16 = WordNetCorpusReader(os.path.abspath("{0}".format(self._wn16_path)), nltk.data.find(self._wn16_path)) wn16 = WordNetCorpusReader(os.path.abspath("{0}".format(self._wn16_path)), nltk.data.find(self._wn16_path))
self.sh['wn16'] = wn16 self.sh['wn16'] = wn16
self._wn16 = self.sh['wn16'] self._wn16 = self.sh['wn16']
logger.info("EmoText plugin is ready to go!")
def deactivate(self, *args, **kwargs): def deactivate(self, *args, **kwargs):
self.save() self.save()
logger.info("EmoText plugin is being deactivated...")
def _my_preprocessor(self, text): def _my_preprocessor(self, text):
@ -166,11 +163,9 @@ class EmotionTextPlugin(EmotionPlugin, ShelfMixin):
return feature_set return feature_set
def analyse(self, **params): def analyse_entry(self, entry, params):
logger.debug("Analysing with params {}".format(params)) text_input = entry.get("text", None)
text_input = params.get("input", None)
text=self._my_preprocessor(text_input) text=self._my_preprocessor(text_input)
@ -178,8 +173,6 @@ class EmotionTextPlugin(EmotionPlugin, ShelfMixin):
response = Results() response = Results()
entry = Entry(id="Entry",
text=text_input)
emotionSet = EmotionSet(id="Emotions0") emotionSet = EmotionSet(id="Emotions0")
emotions = emotionSet.onyx__hasEmotion emotions = emotionSet.onyx__hasEmotion
@ -188,5 +181,5 @@ class EmotionTextPlugin(EmotionPlugin, ShelfMixin):
onyx__hasEmotionIntensity=feature_text[i])) onyx__hasEmotionIntensity=feature_text[i]))
entry.emotions = [emotionSet] entry.emotions = [emotionSet]
response.entries.append(entry)
return response yield entry

View File

@ -8,23 +8,16 @@
"language": { "language": {
"aliases": ["language", "l"], "aliases": ["language", "l"],
"required": true, "required": true,
"options": ["en"],
"default": "en" "default": "en"
} }
}, },
"requirements": {}, "synsets_path": "/data/emotion-wnaffect/a-synsets.xml",
"synsets_path": "/senpy-plugins-data/emotion-wnaffect/a-synsets.xml", "hierarchy_path": "/data/emotion-wnaffect/a-hierarchy.xml",
"hierarchy_path": "/senpy-plugins-data/emotion-wnaffect/a-hierarchy.xml", "wn16_path": "/data/emotion-wnaffect/wordnet1.6/dict",
"wn16_path": "/senpy-plugins-data/emotion-wnaffect/wordnet1.6/dict", "onyx:usesEmotionModel": "emoml:big6",
"requirements": [ "requirements": [
"nltk>=3.0.5", "nltk>=3.0.5",
"numpy>=1.8.2",
"scipy>=0.14.0",
"scikit-learn>=0.14.1",
"lxml>=3.4.2", "lxml>=3.4.2",
"pandas",
"senpy",
"pattern", "pattern",
"textblob"
] ]
} }

View File

@ -7,11 +7,6 @@ logger = logging.getLogger(__name__)
class ExamplePlugin(SentimentPlugin): class ExamplePlugin(SentimentPlugin):
def __init__(self, *args, **kwargs):
super(ExamplePlugin, self).__init__(*args, **kwargs)
logger.warn('%s: the answer to life, the universe and everything'
% self._info.get('custom_attribute'))
def analyse(self, *args, **kwargs): def analyse(self, *args, **kwargs):
logger.warn('Analysing with the example.') logger.warn('Analysing with the example.')
logger.warn('The answer to this response is: %s.' % kwargs['parameter']) logger.warn('The answer to this response is: %s.' % kwargs['parameter'])

View File

@ -6,17 +6,19 @@ from senpy.models import Results, Sentiment, Entry
class Sentiment140Plugin(SentimentPlugin): class Sentiment140Plugin(SentimentPlugin):
def analyse(self, **params):
def analyse(self,entry,params):
lang = params.get("language", "auto") lang = params.get("language", "auto")
res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson", res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson",
json.dumps({"language": lang, json.dumps({"language": lang,
"data": [{"text": params["input"]}] "data": [{"text": entry.get("text",None)}]
} }
) )
) )
p = params.get("prefix", None) p = params.get("prefix", None)
response = Results(prefix=p)
polarity_value = self.maxPolarityValue*int(res.json()["data"][0] polarity_value = self.maxPolarityValue*int(res.json()["data"][0]
["polarity"]) * 0.25 ["polarity"]) * 0.25
polarity = "marl:Neutral" polarity = "marl:Neutral"
@ -25,16 +27,10 @@ class Sentiment140Plugin(SentimentPlugin):
polarity = "marl:Positive" polarity = "marl:Positive"
elif polarity_value < neutral_value: elif polarity_value < neutral_value:
polarity = "marl:Negative" polarity = "marl:Negative"
entry = Entry(id="Entry0",
nif__isString=params["input"])
sentiment = Sentiment(id="Sentiment0", sentiment = Sentiment(id="Sentiment0",
prefix=p, prefix=p,
marl__hasPolarity=polarity, marl__hasPolarity=polarity,
marl__polarityValue=polarity_value) marl__polarityValue=polarity_value)
sentiment.prov__wasGeneratedBy = self.id
entry.sentiments = []
entry.sentiments.append(sentiment) entry.sentiments.append(sentiment)
entry.language = lang
response.entries.append(entry) yield entry
return response

View File

@ -13,24 +13,16 @@ from os import path
from senpy.plugins import SentimentPlugin, SenpyPlugin from senpy.plugins import SentimentPlugin, SenpyPlugin
from senpy.models import Results, Entry, Sentiment from senpy.models import Results, Entry, Sentiment
logger = logging.getLogger(__name__)
class SentiTextPlugin(SentimentPlugin): class SentiTextPlugin(SentimentPlugin):
def __init__(self, info, *args, **kwargs):
super(SentiTextPlugin, self).__init__(info, *args, **kwargs)
self.id = info['module']
base = path.abspath(path.dirname(__file__))
self.swn_path = path.join(base, info['sentiword_path'])
self.pos_path = path.join(base, info['pos_path'])
self._swn = None
self._pos_tagger = None
def _load_swn(self): def _load_swn(self):
self.swn_path = path.join(path.abspath(path.dirname(__file__)), self.sentiword_path)
swn = SentiWordNet(self.swn_path) swn = SentiWordNet(self.swn_path)
return swn return swn
def _load_pos_tagger(self): def _load_pos_tagger(self):
self.pos_path = path.join(path.abspath(path.dirname(__file__)), self.pos_path)
with open(self.pos_path, 'r') as f: with open(self.pos_path, 'r') as f:
tagger = pickle.load(f) tagger = pickle.load(f)
return tagger return tagger
@ -38,12 +30,6 @@ class SentiTextPlugin(SentimentPlugin):
def activate(self, *args, **kwargs): def activate(self, *args, **kwargs):
self._swn = self._load_swn() self._swn = self._load_swn()
self._pos_tagger = self._load_pos_tagger() self._pos_tagger = self._load_pos_tagger()
logger.info("SentiText plugin is ready to go!")
def deactivate(self, *args, **kwargs):
logger.info("SentiText plugin is being deactivated...")
def _remove_punctuation(self, tokens): def _remove_punctuation(self, tokens):
return [t for t in tokens if t not in string.punctuation] return [t for t in tokens if t not in string.punctuation]
@ -80,10 +66,9 @@ class SentiTextPlugin(SentimentPlugin):
return None return None
def analyse(self, **params): def analyse_entry(self, entry, params):
logger.debug("Analysing with params {}".format(params))
text = params.get("input", None) text = entry.get("text", None)
tokens = self._tokenize(text) tokens = self._tokenize(text)
tokens = self._pos(tokens) tokens = self._pos(tokens)
@ -95,7 +80,6 @@ class SentiTextPlugin(SentimentPlugin):
if len(lemmas) == 0: if len(lemmas) == 0:
continue continue
tokens[i]['lemmas'][w[0]] = lemmas tokens[i]['lemmas'][w[0]] = lemmas
logger.debug("Tokens: {}".format(tokens))
trans = TextBlob(unicode(text)).translate(from_lang='es',to='en') trans = TextBlob(unicode(text)).translate(from_lang='es',to='en')
useful_synsets = {} useful_synsets = {}
@ -107,7 +91,6 @@ class SentiTextPlugin(SentimentPlugin):
continue continue
eq_synset = self._compare_synsets(synsets, tokens, s_i) eq_synset = self._compare_synsets(synsets, tokens, s_i)
useful_synsets[s_i][t_w] = eq_synset useful_synsets[s_i][t_w] = eq_synset
logger.debug("Synsets used for analysis: {}".format(useful_synsets))
scores = {} scores = {}
for i in tokens: for i in tokens:
@ -128,12 +111,10 @@ class SentiTextPlugin(SentimentPlugin):
score['score'] = f_score score['score'] = f_score
scores[i][word] = score scores[i][word] = score
break break
logger.debug("All scores (some not used): {}".format(scores))
lang = params.get("language", "auto") lang = params.get("language", "auto")
p = params.get("prefix", None) p = params.get("prefix", None)
response = Results()
for i in scores: for i in scores:
n_pos = 0.0 n_pos = 0.0
@ -158,17 +139,11 @@ class SentiTextPlugin(SentimentPlugin):
elif g_score < 0.5: elif g_score < 0.5:
polarity = 'marl:Negative' polarity = 'marl:Negative'
entry = Entry(id="Entry"+str(i),
nif_isString=tokens[i]['sentence'])
opinion = Sentiment(id="Opinion0"+'_'+str(i), opinion = Sentiment(id="Opinion0"+'_'+str(i),
marl__hasPolarity=polarity, marl__hasPolarity=polarity,
marL__polarityValue=float("{0:.2f}".format(g_score))) marL__polarityValue=float("{0:.2f}".format(g_score)))
opinion["prov:wasGeneratedBy"] = self.id
entry.sentiments = []
entry.sentiments.append(opinion) entry.sentiments.append(opinion)
entry.language = lang
response.entries.append(entry) yield entry
return response

View File

@ -12,7 +12,11 @@
"default": "es" "default": "es"
} }
}, },
"requirements": {}, "requirements": [
"nltk>=3.0.5",
"scipy>=0.14.0",
"textblob"
],
"sentiword_path": "SentiWordNet_3.0.txt", "sentiword_path": "SentiWordNet_3.0.txt",
"pos_path": "unigram_spanish.pickle" "pos_path": "unigram_spanish.pickle"
} }

View File

@ -10,23 +10,15 @@ from senpy.models import Results, Entry, Sentiment,Error
class DaedalusPlugin(SentimentPlugin): class DaedalusPlugin(SentimentPlugin):
def activate(self, *args, **kwargs): def analyse_entry(self, entry, params):
pass
def deactivate(self, *args, **kwargs): txt = entry.get("text",None)
self.close()
def analyse(self, **params):
txt = params["input"]
model = params["model"] # general_es / general_es / general_fr model = params["model"] # general_es / general_es / general_fr
api = 'http://api.meaningcloud.com/sentiment-2.1' api = 'http://api.meaningcloud.com/sentiment-2.1'
lang = params.get("language") lang = params.get("language")
key = params["apiKey"] key = params["apiKey"]
parameters = {'key': key,'model': model,'lang': lang,'of': 'json','txt': txt,'src': 'its-not-a-real-python-sdk'} parameters = {'key': key,'model': model,'lang': lang,'of': 'json','txt': txt,'src': 'its-not-a-real-python-sdk'}
r = requests.post(api, params=parameters) r = requests.post(api, params=parameters)
print(r.text)
value = r.json().get('score_tag', None) value = r.json().get('score_tag', None)
if not value: if not value:
@ -42,10 +34,8 @@ class DaedalusPlugin(SentimentPlugin):
elif 'P' in value: elif 'P' in value:
polarity = 'marl:Positive' polarity = 'marl:Positive'
polarityValue = 1 polarityValue = 1
entry = Entry(id="Entry0",nif_isString=txt)
opinion = Sentiment(id="Opinion0",marl__hasPolarity=polarity,marl__polarityValue = polarityValue) opinion = Sentiment(id="Opinion0",marl__hasPolarity=polarity,marl__polarityValue = polarityValue)
opinion["prov:wasGeneratedBy"] = self.id
entry.sentiments = []
entry.sentiments.append(opinion) entry.sentiments.append(opinion)
response.entries.append(entry)
return response yield entry