Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
dabf444607 |
5
.gitignore
vendored
@@ -1,5 +0,0 @@
|
||||
*.pyc
|
||||
.*
|
||||
*egg-info
|
||||
dist
|
||||
README.html
|
@@ -1,6 +0,0 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
install: "pip install -r requirements.txt"
|
||||
# run nosetests - Tests
|
||||
script: nosetests
|
@@ -1,3 +0,0 @@
|
||||
from python:2.7-onbuild
|
||||
|
||||
ENTRYPOINT ["python", "-m", "senpy"]
|
@@ -1,5 +1,3 @@
|
||||
include requirements.txt
|
||||
include test-requirements.txt
|
||||
include README.md
|
||||
include senpy/context.jsonld
|
||||
graft senpy/plugins
|
||||
|
19
README.md
Normal file
@@ -0,0 +1,19 @@
|
||||

|
||||
[Senpy](http://senpy.herokuapp.com)
|
||||
=========================================
|
||||
Example endpoint that yields results compatible with the EUROSENTIMENT format and exposes the NIF API.
|
||||
It can be used as a template to adapt existing services to EUROSENTIMENT or to create new services.
|
||||
|
||||
[DEMO on Heroku](http://eurosentiment-endpoint.herokuapp.com)
|
||||
|
||||
This endpoint serves as bootcampt for any developer wishing to build applications that use the EUROSENTIMENT services.
|
||||
|
||||
Acknowledgement
|
||||
---------------
|
||||
EUROSENTIMENT PROJECT
|
||||
Grant Agreement no: 296277
|
||||
Starting date: 01/09/2012
|
||||
Project duration: 24 months
|
||||
|
||||

|
||||

|
91
README.rst
@@ -1,91 +0,0 @@
|
||||
.. image:: img/header.png
|
||||
:height: 6em
|
||||
:target: http://demos.gsi.dit.upm.es/senpy
|
||||
|
||||
.. image:: https://travis-ci.org/gsi-upm/senpy.svg?branch=master
|
||||
:target: https://travis-ci.org/gsi-upm/senpy
|
||||
|
||||
Senpy lets you create sentiment analysis web services easily, fast and using a well known API.
|
||||
As a bonus, senpy services use semantic vocabularies (e.g. `NIF <http://persistence.uni-leipzig.org/nlp2rdf/>`_, `Marl <http://www.gsi.dit.upm.es/ontologies/marl>`_, `Onyx <http://www.gsi.dit.upm.es/ontologies/onyx>`_) and formats (turtle, JSON-LD, xml-rdf).
|
||||
|
||||
Have you ever wanted to turn your sentiment analysis algorithms into a service?
|
||||
With senpy, now you can.
|
||||
It provides all the tools so you just have to worry about improving your algorithms:
|
||||
|
||||
`See it in action. <http://demos.gsi.dit.upm.es/senpy>`_
|
||||
|
||||
Installation
|
||||
------------
|
||||
The stable version can be installed in three ways.
|
||||
|
||||
Through PIP
|
||||
***********
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install --user senpy
|
||||
|
||||
|
||||
Alternatively, you can use the development version:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
git clone git@github.com:gsi-upm/senpy
|
||||
cd senpy
|
||||
pip install --user .
|
||||
|
||||
If you want to install senpy globally, use sudo instead of the ``--user`` flag.
|
||||
|
||||
Docker Image
|
||||
************
|
||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 balkian/senpy --host 0.0.0.0 --default-plugins``.
|
||||
|
||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins balkian/senpy --host 0.0.0.0 --default-plugins -f /plugins``
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
However, the easiest and recommended way is to just use the command-line tool to load your plugins and launch the server.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
senpy
|
||||
|
||||
or, alternatively:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
python -m senpy
|
||||
|
||||
|
||||
This will create a server with any modules found in the current path.
|
||||
For more options, see the `--help` page.
|
||||
|
||||
Alternatively, you can use the modules included in senpy to build your own application.
|
||||
|
||||
Deploying on Heroku
|
||||
-------------------
|
||||
Use a free heroku instance to share your service with the world.
|
||||
Just use the example Procfile in this repository, or build your own.
|
||||
|
||||
|
||||
`DEMO on heroku <http://senpy.herokuapp.com>`_
|
||||
|
||||
|
||||
For more information, check out the `documentation <http://senpy.readthedocs.org>`_.
|
||||
------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
Acknowledgement
|
||||
---------------
|
||||
This development has been partially funded by the European Union through the MixedEmotions Project (project number H2020 655632), as part of the `RIA ICT 15 Big data and Open Data Innovation and take-up` programme.
|
||||
|
||||
|
||||
.. image:: img/me.png
|
||||
:target: http://mixedemotions-project.eu
|
||||
:height: 100px
|
||||
:alt: MixedEmotions Logo
|
||||
|
||||
.. image:: img/eu-flag.jpg
|
||||
:height: 100px
|
||||
:target: http://ec.europa.eu/research/participants/portal/desktop/en/opportunities/index.html
|
20
app.py
@@ -15,29 +15,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
This is a helper for development. If you want to run Senpy use:
|
||||
Simple Sentiment Analysis server for EUROSENTIMENT
|
||||
|
||||
python -m senpy
|
||||
This class shows how to use the nif_server module to create custom services.
|
||||
"""
|
||||
from gevent.monkey import patch_all; patch_all()
|
||||
import gevent
|
||||
import config
|
||||
from flask import Flask
|
||||
from senpy.extensions import Senpy
|
||||
import logging
|
||||
import os
|
||||
from gevent.wsgi import WSGIServer
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
app = Flask(__name__)
|
||||
mypath = os.path.dirname(os.path.realpath(__file__))
|
||||
sp = Senpy(app, os.path.join(mypath, "plugins"), default_plugins=True)
|
||||
sp.activate_all()
|
||||
|
||||
sp = Senpy()
|
||||
sp.init_app(app)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import logging
|
||||
logging.basicConfig(level=config.DEBUG)
|
||||
app.debug = config.DEBUG
|
||||
http_server = WSGIServer(('', config.SERVER_PORT), app)
|
||||
http_server.serve_forever()
|
||||
app.run(host="0.0.0.0", use_reloader=False)
|
||||
|
1
docs/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
_build
|
177
docs/Makefile
@@ -1,177 +0,0 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Senpy.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Senpy.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/Senpy"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Senpy"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
208
docs/api.rst
@@ -1,208 +0,0 @@
|
||||
NIF API
|
||||
=======
|
||||
.. http:get:: /api
|
||||
|
||||
Basic endpoint for sentiment/emotion analysis.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api?input=I%20love%20GSI HTTP/1.1
|
||||
Host: localhost
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"@context": [
|
||||
"http://127.0.0.1/static/context.jsonld",
|
||||
],
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "SentimentAnalysisExample",
|
||||
"@type": "marl:SentimentAnalysis",
|
||||
"dc:language": "en",
|
||||
"marl:maxPolarityValue": 10.0,
|
||||
"marl:minPolarityValue": 0.0
|
||||
}
|
||||
],
|
||||
"domain": "wndomains:electronics",
|
||||
"entries": [
|
||||
{
|
||||
"opinions": [
|
||||
{
|
||||
"prov:generatedBy": "SentimentAnalysisExample",
|
||||
"marl:polarityValue": 7.8,
|
||||
"marl:hasPolarity": "marl:Positive",
|
||||
"marl:describesObject": "http://www.gsi.dit.upm.es",
|
||||
}
|
||||
],
|
||||
"nif:isString": "I love GSI",
|
||||
"strings": [
|
||||
{
|
||||
"nif:anchorOf": "GSI",
|
||||
"nif:taIdentRef": "http://www.gsi.dit.upm.es"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
:query i input: No default. Depends on informat and intype
|
||||
:query f informat: one of `turtle` (default), `text`, `json-ld`
|
||||
:query t intype: one of `direct` (default), `url`
|
||||
:query o outformat: one of `turtle` (default), `text`, `json-ld`
|
||||
:query p prefix: prefix for the URIs
|
||||
:query algo algorithm: algorithm/plugin to use for the analysis. For a list of options, see :http:get:`/api/plugins`. If not provided, the default plugin will be used (:http:get:`/api/plugins/default`).
|
||||
|
||||
:reqheader Accept: the response content type depends on
|
||||
:mailheader:`Accept` header
|
||||
:resheader Content-Type: this depends on :mailheader:`Accept`
|
||||
header of request
|
||||
:statuscode 200: no error
|
||||
:statuscode 404: service not found
|
||||
|
||||
.. http:post:: /api
|
||||
|
||||
The same as :http:get:`/api`.
|
||||
|
||||
.. http:get:: /api/plugins
|
||||
|
||||
Returns a list of installed plugins.
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/plugins HTTP/1.1
|
||||
Host: localhost
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
{
|
||||
"@context": {
|
||||
...
|
||||
},
|
||||
"sentiment140": {
|
||||
"name": "sentiment140",
|
||||
"is_activated": true,
|
||||
"version": "0.1",
|
||||
"extra_params": {
|
||||
"@id": "extra_params_sentiment140_0.1",
|
||||
"language": {
|
||||
"required": false,
|
||||
"@id": "lang_sentiment140",
|
||||
"options": [
|
||||
"es",
|
||||
"en",
|
||||
"auto"
|
||||
],
|
||||
"aliases": [
|
||||
"language",
|
||||
"l"
|
||||
]
|
||||
}
|
||||
},
|
||||
"@id": "sentiment140_0.1"
|
||||
},
|
||||
"rand": {
|
||||
"name": "rand",
|
||||
"is_activated": true,
|
||||
"version": "0.1",
|
||||
"extra_params": {
|
||||
"@id": "extra_params_rand_0.1",
|
||||
"language": {
|
||||
"required": false,
|
||||
"@id": "lang_rand",
|
||||
"options": [
|
||||
"es",
|
||||
"en",
|
||||
"auto"
|
||||
],
|
||||
"aliases": [
|
||||
"language",
|
||||
"l"
|
||||
]
|
||||
}
|
||||
},
|
||||
"@id": "rand_0.1"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.. http:get:: /api/plugins/<pluginname>
|
||||
|
||||
Returns the information of a specific plugin.
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/plugins/rand HTTP/1.1
|
||||
Host: localhost
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
{
|
||||
"@id": "rand_0.1",
|
||||
"extra_params": {
|
||||
"@id": "extra_params_rand_0.1",
|
||||
"language": {
|
||||
"@id": "lang_rand",
|
||||
"aliases": [
|
||||
"language",
|
||||
"l"
|
||||
],
|
||||
"options": [
|
||||
"es",
|
||||
"en",
|
||||
"auto"
|
||||
],
|
||||
"required": false
|
||||
}
|
||||
},
|
||||
"is_activated": true,
|
||||
"name": "rand",
|
||||
"version": "0.1"
|
||||
}
|
||||
|
||||
|
||||
.. http:get:: /api/plugins/default
|
||||
|
||||
Return the information about the default plugin.
|
||||
|
||||
.. http:get:: /api/plugins/<pluginname>/{de}activate
|
||||
|
||||
{De}activate a plugin.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/plugins/rand/deactivate HTTP/1.1
|
||||
Host: localhost
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
{
|
||||
"@context": {},
|
||||
"message": "Ok"
|
||||
}
|
272
docs/conf.py
@@ -1,272 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Senpy documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Feb 24 08:57:32 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.todo',
|
||||
'sphinxcontrib.httpdomain',
|
||||
'sphinx.ext.coverage',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Senpy'
|
||||
copyright = u'2015, J. Fernando Sánchez'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.4'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.4'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
else:
|
||||
html_theme = 'default'
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Senpydoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'Senpy.tex', u'Senpy Documentation',
|
||||
u'J. Fernando Sánchez', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'senpy', u'Senpy Documentation',
|
||||
[u'J. Fernando Sánchez'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'Senpy', u'Senpy Documentation',
|
||||
u'J. Fernando Sánchez', 'Senpy', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
@@ -1,16 +0,0 @@
|
||||
.. Senpy documentation master file, created by
|
||||
sphinx-quickstart on Tue Feb 24 08:57:32 2015.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to Senpy's documentation!
|
||||
=================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
installation
|
||||
usage
|
||||
api
|
||||
plugins
|
||||
:maxdepth: 2
|
@@ -1,27 +0,0 @@
|
||||
Installation
|
||||
------------
|
||||
The stable version can be installed in three ways.
|
||||
|
||||
Through PIP
|
||||
***********
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install --user senpy
|
||||
|
||||
|
||||
Alternatively, you can use the development version:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
git clone git@github.com:gsi-upm/senpy
|
||||
cd senpy
|
||||
pip install --user .
|
||||
|
||||
If you want to install senpy globally, use sudo instead of the ``--user`` flag.
|
||||
|
||||
Docker Image
|
||||
************
|
||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 balkian/senpy --host 0.0.0.0 --default-plugins``.
|
||||
|
||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins balkian/senpy --host 0.0.0.0 --default-plugins -f /plugins``
|
@@ -1,48 +0,0 @@
|
||||
Developing new plugins
|
||||
----------------------
|
||||
|
||||
Plugins Interface
|
||||
=================
|
||||
|
||||
The basic methods in a plugin are:
|
||||
|
||||
* __init__
|
||||
* activate: used to load memory-hungry resources
|
||||
* deactivate: used to free up resources
|
||||
|
||||
Plugins are loaded asynchronously, so don't worry if the activate method takes too long. The plugin will be marked as activated once it is finished executing the method.
|
||||
|
||||
F.A.Q.
|
||||
======
|
||||
If I'm using a classifier, where should I train it?
|
||||
???????????????????????????????????????????????????
|
||||
|
||||
Training a classifier can be time time consuming. To avoid running the training unnecessarily, you can use ShelfMixin to store the classifier. For instance:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from senpy.plugins import ShelfMixin, SenpyPlugin
|
||||
|
||||
class MyPlugin(ShelfMixin, SenpyPlugin):
|
||||
def train(self):
|
||||
''' Code to train the classifier
|
||||
'''
|
||||
# Here goes the code
|
||||
# ...
|
||||
return classifier
|
||||
|
||||
def activate(self):
|
||||
if 'classifier' not in self.sh:
|
||||
classifier = self.train()
|
||||
self.sh['classifier'] = classifier
|
||||
self.classifier = self.sh['classifier']
|
||||
|
||||
def deactivate(self):
|
||||
self.close()
|
||||
|
||||
You can speficy a 'shelf_file' in your .senpy file. By default the ShelfMixin creates a file based on the plugin name and stores it in that plugin's folder.
|
||||
|
||||
Where can I find more code examples?
|
||||
????????????????????????????????????
|
||||
|
||||
See: `<http://github.com/gsi-upm/senpy-plugins-community>`_.
|
@@ -1 +0,0 @@
|
||||
sphinxcontrib-httpdomain>=1.4
|
@@ -1,20 +0,0 @@
|
||||
Usage
|
||||
-----
|
||||
|
||||
The easiest and recommended way is to just use the command-line tool to load your plugins and launch the server.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
senpy
|
||||
|
||||
Or, alternatively:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
python -m senpy
|
||||
|
||||
|
||||
This will create a server with any modules found in the current path.
|
||||
For more options, see the `--help` page.
|
||||
|
||||
Alternatively, you can use the modules included in senpy to build your own application.
|
BIN
img/eu-flag.jpg
Before Width: | Height: | Size: 5.6 KiB |
Before Width: | Height: | Size: 81 KiB |
BIN
img/gsi.png
Before Width: | Height: | Size: 5.8 KiB |
BIN
img/header.png
Before Width: | Height: | Size: 208 KiB |
2728
img/logo.svg
Before Width: | Height: | Size: 180 KiB |
Before Width: | Height: | Size: 42 KiB |
BIN
img/me.png
Before Width: | Height: | Size: 25 KiB |
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
45
plugins/sentiment140/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
from senpy.plugins import SentimentPlugin
|
||||
from senpy.models import Response, Opinion, Entry
|
||||
|
||||
|
||||
class Sentiment140Plugin(SentimentPlugin):
|
||||
EXTRA_PARAMS = {
|
||||
"language": {"aliases": ["language", "l"],
|
||||
"required": False,
|
||||
"options": ["es", "en", "auto"],
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(Sentiment140Plugin, self).__init__(name="sentiment140",
|
||||
version="2.0",
|
||||
extraparams=self.EXTRA_PARAMS,
|
||||
**kwargs)
|
||||
|
||||
def analyse(self, **params):
|
||||
lang = params.get("language", "auto")
|
||||
res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson",
|
||||
json.dumps({"language": lang,
|
||||
"data": [{"text": params["input"]}]
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
response = Response()
|
||||
polarity_value = int(res.json()["data"][0]["polarity"]) * 25
|
||||
polarity = "marl:Neutral"
|
||||
if polarity_value > 50:
|
||||
polarity = "marl:Positive"
|
||||
elif polarity_value < 50:
|
||||
polarity = "marl:Negative"
|
||||
entry = Entry(text=params["input"])
|
||||
opinion = Opinion(polarity=polarity, polarity_value=polarity_value)
|
||||
entry.opinions.append(opinion)
|
||||
entry.language = lang
|
||||
response.entries.append(entry)
|
||||
return response
|
||||
|
||||
plugin = Sentiment140Plugin()
|
45
plugins/yapsy_plugin/prueba.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
from senpy.plugins import SentimentPlugin
|
||||
from senpy.models import Response, Opinion, Entry
|
||||
|
||||
|
||||
class Sentiment140Plugin(SentimentPlugin):
|
||||
EXTRA_PARAMS = {
|
||||
"language": {"aliases": ["language", "l"],
|
||||
"required": False,
|
||||
"options": ["es", "en", "auto"],
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(Sentiment140Plugin, self).__init__(name="sentiment140",
|
||||
version="2.0",
|
||||
extraparams=self.EXTRA_PARAMS,
|
||||
**kwargs)
|
||||
|
||||
def analyse(self, **params):
|
||||
lang = params.get("language", "auto")
|
||||
res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson",
|
||||
json.dumps({"language": lang,
|
||||
"data": [{"text": params["input"]}]
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
response = Response()
|
||||
polarity_value = int(res.json()["data"][0]["polarity"]) * 25
|
||||
polarity = "marl:Neutral"
|
||||
if polarity_value > 50:
|
||||
polarity = "marl:Positive"
|
||||
elif polarity_value < 50:
|
||||
polarity = "marl:Negative"
|
||||
entry = Entry(text=params["input"])
|
||||
opinion = Opinion(polarity=polarity, polarity_value=polarity_value)
|
||||
entry.opinions.append(opinion)
|
||||
entry.language = lang
|
||||
response.entries.append(entry)
|
||||
return response
|
||||
|
||||
plugin = Sentiment140Plugin()
|
8
plugins/yapsy_plugin/prueba.senpy
Normal file
@@ -0,0 +1,8 @@
|
||||
[Core]
|
||||
Name = Test plugin of Yapsy
|
||||
Module = prueba
|
||||
[Documentation]
|
||||
Description = What my plugin broadly does
|
||||
Author = My very own name
|
||||
Version = 0.1
|
||||
Website = My very own website
|
@@ -1,7 +1,5 @@
|
||||
Flask>=0.10.1
|
||||
gunicorn>=19.0.0
|
||||
requests>=2.4.1
|
||||
GitPython>=0.3.2.RC1
|
||||
gevent>=1.0.1
|
||||
PyLD>=0.6.5
|
||||
Flask-Testing>=0.4.2
|
||||
Flask==0.10.1
|
||||
gunicorn==19.0.0
|
||||
requests==2.4.1
|
||||
GitPython==0.3.2.RC1
|
||||
Yapsy>=1.10.423
|
@@ -17,3 +17,9 @@
|
||||
"""
|
||||
Sentiment analysis server in Python
|
||||
"""
|
||||
|
||||
import extensions
|
||||
import blueprints
|
||||
import plugins
|
||||
|
||||
__version__ = "0.2.8"
|
||||
|
@@ -1,81 +1,7 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 J. Fernando Sánchez Rada - Grupo de Sistemas Inteligentes
|
||||
# DIT, UPM
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
Senpy is a modular sentiment analysis server. This script runs an instance of
|
||||
the server.
|
||||
|
||||
"""
|
||||
|
||||
from flask import Flask
|
||||
from senpy.extensions import Senpy
|
||||
from gevent.wsgi import WSGIServer
|
||||
from gevent.monkey import patch_all
|
||||
import gevent
|
||||
import logging
|
||||
import os
|
||||
import argparse
|
||||
|
||||
patch_all(thread=False)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Run a Senpy server')
|
||||
parser.add_argument('--level',
|
||||
"-l",
|
||||
metavar="logging_level",
|
||||
type=str,
|
||||
default="INFO",
|
||||
help='Logging level')
|
||||
parser.add_argument('--debug',
|
||||
"-d",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Run the application in debug mode')
|
||||
parser.add_argument('--default-plugins',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Load the default plugins')
|
||||
parser.add_argument('--host',
|
||||
type=str,
|
||||
default="127.0.0.1",
|
||||
help='Use 0.0.0.0 to accept requests from any host.')
|
||||
parser.add_argument('--port',
|
||||
'-p',
|
||||
type=int,
|
||||
default=5000,
|
||||
help='Port to listen on.')
|
||||
parser.add_argument('--plugins-folder',
|
||||
'-f',
|
||||
type=str,
|
||||
default="plugins",
|
||||
help='Where to look for plugins.')
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig(level=getattr(logging, args.level))
|
||||
app = Flask(__name__)
|
||||
app.debug = args.debug
|
||||
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
|
||||
sp.activate_all()
|
||||
http_server = WSGIServer((args.host, args.port), app)
|
||||
try:
|
||||
print("Server running on port %s:%d. Ctrl+C to quit" % (args.host,
|
||||
args.port))
|
||||
http_server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
http_server.stop()
|
||||
print("Bye!")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
from extensions import Senpy
|
||||
app = Flask(__name__)
|
||||
sp = Senpy()
|
||||
sp.init_app(app)
|
||||
app.debug = True
|
||||
app.run()
|
||||
|
@@ -17,35 +17,19 @@
|
||||
"""
|
||||
Blueprints for Senpy
|
||||
"""
|
||||
from flask import Blueprint, request, current_app
|
||||
from .models import Error, Response, Leaf
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from flask import Blueprint, request, jsonify, current_app
|
||||
|
||||
nif_blueprint = Blueprint("NIF Sentiment Analysis Server", __name__)
|
||||
|
||||
BASIC_PARAMS = {
|
||||
"algorithm": {
|
||||
"aliases": ["algorithm", "a", "algo"],
|
||||
"required": False,
|
||||
},
|
||||
"inHeaders": {
|
||||
"aliases": ["inHeaders", "headers"],
|
||||
"required": True,
|
||||
"default": "0"
|
||||
}
|
||||
}
|
||||
|
||||
LIST_PARAMS = {
|
||||
"params": {
|
||||
"aliases": ["params", "with_params"],
|
||||
"required": False,
|
||||
"default": "0"
|
||||
},
|
||||
"algorithm": {"aliases": ["algorithm", "a", "algo"],
|
||||
"required": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -60,40 +44,34 @@ def get_params(req, params=BASIC_PARAMS):
|
||||
outdict = {}
|
||||
wrong_params = {}
|
||||
for param, options in params.iteritems():
|
||||
if param[0] != "@": # Exclude json-ld properties
|
||||
logger.debug("Param: %s - Options: %s", param, options)
|
||||
for alias in options["aliases"]:
|
||||
if alias in indict:
|
||||
outdict[param] = indict[alias]
|
||||
if param not in outdict:
|
||||
if options.get("required", False) and "default" not in options:
|
||||
wrong_params[param] = params[param]
|
||||
else:
|
||||
if "default" in options:
|
||||
outdict[param] = options["default"]
|
||||
for alias in options["aliases"]:
|
||||
if alias in indict:
|
||||
outdict[param] = indict[alias]
|
||||
if param not in outdict:
|
||||
if options.get("required", False):
|
||||
wrong_params[param] = params[param]
|
||||
else:
|
||||
if "options" in params[param] and \
|
||||
outdict[param] not in params[param]["options"]:
|
||||
wrong_params[param] = params[param]
|
||||
if "default" in options:
|
||||
outdict[param] = options["default"]
|
||||
else:
|
||||
if "options" in params[param] and outdict[param] not in params[param]["options"]:
|
||||
wrong_params[param] = params[param]
|
||||
if wrong_params:
|
||||
message = Error({"status": 404,
|
||||
"message": "Missing or invalid parameters",
|
||||
"parameters": outdict,
|
||||
"errors": {param: error for param, error in
|
||||
wrong_params.iteritems()}
|
||||
})
|
||||
raise ValueError(message)
|
||||
message = {"status": "failed",
|
||||
"message": "Missing or invalid parameters",
|
||||
"parameters": outdict,
|
||||
"errors": {param: error for param, error in wrong_params.iteritems()}
|
||||
}
|
||||
raise ValueError(json.dumps(message))
|
||||
return outdict
|
||||
|
||||
|
||||
def basic_analysis(params):
|
||||
response = {"@context":
|
||||
[("http://demos.gsi.dit.upm.es/"
|
||||
"eurosentiment/static/context.jsonld"),
|
||||
{
|
||||
"@base": "{}#".format(request.url.encode('utf-8'))
|
||||
}
|
||||
],
|
||||
response = {"@context": ["http://demos.gsi.dit.upm.es/eurosentiment/static/context.jsonld",
|
||||
{
|
||||
"@base": "{}#".format(request.url.encode('utf-8'))
|
||||
}
|
||||
],
|
||||
"analysis": [{"@type": "marl:SentimentAnalysis"}],
|
||||
"entries": []
|
||||
}
|
||||
@@ -110,28 +88,21 @@ def basic_analysis(params):
|
||||
@nif_blueprint.route('/', methods=['POST', 'GET'])
|
||||
def home():
|
||||
try:
|
||||
params = get_params(request)
|
||||
algo = params.get("algorithm", None)
|
||||
algo = get_params(request).get("algorithm", None)
|
||||
specific_params = current_app.senpy.parameters(algo)
|
||||
logger.debug(
|
||||
"Specific params: %s", json.dumps(specific_params, indent=4))
|
||||
params.update(get_params(request, specific_params))
|
||||
params = get_params(request, specific_params)
|
||||
response = current_app.senpy.analyse(**params)
|
||||
in_headers = params["inHeaders"] != "0"
|
||||
return response.flask(in_headers=in_headers)
|
||||
return jsonify(response)
|
||||
except ValueError as ex:
|
||||
return ex.message.flask()
|
||||
return ex.message
|
||||
except Exception as ex:
|
||||
return jsonify(status="400", message=ex.message)
|
||||
|
||||
|
||||
@nif_blueprint.route("/default")
|
||||
def default():
|
||||
# return current_app.senpy.default_plugin
|
||||
plug = current_app.senpy.default_plugin
|
||||
if plug:
|
||||
return plugins(action="list", plugin=plug.name)
|
||||
else:
|
||||
error = Error(status=404, message="No plugins found")
|
||||
return error.flask()
|
||||
return current_app.senpy.default_plugin
|
||||
#return plugins(action="list", plugin=current_app.senpy.default_algorithm)
|
||||
|
||||
|
||||
@nif_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
||||
@@ -139,28 +110,26 @@ def default():
|
||||
@nif_blueprint.route('/plugins/<plugin>/<action>', methods=['POST', 'GET'])
|
||||
def plugins(plugin=None, action="list"):
|
||||
filt = {}
|
||||
sp = current_app.senpy
|
||||
if plugin:
|
||||
filt["name"] = plugin
|
||||
plugs = sp.filter_plugins(**filt)
|
||||
plugs = current_app.senpy.filter_plugins(**filt)
|
||||
if plugin and not plugs:
|
||||
return "Plugin not found", 400
|
||||
if action == "list":
|
||||
with_params = get_params(request, LIST_PARAMS)["params"] == "1"
|
||||
in_headers = get_params(request, BASIC_PARAMS)["inHeaders"] != "0"
|
||||
if plugin:
|
||||
dic = plugs[plugin]
|
||||
else:
|
||||
dic = Response(
|
||||
{plug: plugs[plug].jsonld(with_params) for plug in plugs},
|
||||
frame={})
|
||||
return dic.flask(in_headers=in_headers)
|
||||
method = "{}_plugin".format(action)
|
||||
if(hasattr(sp, method)):
|
||||
getattr(sp, method)(plugin)
|
||||
return Leaf(message="Ok").flask()
|
||||
with_params = request.args.get("params", "") == "1"
|
||||
dic = {plug: plugs[plug].jsonable(with_params) for plug in plugs}
|
||||
return jsonify(dic)
|
||||
if action == "disable":
|
||||
current_app.senpy.deactivate_plugin(plugin)
|
||||
return "Ok"
|
||||
elif action == "enable":
|
||||
current_app.senpy.activate_plugin(plugin)
|
||||
return "Ok"
|
||||
elif action == "reload":
|
||||
current_app.senpy.reload_plugin(plugin)
|
||||
return "Ok"
|
||||
else:
|
||||
return Error("action '{}' not allowed".format(action)).flask()
|
||||
return "action '{}' not allowed".format(action), 400
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@@ -8,35 +8,31 @@
|
||||
"nif": "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#",
|
||||
"onyx": "http://www.gsi.dit.upm.es/ontologies/onyx/ns#",
|
||||
"emotions": {
|
||||
"@container": "@set",
|
||||
"@id": "onyx:hasEmotionSet"
|
||||
"@id": "onyx:hasEmotionSet",
|
||||
"@type": "onyx:EmotionSet"
|
||||
},
|
||||
"opinions": {
|
||||
"@container": "@set",
|
||||
"@id": "marl:hasOpinion"
|
||||
"@container": "@list",
|
||||
"@id": "marl:hasOpinion",
|
||||
"@type": "marl:Opinion"
|
||||
},
|
||||
"prov": "http://www.w3.org/ns/prov#",
|
||||
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
|
||||
"analysis": {
|
||||
"@container": "@set",
|
||||
"@id": "prov:wasInformedBy"
|
||||
},
|
||||
"entries": {
|
||||
"@container": "@set",
|
||||
"@id": "prov:generated"
|
||||
},
|
||||
"strings": {
|
||||
"@container": "@set",
|
||||
"@reverse": "nif:hasContext"
|
||||
"@reverse": "nif:hasContext",
|
||||
"@type": "nif:String"
|
||||
},
|
||||
"date":
|
||||
{
|
||||
"@id": "dc:date",
|
||||
"@type": "xsd:dateTime"
|
||||
},
|
||||
"text": { "@id": "nif:isString" },
|
||||
"wnaffect": "http://www.gsi.dit.upm.es/ontologies/wnaffect#",
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||
"senpy": "http://www.gsi.dit.upm.es/ontologies/senpy/ns#",
|
||||
"@vocab": "http://www.gsi.dit.upm.es/ontologies/senpy/ns#"
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#"
|
||||
}
|
||||
|
@@ -1,43 +1,37 @@
|
||||
"""
|
||||
"""
|
||||
import gevent
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
from .plugins import SenpyPlugin, SentimentPlugin, EmotionPlugin
|
||||
from .models import Error
|
||||
from .blueprints import nif_blueprint
|
||||
|
||||
from git import Repo, InvalidGitRepositoryError
|
||||
from functools import partial
|
||||
|
||||
import os
|
||||
import fnmatch
|
||||
import inspect
|
||||
import sys
|
||||
import imp
|
||||
import logging
|
||||
import traceback
|
||||
import gevent
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from .plugins import SentimentPlugin, EmotionPlugin
|
||||
from yapsy.PluginFileLocator import PluginFileLocator, PluginFileAnalyzerWithInfoFile
|
||||
from yapsy.PluginManager import PluginManager
|
||||
|
||||
try:
|
||||
from flask import _app_ctx_stack as stack
|
||||
except ImportError:
|
||||
from flask import _request_ctx_stack as stack
|
||||
|
||||
from .blueprints import nif_blueprint
|
||||
from git import Repo, InvalidGitRepositoryError
|
||||
|
||||
|
||||
class Senpy(object):
|
||||
|
||||
""" Default Senpy extension for Flask """
|
||||
|
||||
def __init__(self, app=None, plugin_folder="plugins", default_plugins=False):
|
||||
def __init__(self, app=None, plugin_folder="plugins"):
|
||||
self.app = app
|
||||
base_folder = os.path.join(os.path.dirname(__file__), "plugins")
|
||||
|
||||
self._search_folders = set()
|
||||
self._outdated = True
|
||||
|
||||
self.add_folder(plugin_folder)
|
||||
if default_plugins:
|
||||
base_folder = os.path.join(os.path.dirname(__file__), "plugins")
|
||||
self.add_folder(base_folder)
|
||||
for folder in (base_folder, plugin_folder):
|
||||
self.add_folder(folder)
|
||||
|
||||
if app is not None:
|
||||
self.init_app(app)
|
||||
@@ -73,85 +67,40 @@ class Senpy(object):
|
||||
if "algorithm" in params:
|
||||
algo = params["algorithm"]
|
||||
elif self.plugins:
|
||||
algo = self.default_plugin and self.default_plugin.name
|
||||
if not algo:
|
||||
return Error(status=404,
|
||||
message=("No plugins found."
|
||||
" Please install one.").format(algo))
|
||||
algo = self.default_plugin
|
||||
if algo in self.plugins:
|
||||
if self.plugins[algo].is_activated:
|
||||
plug = self.plugins[algo]
|
||||
resp = plug.analyse(**params)
|
||||
resp.analysis.append(plug)
|
||||
logger.debug("Returning analysis result: {}".format(resp))
|
||||
resp.analysis.append(plug.jsonable())
|
||||
return resp
|
||||
else:
|
||||
logger.debug("Plugin not activated: {}".format(algo))
|
||||
return Error(status=400,
|
||||
message=("The algorithm '{}'"
|
||||
" is not activated yet").format(algo))
|
||||
logger.debug("Plugin not activated: {}".format(algo))
|
||||
else:
|
||||
logger.debug(("The algorithm '{}' is not valid\n"
|
||||
"Valid algorithms: {}").format(algo,
|
||||
self.plugins.keys()))
|
||||
return Error(status=404,
|
||||
message="The algorithm '{}' is not valid"
|
||||
.format(algo))
|
||||
logger.debug("The algorithm '{}' is not valid\nValid algorithms: {}".format(algo, self.plugins.keys()))
|
||||
return {"status": 400, "message": "The algorithm '{}' is not valid".format(algo)}
|
||||
|
||||
def activate_all(self):
|
||||
for plug in self.plugins.values():
|
||||
plug.activate()
|
||||
|
||||
@property
|
||||
def default_plugin(self):
|
||||
candidates = self.filter_plugins(is_activated=True)
|
||||
if len(candidates) > 0:
|
||||
candidate = candidates.values()[0]
|
||||
candidate = candidates.keys()[0]
|
||||
logger.debug("Default: {}".format(candidate))
|
||||
return candidate
|
||||
else:
|
||||
return None
|
||||
|
||||
def parameters(self, algo):
|
||||
return getattr(self.plugins.get(algo) or self.default_plugin,
|
||||
"params",
|
||||
{})
|
||||
return getattr(self.plugins.get(algo or self.default_plugin), "params", {})
|
||||
|
||||
def activate_all(self, sync=False):
|
||||
ps = []
|
||||
for plug in self.plugins.keys():
|
||||
ps.append(self.activate_plugin(plug, sync=sync))
|
||||
return ps
|
||||
def activate_plugin(self, plugin):
|
||||
self.plugins[plugin].activate()
|
||||
|
||||
def deactivate_all(self, sync=False):
|
||||
ps = []
|
||||
for plug in self.plugins.keys():
|
||||
ps.append(self.deactivate_plugin(plug, sync=sync))
|
||||
return ps
|
||||
|
||||
def _set_active_plugin(self, plugin_name, active=True, *args, **kwargs):
|
||||
self.plugins[plugin_name].is_activated = active
|
||||
|
||||
def activate_plugin(self, plugin_name, sync=False):
|
||||
plugin = self.plugins[plugin_name]
|
||||
def act():
|
||||
try:
|
||||
plugin.activate()
|
||||
except Exception as ex:
|
||||
logger.error("Error activating plugin {}: {}".format(plugin.name,
|
||||
ex))
|
||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||
th = gevent.spawn(act)
|
||||
th.link_value(partial(self._set_active_plugin, plugin_name, True))
|
||||
if sync:
|
||||
th.join()
|
||||
else:
|
||||
return th
|
||||
|
||||
def deactivate_plugin(self, plugin_name, sync=False):
|
||||
plugin = self.plugins[plugin_name]
|
||||
th = gevent.spawn(plugin.deactivate)
|
||||
th.link_value(partial(self._set_active_plugin, plugin_name, False))
|
||||
if sync:
|
||||
th.join()
|
||||
else:
|
||||
return th
|
||||
def deactivate_plugin(self, plugin):
|
||||
self.plugins[plugin].deactivate()
|
||||
|
||||
def reload_plugin(self, plugin):
|
||||
logger.debug("Reloading {}".format(plugin))
|
||||
@@ -161,50 +110,38 @@ class Senpy(object):
|
||||
self.plugins[nplug.name] = nplug
|
||||
|
||||
@staticmethod
|
||||
def _load_plugin(root, filename):
|
||||
logger.debug("Loading plugin: {}".format(filename))
|
||||
fpath = os.path.join(root, filename)
|
||||
with open(fpath, 'r') as f:
|
||||
info = json.load(f)
|
||||
logger.debug("Info: {}".format(info))
|
||||
sys.path.append(root)
|
||||
module = info["module"]
|
||||
name = info["name"]
|
||||
(fp, pathname, desc) = imp.find_module(module, [root, ])
|
||||
def _load_plugin(plugin, search_folder, is_activated=True):
|
||||
logger.debug("Loading plugins")
|
||||
sys.path.append(search_folder)
|
||||
(fp, pathname, desc) = imp.find_module(plugin)
|
||||
try:
|
||||
tmp = imp.load_module(module, fp, pathname, desc)
|
||||
sys.path.remove(root)
|
||||
candidate = None
|
||||
for _, obj in inspect.getmembers(tmp):
|
||||
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
||||
logger.debug(("Found plugin class:"
|
||||
" {}@{}").format(obj, inspect.getmodule(obj))
|
||||
)
|
||||
candidate = obj
|
||||
break
|
||||
if not candidate:
|
||||
logger.debug("No valid plugin for: {}".format(filename))
|
||||
return
|
||||
module = candidate(info=info)
|
||||
tmp = imp.load_module(plugin, fp, pathname, desc).plugin
|
||||
sys.path.remove(search_folder)
|
||||
tmp.path = search_folder
|
||||
try:
|
||||
repo_path = root
|
||||
module._repo = Repo(repo_path)
|
||||
repo_path = os.path.join(search_folder, plugin)
|
||||
tmp.repo = Repo(repo_path)
|
||||
except InvalidGitRepositoryError:
|
||||
module._repo = None
|
||||
tmp.repo = None
|
||||
if not hasattr(tmp, "is_activated"):
|
||||
tmp.is_activated = is_activated
|
||||
tmp.module = plugin
|
||||
except Exception as ex:
|
||||
logger.error("Exception importing {}: {}".format(filename, ex))
|
||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||
return None, None
|
||||
return name, module
|
||||
tmp = None
|
||||
logger.debug("Exception importing {}: {}".format(plugin, ex))
|
||||
return tmp
|
||||
|
||||
def _load_plugins(self):
|
||||
plugins = {}
|
||||
for search_folder in self._search_folders:
|
||||
for root, dirnames, filenames in os.walk(search_folder):
|
||||
for filename in fnmatch.filter(filenames, '*.senpy'):
|
||||
name, plugin = self._load_plugin(root, filename)
|
||||
for item in os.listdir(search_folder):
|
||||
if os.path.isdir(os.path.join(search_folder, item)) \
|
||||
and os.path.exists(os.path.join(search_folder,
|
||||
item,
|
||||
"__init__.py")):
|
||||
plugin = self._load_plugin(item, search_folder)
|
||||
if plugin:
|
||||
plugins[name] = plugin
|
||||
plugins[plugin.name] = plugin
|
||||
|
||||
self._outdated = False
|
||||
return plugins
|
||||
@@ -212,12 +149,31 @@ class Senpy(object):
|
||||
def teardown(self, exception):
|
||||
pass
|
||||
|
||||
def enable_all(self):
|
||||
for plugin in self.plugins:
|
||||
self.activate_plugin(plugin)
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
ctx = stack.top
|
||||
if ctx is not None:
|
||||
if not hasattr(ctx, 'senpy_manager'):
|
||||
logger.debug("Loading manager: %s", self._search_folders)
|
||||
ctx.senpy_manager = PluginManager(plugin_info_ext="senpy")
|
||||
ctx.senpy_manager.getPluginLocator().setPluginPlaces(self._search_folders)
|
||||
ctx.senpy_manager.locatePlugins()
|
||||
ctx.senpy_manager.loadPlugins()
|
||||
self.activate_all()
|
||||
return ctx.senpy_manager
|
||||
|
||||
@property
|
||||
def plugins(self):
|
||||
""" Return the plugins registered for a given application. """
|
||||
if not hasattr(self, 'senpy_plugins') or self._outdated:
|
||||
self.senpy_plugins = self._load_plugins()
|
||||
return self.senpy_plugins
|
||||
ctx = stack.top
|
||||
if ctx is not None:
|
||||
if not hasattr(ctx, 'senpy_plugins') or self._outdated:
|
||||
ctx.senpy_plugins = {p.name:p.plugin_object for p in self.manager.getAllPlugins()}
|
||||
return ctx.senpy_plugins
|
||||
|
||||
def filter_plugins(self, **kwargs):
|
||||
""" Filter plugins by different criteria """
|
||||
@@ -237,4 +193,4 @@ class Senpy(object):
|
||||
def sentiment_plugins(self):
|
||||
""" Return only the sentiment plugins """
|
||||
return {p: plugin for p, plugin in self.plugins.items() if
|
||||
isinstance(plugin, SentimentPlugin)}
|
||||
isinstance(plugin, SentimentPlugin)}
|
259
senpy/models.py
@@ -1,252 +1,63 @@
|
||||
import json
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from pyld import jsonld
|
||||
import logging
|
||||
from flask import Response as FlaskResponse
|
||||
|
||||
|
||||
class Leaf(dict):
|
||||
_prefix = None
|
||||
_frame = {}
|
||||
_context = {}
|
||||
class Leaf(defaultdict):
|
||||
def __init__(self, ofclass=list):
|
||||
super(Leaf, self).__init__(ofclass)
|
||||
|
||||
def __init__(self,
|
||||
*args,
|
||||
**kwargs):
|
||||
def __getattr__(self, name):
|
||||
return super(Leaf, self).__getitem__(name)
|
||||
|
||||
id = kwargs.pop("id", None)
|
||||
context = kwargs.pop("context", self._context)
|
||||
vocab = kwargs.pop("vocab", None)
|
||||
prefix = kwargs.pop("prefix", None)
|
||||
frame = kwargs.pop("frame", None)
|
||||
super(Leaf, self).__init__(*args, **kwargs)
|
||||
if context is not None:
|
||||
self.context = context
|
||||
if frame is not None:
|
||||
self._frame = frame
|
||||
self._prefix = prefix
|
||||
self.id = id
|
||||
def __setattr__(self, name, value):
|
||||
self[name] = value
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return object.__getattr__(self, key)
|
||||
except AttributeError:
|
||||
try:
|
||||
return super(Leaf, self).__getitem__(self._get_key(key))
|
||||
except KeyError:
|
||||
raise AttributeError()
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
try:
|
||||
object.__getattr__(self, key)
|
||||
object.__setattr__(self, key, value)
|
||||
except AttributeError:
|
||||
key = self._get_key(key)
|
||||
if key == "@context":
|
||||
value = self.get_context(value)
|
||||
elif key == "@id":
|
||||
value = self.get_id(value)
|
||||
if key[0] == "_":
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
if value is None:
|
||||
try:
|
||||
super(Leaf, self).__delitem__(key)
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
super(Leaf, self).__setitem__(key, value)
|
||||
|
||||
def get_id(self, id):
|
||||
"""
|
||||
Get id, dealing with prefixes
|
||||
"""
|
||||
# This is not the most elegant solution to change the @id attribute,
|
||||
# but it is the quickest way to have it included in the dictionary
|
||||
# without extra boilerplate.
|
||||
if id and self._prefix and ":" not in id:
|
||||
return "{}{}".format(self._prefix, id)
|
||||
else:
|
||||
return id
|
||||
|
||||
def __delattr__(self, key):
|
||||
if key in self.__dict__:
|
||||
del self.__dict__[key]
|
||||
else:
|
||||
super(Leaf, self).__delitem__(self._get_key(key))
|
||||
|
||||
def _get_key(self, key):
|
||||
if key[0] == "_":
|
||||
return key
|
||||
elif key in ["context", "id"]:
|
||||
return "@{}".format(key)
|
||||
else:
|
||||
return key
|
||||
|
||||
@staticmethod
|
||||
def get_context(context):
|
||||
if isinstance(context, list):
|
||||
contexts = []
|
||||
for c in context:
|
||||
contexts.append(Response.get_context(c))
|
||||
return contexts
|
||||
elif isinstance(context, dict):
|
||||
return context
|
||||
elif isinstance(context, basestring):
|
||||
try:
|
||||
with open(context) as f:
|
||||
return json.loads(f.read())
|
||||
except IOError:
|
||||
return context
|
||||
|
||||
def compact(self):
|
||||
return jsonld.compact(self, self.get_context(self.context))
|
||||
|
||||
def frame(self, frame=None, options=None):
|
||||
if frame is None:
|
||||
frame = self._frame
|
||||
if options is None:
|
||||
options = {}
|
||||
return jsonld.frame(self, frame, options)
|
||||
|
||||
def jsonld(self, frame=None, options=None,
|
||||
context=None, removeContext=None):
|
||||
if removeContext is None:
|
||||
removeContext = Response._context # Loop?
|
||||
if frame is None:
|
||||
frame = self._frame
|
||||
if context is None:
|
||||
context = self.context
|
||||
else:
|
||||
context = self.get_context(context)
|
||||
# For some reason, this causes errors with pyld
|
||||
# if options is None:
|
||||
# options = {"expandContext": context.copy() }
|
||||
js = self
|
||||
if frame:
|
||||
logging.debug("Framing: %s", json.dumps(self, indent=4))
|
||||
logging.debug("Framing with %s", json.dumps(frame, indent=4))
|
||||
js = jsonld.frame(js, frame, options)
|
||||
logging.debug("Result: %s", json.dumps(js, indent=4))
|
||||
logging.debug("Compacting with %s", json.dumps(context, indent=4))
|
||||
js = jsonld.compact(js, context, options)
|
||||
logging.debug("Result: %s", json.dumps(js, indent=4))
|
||||
if removeContext == context:
|
||||
del js["@context"]
|
||||
return js
|
||||
|
||||
def to_JSON(self, removeContext=None):
|
||||
return json.dumps(self.jsonld(removeContext=removeContext),
|
||||
default=lambda o: o.__dict__,
|
||||
sort_keys=True, indent=4)
|
||||
|
||||
def flask(self,
|
||||
in_headers=False,
|
||||
url="http://demos.gsi.dit.upm.es/senpy/senpy.jsonld"):
|
||||
"""
|
||||
Return the values and error to be used in flask
|
||||
"""
|
||||
js = self.jsonld()
|
||||
headers = None
|
||||
if in_headers:
|
||||
ctx = js["@context"]
|
||||
headers = {
|
||||
"Link": ('<%s>;'
|
||||
'rel="http://www.w3.org/ns/json-ld#context";'
|
||||
' type="application/ld+json"' % url)
|
||||
}
|
||||
del js["@context"]
|
||||
return FlaskResponse(json.dumps(js, indent=4),
|
||||
status=self.get("status", 200),
|
||||
headers=headers,
|
||||
mimetype="application/json")
|
||||
def __delattr__(self, name):
|
||||
return super(Leaf, self).__delitem__(name)
|
||||
|
||||
|
||||
class Response(Leaf):
|
||||
_context = Leaf.get_context("{}/context.jsonld".format(
|
||||
os.path.dirname(os.path.realpath(__file__))))
|
||||
_frame = {
|
||||
"@context": _context,
|
||||
"analysis": {
|
||||
"@explicit": True,
|
||||
"maxPolarityValue": {},
|
||||
"minPolarityValue": {},
|
||||
"name": {},
|
||||
"version": {},
|
||||
},
|
||||
"entries": {}
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
context = kwargs.pop("context", None)
|
||||
frame = kwargs.pop("frame", None)
|
||||
def __init__(self, context=None):
|
||||
super(Response, self).__init__()
|
||||
self["analysis"] = []
|
||||
self["entries"] = []
|
||||
if context is None:
|
||||
context = self._context
|
||||
self.context = context
|
||||
super(Response, self).__init__(
|
||||
*args, context=context, frame=frame, **kwargs)
|
||||
if self._frame is not None and "entries" in self._frame:
|
||||
self.analysis = []
|
||||
self.entries = []
|
||||
|
||||
def jsonld(self, frame=None, options=None, context=None, removeContext={}):
|
||||
return super(Response, self).jsonld(frame,
|
||||
options,
|
||||
context,
|
||||
removeContext)
|
||||
context = "{}/context.jsonld".format(os.path.dirname(
|
||||
os.path.realpath(__file__)))
|
||||
if isinstance(context, dict):
|
||||
self["@context"] = context
|
||||
if isinstance(context, str) or isinstance(context, unicode):
|
||||
try:
|
||||
with open(context) as f:
|
||||
self["@context"] = json.loads(f.read())
|
||||
except IOError:
|
||||
self["@context"] = context
|
||||
|
||||
|
||||
class Entry(Leaf):
|
||||
_context = {
|
||||
"@vocab": ("http://persistence.uni-leipzig.org/"
|
||||
"nlp2rdf/ontologies/nif-core#")
|
||||
|
||||
}
|
||||
|
||||
def __init__(self, text=None, emotion_sets=None, opinions=None, **kwargs):
|
||||
super(Entry, self).__init__(**kwargs)
|
||||
if text:
|
||||
self.text = text
|
||||
self.emotionSets = emotion_sets if emotion_sets else []
|
||||
self.opinions = opinions if opinions else []
|
||||
if emotion_sets:
|
||||
self.emotionSets = emotion_sets
|
||||
if opinions:
|
||||
self.opinions = opinions
|
||||
|
||||
|
||||
class Opinion(Leaf):
|
||||
_context = {
|
||||
"@vocab": "http://www.gsi.dit.upm.es/ontologies/marl/ns#"
|
||||
}
|
||||
|
||||
def __init__(self, polarityValue=None, hasPolarity=None, *args, **kwargs):
|
||||
super(Opinion, self).__init__(*args,
|
||||
**kwargs)
|
||||
if polarityValue is not None:
|
||||
self.hasPolarityValue = polarityValue
|
||||
if hasPolarity is not None:
|
||||
self.hasPolarity = hasPolarity
|
||||
def __init__(self, polarity_value=None, polarity=None, **kwargs):
|
||||
super(Opinion, self).__init__(**kwargs)
|
||||
if polarity_value is not None:
|
||||
self.polarity_value = polarity_value
|
||||
if polarity is not None:
|
||||
self.polarity = polarity
|
||||
|
||||
|
||||
class EmotionSet(Leaf):
|
||||
_context = {}
|
||||
|
||||
def __init__(self, emotions=None, *args, **kwargs):
|
||||
def __init__(self, emotions=None, **kwargs):
|
||||
if not emotions:
|
||||
emotions = []
|
||||
super(EmotionSet, self).__init__(context=EmotionSet._context,
|
||||
*args,
|
||||
**kwargs)
|
||||
super(EmotionSet, self).__init__(**kwargs)
|
||||
self.emotions = emotions or []
|
||||
|
||||
|
||||
class Emotion(Leaf):
|
||||
_context = {}
|
||||
|
||||
|
||||
class Error(Leaf):
|
||||
# A better pattern would be this:
|
||||
# http://flask.pocoo.org/docs/0.10/patterns/apierrors/
|
||||
_frame = {}
|
||||
_context = {}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Error, self).__init__(*args, **kwargs)
|
||||
|
219
senpy/plugins.py
@@ -1,159 +1,100 @@
|
||||
|
||||
import inspect
|
||||
import os.path
|
||||
import shelve
|
||||
import logging
|
||||
import ConfigParser
|
||||
from .models import Response, Leaf
|
||||
from yapsy.IPlugin import IPlugin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PARAMS = {
|
||||
"input": {
|
||||
"@id": "input",
|
||||
"aliases": ["i", "input"],
|
||||
"required": True,
|
||||
"help": "Input text"
|
||||
},
|
||||
"informat": {
|
||||
"@id": "informat",
|
||||
"aliases": ["f", "informat"],
|
||||
"required": False,
|
||||
"default": "text",
|
||||
"options": ["turtle", "text"],
|
||||
},
|
||||
"intype": {
|
||||
"@id": "intype",
|
||||
"aliases": ["intype", "t"],
|
||||
"required": False,
|
||||
"default": "direct",
|
||||
"options": ["direct", "url", "file"],
|
||||
},
|
||||
"outformat": {
|
||||
"@id": "outformat",
|
||||
"aliases": ["outformat", "o"],
|
||||
"default": "json-ld",
|
||||
"required": False,
|
||||
"options": ["json-ld"],
|
||||
},
|
||||
"language": {
|
||||
"@id": "language",
|
||||
"aliases": ["language", "l"],
|
||||
"required": False,
|
||||
},
|
||||
"prefix": {
|
||||
"@id": "prefix",
|
||||
"aliases": ["prefix", "p"],
|
||||
"required": True,
|
||||
"default": "",
|
||||
},
|
||||
"urischeme": {
|
||||
"@id": "urischeme",
|
||||
"aliases": ["urischeme", "u"],
|
||||
"required": False,
|
||||
"default": "RFC5147String",
|
||||
"options": "RFC5147String"
|
||||
},
|
||||
}
|
||||
PARAMS = {"input": {"aliases": ["i", "input"],
|
||||
"required": True,
|
||||
"help": "Input text"
|
||||
},
|
||||
"informat": {"aliases": ["f", "informat"],
|
||||
"required": False,
|
||||
"default": "text",
|
||||
"options": ["turtle", "text"],
|
||||
},
|
||||
"intype": {"aliases": ["intype", "t"],
|
||||
"required": False,
|
||||
"default": "direct",
|
||||
"options": ["direct", "url", "file"],
|
||||
},
|
||||
"outformat": {"aliases": ["outformat", "o"],
|
||||
"default": "json-ld",
|
||||
"required": False,
|
||||
"options": ["json-ld"],
|
||||
},
|
||||
"language": {"aliases": ["language", "l"],
|
||||
"required": False,
|
||||
"options": ["es", "en"],
|
||||
},
|
||||
"urischeme": {"aliases": ["urischeme", "u"],
|
||||
"required": False,
|
||||
"default": "RFC5147String",
|
||||
"options": "RFC5147String"
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class SenpyPlugin(Leaf):
|
||||
_context = Leaf.get_context(Response._context)
|
||||
_frame = {"@context": _context,
|
||||
"name": {},
|
||||
"extra_params": {"@container": "@index"},
|
||||
"@explicit": True,
|
||||
"version": {},
|
||||
"repo": None,
|
||||
"is_activated": {},
|
||||
"params": None,
|
||||
}
|
||||
|
||||
def __init__(self, info=None):
|
||||
if not info:
|
||||
raise ValueError(("You need to provide configuration"
|
||||
"information for the plugin."))
|
||||
logger.debug("Initialising {}".format(info))
|
||||
super(SenpyPlugin, self).__init__()
|
||||
self.name = info["name"]
|
||||
self.version = info["version"]
|
||||
self.id = "{}_{}".format(self.name, self.version)
|
||||
self.params = info.get("params", PARAMS.copy())
|
||||
if "@id" not in self.params:
|
||||
self.params["@id"] = "params_%s" % self.id
|
||||
self.extra_params = info.get("extra_params", {})
|
||||
self.params.update(self.extra_params.copy())
|
||||
if "@id" not in self.extra_params:
|
||||
self.extra_params["@id"] = "extra_params_%s" % self.id
|
||||
self.is_activated = False
|
||||
self._info = info
|
||||
|
||||
def get_folder(self):
|
||||
return os.path.dirname(inspect.getfile(self.__class__))
|
||||
class SenpyPlugin(IPlugin):
|
||||
def __init__(self, name=None, version=None, extraparams=None, params=None):
|
||||
logger.debug("Initialising {}".format(name))
|
||||
self.name = name
|
||||
self.version = version
|
||||
if params:
|
||||
self.params = params
|
||||
else:
|
||||
self.params = PARAMS.copy()
|
||||
if extraparams:
|
||||
self.params.update(extraparams)
|
||||
self.extraparams = extraparams or {}
|
||||
self.is_activated = True
|
||||
|
||||
def analyse(self, *args, **kwargs):
|
||||
logger.debug("Analysing with: {} {}".format(self.name, self.version))
|
||||
pass
|
||||
|
||||
def activate(self):
|
||||
pass
|
||||
|
||||
def deactivate(self):
|
||||
pass
|
||||
|
||||
def jsonld(self, parameters=False, *args, **kwargs):
|
||||
nframe = kwargs.pop("frame", self._frame)
|
||||
def jsonable(self, parameters=False):
|
||||
resp = {
|
||||
"@id": "{}_{}".format(self.name, self.version),
|
||||
"is_activated": self.is_activated,
|
||||
}
|
||||
if hasattr(self, "repo") and self.repo:
|
||||
resp["repo"] = self.repo.remotes[0].url
|
||||
if parameters:
|
||||
nframe = nframe.copy()
|
||||
nframe["params"] = {}
|
||||
return super(SenpyPlugin, self).jsonld(frame=nframe, *args, **kwargs)
|
||||
resp["parameters"] = self.params
|
||||
elif self.extraparams:
|
||||
resp["extra_parameters"] = self.extraparams
|
||||
return resp
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return "{}_{}".format(self.name, self.version)
|
||||
|
||||
def __del__(self):
|
||||
''' Destructor, to make sure all the resources are freed '''
|
||||
self.deactivate()
|
||||
|
||||
class SentimentPlugin(SenpyPlugin):
|
||||
def __init__(self,
|
||||
min_polarity_value=0,
|
||||
max_polarity_value=1,
|
||||
**kwargs):
|
||||
super(SentimentPlugin, self).__init__(**kwargs)
|
||||
self.minPolarityValue = min_polarity_value
|
||||
self.maxPolarityValue = max_polarity_value
|
||||
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
super(SentimentPlugin, self).__init__(info, *args, **kwargs)
|
||||
self.minPolarityValue = float(info.get("minPolarityValue", 0))
|
||||
self.maxPolarityValue = float(info.get("maxPolarityValue", 1))
|
||||
def jsonable(self, *args, **kwargs):
|
||||
resp = super(SentimentPlugin, self).jsonable(*args, **kwargs)
|
||||
resp["marl:maxPolarityValue"] = self.maxPolarityValue
|
||||
resp["marl:minPolarityValue"] = self.minPolarityValue
|
||||
return resp
|
||||
|
||||
|
||||
class EmotionPlugin(SenpyPlugin):
|
||||
def __init__(self,
|
||||
min_emotion_value=0,
|
||||
max_emotion_value=1,
|
||||
emotion_category=None,
|
||||
**kwargs):
|
||||
super(EmotionPlugin, self).__init__(**kwargs)
|
||||
self.minEmotionValue = min_emotion_value
|
||||
self.maxEmotionValue = max_emotion_value
|
||||
self.emotionCategory = emotion_category
|
||||
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
resp = super(EmotionPlugin, self).__init__(info, *args, **kwargs)
|
||||
self.minEmotionValue = float(info.get("minEmotionValue", 0))
|
||||
self.maxEmotionValue = float(info.get("maxEmotionValue", 0))
|
||||
|
||||
|
||||
class ShelfMixin(object):
|
||||
|
||||
@property
|
||||
def sh(self):
|
||||
if not hasattr(self, '_sh') or not self._sh:
|
||||
self._sh = shelve.open(self.shelf_file, writeback=True)
|
||||
return self._sh
|
||||
|
||||
@sh.deleter
|
||||
def sh(self):
|
||||
if os.path.isfile(self.shelf_file):
|
||||
os.remove(self.shelf_file)
|
||||
|
||||
@property
|
||||
def shelf_file(self):
|
||||
if not hasattr(self, '_shelf_file') or not self._shelf_file:
|
||||
if hasattr(self, '_info') and 'shelf_file' in self._info:
|
||||
self._shelf_file = self._info['shelf_file']
|
||||
else:
|
||||
self._shelf_file = os.path.join(self.get_folder(), self.name + '.db')
|
||||
return self._shelf_file
|
||||
|
||||
def close(self):
|
||||
self.sh.close()
|
||||
del(self._sh)
|
||||
def jsonable(self, *args, **kwargs):
|
||||
resp = super(EmotionPlugin, self).jsonable(*args, **kwargs)
|
||||
resp["onyx:minEmotionValue"] = self.minEmotionValue
|
||||
resp["onyx:maxEmotionValue"] = self.maxEmotionValue
|
||||
return resp
|
||||
|
@@ -1,31 +0,0 @@
|
||||
import json
|
||||
import random
|
||||
|
||||
from senpy.plugins import SentimentPlugin
|
||||
from senpy.models import Response, Opinion, Entry
|
||||
|
||||
|
||||
class Sentiment140Plugin(SentimentPlugin):
|
||||
def analyse(self, **params):
|
||||
lang = params.get("language", "auto")
|
||||
|
||||
p = params.get("prefix", None)
|
||||
response = Response(prefix=p)
|
||||
polarity_value = max(-1, min(1, random.gauss(0.2, 0.2)))
|
||||
polarity = "marl:Neutral"
|
||||
if polarity_value > 0:
|
||||
polarity = "marl:Positive"
|
||||
elif polarity_value < 0:
|
||||
polarity = "marl:Negative"
|
||||
entry = Entry(id="Entry0",
|
||||
text=params["input"],
|
||||
prefix=p)
|
||||
opinion = Opinion(id="Opinion0",
|
||||
prefix=p,
|
||||
hasPolarity=polarity,
|
||||
polarityValue=polarity_value)
|
||||
opinion["prov:wasGeneratedBy"] = self.id
|
||||
entry.opinions.append(opinion)
|
||||
entry.language = lang
|
||||
response.entries.append(entry)
|
||||
return response
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "rand",
|
||||
"module": "rand",
|
||||
"description": "What my plugin broadly does",
|
||||
"author": "@balkian",
|
||||
"version": "0.1",
|
||||
"extra_params": {
|
||||
"language": {
|
||||
"@id": "lang_rand",
|
||||
"aliases": ["language", "l"],
|
||||
"required": false,
|
||||
"options": ["es", "en", "auto"]
|
||||
}
|
||||
},
|
||||
"requirements": {},
|
||||
"marl:maxPolarityValue": "1",
|
||||
"marl:minPolarityValue": "-1"
|
||||
}
|
@@ -1,39 +0,0 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
from senpy.plugins import SentimentPlugin
|
||||
from senpy.models import Response, Opinion, Entry
|
||||
|
||||
|
||||
class Sentiment140Plugin(SentimentPlugin):
|
||||
def analyse(self, **params):
|
||||
lang = params.get("language", "auto")
|
||||
res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson",
|
||||
json.dumps({"language": lang,
|
||||
"data": [{"text": params["input"]}]
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
p = params.get("prefix", None)
|
||||
response = Response(prefix=p)
|
||||
polarity_value = self.maxPolarityValue*int(res.json()["data"][0]
|
||||
["polarity"]) * 0.25
|
||||
polarity = "marl:Neutral"
|
||||
neutral_value = self.maxPolarityValue / 2.0
|
||||
if polarity_value > neutral_value:
|
||||
polarity = "marl:Positive"
|
||||
elif polarity_value < neutral_value:
|
||||
polarity = "marl:Negative"
|
||||
entry = Entry(id="Entry0",
|
||||
text=params["input"],
|
||||
prefix=p)
|
||||
opinion = Opinion(id="Opinion0",
|
||||
prefix=p,
|
||||
hasPolarity=polarity,
|
||||
polarityValue=polarity_value)
|
||||
opinion["prov:wasGeneratedBy"] = self.id
|
||||
entry.opinions.append(opinion)
|
||||
entry.language = lang
|
||||
response.entries.append(entry)
|
||||
return response
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "sentiment140",
|
||||
"module": "sentiment140",
|
||||
"description": "What my plugin broadly does",
|
||||
"author": "@balkian",
|
||||
"version": "0.1",
|
||||
"extra_params": {
|
||||
"language": {
|
||||
"@id": "lang_sentiment140",
|
||||
"aliases": ["language", "l"],
|
||||
"required": false,
|
||||
"options": ["es", "en", "auto"]
|
||||
}
|
||||
},
|
||||
"requirements": {},
|
||||
"maxPolarityValue": "1",
|
||||
"minPolarityValue": "0"
|
||||
}
|
@@ -1,2 +1,2 @@
|
||||
[metadata]
|
||||
description-file = README.rst
|
||||
description-file = README.md
|
||||
|
35
setup.py
@@ -1,21 +1,16 @@
|
||||
import pip
|
||||
from setuptools import setup
|
||||
from pip.req import parse_requirements
|
||||
# parse_requirements() returns generator of pip.req.InstallRequirement objects
|
||||
|
||||
try:
|
||||
install_reqs = parse_requirements("requirements.txt", session=pip.download.PipSession())
|
||||
test_reqs = parse_requirements("test-requirements.txt", session=pip.download.PipSession())
|
||||
except AttributeError:
|
||||
install_reqs = parse_requirements("requirements.txt")
|
||||
test_reqs = parse_requirements("test-requirements.txt")
|
||||
# parse_requirements() returns generator of pip.req.InstallRequirement objects
|
||||
install_reqs = parse_requirements("requirements.txt")
|
||||
|
||||
# reqs is a list of requirement
|
||||
# e.g. ['django==1.5.1', 'mezzanine==1.4.6']
|
||||
install_reqs = [str(ir.req) for ir in install_reqs]
|
||||
test_reqs = [str(ir.req) for ir in test_reqs]
|
||||
reqs = [str(ir.req) for ir in install_reqs]
|
||||
|
||||
VERSION = "0.4.11"
|
||||
VERSION = "0.2.8"
|
||||
|
||||
print(reqs)
|
||||
|
||||
setup(
|
||||
name='senpy',
|
||||
@@ -27,18 +22,10 @@ extendable, so new algorithms and sources can be used.
|
||||
''',
|
||||
author='J. Fernando Sanchez',
|
||||
author_email='balkian@gmail.com',
|
||||
url='https://github.com/gsi-upm/senpy', # use the URL to the github repo
|
||||
download_url='https://github.com/gsi-upm/senpy/archive/{}.tar.gz'
|
||||
.format(VERSION),
|
||||
keywords=['eurosentiment', 'sentiment', 'emotions', 'nif'],
|
||||
url='https://github.com/balkian/senpy', # use the URL to the github repo
|
||||
download_url='https://github.com/balkian/senpy/archive/{}.tar.gz'.format(VERSION),
|
||||
keywords=['eurosentiment', 'sentiment', 'emotions', 'nif'], # arbitrary keywords
|
||||
classifiers=[],
|
||||
install_requires=install_reqs,
|
||||
tests_require=test_reqs,
|
||||
test_suite="nose.collector",
|
||||
include_package_data=True,
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'senpy = senpy.__main__:main'
|
||||
]
|
||||
}
|
||||
install_requires=reqs,
|
||||
include_package_data = True,
|
||||
)
|
||||
|
@@ -1,3 +0,0 @@
|
||||
nose
|
||||
mock
|
||||
pbr
|
@@ -0,0 +1 @@
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
@@ -8,8 +9,6 @@ except ImportError:
|
||||
from senpy.extensions import Senpy
|
||||
from flask import Flask
|
||||
from flask.ext.testing import TestCase
|
||||
from gevent import sleep
|
||||
from itertools import product
|
||||
|
||||
|
||||
def check_dict(indic, template):
|
||||
@@ -17,98 +16,23 @@ def check_dict(indic, template):
|
||||
|
||||
|
||||
class BlueprintsTest(TestCase):
|
||||
|
||||
def create_app(self):
|
||||
self.app = Flask("test_extensions")
|
||||
self.senpy = Senpy()
|
||||
self.senpy.init_app(self.app)
|
||||
self.dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
self.senpy.add_folder(self.dir)
|
||||
self.senpy.activate_plugin("Dummy", sync=True)
|
||||
return self.app
|
||||
|
||||
def test_home(self):
|
||||
"""
|
||||
Calling with no arguments should ask the user for more arguments
|
||||
"""
|
||||
""" Calling with no arguments should ask the user for more arguments """
|
||||
resp = self.client.get("/")
|
||||
self.assert404(resp)
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert resp.json["status"] == 404
|
||||
assert resp.json["status"] == "failed"
|
||||
atleast = {
|
||||
"status": 404,
|
||||
"status": "failed",
|
||||
"message": "Missing or invalid parameters",
|
||||
}
|
||||
assert check_dict(resp.json, atleast)
|
||||
|
||||
def test_analysis(self):
|
||||
"""
|
||||
The dummy plugin returns an empty response,\
|
||||
it should contain the context
|
||||
"""
|
||||
resp = self.client.get("/?i=My aloha mohame")
|
||||
self.assert200(resp)
|
||||
logging.debug("Got response: %s", resp.json)
|
||||
assert "@context" in resp.json
|
||||
assert check_dict(
|
||||
resp.json["@context"],
|
||||
{"marl": "http://www.gsi.dit.upm.es/ontologies/marl/ns#"})
|
||||
assert "entries" in resp.json
|
||||
|
||||
def test_list(self):
|
||||
""" List the plugins """
|
||||
resp = self.client.get("/plugins/")
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert "Dummy" in resp.json
|
||||
assert "@context" in resp.json
|
||||
|
||||
def test_headers(self):
|
||||
for i, j in product(["/plugins/?nothing=", "/?i=test&"],
|
||||
["headers", "inHeaders"]):
|
||||
resp = self.client.get("%s" % (i))
|
||||
assert "@context" in resp.json
|
||||
resp = self.client.get("%s&%s=0" % (i, j))
|
||||
assert "@context" in resp.json
|
||||
resp = self.client.get("%s&%s=1" % (i, j))
|
||||
assert "@context" not in resp.json
|
||||
resp = self.client.get("%s&%s=true" % (i, j))
|
||||
assert "@context" not in resp.json
|
||||
|
||||
def test_detail(self):
|
||||
""" Show only one plugin"""
|
||||
resp = self.client.get("/plugins/Dummy")
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert "@id" in resp.json
|
||||
assert resp.json["@id"] == "Dummy_0.1"
|
||||
|
||||
def test_activate(self):
|
||||
""" Activate and deactivate one plugin """
|
||||
resp = self.client.get("/plugins/Dummy/deactivate")
|
||||
self.assert200(resp)
|
||||
sleep(0.5)
|
||||
resp = self.client.get("/plugins/Dummy")
|
||||
self.assert200(resp)
|
||||
assert "is_activated" in resp.json
|
||||
assert resp.json["is_activated"] == False
|
||||
resp = self.client.get("/plugins/Dummy/activate")
|
||||
self.assert200(resp)
|
||||
sleep(0.5)
|
||||
resp = self.client.get("/plugins/Dummy")
|
||||
self.assert200(resp)
|
||||
assert "is_activated" in resp.json
|
||||
assert resp.json["is_activated"] == True
|
||||
|
||||
def test_default(self):
|
||||
""" Show only one plugin"""
|
||||
resp = self.client.get("/default")
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert "@id" in resp.json
|
||||
assert resp.json["@id"] == "Dummy_0.1"
|
||||
resp = self.client.get("/plugins/Dummy/deactivate")
|
||||
self.assert200(resp)
|
||||
sleep(0.5)
|
||||
resp = self.client.get("/default")
|
||||
self.assert404(resp)
|
||||
|
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"dc": "http://purl.org/dc/terms/",
|
||||
"dc:subject": {
|
||||
"@type": "@id"
|
||||
},
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||
"marl": "http://www.gsi.dit.upm.es/ontologies/marl/ns#",
|
||||
"nif": "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#",
|
||||
"onyx": "http://www.gsi.dit.upm.es/ontologies/onyx/ns#",
|
||||
"emotions": {
|
||||
"@container": "@set",
|
||||
"@id": "onyx:hasEmotionSet"
|
||||
},
|
||||
"opinions": {
|
||||
"@container": "@set",
|
||||
"@id": "marl:hasOpinion"
|
||||
},
|
||||
"prov": "http://www.w3.org/ns/prov#",
|
||||
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
|
||||
"analysis": {
|
||||
"@container": "@set",
|
||||
"@id": "prov:wasInformedBy"
|
||||
},
|
||||
"entries": {
|
||||
"@container": "@set",
|
||||
"@id": "prov:generated"
|
||||
},
|
||||
"strings": {
|
||||
"@container": "@set",
|
||||
"@reverse": "nif:hasContext"
|
||||
},
|
||||
"date":
|
||||
{
|
||||
"@id": "dc:date",
|
||||
"@type": "xsd:dateTime"
|
||||
},
|
||||
"text": { "@id": "nif:isString" },
|
||||
"wnaffect": "http://www.gsi.dit.upm.es/ontologies/wnaffect#",
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#"
|
||||
}
|
@@ -1,8 +1,6 @@
|
||||
from senpy.plugins import SentimentPlugin
|
||||
from senpy.models import Response
|
||||
from senpy.plugins import SenpyPlugin
|
||||
|
||||
class DummyPlugin(SenpyPlugin):
|
||||
def __init__(self):
|
||||
super(DummyPlugin, self).__init__("dummy")
|
||||
|
||||
class DummyPlugin(SentimentPlugin):
|
||||
|
||||
def analyse(self, *args, **kwargs):
|
||||
return Response()
|
||||
|
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"name": "Dummy",
|
||||
"module": "dummy",
|
||||
"description": "I am dummy",
|
||||
"author": "@balkian",
|
||||
"version": "0.1"
|
||||
}
|
||||
[Core]
|
||||
Name = dummy
|
||||
Module = dummy
|
||||
[Documentation]
|
||||
Description = What my plugin broadly does
|
||||
Author = My very own name
|
||||
Version = 0.1
|
||||
Website = My very own website
|
||||
|
@@ -11,13 +11,11 @@ from flask.ext.testing import TestCase
|
||||
|
||||
|
||||
class ExtensionsTest(TestCase):
|
||||
|
||||
def create_app(self):
|
||||
self.app = Flask("test_extensions")
|
||||
self.dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
self.senpy = Senpy(plugin_folder=self.dir, default_plugins=False)
|
||||
self.senpy = Senpy(plugin_folder=self.dir)
|
||||
self.senpy.init_app(self.app)
|
||||
self.senpy.activate_plugin("Dummy", sync=True)
|
||||
return self.app
|
||||
|
||||
def test_init(self):
|
||||
@@ -32,56 +30,41 @@ class ExtensionsTest(TestCase):
|
||||
# noinspection PyProtectedMember
|
||||
assert self.dir in self.senpy._search_folders
|
||||
print self.senpy.plugins
|
||||
assert "Dummy" in self.senpy.plugins
|
||||
assert "dummy" in self.senpy.plugins
|
||||
|
||||
def test_enabling(self):
|
||||
""" Enabling a plugin """
|
||||
self.senpy.activate_all(sync=True)
|
||||
assert len(self.senpy.plugins) == 2
|
||||
assert self.senpy.plugins["Sleep"].is_activated
|
||||
self.senpy.activate_plugin("dummy")
|
||||
assert self.senpy.plugins["dummy"].is_activated
|
||||
|
||||
def test_disabling(self):
|
||||
""" Disabling a plugin """
|
||||
self.senpy.deactivate_all(sync=True)
|
||||
assert not self.senpy.plugins["Dummy"].is_activated
|
||||
assert not self.senpy.plugins["Sleep"].is_activated
|
||||
self.senpy.activate_plugin("dummy")
|
||||
self.senpy.deactivate_plugin("dummy")
|
||||
assert not self.senpy.plugins["dummy"].is_activated
|
||||
|
||||
def test_default(self):
|
||||
""" Default plugin should be set """
|
||||
assert self.senpy.default_plugin
|
||||
assert self.senpy.default_plugin.name == "Dummy"
|
||||
self.senpy.deactivate_all(sync=True)
|
||||
logging.debug("Default: {}".format(self.senpy.default_plugin))
|
||||
assert self.senpy.default_plugin is None
|
||||
|
||||
def test_noplugin(self):
|
||||
""" Don't analyse if there isn't any plugin installed """
|
||||
self.senpy.deactivate_all(sync=True)
|
||||
resp = self.senpy.analyse(input="tupni")
|
||||
logging.debug("Response: {}".format(resp))
|
||||
assert resp["status"] == 404
|
||||
assert self.senpy.default_plugin == "dummy"
|
||||
|
||||
def test_analyse(self):
|
||||
""" Using a plugin """
|
||||
# I was using mock until plugin started inheriting
|
||||
# Leaf (defaultdict with __setattr__ and __getattr__.
|
||||
r1 = self.senpy.analyse(
|
||||
algorithm="Dummy", input="tupni", output="tuptuo")
|
||||
r2 = self.senpy.analyse(input="tupni", output="tuptuo")
|
||||
assert r1.analysis[0].id[:5] == "Dummy"
|
||||
assert r2.analysis[0].id[:5] == "Dummy"
|
||||
with mock.patch.object(self.senpy.plugins["dummy"], "analyse") as mocked:
|
||||
self.senpy.analyse(algorithm="dummy", input="tupni", output="tuptuo")
|
||||
self.senpy.analyse(input="tupni", output="tuptuo")
|
||||
mocked.assert_any_call(input="tupni", output="tuptuo", algorithm="dummy")
|
||||
mocked.assert_any_call(input="tupni", output="tuptuo")
|
||||
for plug in self.senpy.plugins:
|
||||
self.senpy.deactivate_plugin(plug, sync=True)
|
||||
self.senpy.deactivate_plugin(plug)
|
||||
resp = self.senpy.analyse(input="tupni")
|
||||
logging.debug("Response: {}".format(resp))
|
||||
assert resp["status"] == 404
|
||||
|
||||
assert resp["status"] == 400
|
||||
|
||||
def test_filtering(self):
|
||||
""" Filtering plugins """
|
||||
assert len(self.senpy.filter_plugins(name="Dummy")) > 0
|
||||
assert len(self.senpy.filter_plugins(name="dummy")) > 0
|
||||
assert not len(self.senpy.filter_plugins(name="notdummy"))
|
||||
assert self.senpy.filter_plugins(name="Dummy", is_activated=True)
|
||||
self.senpy.deactivate_plugin("Dummy", sync=True)
|
||||
assert not len(
|
||||
self.senpy.filter_plugins(name="Dummy", is_activated=True))
|
||||
assert self.senpy.filter_plugins(name="dummy", is_activated=True)
|
||||
self.senpy.deactivate_plugin("dummy")
|
||||
assert not len(self.senpy.filter_plugins(name="dummy", is_activated=True))
|
||||
|
@@ -1,81 +0,0 @@
|
||||
import os
|
||||
import logging
|
||||
|
||||
try:
|
||||
import unittest.mock as mock
|
||||
except ImportError:
|
||||
import mock
|
||||
import json
|
||||
import os
|
||||
from unittest import TestCase
|
||||
from senpy.models import Response, Entry
|
||||
from senpy.plugins import SenpyPlugin
|
||||
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
|
||||
def test_response(self):
|
||||
r = Response(context=os.path.normpath(
|
||||
os.path.join(__file__, "..", "..", "context.jsonld")))
|
||||
assert("@context" in r)
|
||||
assert(r._frame)
|
||||
logging.debug("Default frame: %s", r._frame)
|
||||
assert("marl" in r.context)
|
||||
assert("entries" in r.context)
|
||||
|
||||
r2 = Response(context=json.loads('{"test": "roger"}'))
|
||||
assert("test" in r2.context)
|
||||
|
||||
r3 = Response(context=None)
|
||||
del r3.context
|
||||
assert("@context" not in r3)
|
||||
assert("entries" in r3)
|
||||
assert("analysis" in r3)
|
||||
|
||||
r4 = Response()
|
||||
assert("@context" in r4)
|
||||
assert("entries" in r4)
|
||||
assert("analysis" in r4)
|
||||
|
||||
dummy = SenpyPlugin({"name": "dummy", "version": 0})
|
||||
r5 = Response({"dummy": dummy}, context=None, frame=None)
|
||||
logging.debug("Response 5: %s", r5)
|
||||
assert("dummy" in r5)
|
||||
assert(r5["dummy"].name == "dummy")
|
||||
js = r5.jsonld(context={}, frame={})
|
||||
logging.debug("jsonld 5: %s", js)
|
||||
assert("dummy" in js)
|
||||
assert(js["dummy"].name == "dummy")
|
||||
|
||||
r6 = Response()
|
||||
r6.entries.append(Entry(text="Just testing"))
|
||||
logging.debug("Reponse 6: %s", r6)
|
||||
assert("@context" in r6)
|
||||
assert("marl" in r6.context)
|
||||
assert("entries" in r6.context)
|
||||
js = r6.jsonld()
|
||||
logging.debug("jsonld: %s", js)
|
||||
assert("entries" in js)
|
||||
assert("entries" in js)
|
||||
assert("analysis" in js)
|
||||
resp = r6.flask()
|
||||
received = json.loads(resp.data)
|
||||
logging.debug("Response: %s", js)
|
||||
assert(received["entries"])
|
||||
assert(received["entries"][0]["text"] == "Just testing")
|
||||
assert(received["entries"][0]["text"] != "Not testing")
|
||||
|
||||
def test_opinions(self):
|
||||
pass
|
||||
|
||||
def test_plugins(self):
|
||||
p = SenpyPlugin({"name": "dummy", "version": 0})
|
||||
c = p.jsonld()
|
||||
assert "info" not in c
|
||||
assert "repo" not in c
|
||||
assert "params" not in c
|
||||
logging.debug("Framed: %s", c)
|
||||
assert "extra_params" in c
|
||||
|
||||
def test_frame_response(self):
|
||||
pass
|
@@ -1,70 +0,0 @@
|
||||
#!/bin/env python2
|
||||
# -*- py-which-shell: "python2"; -*-
|
||||
import os
|
||||
import logging
|
||||
import shelve
|
||||
|
||||
try:
|
||||
import unittest.mock as mock
|
||||
except ImportError:
|
||||
import mock
|
||||
import json
|
||||
import os
|
||||
from unittest import TestCase
|
||||
from senpy.models import Response, Entry
|
||||
from senpy.plugins import SenpyPlugin, ShelfMixin
|
||||
|
||||
|
||||
class ShelfTest(ShelfMixin, SenpyPlugin):
|
||||
|
||||
def test(self, key=None, value=None):
|
||||
assert isinstance(self.sh, shelve.Shelf)
|
||||
assert key in self.sh
|
||||
print('Checking: sh[{}] == {}'.format(key, value))
|
||||
print('SH[{}]: {}'.format(key, self.sh[key]))
|
||||
assert self.sh[key] == value
|
||||
|
||||
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
shelf_file = 'shelf_test.db'
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.isfile(self.shelf_file):
|
||||
os.remove(self.shelf_file)
|
||||
|
||||
setUp = tearDown
|
||||
|
||||
def test_shelf(self):
|
||||
''' A shelf is created and the value is stored '''
|
||||
a = ShelfTest(info={'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file})
|
||||
assert a.sh == {}
|
||||
assert a.shelf_file == self.shelf_file
|
||||
|
||||
a.sh['a'] = 'fromA'
|
||||
|
||||
a.test(key='a', value='fromA')
|
||||
del(a)
|
||||
assert os.path.isfile(self.shelf_file)
|
||||
sh = shelve.open(self.shelf_file)
|
||||
assert sh['a'] == 'fromA'
|
||||
|
||||
|
||||
def test_two(self):
|
||||
''' Reusing the values of a previous shelf '''
|
||||
a = ShelfTest(info={'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file})
|
||||
print('Shelf file: %s' % a.shelf_file)
|
||||
a.sh['a'] = 'fromA'
|
||||
a.close()
|
||||
|
||||
b = ShelfTest(info={'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file})
|
||||
b.test(key='a', value='fromA')
|
||||
b.sh['a'] = 'fromB'
|
||||
assert b.sh['a'] == 'fromB'
|
@@ -1,17 +0,0 @@
|
||||
from senpy.plugins import SenpyPlugin
|
||||
from senpy.models import Response
|
||||
from time import sleep
|
||||
|
||||
|
||||
class SleepPlugin(SenpyPlugin):
|
||||
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
super(SleepPlugin, self).__init__(info, *args, **kwargs)
|
||||
self.timeout = int(info["timeout"])
|
||||
|
||||
def activate(self, *args, **kwargs):
|
||||
sleep(self.timeout)
|
||||
|
||||
def analyse(self, *args, **kwargs):
|
||||
sleep(float(kwargs.get("timeout", self.timeout)))
|
||||
return Response()
|
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "Sleep",
|
||||
"module": "sleep",
|
||||
"description": "I am dummy",
|
||||
"author": "@balkian",
|
||||
"version": "0.1",
|
||||
"timeout": "2",
|
||||
"extra_params": {
|
||||
"timeout": {
|
||||
"@id": "timeout_sleep",
|
||||
"aliases": ["timeout", "to"],
|
||||
"required": false,
|
||||
"default": 0
|
||||
}
|
||||
}
|
||||
}
|