mirror of
https://github.com/gsi-upm/senpy
synced 2025-10-19 09:48:26 +00:00
Compare commits
24 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
908090f634 | ||
|
fbf0384985 | ||
|
b072121e20 | ||
|
ceed9b97d0 | ||
|
2dbdb58b06 | ||
|
db30257373 | ||
|
7fd69cc690 | ||
|
b543a4614e | ||
|
bc1f9e4cf5 | ||
|
d72a995fa9 | ||
|
40b67503ce | ||
|
8624562f02 | ||
|
4dee623ef9 | ||
|
2e7530d9bc | ||
|
07b5dd3823 | ||
|
0d511ad3c3 | ||
|
7205a0e7b2 | ||
|
fff38bf825 | ||
|
5d5de0bc50 | ||
|
0454fb1afe | ||
|
5e36c71fa7 | ||
|
c8e742f96e | ||
|
1e7ae13700 | ||
|
bf30c04a52 |
@@ -1,6 +1,8 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.4"
|
||||
- "3.5"
|
||||
install: "pip install -r requirements.txt"
|
||||
# run nosetests - Tests
|
||||
script: nosetests
|
||||
|
33
Dockerfile
33
Dockerfile
@@ -1,33 +0,0 @@
|
||||
from python:2.7
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install git
|
||||
RUN mkdir -p /senpy-plugins
|
||||
|
||||
RUN apt-get -y install python-numpy
|
||||
RUN apt-get -y install python-scipy
|
||||
RUN apt-get -y install python-sklearn
|
||||
RUN apt-get -y install python-gevent
|
||||
RUN apt-get -y install libopenblas-dev
|
||||
RUN apt-get -y install gfortran
|
||||
RUN apt-get -y install libxml2-dev libxslt1-dev python-dev
|
||||
|
||||
#RUN pip install --upgrade pip
|
||||
|
||||
ADD id_rsa /root/.ssh/id_rsa
|
||||
RUN chmod 700 /root/.ssh/id_rsa
|
||||
RUN echo "Host github.com\n\tStrictHostKeyChecking no\n" >> /root/.ssh/config
|
||||
|
||||
RUN git clone https://github.com/gsi-upm/senpy /usr/src/app/
|
||||
RUN git clone git@github.com:gsi-upm/senpy-plugins-enterprise /senpy-plugins/enterprise
|
||||
RUN git clone https://github.com/gsi-upm/senpy-plugins-community /senpy-plugins/community
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
RUN pip install --no-use-wheel -r /senpy-plugins/enterprise/requirements.txt
|
||||
RUN python -m nltk.downloader stopwords
|
||||
RUN python -m nltk.downloader punkt
|
||||
RUN python -m nltk.downloader maxent_treebank_pos_tagger
|
||||
RUN python -m nltk.downloader wordnet
|
||||
|
||||
WORKDIR /senpy-plugins
|
||||
ENTRYPOINT ["python", "-m", "senpy", "-f", ".", "--host", "0.0.0.0"]
|
1
Dockerfile
Symbolic link
1
Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
||||
Dockerfile-3.5
|
9
Dockerfile-2.7
Normal file
9
Dockerfile-2.7
Normal file
@@ -0,0 +1,9 @@
|
||||
from python:2.7
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
RUN pip install -r requirements.txt
|
||||
ADD . /usr/src/app/
|
||||
RUN pip install .
|
||||
|
||||
ENTRYPOINT ["python", "-m", "senpy", "-f", ".", "--host", "0.0.0.0"]
|
9
Dockerfile-3.4
Normal file
9
Dockerfile-3.4
Normal file
@@ -0,0 +1,9 @@
|
||||
from python:3.4
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
RUN pip install -r requirements.txt
|
||||
ADD . /usr/src/app/
|
||||
RUN pip install .
|
||||
|
||||
ENTRYPOINT ["python", "-m", "senpy", "-f", ".", "--host", "0.0.0.0"]
|
9
Dockerfile-3.5
Normal file
9
Dockerfile-3.5
Normal file
@@ -0,0 +1,9 @@
|
||||
from python:3.5
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
RUN pip install -r requirements.txt
|
||||
ADD . /usr/src/app/
|
||||
RUN pip install .
|
||||
|
||||
ENTRYPOINT ["python", "-m", "senpy", "-f", ".", "--host", "0.0.0.0"]
|
33
Dockerfile.deps
Normal file
33
Dockerfile.deps
Normal file
@@ -0,0 +1,33 @@
|
||||
from python:2.7
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install git
|
||||
RUN mkdir -p /senpy-plugins
|
||||
|
||||
RUN apt-get -y install python-numpy
|
||||
RUN apt-get -y install python-scipy
|
||||
RUN apt-get -y install python-sklearn
|
||||
RUN apt-get -y install python-gevent
|
||||
RUN apt-get -y install libopenblas-dev
|
||||
RUN apt-get -y install gfortran
|
||||
RUN apt-get -y install libxml2-dev libxslt1-dev python-dev
|
||||
|
||||
#RUN pip install --upgrade pip
|
||||
|
||||
ADD id_rsa /root/.ssh/id_rsa
|
||||
RUN chmod 700 /root/.ssh/id_rsa
|
||||
RUN echo "Host github.com\n\tStrictHostKeyChecking no\n" >> /root/.ssh/config
|
||||
|
||||
RUN git clone https://github.com/gsi-upm/senpy /usr/src/app/
|
||||
RUN git clone git@github.com:gsi-upm/senpy-plugins-enterprise /senpy-plugins/enterprise
|
||||
RUN git clone https://github.com/gsi-upm/senpy-plugins-community /senpy-plugins/community
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
RUN pip install --no-use-wheel -r /senpy-plugins/enterprise/requirements.txt
|
||||
RUN python -m nltk.downloader stopwords
|
||||
RUN python -m nltk.downloader punkt
|
||||
RUN python -m nltk.downloader maxent_treebank_pos_tagger
|
||||
RUN python -m nltk.downloader wordnet
|
||||
|
||||
WORKDIR /senpy-plugins
|
||||
ENTRYPOINT ["python", "-m", "senpy", "-f", ".", "--host", "0.0.0.0"]
|
9
Dockerfile.template
Normal file
9
Dockerfile.template
Normal file
@@ -0,0 +1,9 @@
|
||||
from python:{{PYVERSION}}
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
RUN pip install -r requirements.txt
|
||||
ADD . /usr/src/app/
|
||||
RUN pip install .
|
||||
|
||||
ENTRYPOINT ["python", "-m", "senpy", "-f", ".", "--host", "0.0.0.0"]
|
@@ -2,6 +2,7 @@ include requirements.txt
|
||||
include test-requirements.txt
|
||||
include README.md
|
||||
include senpy/context.jsonld
|
||||
include senpy/VERSION
|
||||
graft senpy/plugins
|
||||
graft senpy/schemas
|
||||
graft senpy/templates
|
||||
|
82
Makefile
Normal file
82
Makefile
Normal file
@@ -0,0 +1,82 @@
|
||||
PYVERSIONS=3.5 3.4 2.7
|
||||
PYMAIN=$(firstword $(PYVERSIONS))
|
||||
NAME=senpy
|
||||
REPO=gsiupm
|
||||
VERSION=$(shell cat $(NAME)/VERSION)
|
||||
TARNAME=$(NAME)-$(subst -,.,$(VERSION)).tar.gz
|
||||
IMAGENAME=$(REPO)/$(NAME):$(VERSION)
|
||||
|
||||
all: build run
|
||||
|
||||
yapf:
|
||||
yapf -i -r senpy
|
||||
yapf -i -r tests
|
||||
|
||||
dev:
|
||||
pip install --user pre-commit
|
||||
pre-commit install
|
||||
|
||||
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS))
|
||||
@unlink Dockerfile >/dev/null
|
||||
ln -s Dockerfile-$(PYMAIN) Dockerfile
|
||||
|
||||
Dockerfile-%: Dockerfile.template
|
||||
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
|
||||
|
||||
quick_build: $(addprefix build-, $(PYMAIN))
|
||||
|
||||
build: $(addprefix build-, $(PYVERSIONS))
|
||||
|
||||
build-%: Dockerfile-%
|
||||
docker build -t '$(IMAGENAME)-python$*' -f Dockerfile-$* .;
|
||||
|
||||
quick_test: $(addprefix test-,$(PYMAIN))
|
||||
|
||||
test: $(addprefix test-,$(PYVERSIONS))
|
||||
|
||||
debug-%:
|
||||
(docker start $(NAME)-debug && docker attach $(NAME)-debug) || docker run -w /usr/src/app/ -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-debug '$(IMAGENAME)-python$*'
|
||||
|
||||
debug: debug-$(PYMAIN)
|
||||
|
||||
test-%: build-%
|
||||
docker run --rm -w /usr/src/app/ --entrypoint=/usr/local/bin/python -ti '$(IMAGENAME)-python$*' setup.py test --addopts "-vvv -s" ;
|
||||
|
||||
dist/$(TARNAME):
|
||||
docker run --rm -ti -v $$PWD:/usr/src/app/ -w /usr/src/app/ python:$(PYMAIN) python setup.py sdist;
|
||||
|
||||
sdist: dist/$(TARNAME)
|
||||
|
||||
pip_test-%: sdist
|
||||
docker run --rm -v $$PWD/dist:/dist/ -ti python:$* pip install /dist/$(TARNAME);
|
||||
|
||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS))
|
||||
|
||||
upload-%: test-%
|
||||
docker push '$(IMAGENAME)-python$*'
|
||||
|
||||
upload: test $(addprefix upload-,$(PYVERSIONS))
|
||||
docker tag '$(IMAGENAME)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||
docker tag '$(IMAGENAME)-python$(PYMAIN)' '$(REPO)/$(NAME)'
|
||||
docker push '$(IMAGENAME)'
|
||||
docker push '$(REPO)/$(NAME)'
|
||||
|
||||
clean:
|
||||
@docker ps -a | awk '/$(REPO)\/$(NAME)/{ split($$2, vers, "-"); if(vers[1] != "${VERSION}"){ print $$1;}}' | xargs docker rm 2>/dev/null|| true
|
||||
@docker images | awk '/$(REPO)\/$(NAME)/{ split($$2, vers, "-"); if(vers[1] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
||||
@docker rmi $(NAME)-debug 2>/dev/null || true
|
||||
|
||||
upload_git:
|
||||
git commit -a
|
||||
git tag ${VERSION}
|
||||
git push --tags origin master
|
||||
|
||||
pip_upload:
|
||||
python setup.py sdist upload ;
|
||||
|
||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS))
|
||||
|
||||
run: build
|
||||
docker run --rm -p 5000:5000 -ti '$(IMAGENAME)-python$(PYMAIN)'
|
||||
|
||||
.PHONY: test test-% build-% build test pip_test run yapf dev
|
@@ -12,7 +12,7 @@ Have you ever wanted to turn your sentiment analysis algorithms into a service?
|
||||
With senpy, now you can.
|
||||
It provides all the tools so you just have to worry about improving your algorithms:
|
||||
|
||||
`See it in action. <http://demos.gsi.dit.upm.es/senpy>`_
|
||||
`See it in action. <http://senpy.cluster.gsi.dit.upm.es/>`_
|
||||
|
||||
Installation
|
||||
------------
|
||||
@@ -30,7 +30,7 @@ Alternatively, you can use the development version:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
git clone git@github.com:gsi-upm/senpy
|
||||
git clone http://github.com/gsi-upm/senpy
|
||||
cd senpy
|
||||
pip install --user .
|
||||
|
||||
@@ -38,9 +38,9 @@ If you want to install senpy globally, use sudo instead of the ``--user`` flag.
|
||||
|
||||
Docker Image
|
||||
************
|
||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 balkian/senpy --host 0.0.0.0 --default-plugins``.
|
||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 gsiupm/senpy --host 0.0.0.0 --default-plugins``.
|
||||
|
||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins balkian/senpy --host 0.0.0.0 --default-plugins -f /plugins``
|
||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins gsiupm/senpy --host 0.0.0.0 --default-plugins -f /plugins``
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
43
app.py
43
app.py
@@ -1,43 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 J. Fernando Sánchez Rada - Grupo de Sistemas Inteligentes
|
||||
# DIT, UPM
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
This is a helper for development. If you want to run Senpy use:
|
||||
|
||||
python -m senpy
|
||||
"""
|
||||
from gevent.monkey import patch_all; patch_all()
|
||||
import gevent
|
||||
import config
|
||||
from flask import Flask
|
||||
from senpy.extensions import Senpy
|
||||
import logging
|
||||
import os
|
||||
from gevent.wsgi import WSGIServer
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
app = Flask(__name__)
|
||||
mypath = os.path.dirname(os.path.realpath(__file__))
|
||||
sp = Senpy(app, os.path.join(mypath, "plugins"), default_plugins=True)
|
||||
sp.activate_all()
|
||||
|
||||
if __name__ == '__main__':
|
||||
import logging
|
||||
logging.basicConfig(level=config.DEBUG)
|
||||
app.debug = config.DEBUG
|
||||
http_server = WSGIServer(('', config.SERVER_PORT), app)
|
||||
http_server.serve_forever()
|
1
docs/_static/schemas
vendored
Symbolic link
1
docs/_static/schemas
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../senpy/schemas/
|
4
docs/bad-examples/plugins/noplugins.json
Normal file
4
docs/bad-examples/plugins/noplugins.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"plugins": [
|
||||
]
|
||||
}
|
18
docs/bad-examples/results/example-basic-FAIL.json
Normal file
18
docs/bad-examples/results/example-basic-FAIL.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "http://example.com#NIFExample",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
],
|
||||
"entries": [
|
||||
{
|
||||
"@type": [
|
||||
"nif:RFC5147String",
|
||||
"nif:Context"
|
||||
],
|
||||
"nif:beginIndex": 0,
|
||||
"nif:endIndex": 40,
|
||||
"nif:isString": "My favourite actress is Natalie Portman"
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,4 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# flake8: noqa
|
||||
#
|
||||
# Senpy documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Feb 24 08:57:32 2015.
|
||||
@@ -52,16 +53,17 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Senpy'
|
||||
copyright = u'2015, J. Fernando Sánchez'
|
||||
copyright = u'2016, J. Fernando Sánchez'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.4'
|
||||
with open('../senpy/VERSION') as f:
|
||||
version = f.read().strip()
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.4'
|
||||
release = version
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
5
docs/examples/plugins/noplugins.json
Normal file
5
docs/examples/plugins/noplugins.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"@type": "plugins",
|
||||
"plugins": [
|
||||
]
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "http://example.com#NIFExample",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
],
|
||||
"entries": [
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "me:SAnalysis1",
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "me:EmotionAnalysis1",
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "me:NER1",
|
46
docs/examples/results/example-pad.json
Normal file
46
docs/examples/results/example-pad.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"@context": [
|
||||
"http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
{
|
||||
"emovoc": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#"
|
||||
}
|
||||
],
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "me:HesamsAnalysis",
|
||||
"@type": "onyx:EmotionAnalysis",
|
||||
"onyx:usesEmotionModel": "emovoc:pad-dimensions"
|
||||
}
|
||||
],
|
||||
"entries": [
|
||||
{
|
||||
"@id": "Entry1",
|
||||
"@type": [
|
||||
"nif:RFC5147String",
|
||||
"nif:Context"
|
||||
],
|
||||
"nif:isString": "This is a test string",
|
||||
"entities": [
|
||||
],
|
||||
"suggestions": [
|
||||
],
|
||||
"sentiments": [
|
||||
],
|
||||
"emotions": [
|
||||
{
|
||||
"@id": "Entry1#char=0,21",
|
||||
"nif:anchorOf": "This is a test string",
|
||||
"prov:wasGeneratedBy": "me:HesamAnalysis",
|
||||
"onyx:hasEmotion": [
|
||||
{
|
||||
"emovoc:pleasure": 0.5,
|
||||
"emovoc:arousal": 0.7
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "me:SAnalysis1",
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
{
|
||||
"@id": "me:SgAnalysis1",
|
@@ -22,6 +22,6 @@ If you want to install senpy globally, use sudo instead of the ``--user`` flag.
|
||||
|
||||
Docker Image
|
||||
************
|
||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 balkian/senpy --host 0.0.0.0 --default-plugins``.
|
||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 gsiupm/senpy --host 0.0.0.0 --default-plugins``.
|
||||
|
||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins balkian/senpy --host 0.0.0.0 --default-plugins -f /plugins``
|
||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins gsiupm/senpy --host 0.0.0.0 --default-plugins -f /plugins``
|
||||
|
@@ -2,6 +2,8 @@ Developing new plugins
|
||||
----------------------
|
||||
Each plugin represents a different analysis process.There are two types of files that are needed by senpy for loading a plugin:
|
||||
|
||||
Plugins Interface
|
||||
=======
|
||||
- Definition file, has the ".senpy" extension.
|
||||
- Code file, is a python file.
|
||||
|
||||
@@ -34,7 +36,7 @@ The basic methods in a plugin are:
|
||||
* __init__
|
||||
* activate: used to load memory-hungry resources
|
||||
* deactivate: used to free up resources
|
||||
* analyse: called in every user requests. It takes in the parameters supplied by a user and should return a senpy Results.
|
||||
* analyse: called in every user requests. It takes in the parameters supplied by a user and should return a senpy Response.
|
||||
|
||||
Plugins are loaded asynchronously, so don't worry if the activate method takes too long. The plugin will be marked as activated once it is finished executing the method.
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
Schema Examples
|
||||
===============
|
||||
All the examples in this page use the schema defined in :ref:`schema`.
|
||||
All the examples in this page use the :download:`the main schema <_static/schemas/definitions.json>`.
|
||||
|
||||
Simple NIF annotation
|
||||
---------------------
|
||||
|
@@ -4,8 +4,9 @@ requests>=2.4.1
|
||||
GitPython>=0.3.2.RC1
|
||||
gevent>=1.1rc4
|
||||
PyLD>=0.6.5
|
||||
Flask-Testing>=0.4.2
|
||||
six
|
||||
future
|
||||
jsonschema
|
||||
jsonref
|
||||
PyYAML
|
||||
semver
|
||||
|
1
senpy/VERSION
Normal file
1
senpy/VERSION
Normal file
@@ -0,0 +1 @@
|
||||
0.7.0
|
@@ -17,5 +17,24 @@
|
||||
"""
|
||||
Sentiment analysis server in Python
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from .version import __version__
|
||||
|
||||
__version__ = "0.5.5"
|
||||
try:
|
||||
import semver
|
||||
__version_info__ = semver.parse_version_info(__version__)
|
||||
|
||||
if __version_info__.prerelease:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
msg = 'WARNING: You are using a pre-release version of {} ({})'.format(
|
||||
__name__, __version__)
|
||||
if len(logging.root.handlers) > 0:
|
||||
logger.info(msg)
|
||||
else:
|
||||
import sys
|
||||
print(msg, file=sys.stderr)
|
||||
except ImportError:
|
||||
print('semver not installed. Not doing version checking')
|
||||
|
||||
__all__ = ['api', 'blueprints', 'cli', 'extensions', 'models', 'plugins']
|
||||
|
@@ -24,7 +24,6 @@ from flask import Flask
|
||||
from senpy.extensions import Senpy
|
||||
from gevent.wsgi import WSGIServer
|
||||
from gevent.monkey import patch_all
|
||||
import gevent
|
||||
import logging
|
||||
import os
|
||||
import argparse
|
||||
@@ -34,37 +33,51 @@ patch_all(thread=False)
|
||||
|
||||
SERVER_PORT = os.environ.get("PORT", 5000)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Run a Senpy server')
|
||||
parser.add_argument('--level',
|
||||
'-l',
|
||||
metavar='logging_level',
|
||||
type=str,
|
||||
default="INFO",
|
||||
help='Logging level')
|
||||
parser.add_argument('--debug',
|
||||
'-d',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Run the application in debug mode')
|
||||
parser.add_argument('--default-plugins',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Load the default plugins')
|
||||
parser.add_argument('--host',
|
||||
type=str,
|
||||
default="127.0.0.1",
|
||||
help='Use 0.0.0.0 to accept requests from any host.')
|
||||
parser.add_argument('--port',
|
||||
'-p',
|
||||
type=int,
|
||||
default=SERVER_PORT,
|
||||
help='Port to listen on.')
|
||||
parser.add_argument('--plugins-folder',
|
||||
'-f',
|
||||
type=str,
|
||||
default='plugins',
|
||||
help='Where to look for plugins.')
|
||||
parser.add_argument(
|
||||
'--level',
|
||||
'-l',
|
||||
metavar='logging_level',
|
||||
type=str,
|
||||
default="INFO",
|
||||
help='Logging level')
|
||||
parser.add_argument(
|
||||
'--debug',
|
||||
'-d',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Run the application in debug mode')
|
||||
parser.add_argument(
|
||||
'--default-plugins',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Load the default plugins')
|
||||
parser.add_argument(
|
||||
'--host',
|
||||
type=str,
|
||||
default="127.0.0.1",
|
||||
help='Use 0.0.0.0 to accept requests from any host.')
|
||||
parser.add_argument(
|
||||
'--port',
|
||||
'-p',
|
||||
type=int,
|
||||
default=SERVER_PORT,
|
||||
help='Port to listen on.')
|
||||
parser.add_argument(
|
||||
'--plugins-folder',
|
||||
'-f',
|
||||
type=str,
|
||||
default='plugins',
|
||||
help='Where to look for plugins.')
|
||||
parser.add_argument(
|
||||
'--only-install',
|
||||
'-i',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Do not run a server, only install plugin dependencies'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig()
|
||||
rl = logging.getLogger()
|
||||
@@ -72,6 +85,9 @@ def main():
|
||||
app = Flask(__name__)
|
||||
app.debug = args.debug
|
||||
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
|
||||
if args.only_install:
|
||||
sp.install_deps()
|
||||
return
|
||||
sp.activate_all()
|
||||
http_server = WSGIServer((args.host, args.port), app)
|
||||
try:
|
||||
@@ -80,8 +96,10 @@ def main():
|
||||
args.port))
|
||||
http_server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
http_server.stop()
|
||||
print('Bye!')
|
||||
http_server.stop()
|
||||
sp.deactivate_all()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
16
senpy/api.py
16
senpy/api.py
@@ -1,9 +1,8 @@
|
||||
from future.utils import iteritems
|
||||
from .models import Error
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from .models import Error
|
||||
|
||||
API_PARAMS = {
|
||||
"algorithm": {
|
||||
"aliases": ["algorithm", "a", "algo"],
|
||||
@@ -25,7 +24,7 @@ CLI_PARAMS = {
|
||||
"required": True,
|
||||
"default": "."
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
NIF_PARAMS = {
|
||||
"input": {
|
||||
@@ -96,10 +95,11 @@ def parse_params(indict, spec=NIF_PARAMS):
|
||||
outdict[param] not in spec[param]["options"]:
|
||||
wrong_params[param] = spec[param]
|
||||
if wrong_params:
|
||||
message = Error(status=404,
|
||||
message="Missing or invalid parameters",
|
||||
parameters=outdict,
|
||||
errors={param: error for param, error in
|
||||
iteritems(wrong_params)})
|
||||
message = Error(
|
||||
status=404,
|
||||
message="Missing or invalid parameters",
|
||||
parameters=outdict,
|
||||
errors={param: error
|
||||
for param, error in iteritems(wrong_params)})
|
||||
raise message
|
||||
return outdict
|
||||
|
@@ -17,12 +17,12 @@
|
||||
"""
|
||||
Blueprints for Senpy
|
||||
"""
|
||||
from flask import Blueprint, request, current_app, render_template, url_for, jsonify
|
||||
from flask import (Blueprint, request, current_app,
|
||||
render_template, url_for, jsonify)
|
||||
from .models import Error, Response, Plugins, read_schema
|
||||
from .api import NIF_PARAMS, WEB_PARAMS, parse_params
|
||||
from .api import WEB_PARAMS, parse_params
|
||||
from functools import wraps
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -30,6 +30,7 @@ logger = logging.getLogger(__name__)
|
||||
api_blueprint = Blueprint("api", __name__)
|
||||
demo_blueprint = Blueprint("demo", __name__)
|
||||
|
||||
|
||||
def get_params(req):
|
||||
if req.method == 'POST':
|
||||
indict = req.form.to_dict(flat=True)
|
||||
@@ -44,17 +45,20 @@ def get_params(req):
|
||||
def index():
|
||||
return render_template("index.html")
|
||||
|
||||
|
||||
@api_blueprint.route('/contexts/<entity>.jsonld')
|
||||
def context(entity="context"):
|
||||
return jsonify({"@context": Response.context})
|
||||
|
||||
|
||||
@api_blueprint.route('/schemas/<schema>')
|
||||
def schema(schema="definitions"):
|
||||
try:
|
||||
return jsonify(read_schema(schema))
|
||||
except Exception: # Should be FileNotFoundError, but it's missing from py2
|
||||
except Exception: # Should be FileNotFoundError, but it's missing from py2
|
||||
return Error(message="Schema not found", status=404).flask()
|
||||
|
||||
|
||||
def basic_api(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
@@ -73,12 +77,15 @@ def basic_api(f):
|
||||
response = ex
|
||||
in_headers = web_params["inHeaders"] != "0"
|
||||
headers = {'X-ORIGINAL-PARAMS': raw_params}
|
||||
return response.flask(in_headers=in_headers,
|
||||
headers=headers,
|
||||
context_uri=url_for('api.context', entity=type(response).__name__,
|
||||
_external=True))
|
||||
return response.flask(
|
||||
in_headers=in_headers,
|
||||
headers=headers,
|
||||
context_uri=url_for(
|
||||
'api.context', entity=type(response).__name__, _external=True))
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
@api_blueprint.route('/', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def api():
|
||||
@@ -93,13 +100,12 @@ def plugins():
|
||||
dic = Plugins(plugins=list(sp.plugins.values()))
|
||||
return dic
|
||||
|
||||
|
||||
@api_blueprint.route('/plugins/<plugin>/', methods=['POST', 'GET'])
|
||||
@api_blueprint.route('/plugins/<plugin>/<action>', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def plugin(plugin=None, action="list"):
|
||||
filt = {}
|
||||
sp = current_app.senpy
|
||||
plugs = sp.filter_plugins(name=plugin)
|
||||
if plugin == 'default' and sp.default_plugin:
|
||||
response = sp.default_plugin
|
||||
plugin = response.name
|
||||
@@ -110,15 +116,8 @@ def plugin(plugin=None, action="list"):
|
||||
if action == "list":
|
||||
return response
|
||||
method = "{}_plugin".format(action)
|
||||
if(hasattr(sp, method)):
|
||||
if (hasattr(sp, method)):
|
||||
getattr(sp, method)(plugin)
|
||||
return Response(message="Ok")
|
||||
else:
|
||||
return Error(message="action '{}' not allowed".format(action))
|
||||
|
||||
if __name__ == '__main__':
|
||||
import config
|
||||
|
||||
app.register_blueprint(api_blueprint)
|
||||
app.debug = config.DEBUG
|
||||
app.run(host='0.0.0.0', port=5000)
|
||||
|
@@ -3,6 +3,7 @@ from .models import Error
|
||||
from .api import parse_params, CLI_PARAMS
|
||||
from .extensions import Senpy
|
||||
|
||||
|
||||
def argv_to_dict(argv):
|
||||
'''Turns parameters in the form of '--key value' into a dict {'key': 'value'}
|
||||
'''
|
||||
@@ -11,13 +12,14 @@ def argv_to_dict(argv):
|
||||
for i in range(len(argv)):
|
||||
if argv[i][0] == '-':
|
||||
key = argv[i].strip('-')
|
||||
value = argv[i+1] if len(argv)>i+1 else None
|
||||
value = argv[i + 1] if len(argv) > i + 1 else None
|
||||
if value and value[0] == '-':
|
||||
cli_dict[key] = ""
|
||||
else:
|
||||
cli_dict[key] = value
|
||||
return cli_dict
|
||||
|
||||
|
||||
def parse_cli(argv):
|
||||
cli_dict = argv_to_dict(argv)
|
||||
cli_params = parse_params(cli_dict, spec=CLI_PARAMS)
|
||||
@@ -34,6 +36,7 @@ def main_function(argv):
|
||||
res = sp.analyse(**cli_dict)
|
||||
return res
|
||||
|
||||
|
||||
def main():
|
||||
'''This method is the entrypoint for the CLI (as configured un setup.py)
|
||||
'''
|
||||
@@ -47,4 +50,3 @@ def main():
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
45
senpy/client.py
Normal file
45
senpy/client.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import requests
|
||||
import logging
|
||||
from . import models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Client(object):
|
||||
|
||||
def __init__(self, endpoint):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def analyse(self, input, method='GET', **kwargs):
|
||||
return self.request('/', method=method, input=input, **kwargs)
|
||||
|
||||
def request(self, path=None, method='GET', **params):
|
||||
url = '{}{}'.format(self.endpoint, path)
|
||||
response = requests.request(method=method,
|
||||
url=url,
|
||||
params=params)
|
||||
try:
|
||||
resp = models.from_dict(response.json())
|
||||
resp.validate(resp)
|
||||
return resp
|
||||
except Exception as ex:
|
||||
logger.error(('There seems to be a problem with the response:\n'
|
||||
'\tURL: {url}\n'
|
||||
'\tError: {error}\n'
|
||||
'\t\n'
|
||||
'#### Response:\n'
|
||||
'\tCode: {code}'
|
||||
'\tContent: {content}'
|
||||
'\n').format(error=ex,
|
||||
url=url,
|
||||
code=response.status_code,
|
||||
content=response.content))
|
||||
raise ex
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
c = Client('http://senpy.cluster.gsi.dit.upm.es/api/')
|
||||
resp = c.analyse('hello')
|
||||
# print(resp)
|
||||
print(resp.entries)
|
||||
resp.validate()
|
@@ -2,17 +2,15 @@
|
||||
"""
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
import gevent
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
from .plugins import SenpyPlugin, SentimentPlugin, EmotionPlugin
|
||||
from .plugins import SentimentPlugin
|
||||
from .models import Error
|
||||
from .blueprints import api_blueprint, demo_blueprint
|
||||
from .api import API_PARAMS, NIF_PARAMS, parse_params
|
||||
|
||||
from git import Repo, InvalidGitRepositoryError
|
||||
from functools import partial
|
||||
|
||||
from threading import Thread
|
||||
|
||||
import os
|
||||
import fnmatch
|
||||
@@ -21,17 +19,19 @@ import sys
|
||||
import imp
|
||||
import logging
|
||||
import traceback
|
||||
import gevent
|
||||
import json
|
||||
import yaml
|
||||
import pip
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Senpy(object):
|
||||
|
||||
""" Default Senpy extension for Flask """
|
||||
|
||||
def __init__(self, app=None, plugin_folder="plugins", default_plugins=False):
|
||||
def __init__(self,
|
||||
app=None,
|
||||
plugin_folder="plugins",
|
||||
default_plugins=False):
|
||||
self.app = app
|
||||
|
||||
self._search_folders = set()
|
||||
@@ -79,22 +79,24 @@ class Senpy(object):
|
||||
elif self.plugins:
|
||||
algo = self.default_plugin and self.default_plugin.name
|
||||
if not algo:
|
||||
raise Error(status=404,
|
||||
message=("No plugins found."
|
||||
" Please install one.").format(algo))
|
||||
raise Error(
|
||||
status=404,
|
||||
message=("No plugins found."
|
||||
" Please install one.").format(algo))
|
||||
if algo not in self.plugins:
|
||||
logger.debug(("The algorithm '{}' is not valid\n"
|
||||
"Valid algorithms: {}").format(algo,
|
||||
self.plugins.keys()))
|
||||
raise Error(status=404,
|
||||
message="The algorithm '{}' is not valid"
|
||||
.format(algo))
|
||||
raise Error(
|
||||
status=404,
|
||||
message="The algorithm '{}' is not valid".format(algo))
|
||||
|
||||
if not self.plugins[algo].is_activated:
|
||||
logger.debug("Plugin not activated: {}".format(algo))
|
||||
raise Error(status=400,
|
||||
message=("The algorithm '{}'"
|
||||
" is not activated yet").format(algo))
|
||||
raise Error(
|
||||
status=400,
|
||||
message=("The algorithm '{}'"
|
||||
" is not activated yet").format(algo))
|
||||
plug = self.plugins[algo]
|
||||
nif_params = parse_params(params, spec=NIF_PARAMS)
|
||||
extra_params = plug.get('extra_params', {})
|
||||
@@ -119,9 +121,8 @@ class Senpy(object):
|
||||
return None
|
||||
|
||||
def parameters(self, algo):
|
||||
return getattr(self.plugins.get(algo) or self.default_plugin,
|
||||
"extra_params",
|
||||
{})
|
||||
return getattr(
|
||||
self.plugins.get(algo) or self.default_plugin, "extra_params", {})
|
||||
|
||||
def activate_all(self, sync=False):
|
||||
ps = []
|
||||
@@ -145,47 +146,53 @@ class Senpy(object):
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise Error(message="Plugin not found: {}".format(plugin_name),
|
||||
status=404)
|
||||
raise Error(
|
||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||
|
||||
logger.info("Activating plugin: {}".format(plugin.name))
|
||||
|
||||
def act():
|
||||
success = False
|
||||
try:
|
||||
plugin.activate()
|
||||
logger.info("Plugin activated: {}".format(plugin.name))
|
||||
msg = "Plugin activated: {}".format(plugin.name)
|
||||
logger.info(msg)
|
||||
success = True
|
||||
self._set_active_plugin(plugin_name, success)
|
||||
except Exception as ex:
|
||||
logger.error("Error activating plugin {}: {}".format(plugin.name,
|
||||
ex))
|
||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||
th = gevent.spawn(act)
|
||||
th.link_value(partial(self._set_active_plugin, plugin_name, True))
|
||||
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
||||
plugin.name, ex, traceback.format_exc())
|
||||
logger.error(msg)
|
||||
raise Error(msg)
|
||||
if sync:
|
||||
th.join()
|
||||
act()
|
||||
else:
|
||||
return th
|
||||
th = Thread(target=act)
|
||||
th.start()
|
||||
|
||||
def deactivate_plugin(self, plugin_name, sync=False):
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise Error(message="Plugin not found: {}".format(plugin_name),
|
||||
status=404)
|
||||
raise Error(
|
||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||
|
||||
self._set_active_plugin(plugin_name, False)
|
||||
|
||||
def deact():
|
||||
try:
|
||||
plugin.deactivate()
|
||||
logger.info("Plugin deactivated: {}".format(plugin.name))
|
||||
except Exception as ex:
|
||||
logger.error("Error deactivating plugin {}: {}".format(plugin.name,
|
||||
ex))
|
||||
logger.error("Error deactivating plugin {}: {}".format(
|
||||
plugin.name, ex))
|
||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||
|
||||
th = gevent.spawn(deact)
|
||||
th.link_value(partial(self._set_active_plugin, plugin_name, False))
|
||||
if sync:
|
||||
th.join()
|
||||
deact()
|
||||
else:
|
||||
return th
|
||||
th = Thread(target=deact)
|
||||
th.start()
|
||||
|
||||
def reload_plugin(self, name):
|
||||
logger.debug("Reloading {}".format(name))
|
||||
@@ -198,43 +205,71 @@ class Senpy(object):
|
||||
logger.error('Error reloading {}: {}'.format(name, ex))
|
||||
self.plugins[name] = plugin
|
||||
|
||||
@staticmethod
|
||||
def _load_plugin(root, filename):
|
||||
logger.debug("Loading plugin: {}".format(filename))
|
||||
fpath = os.path.join(root, filename)
|
||||
with open(fpath, 'r') as f:
|
||||
info = json.load(f)
|
||||
logger.debug("Info: {}".format(info))
|
||||
sys.path.append(root)
|
||||
@classmethod
|
||||
def validate_info(cls, info):
|
||||
return all(x in info for x in ('name', 'module', 'version'))
|
||||
|
||||
def install_deps(self):
|
||||
for i in self.plugins.values():
|
||||
self._install_deps(i._info)
|
||||
|
||||
@classmethod
|
||||
def _install_deps(cls, info=None):
|
||||
requirements = info.get('requirements', [])
|
||||
if requirements:
|
||||
pip_args = []
|
||||
pip_args.append('install')
|
||||
for req in requirements:
|
||||
pip_args.append(req)
|
||||
logger.info('Installing requirements: ' + str(requirements))
|
||||
pip.main(pip_args)
|
||||
|
||||
@classmethod
|
||||
def _load_plugin_from_info(cls, info, root):
|
||||
if not cls.validate_info(info):
|
||||
logger.warn('The module info is not valid.\n\t{}'.format(info))
|
||||
return None, None
|
||||
module = info["module"]
|
||||
name = info["name"]
|
||||
sys.path.append(root)
|
||||
(fp, pathname, desc) = imp.find_module(module, [root, ])
|
||||
try:
|
||||
cls._install_deps(info)
|
||||
tmp = imp.load_module(module, fp, pathname, desc)
|
||||
sys.path.remove(root)
|
||||
candidate = None
|
||||
for _, obj in inspect.getmembers(tmp):
|
||||
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
||||
logger.debug(("Found plugin class:"
|
||||
" {}@{}").format(obj, inspect.getmodule(obj))
|
||||
)
|
||||
" {}@{}").format(obj, inspect.getmodule(
|
||||
obj)))
|
||||
candidate = obj
|
||||
break
|
||||
if not candidate:
|
||||
logger.debug("No valid plugin for: {}".format(filename))
|
||||
logger.debug("No valid plugin for: {}".format(module))
|
||||
return
|
||||
module = candidate(info=info)
|
||||
try:
|
||||
repo_path = root
|
||||
module._repo = Repo(repo_path)
|
||||
except InvalidGitRepositoryError:
|
||||
module._repo = None
|
||||
repo_path = root
|
||||
module._repo = Repo(repo_path)
|
||||
except InvalidGitRepositoryError:
|
||||
logger.debug("The plugin {} is not in a Git repository".format(
|
||||
module))
|
||||
module._repo = None
|
||||
except Exception as ex:
|
||||
logger.error("Exception importing {}: {}".format(filename, ex))
|
||||
logger.error("Exception importing {}: {}".format(module, ex))
|
||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||
return None, None
|
||||
return name, module
|
||||
|
||||
@classmethod
|
||||
def _load_plugin(cls, root, filename):
|
||||
fpath = os.path.join(root, filename)
|
||||
logger.debug("Loading plugin: {}".format(fpath))
|
||||
with open(fpath, 'r') as f:
|
||||
info = yaml.load(f)
|
||||
logger.debug("Info: {}".format(info))
|
||||
return cls._load_plugin_from_info(info, root)
|
||||
|
||||
def _load_plugins(self):
|
||||
plugins = {}
|
||||
for search_folder in self._search_folders:
|
||||
@@ -262,8 +297,7 @@ class Senpy(object):
|
||||
|
||||
def matches(plug):
|
||||
res = all(getattr(plug, k, None) == v for (k, v) in kwargs.items())
|
||||
logger.debug("matching {} with {}: {}".format(plug.name,
|
||||
kwargs,
|
||||
logger.debug("matching {} with {}: {}".format(plug.name, kwargs,
|
||||
res))
|
||||
return res
|
||||
|
||||
@@ -274,5 +308,8 @@ class Senpy(object):
|
||||
|
||||
def sentiment_plugins(self):
|
||||
""" Return only the sentiment plugins """
|
||||
return {p: plugin for p, plugin in self.plugins.items() if
|
||||
isinstance(plugin, SentimentPlugin)}
|
||||
return {
|
||||
p: plugin
|
||||
for p, plugin in self.plugins.items()
|
||||
if isinstance(plugin, SentimentPlugin)
|
||||
}
|
||||
|
211
senpy/models.py
211
senpy/models.py
@@ -2,8 +2,8 @@
|
||||
Senpy Models.
|
||||
|
||||
This implementation should mirror the JSON schema definition.
|
||||
For compatibility with Py3 and for easier debugging, this new version drops introspection
|
||||
and adds all arguments to the models.
|
||||
For compatibility with Py3 and for easier debugging, this new version drops
|
||||
introspection and adds all arguments to the models.
|
||||
'''
|
||||
from __future__ import print_function
|
||||
from six import string_types
|
||||
@@ -12,34 +12,40 @@ import time
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
import jsonref
|
||||
import jsonschema
|
||||
|
||||
from flask import Response as FlaskResponse
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFINITIONS_FILE = 'definitions.json'
|
||||
CONTEXT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schemas', 'context.jsonld')
|
||||
CONTEXT_PATH = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), 'schemas', 'context.jsonld')
|
||||
|
||||
|
||||
def get_schema_path(schema_file, absolute=False):
|
||||
if absolute:
|
||||
return os.path.realpath(schema_file)
|
||||
else:
|
||||
return os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schemas', schema_file)
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), 'schemas',
|
||||
schema_file)
|
||||
|
||||
|
||||
def read_schema(schema_file, absolute=False):
|
||||
schema_path = get_schema_path(schema_file, absolute)
|
||||
schema_uri = 'file://{}'.format(schema_path)
|
||||
return jsonref.load(open(schema_path), base_uri=schema_uri)
|
||||
with open(schema_path) as f:
|
||||
return jsonref.load(f, base_uri=schema_uri)
|
||||
|
||||
|
||||
base_schema = read_schema(DEFINITIONS_FILE)
|
||||
logging.debug(base_schema)
|
||||
|
||||
|
||||
class Context(dict):
|
||||
|
||||
@staticmethod
|
||||
def load(context):
|
||||
logging.debug('Loading context: {}'.format(context))
|
||||
@@ -61,15 +67,14 @@ class Context(dict):
|
||||
else:
|
||||
raise AttributeError('Please, provide a valid context')
|
||||
|
||||
|
||||
base_context = Context.load(CONTEXT_PATH)
|
||||
|
||||
|
||||
class SenpyMixin(object):
|
||||
context = base_context["@context"]
|
||||
|
||||
def flask(self,
|
||||
in_headers=False,
|
||||
headers=None,
|
||||
**kwargs):
|
||||
def flask(self, in_headers=True, headers=None, **kwargs):
|
||||
"""
|
||||
Return the values and error to be used in flask.
|
||||
So far, it returns a fixed context. We should store/generate different
|
||||
@@ -86,33 +91,34 @@ class SenpyMixin(object):
|
||||
'rel="http://www.w3.org/ns/json-ld#context";'
|
||||
' type="application/ld+json"' % url)
|
||||
})
|
||||
return FlaskResponse(json.dumps(js, indent=2, sort_keys=True),
|
||||
status=getattr(self, "status", 200),
|
||||
headers=headers,
|
||||
mimetype="application/json")
|
||||
|
||||
return FlaskResponse(
|
||||
json.dumps(
|
||||
js, indent=2, sort_keys=True),
|
||||
status=getattr(self, "status", 200),
|
||||
headers=headers,
|
||||
mimetype="application/json")
|
||||
|
||||
def serializable(self):
|
||||
def ser_or_down(item):
|
||||
if hasattr(item, 'serializable'):
|
||||
return item.serializable()
|
||||
elif isinstance(item, dict):
|
||||
temp = dict()
|
||||
for kp in item:
|
||||
vp = item[kp]
|
||||
temp[kp] = ser_or_down(vp)
|
||||
return temp
|
||||
elif isinstance(item, list):
|
||||
return list(ser_or_down(i) for i in item)
|
||||
else:
|
||||
return item
|
||||
return ser_or_down(self._plain_dict())
|
||||
if hasattr(item, 'serializable'):
|
||||
return item.serializable()
|
||||
elif isinstance(item, dict):
|
||||
temp = dict()
|
||||
for kp in item:
|
||||
vp = item[kp]
|
||||
temp[kp] = ser_or_down(vp)
|
||||
return temp
|
||||
elif isinstance(item, list):
|
||||
return list(ser_or_down(i) for i in item)
|
||||
else:
|
||||
return item
|
||||
|
||||
return ser_or_down(self._plain_dict())
|
||||
|
||||
def jsonld(self, with_context=True, context_uri=None):
|
||||
ser = self.serializable()
|
||||
|
||||
if with_context:
|
||||
if with_context:
|
||||
context = []
|
||||
if context_uri:
|
||||
context = context_uri
|
||||
@@ -120,9 +126,9 @@ class SenpyMixin(object):
|
||||
context = self.context.copy()
|
||||
if hasattr(self, 'prefix'):
|
||||
# This sets @base for the document, which will be used in
|
||||
# all relative URIs will. For example, if a uri is "Example" and
|
||||
# prefix =s "http://example.com", the absolute URI after expanding
|
||||
# with JSON-LD will be "http://example.com/Example"
|
||||
# all relative URIs. For example, if a uri is "Example" and
|
||||
# prefix =s "http://example.com", the absolute URI after
|
||||
# expanding with JSON-LD will be "http://example.com/Example"
|
||||
|
||||
prefix_context = {"@base": self.prefix}
|
||||
if isinstance(context, list):
|
||||
@@ -132,10 +138,8 @@ class SenpyMixin(object):
|
||||
ser["@context"] = context
|
||||
return ser
|
||||
|
||||
|
||||
def to_JSON(self, *args, **kwargs):
|
||||
js = json.dumps(self.jsonld(*args, **kwargs), indent=4,
|
||||
sort_keys=True)
|
||||
js = json.dumps(self.jsonld(*args, **kwargs), indent=4, sort_keys=True)
|
||||
return js
|
||||
|
||||
def validate(self, obj=None):
|
||||
@@ -145,34 +149,41 @@ class SenpyMixin(object):
|
||||
obj = obj.jsonld()
|
||||
jsonschema.validate(obj, self.schema)
|
||||
|
||||
class SenpyModel(SenpyMixin, dict):
|
||||
def __str__(self):
|
||||
return str(self.to_JSON())
|
||||
|
||||
|
||||
class BaseModel(SenpyMixin, dict):
|
||||
|
||||
schema = base_schema
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.id = kwargs.pop('id', '{}_{}'.format(type(self).__name__,
|
||||
time.time()))
|
||||
|
||||
if 'id' in kwargs:
|
||||
self.id = kwargs.pop('id')
|
||||
elif kwargs.pop('_auto_id', True):
|
||||
self.id = '_:{}_{}'.format(
|
||||
type(self).__name__, time.time())
|
||||
temp = dict(*args, **kwargs)
|
||||
|
||||
for obj in [self.schema, ] + self.schema.get('allOf', []):
|
||||
for k, v in obj.get('properties', {}).items():
|
||||
if 'default' in v:
|
||||
temp[k] = copy.deepcopy(v['default'])
|
||||
|
||||
for i in temp:
|
||||
nk = self._get_key(i)
|
||||
if nk != i:
|
||||
temp[nk] = temp[i]
|
||||
del temp[i]
|
||||
|
||||
reqs = self.schema.get('required', [])
|
||||
for i in reqs:
|
||||
if i not in temp:
|
||||
prop = self.schema['properties'][i]
|
||||
if 'default' in prop:
|
||||
temp[i] = copy.deepcopy(prop['default'])
|
||||
if 'context' in temp:
|
||||
context = temp['context']
|
||||
del temp['context']
|
||||
self.__dict__['context'] = Context.load(context)
|
||||
super(SenpyModel, self).__init__(temp)
|
||||
|
||||
try:
|
||||
temp['@type'] = getattr(self, '@type')
|
||||
except AttributeError:
|
||||
logger.warn('Creating an instance of an unknown model')
|
||||
super(BaseModel, self).__init__(temp)
|
||||
|
||||
def _get_key(self, key):
|
||||
key = key.replace("__", ":", 1)
|
||||
@@ -181,7 +192,6 @@ class SenpyModel(SenpyMixin, dict):
|
||||
def __setitem__(self, key, value):
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
|
||||
@@ -197,52 +207,87 @@ class SenpyModel(SenpyMixin, dict):
|
||||
def __delattr__(self, key):
|
||||
self.__delitem__(self._get_key(key))
|
||||
|
||||
|
||||
def _plain_dict(self):
|
||||
d = { k: v for (k,v) in self.items() if k[0] != "_"}
|
||||
d["@id"] = d.pop('id')
|
||||
d = {k: v for (k, v) in self.items() if k[0] != "_"}
|
||||
if 'id' in d:
|
||||
d["@id"] = d.pop('id')
|
||||
return d
|
||||
|
||||
class Response(SenpyModel):
|
||||
schema = read_schema('response.json')
|
||||
|
||||
class Results(SenpyModel):
|
||||
schema = read_schema('results.json')
|
||||
_subtypes = {}
|
||||
|
||||
class Entry(SenpyModel):
|
||||
schema = read_schema('entry.json')
|
||||
|
||||
class Sentiment(SenpyModel):
|
||||
schema = read_schema('sentiment.json')
|
||||
def register(rsubclass, rtype=None):
|
||||
_subtypes[rtype or rsubclass.__name__] = rsubclass
|
||||
|
||||
class Analysis(SenpyModel):
|
||||
schema = read_schema('analysis.json')
|
||||
|
||||
class EmotionSet(SenpyModel):
|
||||
schema = read_schema('emotionSet.json')
|
||||
def from_dict(indict):
|
||||
target = indict.get('@type', None)
|
||||
if target and target in _subtypes:
|
||||
cls = _subtypes[target]
|
||||
else:
|
||||
cls = BaseModel
|
||||
return cls(**indict)
|
||||
|
||||
class Emotion(SenpyModel):
|
||||
schema = read_schema('emotion.json')
|
||||
|
||||
class Suggestion(SenpyModel):
|
||||
schema = read_schema('suggestion.json')
|
||||
def from_schema(name, schema_file=None, base_classes=None):
|
||||
base_classes = base_classes or []
|
||||
base_classes.append(BaseModel)
|
||||
schema_file = schema_file or '{}.json'.format(name)
|
||||
class_name = '{}{}'.format(i[0].upper(), i[1:])
|
||||
newclass = type(class_name, tuple(base_classes), {})
|
||||
setattr(newclass, '@type', name)
|
||||
setattr(newclass, 'schema', read_schema(schema_file))
|
||||
register(newclass, name)
|
||||
return newclass
|
||||
|
||||
class PluginModel(SenpyModel):
|
||||
schema = read_schema('plugin.json')
|
||||
|
||||
class Plugins(SenpyModel):
|
||||
schema = read_schema('plugins.json')
|
||||
def _add_from_schema(*args, **kwargs):
|
||||
generatedClass = from_schema(*args, **kwargs)
|
||||
globals()[generatedClass.__name__] = generatedClass
|
||||
del generatedClass
|
||||
|
||||
class Error(SenpyMixin, BaseException ):
|
||||
|
||||
def __init__(self, message, status=500, params=None, errors=None, *args, **kwargs):
|
||||
for i in ['response',
|
||||
'results',
|
||||
'entry',
|
||||
'sentiment',
|
||||
'analysis',
|
||||
'emotionSet',
|
||||
'emotion',
|
||||
'emotionModel',
|
||||
'suggestion',
|
||||
'plugin',
|
||||
'emotionPlugin',
|
||||
'sentimentPlugin',
|
||||
'plugins']:
|
||||
_add_from_schema(i)
|
||||
|
||||
_ErrorModel = from_schema('error')
|
||||
|
||||
|
||||
class Error(SenpyMixin, BaseException):
|
||||
def __init__(self,
|
||||
message,
|
||||
*args,
|
||||
**kwargs):
|
||||
super(Error, self).__init__(self, message, message)
|
||||
self._error = _ErrorModel(message=message, *args, **kwargs)
|
||||
self.message = message
|
||||
self.status = status
|
||||
self.params = params or {}
|
||||
self.errors = errors or ""
|
||||
|
||||
def _plain_dict(self):
|
||||
return self.__dict__
|
||||
def __getattr__(self, key):
|
||||
if key != '_error' and hasattr(self._error, key):
|
||||
return getattr(self._error, key)
|
||||
raise AttributeError(key)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.jsonld())
|
||||
def __setattr__(self, key, value):
|
||||
if key != '_error':
|
||||
return setattr(self._error, key, value)
|
||||
else:
|
||||
super(Error, self).__setattr__(key, value)
|
||||
|
||||
def __delattr__(self, key):
|
||||
delattr(self._error, key)
|
||||
|
||||
|
||||
register(Error, 'error')
|
||||
|
@@ -5,16 +5,17 @@ import inspect
|
||||
import os.path
|
||||
import pickle
|
||||
import logging
|
||||
from .models import Response, PluginModel, Error
|
||||
import tempfile
|
||||
from . import models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SenpyPlugin(PluginModel):
|
||||
|
||||
class SenpyPlugin(models.Plugin):
|
||||
def __init__(self, info=None):
|
||||
if not info:
|
||||
raise Error(message=("You need to provide configuration"
|
||||
"information for the plugin."))
|
||||
raise models.Error(message=("You need to provide configuration"
|
||||
"information for the plugin."))
|
||||
logger.debug("Initialising {}".format(info))
|
||||
super(SenpyPlugin, self).__init__(info)
|
||||
self.id = '{}_{}'.format(self.name, self.version)
|
||||
@@ -38,8 +39,8 @@ class SenpyPlugin(PluginModel):
|
||||
''' Destructor, to make sure all the resources are freed '''
|
||||
self.deactivate()
|
||||
|
||||
class SentimentPlugin(SenpyPlugin):
|
||||
|
||||
class SentimentPlugin(SenpyPlugin, models.SentimentPlugin):
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
super(SentimentPlugin, self).__init__(info, *args, **kwargs)
|
||||
self.minPolarityValue = float(info.get("minPolarityValue", 0))
|
||||
@@ -47,17 +48,14 @@ class SentimentPlugin(SenpyPlugin):
|
||||
self["@type"] = "marl:SentimentAnalysis"
|
||||
|
||||
|
||||
class EmotionPlugin(SenpyPlugin):
|
||||
|
||||
class EmotionPlugin(SentimentPlugin, models.EmotionPlugin):
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
resp = super(EmotionPlugin, self).__init__(info, *args, **kwargs)
|
||||
self.minEmotionValue = float(info.get("minEmotionValue", 0))
|
||||
self.maxEmotionValue = float(info.get("maxEmotionValue", 0))
|
||||
self["@type"] = "onyx:EmotionAnalysis"
|
||||
|
||||
|
||||
class ShelfMixin(object):
|
||||
|
||||
@property
|
||||
def sh(self):
|
||||
if not hasattr(self, '_sh') or self._sh is None:
|
||||
@@ -73,22 +71,18 @@ class ShelfMixin(object):
|
||||
del self.__dict__['_sh']
|
||||
self.save()
|
||||
|
||||
def __del__(self):
|
||||
self.save()
|
||||
super(ShelfMixin, self).__del__()
|
||||
|
||||
@property
|
||||
def shelf_file(self):
|
||||
if not hasattr(self, '_shelf_file') or not self._shelf_file:
|
||||
if hasattr(self, '_info') and 'shelf_file' in self._info:
|
||||
self.__dict__['_shelf_file'] = self._info['shelf_file']
|
||||
else:
|
||||
self._shelf_file = os.path.join(self.get_folder(), self.name + '.p')
|
||||
self._shelf_file = os.path.join(tempfile.gettempdir(),
|
||||
self.name + '.p')
|
||||
return self._shelf_file
|
||||
|
||||
def save(self):
|
||||
logger.debug('closing pickle')
|
||||
logger.debug('saving pickle')
|
||||
if hasattr(self, '_sh') and self._sh is not None:
|
||||
with open(self.shelf_file, 'wb') as f:
|
||||
pickle.dump(self._sh, f)
|
||||
del(self.__dict__['_sh'])
|
||||
|
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import random
|
||||
|
||||
from senpy.plugins import SentimentPlugin
|
||||
@@ -16,26 +15,15 @@ class Sentiment140Plugin(SentimentPlugin):
|
||||
polarity = "marl:Positive"
|
||||
elif polarity_value < 0:
|
||||
polarity = "marl:Negative"
|
||||
entry = Entry({"id":":Entry0",
|
||||
"nif:isString": params["input"]})
|
||||
sentiment = Sentiment({"id": ":Sentiment0",
|
||||
"marl:hasPolarity": polarity,
|
||||
"marl:polarityValue": polarity_value})
|
||||
entry = Entry({"id": ":Entry0", "nif:isString": params["input"]})
|
||||
sentiment = Sentiment({
|
||||
"id": ":Sentiment0",
|
||||
"marl:hasPolarity": polarity,
|
||||
"marl:polarityValue": polarity_value
|
||||
})
|
||||
sentiment["prov:wasGeneratedBy"] = self.id
|
||||
entry.sentiments = []
|
||||
entry.sentiments.append(sentiment)
|
||||
entry.language = lang
|
||||
response.entries.append(entry)
|
||||
return response
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@@ -9,16 +9,17 @@ class Sentiment140Plugin(SentimentPlugin):
|
||||
def analyse(self, **params):
|
||||
lang = params.get("language", "auto")
|
||||
res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson",
|
||||
json.dumps({"language": lang,
|
||||
"data": [{"text": params["input"]}]
|
||||
}
|
||||
)
|
||||
)
|
||||
json.dumps({
|
||||
"language": lang,
|
||||
"data": [{
|
||||
"text": params["input"]
|
||||
}]
|
||||
}))
|
||||
|
||||
p = params.get("prefix", None)
|
||||
response = Results(prefix=p)
|
||||
polarity_value = self.maxPolarityValue*int(res.json()["data"][0]
|
||||
["polarity"]) * 0.25
|
||||
polarity_value = self.maxPolarityValue * int(res.json()["data"][0][
|
||||
"polarity"]) * 0.25
|
||||
polarity = "marl:Neutral"
|
||||
neutral_value = self.maxPolarityValue / 2.0
|
||||
if polarity_value > neutral_value:
|
||||
@@ -26,12 +27,12 @@ class Sentiment140Plugin(SentimentPlugin):
|
||||
elif polarity_value < neutral_value:
|
||||
polarity = "marl:Negative"
|
||||
|
||||
entry = Entry(id="Entry0",
|
||||
nif__isString=params["input"])
|
||||
sentiment = Sentiment(id="Sentiment0",
|
||||
prefix=p,
|
||||
marl__hasPolarity=polarity,
|
||||
marl__polarityValue=polarity_value)
|
||||
entry = Entry(id="Entry0", nif__isString=params["input"])
|
||||
sentiment = Sentiment(
|
||||
id="Sentiment0",
|
||||
prefix=p,
|
||||
marl__hasPolarity=polarity,
|
||||
marl__polarityValue=polarity_value)
|
||||
sentiment.prov__wasGeneratedBy = self.id
|
||||
entry.sentiments = []
|
||||
entry.sentiments.append(sentiment)
|
||||
|
7
senpy/schemas/\
Normal file
7
senpy/schemas/\
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Senpy analysis",
|
||||
"allOf": [{
|
||||
"$ref": "atom.json"
|
||||
}]
|
||||
}
|
@@ -1,4 +1,15 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Analysis"
|
||||
"description": "Senpy analysis",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"@type": {
|
||||
"type": "string",
|
||||
"description": "Type of the analysis. e.g. marl:SentimentAnalysis"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "@type"]
|
||||
}
|
||||
|
15
senpy/schemas/atom.json
Normal file
15
senpy/schemas/atom.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Base schema for all Senpy objects",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"@type": {
|
||||
"type": "string",
|
||||
"description": "Type of the atom. e.g., 'onyx:EmotionAnalysis', 'nif:Entry'"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "@type"]
|
||||
}
|
5
senpy/schemas/context.json
Normal file
5
senpy/schemas/context.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "JSON-LD Context",
|
||||
"type": ["array", "string", "object"]
|
||||
}
|
@@ -16,16 +16,20 @@
|
||||
"@id": "me:hasEntities"
|
||||
},
|
||||
"suggestions": {
|
||||
"@id": "me:hasSuggestions"
|
||||
"@id": "me:hasSuggestions",
|
||||
"@container": "@set"
|
||||
},
|
||||
"emotions": {
|
||||
"@id": "onyx:hasEmotionSet"
|
||||
"@id": "onyx:hasEmotionSet",
|
||||
"@container": "@set"
|
||||
},
|
||||
"sentiments": {
|
||||
"@id": "marl:hasOpinion"
|
||||
"@id": "marl:hasOpinion",
|
||||
"@container": "@set"
|
||||
},
|
||||
"entries": {
|
||||
"@id": "prov:used"
|
||||
"@id": "prov:used",
|
||||
"@container": "@set"
|
||||
},
|
||||
"analysis": {
|
||||
"@id": "prov:wasGeneratedBy"
|
||||
|
@@ -1,169 +1,45 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"Results": {
|
||||
"title": "Results",
|
||||
"description": "The results of an analysis",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@context": {
|
||||
"$ref": "#/Context"
|
||||
},
|
||||
"@id": {
|
||||
"description": "ID of the analysis",
|
||||
"type": "string"
|
||||
},
|
||||
"analysis": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/Analysis"
|
||||
}
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/Entry"
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
"required": ["@id", "analysis", "entries"]
|
||||
"$ref": "results.json"
|
||||
},
|
||||
"Context": {
|
||||
"description": "JSON-LD Context",
|
||||
"type": ["array", "string", "object"]
|
||||
"$ref": "context.json"
|
||||
},
|
||||
"Analysis": {
|
||||
"description": "Senpy analysis",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"@type": {
|
||||
"type": "string",
|
||||
"description": "Type of the analysis. e.g. marl:SentimentAnalysis"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "@type"]
|
||||
"$ref": "analysis.json"
|
||||
},
|
||||
"Entry": {
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"@type": {
|
||||
"enum": [["nif:RFC5147String", "nif:Context"]]
|
||||
},
|
||||
"nif:isString": {
|
||||
"description": "String contained in this Context",
|
||||
"type": "string"
|
||||
},
|
||||
"sentiments": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/Sentiment" }
|
||||
},
|
||||
"emotions": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/EmotionSet" }
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/Entity" }
|
||||
},
|
||||
"topics": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/Topic" }
|
||||
},
|
||||
"suggestions": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/Suggestion" }
|
||||
}
|
||||
},
|
||||
"required": ["@id", "nif:isString"]
|
||||
"$ref": "entry.json"
|
||||
},
|
||||
"Sentiment": {
|
||||
"properties": {
|
||||
"@id": {"type": "string"},
|
||||
"nif:beginIndex": {"type": "integer"},
|
||||
"nif:endIndex": {"type": "integer"},
|
||||
"nif:anchorOf": {
|
||||
"description": "Piece of context that contains the Sentiment",
|
||||
"type": "string"
|
||||
},
|
||||
"marl:hasPolarity": {
|
||||
"enum": ["marl:Positive", "marl:Negative", "marl:Neutral"]
|
||||
},
|
||||
"marl:polarityValue": {
|
||||
"type": "number"
|
||||
},
|
||||
"prov:wasGeneratedBy": {
|
||||
"type": "string",
|
||||
"description": "The ID of the analysis that generated this Sentiment. The full object should be included in the \"analysis\" property of the root object"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "prov:wasGeneratedBy"]
|
||||
"$ref": "sentiment.json"
|
||||
},
|
||||
"EmotionSet": {
|
||||
"properties": {
|
||||
"@id": {"type": "string"},
|
||||
"nif:beginIndex": {"type": "integer"},
|
||||
"nif:endIndex": {"type": "integer"},
|
||||
"nif:anchorOf": {
|
||||
"description": "Piece of context that contains the Sentiment",
|
||||
"type": "string"
|
||||
},
|
||||
"onyx:hasEmotion": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/Emotion"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"prov:wasGeneratedBy": {
|
||||
"type": "string",
|
||||
"description": "The ID of the analysis that generated this Emotion. The full object should be included in the \"analysis\" property of the root object"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "prov:wasGeneratedBy", "onyx:hasEmotion"]
|
||||
"$ref": "emotionSet.json"
|
||||
},
|
||||
"Emotion": {
|
||||
"type": "object"
|
||||
"$ref": "emotion.json"
|
||||
},
|
||||
"EmotionModel": {
|
||||
"$ref": "emotionModel.json"
|
||||
},
|
||||
"Entity": {
|
||||
"type": "object"
|
||||
"$ref": "entity.json"
|
||||
},
|
||||
"Topic": {
|
||||
"type": "object"
|
||||
"$ref": "topic.json"
|
||||
},
|
||||
"Suggestion": {
|
||||
"type": "object"
|
||||
"$ref": "suggestion.json"
|
||||
},
|
||||
"Plugins": {
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/Plugin"
|
||||
}
|
||||
}
|
||||
}
|
||||
"$ref": "plugin.json"
|
||||
},
|
||||
"Plugin": {
|
||||
"type": "object",
|
||||
"required": ["@id", "extra_params"],
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"extra_params": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
}
|
||||
}
|
||||
"$ref": "plugin.json"
|
||||
},
|
||||
"Response": {
|
||||
"type": "object"
|
||||
"$ref": "response.json"
|
||||
}
|
||||
}
|
||||
|
9
senpy/schemas/dimensions.json
Normal file
9
senpy/schemas/dimensions.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"maxValue": {"type": "number"},
|
||||
"minValue": {"type": "number"}
|
||||
},
|
||||
"required": ["name", "maxValue", "minValue"]
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Emotion"
|
||||
"type": "object"
|
||||
}
|
||||
|
18
senpy/schemas/emotionAnalysis.json
Normal file
18
senpy/schemas/emotionAnalysis.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Senpy Emotion analysis",
|
||||
"type": "object",
|
||||
"allOf": [
|
||||
{"$ref": "analysis.json"},
|
||||
{"properties":
|
||||
{
|
||||
"onyx:hasEmotionModel": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"$ref": "emotionModel.json"}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": ["onyx:hasEmotionModel"]
|
||||
}]
|
||||
}
|
27
senpy/schemas/emotionModel.json
Normal file
27
senpy/schemas/emotionModel.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"properties": {
|
||||
"@id": {"type": "string"},
|
||||
"nif:beginIndex": {"type": "integer"},
|
||||
"nif:endIndex": {"type": "integer"},
|
||||
"nif:anchorOf": {
|
||||
"description": "Piece of context that contains the Sentiment",
|
||||
"type": "string"
|
||||
},
|
||||
"onyx:hasDimension": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "dimensions.json"
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
"onyx:hasEmotionCategory": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "emotion.json"
|
||||
},
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"required": ["@id", "onyx:hasEmotion"]
|
||||
}
|
19
senpy/schemas/emotionPlugin.json
Normal file
19
senpy/schemas/emotionPlugin.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object",
|
||||
"$allOf": [
|
||||
{
|
||||
"$ref": "plugin.json"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"onyx:usesEmotionModel": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "emotionModel.json"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,4 +1,24 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/EmotionSet"
|
||||
"properties": {
|
||||
"@id": {"type": "string"},
|
||||
"nif:beginIndex": {"type": "integer"},
|
||||
"nif:endIndex": {"type": "integer"},
|
||||
"nif:anchorOf": {
|
||||
"description": "Piece of context that contains the Sentiment",
|
||||
"type": "string"
|
||||
},
|
||||
"onyx:hasEmotion": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "emotion.json"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"prov:wasGeneratedBy": {
|
||||
"type": "string",
|
||||
"description": "The ID of the analysis that generated this Emotion. The full object should be included in the \"analysis\" property of the root object"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "prov:wasGeneratedBy", "onyx:hasEmotion"]
|
||||
}
|
||||
|
4
senpy/schemas/entity.json
Normal file
4
senpy/schemas/entity.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object"
|
||||
}
|
@@ -1,4 +1,34 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Entry"
|
||||
"name": "Entry",
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"nif:isString": {
|
||||
"description": "String contained in this Context",
|
||||
"type": "string"
|
||||
},
|
||||
"sentiments": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "sentiment.json" }
|
||||
},
|
||||
"emotions": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "emotionSet.json" }
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "entity.json" }
|
||||
},
|
||||
"topics": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "topic.json" }
|
||||
},
|
||||
"suggestions": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "suggestion.json" }
|
||||
}
|
||||
},
|
||||
"required": ["@id", "nif:isString"]
|
||||
}
|
||||
|
23
senpy/schemas/error.json
Normal file
23
senpy/schemas/error.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Base schema for all Senpy objects",
|
||||
"type": "object",
|
||||
"$allOf": [
|
||||
{"$ref": "atom.json"},
|
||||
{
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
"errors": {
|
||||
"type": "list",
|
||||
"items": {"type": "object"}
|
||||
},
|
||||
"code": {
|
||||
"type": "int"
|
||||
},
|
||||
"required": ["message"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,3 +1,19 @@
|
||||
{
|
||||
"$ref": "definitions.json#/Plugin"
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object",
|
||||
"required": ["@id", "extra_params"],
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the plugin, usually comprised of the name of the plugin and the version."
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the plugin, which will be used in the algorithm detection phase"
|
||||
},
|
||||
"extra_params": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,3 +1,18 @@
|
||||
{
|
||||
"$ref": "definitions.json#/Plugins"
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"allOf": [
|
||||
{"$ref": "response.json"},
|
||||
{
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "plugin.json"
|
||||
}
|
||||
},
|
||||
"@type": {
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -1,4 +1,9 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Response"
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@type": {"type": "string"}
|
||||
},
|
||||
"required": ["@type"]
|
||||
|
||||
}
|
||||
|
@@ -1,4 +1,39 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Results"
|
||||
"allOf": [
|
||||
{"$ref": "response.json"},
|
||||
{
|
||||
"title": "Results",
|
||||
"description": "The results of an analysis",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@context": {
|
||||
"$ref": "context.json"
|
||||
},
|
||||
"@type": {
|
||||
"default": "results"
|
||||
},
|
||||
"@id": {
|
||||
"description": "ID of the analysis",
|
||||
"type": "string"
|
||||
},
|
||||
"analysis": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "analysis.json"
|
||||
}
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "entry.json"
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
"required": ["@id", "analysis", "entries"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -1,4 +1,23 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Sentiment"
|
||||
"properties": {
|
||||
"@id": {"type": "string"},
|
||||
"nif:beginIndex": {"type": "integer"},
|
||||
"nif:endIndex": {"type": "integer"},
|
||||
"nif:anchorOf": {
|
||||
"description": "Piece of context that contains the Sentiment",
|
||||
"type": "string"
|
||||
},
|
||||
"marl:hasPolarity": {
|
||||
"enum": ["marl:Positive", "marl:Negative", "marl:Neutral"]
|
||||
},
|
||||
"marl:polarityValue": {
|
||||
"type": "number"
|
||||
},
|
||||
"prov:wasGeneratedBy": {
|
||||
"type": "string",
|
||||
"description": "The ID of the analysis that generated this Sentiment. The full object should be included in the \"analysis\" property of the root object"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "prov:wasGeneratedBy"]
|
||||
}
|
||||
|
19
senpy/schemas/sentimentPlugin.json
Normal file
19
senpy/schemas/sentimentPlugin.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object",
|
||||
"$allOf": [
|
||||
{
|
||||
"$ref": "plugin.json"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"marl:minPolarityValue": {
|
||||
"type": "number"
|
||||
},
|
||||
"marl:maxPolarityValue": {
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$ref": "definitions.json#/Suggestion"
|
||||
"type": "object"
|
||||
}
|
||||
|
4
senpy/schemas/topic.json
Normal file
4
senpy/schemas/topic.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object"
|
||||
}
|
4
senpy/version.py
Normal file
4
senpy/version.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import os
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'VERSION')) as f:
|
||||
__version__ = f.read().strip()
|
@@ -2,3 +2,8 @@
|
||||
description-file = README.rst
|
||||
[aliases]
|
||||
test=pytest
|
||||
[flake8]
|
||||
# because of the way that future works, we need to call install_aliases before
|
||||
# finishing the imports. flake8 thinks that we're doing the imports too late,
|
||||
# but it's actually ok
|
||||
ignore = E402
|
||||
|
29
setup.py
29
setup.py
@@ -4,8 +4,10 @@ from pip.req import parse_requirements
|
||||
# parse_requirements() returns generator of pip.req.InstallRequirement objects
|
||||
|
||||
try:
|
||||
install_reqs = parse_requirements("requirements.txt", session=pip.download.PipSession())
|
||||
test_reqs = parse_requirements("test-requirements.txt", session=pip.download.PipSession())
|
||||
install_reqs = parse_requirements(
|
||||
"requirements.txt", session=pip.download.PipSession())
|
||||
test_reqs = parse_requirements(
|
||||
"test-requirements.txt", session=pip.download.PipSession())
|
||||
except AttributeError:
|
||||
install_reqs = parse_requirements("requirements.txt")
|
||||
test_reqs = parse_requirements("test-requirements.txt")
|
||||
@@ -15,30 +17,27 @@ except AttributeError:
|
||||
install_reqs = [str(ir.req) for ir in install_reqs]
|
||||
test_reqs = [str(ir.req) for ir in test_reqs]
|
||||
|
||||
exec(open('senpy/__init__.py').read())
|
||||
from senpy import __version__
|
||||
|
||||
setup(
|
||||
name='senpy',
|
||||
packages=['senpy'], # this must be the same as the name above
|
||||
version=__version__,
|
||||
description='''
|
||||
A sentiment analysis server implementation. Designed to be \
|
||||
extendable, so new algorithms and sources can be used.
|
||||
''',
|
||||
description=('A sentiment analysis server implementation. '
|
||||
'Designed to be extensible, so new algorithms '
|
||||
'and sources can be used.'),
|
||||
author='J. Fernando Sanchez',
|
||||
author_email='balkian@gmail.com',
|
||||
url='https://github.com/gsi-upm/senpy', # use the URL to the github repo
|
||||
download_url='https://github.com/gsi-upm/senpy/archive/{}.tar.gz' .format(__version__),
|
||||
download_url='https://github.com/gsi-upm/senpy/archive/{}.tar.gz'.format(
|
||||
__version__),
|
||||
keywords=['eurosentiment', 'sentiment', 'emotions', 'nif'],
|
||||
classifiers=[],
|
||||
install_requires=install_reqs,
|
||||
tests_require=test_reqs,
|
||||
setup_requires=['pytest-runner',],
|
||||
setup_requires=['pytest-runner', ],
|
||||
include_package_data=True,
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'senpy = senpy.__main__:main',
|
||||
'senpy-cli = senpy.cli:main'
|
||||
]
|
||||
}
|
||||
)
|
||||
'console_scripts':
|
||||
['senpy = senpy.__main__:main', 'senpy-cli = senpy.cli:main']
|
||||
})
|
||||
|
@@ -3,6 +3,5 @@ from senpy.models import Results
|
||||
|
||||
|
||||
class DummyPlugin(SentimentPlugin):
|
||||
|
||||
def analyse(self, *args, **kwargs):
|
||||
return Results()
|
||||
|
@@ -4,7 +4,6 @@ from time import sleep
|
||||
|
||||
|
||||
class SleepPlugin(SenpyPlugin):
|
||||
|
||||
def activate(self, *args, **kwargs):
|
||||
sleep(self.timeout)
|
||||
|
||||
|
@@ -1,9 +1,10 @@
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
|
||||
from senpy.extensions import Senpy
|
||||
from flask import Flask
|
||||
from flask.ext.testing import TestCase
|
||||
from unittest import TestCase
|
||||
from gevent import sleep
|
||||
from itertools import product
|
||||
|
||||
@@ -12,30 +13,37 @@ def check_dict(indic, template):
|
||||
return all(item in indic.items() for item in template.items())
|
||||
|
||||
|
||||
class BlueprintsTest(TestCase):
|
||||
def parse_resp(resp):
|
||||
return json.loads(resp.data.decode('utf-8'))
|
||||
|
||||
def create_app(self):
|
||||
|
||||
class BlueprintsTest(TestCase):
|
||||
def setUp(self):
|
||||
self.app = Flask("test_extensions")
|
||||
self.client = self.app.test_client()
|
||||
self.senpy = Senpy()
|
||||
self.senpy.init_app(self.app)
|
||||
self.dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
self.senpy.add_folder(self.dir)
|
||||
self.senpy.activate_plugin("Dummy", sync=True)
|
||||
return self.app
|
||||
|
||||
def assertCode(self, resp, code):
|
||||
self.assertEqual(resp.status_code, code)
|
||||
|
||||
def test_home(self):
|
||||
"""
|
||||
Calling with no arguments should ask the user for more arguments
|
||||
"""
|
||||
resp = self.client.get("/api/")
|
||||
self.assert404(resp)
|
||||
logging.debug(resp.json)
|
||||
assert resp.json["status"] == 404
|
||||
self.assertCode(resp, 404)
|
||||
js = parse_resp(resp)
|
||||
logging.debug(js)
|
||||
assert js["status"] == 404
|
||||
atleast = {
|
||||
"status": 404,
|
||||
"message": "Missing or invalid parameters",
|
||||
}
|
||||
assert check_dict(resp.json, atleast)
|
||||
assert check_dict(js, atleast)
|
||||
|
||||
def test_analysis(self):
|
||||
"""
|
||||
@@ -43,81 +51,93 @@ class BlueprintsTest(TestCase):
|
||||
it should contain the context
|
||||
"""
|
||||
resp = self.client.get("/api/?i=My aloha mohame")
|
||||
self.assert200(resp)
|
||||
logging.debug("Got response: %s", resp.json)
|
||||
assert "@context" in resp.json
|
||||
assert "entries" in resp.json
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
logging.debug("Got response: %s", js)
|
||||
assert "@context" in js
|
||||
assert "entries" in js
|
||||
|
||||
def test_list(self):
|
||||
""" List the plugins """
|
||||
resp = self.client.get("/api/plugins/")
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert 'plugins' in resp.json
|
||||
plugins = resp.json['plugins']
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
logging.debug(js)
|
||||
assert 'plugins' in js
|
||||
plugins = js['plugins']
|
||||
assert len(plugins) > 1
|
||||
assert list(p for p in plugins if p['name'] == "Dummy")
|
||||
assert "@context" in resp.json
|
||||
assert "@context" in js
|
||||
|
||||
def test_headers(self):
|
||||
for i, j in product(["/api/plugins/?nothing=", "/api/?i=test&"],
|
||||
["inHeaders"]):
|
||||
resp = self.client.get("%s" % (i))
|
||||
assert "@context" in resp.json
|
||||
js = parse_resp(resp)
|
||||
assert "@context" in js
|
||||
resp = self.client.get("%s&%s=0" % (i, j))
|
||||
assert "@context" in resp.json
|
||||
js = parse_resp(resp)
|
||||
assert "@context" in js
|
||||
resp = self.client.get("%s&%s=1" % (i, j))
|
||||
assert "@context" not in resp.json
|
||||
js = parse_resp(resp)
|
||||
assert "@context" not in js
|
||||
resp = self.client.get("%s&%s=true" % (i, j))
|
||||
assert "@context" not in resp.json
|
||||
js = parse_resp(resp)
|
||||
assert "@context" not in js
|
||||
|
||||
def test_detail(self):
|
||||
""" Show only one plugin"""
|
||||
resp = self.client.get("/api/plugins/Dummy/")
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert "@id" in resp.json
|
||||
assert resp.json["@id"] == "Dummy_0.1"
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
logging.debug(js)
|
||||
assert "@id" in js
|
||||
assert js["@id"] == "Dummy_0.1"
|
||||
|
||||
def test_activate(self):
|
||||
""" Activate and deactivate one plugin """
|
||||
resp = self.client.get("/api/plugins/Dummy/deactivate")
|
||||
self.assert200(resp)
|
||||
self.assertCode(resp, 200)
|
||||
sleep(0.5)
|
||||
resp = self.client.get("/api/plugins/Dummy/")
|
||||
self.assert200(resp)
|
||||
assert "is_activated" in resp.json
|
||||
assert resp.json["is_activated"] == False
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
assert "is_activated" in js
|
||||
assert not js["is_activated"]
|
||||
resp = self.client.get("/api/plugins/Dummy/activate")
|
||||
self.assert200(resp)
|
||||
self.assertCode(resp, 200)
|
||||
sleep(0.5)
|
||||
resp = self.client.get("/api/plugins/Dummy/")
|
||||
self.assert200(resp)
|
||||
assert "is_activated" in resp.json
|
||||
assert resp.json["is_activated"] == True
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
assert "is_activated" in js
|
||||
assert js["is_activated"]
|
||||
|
||||
def test_default(self):
|
||||
""" Show only one plugin"""
|
||||
resp = self.client.get("/api/plugins/default/")
|
||||
self.assert200(resp)
|
||||
logging.debug(resp.json)
|
||||
assert "@id" in resp.json
|
||||
assert resp.json["@id"] == "Dummy_0.1"
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
logging.debug(js)
|
||||
assert "@id" in js
|
||||
assert js["@id"] == "Dummy_0.1"
|
||||
resp = self.client.get("/api/plugins/Dummy/deactivate")
|
||||
self.assert200(resp)
|
||||
self.assertCode(resp, 200)
|
||||
sleep(0.5)
|
||||
resp = self.client.get("/api/plugins/default/")
|
||||
self.assert404(resp)
|
||||
self.assertCode(resp, 404)
|
||||
|
||||
def test_context(self):
|
||||
resp = self.client.get("/api/contexts/context.jsonld")
|
||||
self.assert200(resp)
|
||||
assert "@context" in resp.json
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
assert "@context" in js
|
||||
assert check_dict(
|
||||
resp.json["@context"],
|
||||
js["@context"],
|
||||
{"marl": "http://www.gsi.dit.upm.es/ontologies/marl/ns#"})
|
||||
|
||||
def test_schema(self):
|
||||
resp = self.client.get("/api/schemas/definitions.json")
|
||||
self.assert200(resp)
|
||||
assert "$schema" in resp.json
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
assert "$schema" in js
|
||||
|
@@ -1,7 +1,11 @@
|
||||
import os
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from unittest.mock import patch
|
||||
except ImportError:
|
||||
from mock import patch
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from unittest import TestCase
|
||||
@@ -10,12 +14,14 @@ from senpy.models import Error
|
||||
|
||||
|
||||
class CLITest(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
self.assertRaises(Error, partial(main_function, []))
|
||||
res = main_function(['--input', 'test'])
|
||||
assert 'entries' in res
|
||||
res = main_function(['--input', 'test', '--algo', 'rand'])
|
||||
assert 'entries' in res
|
||||
assert 'analysis' in res
|
||||
assert res['analysis'][0]['name'] == 'rand'
|
||||
|
||||
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||
main_function(['--input', 'test'])
|
||||
|
||||
patched.assert_called_with(input='test')
|
||||
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||
main_function(['--input', 'test', '--algo', 'rand'])
|
||||
|
||||
patched.assert_called_with(input='test', algo='rand')
|
||||
|
42
tests/test_client.py
Normal file
42
tests/test_client.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from unittest import TestCase
|
||||
try:
|
||||
from unittest.mock import patch
|
||||
except ImportError:
|
||||
from mock import patch
|
||||
|
||||
from senpy.client import Client
|
||||
from senpy.models import Results, Error
|
||||
|
||||
|
||||
class Call(dict):
|
||||
|
||||
def __init__(self, obj):
|
||||
self.obj = obj.jsonld()
|
||||
|
||||
def json(self):
|
||||
return self.obj
|
||||
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
def setUp(self):
|
||||
self.host = '0.0.0.0'
|
||||
self.port = 5000
|
||||
|
||||
def test_client(self):
|
||||
endpoint = 'http://dummy/'
|
||||
client = Client(endpoint)
|
||||
success = Call(Results())
|
||||
with patch('requests.request', return_value=success) as patched:
|
||||
resp = client.analyse('hello')
|
||||
assert isinstance(resp, Results)
|
||||
patched.assert_called_with(url=endpoint + '/',
|
||||
method='GET',
|
||||
params={'input': 'hello'})
|
||||
error = Call(Error('Nothing'))
|
||||
with patch('requests.request', return_value=error) as patched:
|
||||
resp = client.analyse(input='hello', algorithm='NONEXISTENT')
|
||||
assert isinstance(resp, Error)
|
||||
patched.assert_called_with(url=endpoint + '/',
|
||||
method='GET',
|
||||
params={'input': 'hello',
|
||||
'algorithm': 'NONEXISTENT'})
|
@@ -6,18 +6,16 @@ from functools import partial
|
||||
from senpy.extensions import Senpy
|
||||
from senpy.models import Error
|
||||
from flask import Flask
|
||||
from flask.ext.testing import TestCase
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class ExtensionsTest(TestCase):
|
||||
|
||||
def create_app(self):
|
||||
def setUp(self):
|
||||
self.app = Flask("test_extensions")
|
||||
self.dir = os.path.join(os.path.dirname(__file__))
|
||||
self.senpy = Senpy(plugin_folder=self.dir, default_plugins=False)
|
||||
self.senpy.init_app(self.app)
|
||||
self.senpy.activate_plugin("Dummy", sync=True)
|
||||
return self.app
|
||||
|
||||
def test_init(self):
|
||||
""" Initialising the app with the extension. """
|
||||
@@ -34,6 +32,21 @@ class ExtensionsTest(TestCase):
|
||||
assert "Dummy" in self.senpy.plugins
|
||||
|
||||
def test_enabling(self):
|
||||
""" Enabling a plugin """
|
||||
info = {
|
||||
'name': 'TestPip',
|
||||
'module': 'dummy',
|
||||
'requirements': ['noop'],
|
||||
'version': 0
|
||||
}
|
||||
root = os.path.join(self.dir, 'dummy_plugin')
|
||||
name, module = self.senpy._load_plugin_from_info(info, root=root)
|
||||
assert name == 'TestPip'
|
||||
assert module
|
||||
import noop
|
||||
dir(noop)
|
||||
|
||||
def test_installing(self):
|
||||
""" Enabling a plugin """
|
||||
self.senpy.activate_all(sync=True)
|
||||
assert len(self.senpy.plugins) == 2
|
||||
@@ -75,4 +88,5 @@ class ExtensionsTest(TestCase):
|
||||
assert self.senpy.filter_plugins(name="Dummy", is_activated=True)
|
||||
self.senpy.deactivate_plugin("Dummy", sync=True)
|
||||
assert not len(
|
||||
self.senpy.filter_plugins(name="Dummy", is_activated=True))
|
||||
self.senpy.filter_plugins(
|
||||
name="Dummy", is_activated=True))
|
||||
|
@@ -1,20 +1,16 @@
|
||||
import os
|
||||
import logging
|
||||
|
||||
import jsonschema
|
||||
|
||||
import json
|
||||
import os
|
||||
from unittest import TestCase
|
||||
from senpy.models import Response, Entry, Results, Sentiment, EmotionSet, Emotion, Error
|
||||
from senpy.models import Entry, Results, Sentiment, EmotionSet, Error
|
||||
from senpy.plugins import SenpyPlugin
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
|
||||
def test_jsonld(self):
|
||||
ctx = os.path.normpath(os.path.join(__file__, "..", "..", "..", "senpy", "schemas", "context.jsonld"))
|
||||
prueba = {"id": "test",
|
||||
"analysis": [],
|
||||
"entries": []}
|
||||
@@ -27,28 +23,32 @@ class ModelsTest(TestCase):
|
||||
j = r.jsonld(with_context=True)
|
||||
print("As JSON:")
|
||||
pprint(j)
|
||||
assert("@context" in j)
|
||||
assert("marl" in j["@context"])
|
||||
assert("entries" in j["@context"])
|
||||
assert(j["@id"] == "test")
|
||||
assert ("@context" in j)
|
||||
assert ("marl" in j["@context"])
|
||||
assert ("entries" in j["@context"])
|
||||
assert (j["@id"] == "test")
|
||||
assert "id" not in j
|
||||
|
||||
r6 = Results(**prueba)
|
||||
r6.entries.append(Entry({"@id":"ohno", "nif:isString":"Just testing"}))
|
||||
e = Entry({
|
||||
"@id": "ohno",
|
||||
"nif:isString": "Just testing"
|
||||
})
|
||||
r6.entries.append(e)
|
||||
logging.debug("Reponse 6: %s", r6)
|
||||
assert("marl" in r6.context)
|
||||
assert("entries" in r6.context)
|
||||
assert ("marl" in r6.context)
|
||||
assert ("entries" in r6.context)
|
||||
j6 = r6.jsonld(with_context=True)
|
||||
logging.debug("jsonld: %s", j6)
|
||||
assert("@context" in j6)
|
||||
assert("entries" in j6)
|
||||
assert("analysis" in j6)
|
||||
assert ("@context" in j6)
|
||||
assert ("entries" in j6)
|
||||
assert ("analysis" in j6)
|
||||
resp = r6.flask()
|
||||
received = json.loads(resp.data.decode())
|
||||
logging.debug("Response: %s", j6)
|
||||
assert(received["entries"])
|
||||
assert(received["entries"][0]["nif:isString"] == "Just testing")
|
||||
assert(received["entries"][0]["nif:isString"] != "Not testing")
|
||||
assert (received["entries"])
|
||||
assert (received["entries"][0]["nif:isString"] == "Just testing")
|
||||
assert (received["entries"][0]["nif:isString"] != "Not testing")
|
||||
|
||||
def test_id(self):
|
||||
''' Adding the id after creation should overwrite the automatic ID
|
||||
@@ -61,7 +61,6 @@ class ModelsTest(TestCase):
|
||||
assert j2['@id'] == 'test'
|
||||
assert 'id' not in j2
|
||||
|
||||
|
||||
def test_entries(self):
|
||||
e = Entry()
|
||||
self.assertRaises(jsonschema.ValidationError, e.validate)
|
||||
@@ -103,5 +102,16 @@ class ModelsTest(TestCase):
|
||||
logging.debug(c)
|
||||
p.validate()
|
||||
|
||||
def test_str(self):
|
||||
"""The string representation shouldn't include private variables"""
|
||||
r = Results()
|
||||
p = SenpyPlugin({"name": "STR test", "version": 0})
|
||||
p._testing = 0
|
||||
s = str(p)
|
||||
assert "_testing" not in s
|
||||
r.analysis.append(p)
|
||||
s = str(r)
|
||||
assert "_testing" not in s
|
||||
|
||||
def test_frame_response(self):
|
||||
pass
|
||||
|
@@ -1,31 +1,34 @@
|
||||
#!/bin/env python
|
||||
|
||||
import os
|
||||
import logging
|
||||
import pickle
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import json
|
||||
import os
|
||||
from unittest import TestCase
|
||||
from senpy.models import Results, Entry
|
||||
from senpy.plugins import SenpyPlugin, ShelfMixin
|
||||
from senpy.plugins import SentimentPlugin, ShelfMixin
|
||||
|
||||
|
||||
class ShelfTest(ShelfMixin, SenpyPlugin):
|
||||
class ShelfDummyPlugin(SentimentPlugin, ShelfMixin):
|
||||
def activate(self, *args, **kwargs):
|
||||
if 'counter' not in self.sh:
|
||||
self.sh['counter'] = 0
|
||||
self.save()
|
||||
|
||||
def test(self, key=None, value=None):
|
||||
assert key in self.sh
|
||||
print('Checking: sh[{}] == {}'.format(key, value))
|
||||
print('SH[{}]: {}'.format(key, self.sh[key]))
|
||||
assert self.sh[key] == value
|
||||
|
||||
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
def deactivate(self, *args, **kwargs):
|
||||
self.save()
|
||||
|
||||
def analyse(self, *args, **kwargs):
|
||||
self.sh['counter'] = self.sh['counter'] + 1
|
||||
e = Entry()
|
||||
e.nif__isString = self.sh['counter']
|
||||
r = Results()
|
||||
r.entries.append(e)
|
||||
return r
|
||||
|
||||
|
||||
class PluginsTest(TestCase):
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.shelf_dir):
|
||||
shutil.rmtree(self.shelf_dir)
|
||||
@@ -37,16 +40,28 @@ class ModelsTest(TestCase):
|
||||
self.shelf_dir = tempfile.mkdtemp()
|
||||
self.shelf_file = os.path.join(self.shelf_dir, "shelf")
|
||||
|
||||
def test_shelf_file(self):
|
||||
a = ShelfDummyPlugin(
|
||||
info={'name': 'default_shelve_file',
|
||||
'version': 'test'})
|
||||
a.activate()
|
||||
assert os.path.isfile(a.shelf_file)
|
||||
os.remove(a.shelf_file)
|
||||
|
||||
def test_shelf(self):
|
||||
''' A shelf is created and the value is stored '''
|
||||
a = ShelfTest(info={'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file})
|
||||
a = ShelfDummyPlugin(info={
|
||||
'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file
|
||||
})
|
||||
assert a.sh == {}
|
||||
a.activate()
|
||||
assert a.sh == {'counter': 0}
|
||||
assert a.shelf_file == self.shelf_file
|
||||
|
||||
a.sh['a'] = 'fromA'
|
||||
a.test(key='a', value='fromA')
|
||||
assert a.sh['a'] == 'fromA'
|
||||
|
||||
a.save()
|
||||
|
||||
@@ -54,19 +69,37 @@ class ModelsTest(TestCase):
|
||||
|
||||
assert sh['a'] == 'fromA'
|
||||
|
||||
def test_dummy_shelf(self):
|
||||
a = ShelfDummyPlugin(info={
|
||||
'name': 'DummyShelf',
|
||||
'shelf_file': self.shelf_file,
|
||||
'version': 'test'
|
||||
})
|
||||
a.activate()
|
||||
|
||||
res1 = a.analyse(input=1)
|
||||
assert res1.entries[0].nif__isString == 1
|
||||
res2 = a.analyse(input=1)
|
||||
assert res2.entries[0].nif__isString == 2
|
||||
|
||||
def test_two(self):
|
||||
''' Reusing the values of a previous shelf '''
|
||||
a = ShelfTest(info={'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file})
|
||||
a = ShelfDummyPlugin(info={
|
||||
'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file
|
||||
})
|
||||
a.activate()
|
||||
print('Shelf file: %s' % a.shelf_file)
|
||||
a.sh['a'] = 'fromA'
|
||||
a.save()
|
||||
|
||||
b = ShelfTest(info={'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file})
|
||||
b.test(key='a', value='fromA')
|
||||
b = ShelfDummyPlugin(info={
|
||||
'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': self.shelf_file
|
||||
})
|
||||
b.activate()
|
||||
assert b.sh['a'] == 'fromA'
|
||||
b.sh['a'] = 'fromB'
|
||||
assert b.sh['a'] == 'fromB'
|
||||
|
60
tests/test_schemas.py
Normal file
60
tests/test_schemas.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import unittest
|
||||
import os
|
||||
from os import path
|
||||
from fnmatch import fnmatch
|
||||
|
||||
from jsonschema import RefResolver, Draft4Validator, ValidationError
|
||||
|
||||
root_path = path.join(path.dirname(path.realpath(__file__)), '..')
|
||||
schema_folder = path.join(root_path, 'senpy', 'schemas')
|
||||
examples_path = path.join(root_path, 'docs', 'examples')
|
||||
bad_examples_path = path.join(root_path, 'docs', 'bad-examples')
|
||||
|
||||
|
||||
class JSONSchemaTests(unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
def do_create_(jsfile, success):
|
||||
def do_expected(self):
|
||||
with open(jsfile) as f:
|
||||
js = json.load(f)
|
||||
try:
|
||||
assert '@type' in js
|
||||
schema_name = js['@type']
|
||||
with open(os.path.join(schema_folder, schema_name +
|
||||
".json")) as file_object:
|
||||
schema = json.load(file_object)
|
||||
resolver = RefResolver('file://' + schema_folder + '/', schema)
|
||||
validator = Draft4Validator(schema, resolver=resolver)
|
||||
validator.validate(js)
|
||||
except (AssertionError, ValidationError, KeyError) as ex:
|
||||
if success:
|
||||
raise
|
||||
|
||||
return do_expected
|
||||
|
||||
|
||||
def add_examples(dirname, success):
|
||||
for dirpath, dirnames, filenames in os.walk(dirname):
|
||||
for i in filenames:
|
||||
if fnmatch(i, '*.json'):
|
||||
filename = path.join(dirpath, i)
|
||||
test_method = do_create_(filename, success)
|
||||
test_method.__name__ = 'test_file_%s_success_%s' % (filename,
|
||||
success)
|
||||
test_method.__doc__ = '%s should %svalidate' % (filename, ''
|
||||
if success else
|
||||
'not')
|
||||
setattr(JSONSchemaTests, test_method.__name__, test_method)
|
||||
del test_method
|
||||
|
||||
|
||||
add_examples(examples_path, True)
|
||||
add_examples(bad_examples_path, False)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Reference in New Issue
Block a user