mirror of
https://github.com/gsi-upm/senpy
synced 2025-10-19 01:38:28 +00:00
Compare commits
89 Commits
0.9.0a4
...
51-calcula
Author | SHA1 | Date | |
---|---|---|---|
|
d145a852e7 | ||
|
c090501534 | ||
|
6a1069780b | ||
|
41aa142ce0 | ||
|
b48730137d | ||
|
f1ec057b16 | ||
|
f6ca82cac8 | ||
|
318acd5a71 | ||
|
c8f6f5613d | ||
|
748d1a00bd | ||
|
a82e4ed440 | ||
|
c939b095de | ||
|
ca69bddc17 | ||
|
aa35e62a27 | ||
|
6dd4a44924 | ||
|
4291c5eabf | ||
|
7c7a815d1a | ||
|
a3eb8f196c | ||
|
00ffbb3804 | ||
|
13cf0c71c5 | ||
|
e5662d482e | ||
|
61181db199 | ||
|
a1663a3f31 | ||
|
83b23dbdf4 | ||
|
4675d9acf1 | ||
|
6832a2816d | ||
|
7a8abf1823 | ||
|
a21ce0d90e | ||
|
a964e586d7 | ||
|
bce42b5bb4 | ||
|
1313853788 | ||
|
697e779767 | ||
|
48f5ffafa1 | ||
|
73f7cbbe8a | ||
|
07a41236f8 | ||
|
55db97cf62 | ||
|
d8dead1908 | ||
|
87dcdb9fbc | ||
|
67ef4b60bd | ||
|
da4b11e5b5 | ||
|
c0aa7ddc3c | ||
|
5e2ada1654 | ||
|
7a188586c5 | ||
|
b768b215c5 | ||
|
d1f1b9a15a | ||
|
52a0f3f4c8 | ||
|
55c32dcd7c | ||
|
0093bc34d5 | ||
|
67bae9a20d | ||
|
551a5cb176 | ||
|
d6f4cc2dd2 | ||
|
4af692091a | ||
|
ec68ff0b90 | ||
|
738da490db | ||
|
d29c42fd2e | ||
|
23c88d0acc | ||
|
dcaaa591b7 | ||
|
15ab5f4c25 | ||
|
92189822d8 | ||
|
fbb418c365 | ||
|
081078ddd6 | ||
|
7c8dbf3262 | ||
|
41dc89b23b | ||
|
a951696317 | ||
|
1087692de2 | ||
|
3e2b8baeb2 | ||
|
21a5a3f201 | ||
|
abd401f863 | ||
|
bfc588a915 | ||
|
f93eed2cf5 | ||
|
0204e0b8e9 | ||
|
701f46b9f1 | ||
|
d1eca04eeb | ||
|
89f3a0eca9 | ||
|
df7efbc57d | ||
|
aa54d1c9c8 | ||
|
869c00f709 | ||
|
e329e84eef | ||
|
55be0e57da | ||
|
778746c5e8 | ||
|
19278d0acd | ||
|
694201d8d3 | ||
|
e8413fb645 | ||
|
390225df45 | ||
|
b03e03fd0a | ||
|
79e107bdcd | ||
|
c6e79fa50d | ||
|
f6bf7459a8 | ||
|
300f4c374a |
@@ -18,6 +18,8 @@ before_script:
|
|||||||
stage: test
|
stage: test
|
||||||
script:
|
script:
|
||||||
- make -e test-$PYTHON_VERSION
|
- make -e test-$PYTHON_VERSION
|
||||||
|
except:
|
||||||
|
- tags # Avoid unnecessary double testing
|
||||||
|
|
||||||
test-3.5:
|
test-3.5:
|
||||||
<<: *test_definition
|
<<: *test_definition
|
||||||
@@ -29,32 +31,23 @@ test-2.7:
|
|||||||
variables:
|
variables:
|
||||||
PYTHON_VERSION: "2.7"
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
.image: &image_definition
|
push:
|
||||||
stage: push
|
stage: push
|
||||||
script:
|
script:
|
||||||
- make -e push-$PYTHON_VERSION
|
- make -e push
|
||||||
only:
|
only:
|
||||||
- tags
|
- tags
|
||||||
- triggers
|
- triggers
|
||||||
- fix-makefiles
|
- fix-makefiles
|
||||||
|
|
||||||
push-3.5:
|
|
||||||
<<: *image_definition
|
|
||||||
variables:
|
|
||||||
PYTHON_VERSION: "3.5"
|
|
||||||
|
|
||||||
push-2.7:
|
|
||||||
<<: *image_definition
|
|
||||||
variables:
|
|
||||||
PYTHON_VERSION: "2.7"
|
|
||||||
|
|
||||||
push-latest:
|
push-latest:
|
||||||
<<: *image_definition
|
stage: push
|
||||||
variables:
|
script:
|
||||||
PYTHON_VERSION: latest
|
- make -e push-latest
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
- triggers
|
- triggers
|
||||||
|
- fix-makefiles
|
||||||
|
|
||||||
push-github:
|
push-github:
|
||||||
stage: deploy
|
stage: deploy
|
||||||
@@ -63,11 +56,13 @@ push-github:
|
|||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
- triggers
|
- triggers
|
||||||
|
- fix-makefiles
|
||||||
|
|
||||||
deploy_pypi:
|
deploy_pypi:
|
||||||
stage: deploy
|
stage: deploy
|
||||||
script: # Configure the PyPI credentials, then push the package, and cleanup the creds.
|
script: # Configure the PyPI credentials, then push the package, and cleanup the creds.
|
||||||
- echo "[server-login]" >> ~/.pypirc
|
- echo "[server-login]" >> ~/.pypirc
|
||||||
|
- echo "repository=https://upload.pypi.org/legacy/" >> ~/.pypirc
|
||||||
- echo "username=" ${PYPI_USER} >> ~/.pypirc
|
- echo "username=" ${PYPI_USER} >> ~/.pypirc
|
||||||
- echo "password=" ${PYPI_PASSWORD} >> ~/.pypirc
|
- echo "password=" ${PYPI_PASSWORD} >> ~/.pypirc
|
||||||
- make pip_upload
|
- make pip_upload
|
||||||
|
@@ -2,18 +2,16 @@ export
|
|||||||
NAME ?= $(shell basename $(CURDIR))
|
NAME ?= $(shell basename $(CURDIR))
|
||||||
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
|
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
|
||||||
|
|
||||||
|
ifeq ($(VERSION),)
|
||||||
|
VERSION:=unknown
|
||||||
|
endif
|
||||||
|
|
||||||
# Get the location of this makefile.
|
# Get the location of this makefile.
|
||||||
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||||
|
|
||||||
-include .env
|
-include .env
|
||||||
-include ../.env
|
-include ../.env
|
||||||
|
|
||||||
.FORCE:
|
|
||||||
|
|
||||||
version: .FORCE
|
|
||||||
@echo $(VERSION) > $(NAME)/VERSION
|
|
||||||
@echo $(VERSION)
|
|
||||||
|
|
||||||
help: ## Show this help.
|
help: ## Show this help.
|
||||||
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
|
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
|
||||||
|
|
||||||
@@ -35,4 +33,4 @@ include $(MK_DIR)/git.mk
|
|||||||
info:: ## List all variables
|
info:: ## List all variables
|
||||||
env
|
env
|
||||||
|
|
||||||
.PHONY:: config help ci version .FORCE
|
.PHONY:: config help ci
|
||||||
|
@@ -1,4 +1,14 @@
|
|||||||
IMAGEWTAG ?= $(IMAGENAME):$(VERSION)
|
ifndef IMAGENAME
|
||||||
|
ifdef CI_REGISTRY_IMAGE
|
||||||
|
IMAGENAME=$(CI_REGISTRY_IMAGE)
|
||||||
|
else
|
||||||
|
IMAGENAME=$(NAME)
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
|
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
|
||||||
|
DOCKER_FLAGS?=$(-ti)
|
||||||
|
DOCKER_CMD?=
|
||||||
|
|
||||||
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
||||||
ifeq ($(CI_BUILD_TOKEN),)
|
ifeq ($(CI_BUILD_TOKEN),)
|
||||||
@@ -18,8 +28,24 @@ else
|
|||||||
@docker logout
|
@docker logout
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
docker-run: ## Build a generic docker image
|
||||||
|
docker run $(DOCKER_FLAGS) $(IMAGEWTAG) $(DOCKER_CMD)
|
||||||
|
|
||||||
|
docker-build: ## Build a generic docker image
|
||||||
|
docker build . -t $(IMAGEWTAG)
|
||||||
|
|
||||||
|
docker-push: docker-login ## Push a generic docker image
|
||||||
|
docker push $(IMAGEWTAG)
|
||||||
|
|
||||||
|
docker-latest-push: docker-login ## Push the latest image
|
||||||
|
docker tag $(IMAGEWTAG) $(IMAGENAME)
|
||||||
|
docker push $(IMAGENAME)
|
||||||
|
|
||||||
login:: docker-login
|
login:: docker-login
|
||||||
|
|
||||||
clean:: docker-clean
|
clean:: docker-clean
|
||||||
|
|
||||||
|
docker-info:
|
||||||
|
@echo IMAGEWTAG=${IMAGEWTAG}
|
||||||
|
|
||||||
.PHONY:: docker-login docker-clean login clean
|
.PHONY:: docker-login docker-clean login clean
|
||||||
|
@@ -13,16 +13,13 @@ git-pull:
|
|||||||
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
|
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
|
||||||
ifeq ($(GITHUB_DEPLOY_KEY),)
|
ifeq ($(GITHUB_DEPLOY_KEY),)
|
||||||
else
|
else
|
||||||
$(eval KEY_FILE := $(shell mktemp))
|
$(eval KEY_FILE := "$(shell mktemp)")
|
||||||
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
|
@printf '%b' '$(GITHUB_DEPLOY_KEY)' > $(KEY_FILE)
|
||||||
@git remote rm github-deploy || true
|
@git remote rm github-deploy || true
|
||||||
git remote add github-deploy $(GITHUB_REPO)
|
git remote add github-deploy $(GITHUB_REPO)
|
||||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME) || true
|
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
|
||||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy $(CI_COMMIT_REF_NAME)
|
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy HEAD:$(CI_COMMIT_REF_NAME)
|
||||||
rm $(KEY_FILE)
|
rm $(KEY_FILE)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
push:: git-push
|
.PHONY:: commit tag git-push git-pull push-github
|
||||||
pull:: git-pull
|
|
||||||
|
|
||||||
.PHONY:: commit tag push git-push git-pull push-github
|
|
||||||
|
@@ -13,7 +13,7 @@
|
|||||||
KUBE_CA_TEMP=false
|
KUBE_CA_TEMP=false
|
||||||
ifndef KUBE_CA_PEM_FILE
|
ifndef KUBE_CA_PEM_FILE
|
||||||
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
||||||
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
|
CREATED:=$(shell printf '%b\n' '$(KUBE_CA_BUNDLE)' > $(KUBE_CA_PEM_FILE))
|
||||||
endif
|
endif
|
||||||
KUBE_TOKEN?=""
|
KUBE_TOKEN?=""
|
||||||
KUBE_NAMESPACE?=$(NAME)
|
KUBE_NAMESPACE?=$(NAME)
|
||||||
|
@@ -1,17 +1,15 @@
|
|||||||
makefiles-remote:
|
makefiles-remote:
|
||||||
@git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git 2>/dev/null || true
|
git ls-remote --exit-code makefiles 2> /dev/null || git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git
|
||||||
|
|
||||||
makefiles-commit: makefiles-remote
|
makefiles-commit: makefiles-remote
|
||||||
git add -f .makefiles
|
git add -f .makefiles
|
||||||
git commit -em "Updated makefiles from ${NAME}"
|
git commit -em "Updated makefiles from ${NAME}"
|
||||||
|
|
||||||
makefiles-push:
|
makefiles-push:
|
||||||
|
git fetch makefiles $(NAME)
|
||||||
git subtree push --prefix=.makefiles/ makefiles $(NAME)
|
git subtree push --prefix=.makefiles/ makefiles $(NAME)
|
||||||
|
|
||||||
makefiles-pull: makefiles-remote
|
makefiles-pull: makefiles-remote
|
||||||
git subtree pull --prefix=.makefiles/ makefiles master --squash
|
git subtree pull --prefix=.makefiles/ makefiles master --squash
|
||||||
|
|
||||||
pull:: makefiles-pull
|
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull
|
||||||
push:: makefiles-push
|
|
||||||
|
|
||||||
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull pull push
|
|
||||||
|
@@ -1,9 +1,17 @@
|
|||||||
PYVERSIONS ?= 2.7
|
PYVERSIONS ?= 3.5
|
||||||
PYMAIN ?= $(firstword $(PYVERSIONS))
|
PYMAIN ?= $(firstword $(PYVERSIONS))
|
||||||
TARNAME ?= $(NAME)-$(VERSION).tar.gz
|
TARNAME ?= $(NAME)-$(VERSION).tar.gz
|
||||||
|
VERSIONFILE ?= $(NAME)/VERSION
|
||||||
|
|
||||||
DEVPORT ?= 6000
|
DEVPORT ?= 6000
|
||||||
|
|
||||||
|
|
||||||
|
.FORCE:
|
||||||
|
|
||||||
|
version: .FORCE
|
||||||
|
@echo $(VERSION) > $(VERSIONFILE)
|
||||||
|
@echo $(VERSION)
|
||||||
|
|
||||||
yapf: ## Format python code
|
yapf: ## Format python code
|
||||||
yapf -i -r $(NAME)
|
yapf -i -r $(NAME)
|
||||||
yapf -i -r tests
|
yapf -i -r tests
|
||||||
@@ -18,9 +26,10 @@ Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Docke
|
|||||||
quick_build: $(addprefix build-, $(PYMAIN))
|
quick_build: $(addprefix build-, $(PYMAIN))
|
||||||
|
|
||||||
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
||||||
|
docker tag $(IMAGEWTAG)-python$(PYMAIN) $(IMAGEWTAG)
|
||||||
|
|
||||||
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
||||||
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
|
docker build -t '$(IMAGEWTAG)-python$*' -f Dockerfile-$* .;
|
||||||
|
|
||||||
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
|
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
|
||||||
@docker start $(NAME)-dev$* || (\
|
@docker start $(NAME)-dev$* || (\
|
||||||
@@ -34,10 +43,10 @@ dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the d
|
|||||||
|
|
||||||
quick_test: test-$(PYMAIN)
|
quick_test: test-$(PYMAIN)
|
||||||
|
|
||||||
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
|
test-%: build-% ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
|
||||||
# This speeds tests up because the image has most (if not all) of the dependencies already.
|
# This speeds tests up because the image has most (if not all) of the dependencies already.
|
||||||
docker rm $(NAME)-test-$* || true
|
docker rm $(NAME)-test-$* || true
|
||||||
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
|
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGEWTAG)-python$* python setup.py test
|
||||||
docker cp . $(NAME)-test-$*:/usr/src/app
|
docker cp . $(NAME)-test-$*:/usr/src/app
|
||||||
docker start -a $(NAME)-test-$*
|
docker start -a $(NAME)-test-$*
|
||||||
|
|
||||||
@@ -67,7 +76,7 @@ pip_upload: pip_test ## Upload package to pip
|
|||||||
|
|
||||||
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
|
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
|
||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME):latest'
|
||||||
docker push '$(IMAGENAME):latest'
|
docker push '$(IMAGENAME):latest'
|
||||||
docker push '$(IMAGEWTAG)'
|
docker push '$(IMAGEWTAG)'
|
||||||
|
|
||||||
@@ -89,4 +98,4 @@ clean:: ## Clean older docker images and containers related to this project and
|
|||||||
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
|
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
|
||||||
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
||||||
|
|
||||||
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push
|
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push version .FORCE
|
||||||
|
@@ -2,6 +2,10 @@ from python:{{PYVERSION}}
|
|||||||
|
|
||||||
MAINTAINER J. Fernando Sánchez <jf.sanchez@upm.es>
|
MAINTAINER J. Fernando Sánchez <jf.sanchez@upm.es>
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
libblas-dev liblapack-dev liblapacke-dev gfortran \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /cache/ /senpy-plugins /data/
|
RUN mkdir /cache/ /senpy-plugins /data/
|
||||||
|
|
||||||
VOLUME /data/
|
VOLUME /data/
|
||||||
@@ -14,9 +18,9 @@ ONBUILD WORKDIR /senpy-plugins/
|
|||||||
|
|
||||||
|
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
COPY test-requirements.txt requirements.txt /usr/src/app/
|
COPY test-requirements.txt requirements.txt extra-requirements.txt /usr/src/app/
|
||||||
RUN pip install --use-wheel -r test-requirements.txt -r requirements.txt
|
RUN pip install --no-cache-dir -r test-requirements.txt -r requirements.txt -r extra-requirements.txt
|
||||||
COPY . /usr/src/app/
|
COPY . /usr/src/app/
|
||||||
RUN pip install --no-index --no-deps --editable .
|
RUN pip install --no-cache-dir --no-index --no-deps --editable .
|
||||||
|
|
||||||
ENTRYPOINT ["python", "-m", "senpy", "-f", "/senpy-plugins/", "--host", "0.0.0.0"]
|
ENTRYPOINT ["python", "-m", "senpy", "-f", "/senpy-plugins/", "--host", "0.0.0.0"]
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
include requirements.txt
|
include requirements.txt
|
||||||
include test-requirements.txt
|
include test-requirements.txt
|
||||||
|
include extra-requirements.txt
|
||||||
include README.rst
|
include README.rst
|
||||||
include senpy/VERSION
|
include senpy/VERSION
|
||||||
graft senpy/plugins
|
graft senpy/plugins
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
.. image:: img/header.png
|
.. image:: img/header.png
|
||||||
:height: 6em
|
:width: 100%
|
||||||
:target: http://demos.gsi.dit.upm.es/senpy
|
:target: http://demos.gsi.dit.upm.es/senpy
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/gsi-upm/senpy.svg?branch=master
|
.. image:: https://travis-ci.org/gsi-upm/senpy.svg?branch=master
|
||||||
|
10
docker-compose.dev.yml
Normal file
10
docker-compose.dev.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
senpy:
|
||||||
|
image: "${IMAGENAME-gsiupm/senpy}:${VERSION-latest}"
|
||||||
|
entrypoint: ["/bin/bash"]
|
||||||
|
working_dir: "/senpy-plugins"
|
||||||
|
ports:
|
||||||
|
- 5000:5000
|
||||||
|
volumes:
|
||||||
|
- ".:/usr/src/app/"
|
9
docker-compose.test.yml
Normal file
9
docker-compose.test.yml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
test:
|
||||||
|
image: "${IMAGENAME-gsiupm/senpy}:${VERSION-dev}"
|
||||||
|
entrypoint: ["py.test"]
|
||||||
|
volumes:
|
||||||
|
- ".:/usr/src/app/"
|
||||||
|
command:
|
||||||
|
[]
|
11
docker-compose.yml
Normal file
11
docker-compose.yml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
senpy:
|
||||||
|
image: "${IMAGENAME-gsiupm/senpy}:${VERSION-dev}"
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile${PYVERSION--2.7}
|
||||||
|
ports:
|
||||||
|
- 5001:5000
|
||||||
|
volumes:
|
||||||
|
- "./data:/data"
|
106
docs/SenpyClientUse.rst
Normal file
106
docs/SenpyClientUse.rst
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
|
||||||
|
Client
|
||||||
|
======
|
||||||
|
|
||||||
|
Demo Endpoint
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Import Client and send a request
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from senpy.client import Client
|
||||||
|
|
||||||
|
c = Client('http://latest.senpy.cluster.gsi.dit.upm.es/api')
|
||||||
|
r = c.analyse('I like Pizza', algorithm='sentiment140')
|
||||||
|
|
||||||
|
Print response
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
for entry in r.entries:
|
||||||
|
print('{} -> {}'.format(entry['text'], entry['sentiments'][0]['marl:hasPolarity']))
|
||||||
|
|
||||||
|
|
||||||
|
.. parsed-literal::
|
||||||
|
|
||||||
|
I like Pizza -> marl:Positive
|
||||||
|
|
||||||
|
|
||||||
|
Obtain a list of available plugins
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
for plugin in c.request('/plugins')['plugins']:
|
||||||
|
print(plugin['name'])
|
||||||
|
|
||||||
|
|
||||||
|
.. parsed-literal::
|
||||||
|
|
||||||
|
emoRand
|
||||||
|
rand
|
||||||
|
sentiment140
|
||||||
|
|
||||||
|
|
||||||
|
Local Endpoint
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Run a docker container with Senpy image and default plugins
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
docker run -ti --name 'SenpyEndpoint' -d -p 5000:5000 gsiupm/senpy:0.8.6 --host 0.0.0.0 --default-plugins
|
||||||
|
|
||||||
|
|
||||||
|
.. parsed-literal::
|
||||||
|
|
||||||
|
a0157cd98057072388bfebeed78a830da7cf0a796f4f1a3fd9188f9f2e5fe562
|
||||||
|
|
||||||
|
|
||||||
|
Import client and send a request to localhost
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
c_local = Client('http://127.0.0.1:5000/api')
|
||||||
|
r = c_local.analyse('Hello world', algorithm='sentiment140')
|
||||||
|
|
||||||
|
Print response
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
for entry in r.entries:
|
||||||
|
print('{} -> {}'.format(entry['text'], entry['sentiments'][0]['marl:hasPolarity']))
|
||||||
|
|
||||||
|
|
||||||
|
.. parsed-literal::
|
||||||
|
|
||||||
|
Hello world -> marl:Neutral
|
||||||
|
|
||||||
|
|
||||||
|
Obtain a list of available plugins deployed locally
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
c_local.plugins().keys()
|
||||||
|
|
||||||
|
|
||||||
|
.. parsed-literal::
|
||||||
|
|
||||||
|
rand
|
||||||
|
sentiment140
|
||||||
|
emoRand
|
||||||
|
|
||||||
|
|
||||||
|
Stop the docker container
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
!docker stop SenpyEndpoint
|
||||||
|
!docker rm SenpyEndpoint
|
||||||
|
|
||||||
|
|
||||||
|
.. parsed-literal::
|
||||||
|
|
||||||
|
SenpyEndpoint
|
||||||
|
SenpyEndpoint
|
||||||
|
|
@@ -6,13 +6,9 @@
|
|||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
{
|
{
|
||||||
"@type": [
|
|
||||||
"nif:RFC5147String",
|
|
||||||
"nif:Context"
|
|
||||||
],
|
|
||||||
"nif:beginIndex": 0,
|
"nif:beginIndex": 0,
|
||||||
"nif:endIndex": 40,
|
"nif:endIndex": 40,
|
||||||
"nif:isString": "My favourite actress is Natalie Portman"
|
"text": "An entry should have a nif:isString key"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -3,10 +3,21 @@
|
|||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
"me:SAnalysis1",
|
{
|
||||||
"me:SgAnalysis1",
|
"@id": "_:SAnalysis1_Activity",
|
||||||
"me:EmotionAnalysis1",
|
"@type": "marl:SentimentAnalysis",
|
||||||
"me:NER1"
|
"prov:wasAssociatedWith": "me:SAnalysis1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:EmotionAnalysis1_Activity",
|
||||||
|
"@type": "onyx:EmotionAnalysis",
|
||||||
|
"prov:wasAssociatedWith": "me:EmotionAnalysis1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:NER1_Activity",
|
||||||
|
"@type": "me:NER",
|
||||||
|
"prov:wasAssociatedWith": "me:NER1"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
{
|
{
|
||||||
@@ -23,7 +34,7 @@
|
|||||||
"nif:endIndex": 13,
|
"nif:endIndex": 13,
|
||||||
"nif:anchorOf": "Microsoft",
|
"nif:anchorOf": "Microsoft",
|
||||||
"me:references": "http://dbpedia.org/page/Microsoft",
|
"me:references": "http://dbpedia.org/page/Microsoft",
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
"prov:wasGeneratedBy": "_:NER1_Activity"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=25,37",
|
"@id": "http://micro.blog/status1#char=25,37",
|
||||||
@@ -31,7 +42,7 @@
|
|||||||
"nif:endIndex": 37,
|
"nif:endIndex": 37,
|
||||||
"nif:anchorOf": "Windows Phone",
|
"nif:anchorOf": "Windows Phone",
|
||||||
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
"prov:wasGeneratedBy": "_:NER1_Activity"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"suggestions": [
|
"suggestions": [
|
||||||
@@ -40,7 +51,7 @@
|
|||||||
"nif:beginIndex": 16,
|
"nif:beginIndex": 16,
|
||||||
"nif:endIndex": 77,
|
"nif:endIndex": 77,
|
||||||
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
||||||
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
"prov:wasGeneratedBy": "_:SgAnalysis1_Activity"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"sentiments": [
|
"sentiments": [
|
||||||
@@ -51,14 +62,14 @@
|
|||||||
"nif:anchorOf": "You'll be awesome.",
|
"nif:anchorOf": "You'll be awesome.",
|
||||||
"marl:hasPolarity": "marl:Positive",
|
"marl:hasPolarity": "marl:Positive",
|
||||||
"marl:polarityValue": 0.9,
|
"marl:polarityValue": 0.9,
|
||||||
"prov:wasGeneratedBy": "me:SAnalysis1"
|
"prov:wasGeneratedBy": "_:SgAnalysis1_Activity"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"emotions": [
|
"emotions": [
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=0,109",
|
"@id": "http://micro.blog/status1#char=0,109",
|
||||||
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||||
"prov:wasGeneratedBy": "me:EAnalysis1",
|
"prov:wasGeneratedBy": "_:EmotionAnalysis1_Activity",
|
||||||
"onyx:hasEmotion": [
|
"onyx:hasEmotion": [
|
||||||
{
|
{
|
||||||
"onyx:hasEmotionCategory": "wna:liking"
|
"onyx:hasEmotionCategory": "wna:liking"
|
||||||
|
@@ -1,78 +0,0 @@
|
|||||||
{
|
|
||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
|
||||||
"@id": "me:Result1",
|
|
||||||
"@type": "results",
|
|
||||||
"analysis": [
|
|
||||||
"me:SAnalysis1",
|
|
||||||
"me:SgAnalysis1",
|
|
||||||
"me:EmotionAnalysis1",
|
|
||||||
"me:NER1",
|
|
||||||
{
|
|
||||||
"@type": "analysis",
|
|
||||||
"@id": "anonymous"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1",
|
|
||||||
"@type": [
|
|
||||||
"nif:RFC5147String",
|
|
||||||
"nif:Context"
|
|
||||||
],
|
|
||||||
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
|
||||||
"entities": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=5,13",
|
|
||||||
"nif:beginIndex": 5,
|
|
||||||
"nif:endIndex": 13,
|
|
||||||
"nif:anchorOf": "Microsoft",
|
|
||||||
"me:references": "http://dbpedia.org/page/Microsoft",
|
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=25,37",
|
|
||||||
"nif:beginIndex": 25,
|
|
||||||
"nif:endIndex": 37,
|
|
||||||
"nif:anchorOf": "Windows Phone",
|
|
||||||
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"suggestions": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=16,77",
|
|
||||||
"nif:beginIndex": 16,
|
|
||||||
"nif:endIndex": 77,
|
|
||||||
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
|
||||||
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sentiments": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=80,97",
|
|
||||||
"nif:beginIndex": 80,
|
|
||||||
"nif:endIndex": 97,
|
|
||||||
"nif:anchorOf": "You'll be awesome.",
|
|
||||||
"marl:hasPolarity": "marl:Positive",
|
|
||||||
"marl:polarityValue": 0.9,
|
|
||||||
"prov:wasGeneratedBy": "me:SAnalysis1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"emotions": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=0,109",
|
|
||||||
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
|
||||||
"prov:wasGeneratedBy": "me:EAnalysis1",
|
|
||||||
"onyx:hasEmotion": [
|
|
||||||
{
|
|
||||||
"onyx:hasEmotionCategory": "wna:liking"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"onyx:hasEmotionCategory": "wna:excitement"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@@ -1,19 +1,18 @@
|
|||||||
{
|
{
|
||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "http://example.com#NIFExample",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [ ],
|
||||||
],
|
"entries": [
|
||||||
"entries": [
|
{
|
||||||
{
|
"@id": "http://example.org#char=0,40",
|
||||||
"@id": "http://example.org#char=0,40",
|
"@type": [
|
||||||
"@type": [
|
"nif:RFC5147String",
|
||||||
"nif:RFC5147String",
|
"nif:Context"
|
||||||
"nif:Context"
|
],
|
||||||
],
|
"nif:beginIndex": 0,
|
||||||
"nif:beginIndex": 0,
|
"nif:endIndex": 40,
|
||||||
"nif:endIndex": 40,
|
"nif:isString": "My favourite actress is Natalie Portman"
|
||||||
"nif:isString": "My favourite actress is Natalie Portman"
|
}
|
||||||
}
|
]
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
@@ -1,88 +1,100 @@
|
|||||||
{
|
{
|
||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
{
|
|
||||||
"@id": "me:SAnalysis1",
|
|
||||||
"@type": "marl:SentimentAnalysis",
|
|
||||||
"marl:maxPolarityValue": 1,
|
|
||||||
"marl:minPolarityValue": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": "me:SgAnalysis1",
|
|
||||||
"@type": "me:SuggestionAnalysis"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": "me:EmotionAnalysis1",
|
|
||||||
"@type": "me:EmotionAnalysis"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": "me:NER1",
|
|
||||||
"@type": "me:NER"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1",
|
|
||||||
"@type": [
|
|
||||||
"nif:RFC5147String",
|
|
||||||
"nif:Context"
|
|
||||||
],
|
|
||||||
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
|
||||||
"entities": [
|
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=5,13",
|
"@id": "_:SAnalysis1_Activity",
|
||||||
"nif:beginIndex": 5,
|
"@type": "marl:SentimentAnalysis",
|
||||||
"nif:endIndex": 13,
|
"prov:wasAssociatedWith": "me:SentimentAnalysis",
|
||||||
"nif:anchorOf": "Microsoft",
|
"prov:used": [
|
||||||
"me:references": "http://dbpedia.org/page/Microsoft",
|
{
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
"name": "marl:maxPolarityValue",
|
||||||
|
"prov:value": "1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "marl:minPolarityValue",
|
||||||
|
"prov:value": "0"
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=25,37",
|
"@id": "_:SgAnalysis1_Activity",
|
||||||
"nif:beginIndex": 25,
|
"prov:wasAssociatedWith": "me:SgAnalysis1",
|
||||||
"nif:endIndex": 37,
|
"@type": "me:SuggestionAnalysis"
|
||||||
"nif:anchorOf": "Windows Phone",
|
},
|
||||||
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"suggestions": [
|
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=16,77",
|
"@id": "_:EmotionAnalysis1_Activity",
|
||||||
"nif:beginIndex": 16,
|
"@type": "me:EmotionAnalysis",
|
||||||
"nif:endIndex": 77,
|
"prov:wasAssociatedWith": "me:EmotionAnalysis1"
|
||||||
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
},
|
||||||
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sentiments": [
|
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=80,97",
|
"@id": "_:NER1_Activity",
|
||||||
"nif:beginIndex": 80,
|
"@type": "me:NER",
|
||||||
"nif:endIndex": 97,
|
"prov:wasAssociatedWith": "me:EmotionNER1"
|
||||||
"nif:anchorOf": "You'll be awesome.",
|
|
||||||
"marl:hasPolarity": "marl:Positive",
|
|
||||||
"marl:polarityValue": 0.9,
|
|
||||||
"prov:wasGeneratedBy": "me:SAnalysis1"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"emotions": [
|
"entries": [
|
||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=0,109",
|
"@id": "http://micro.blog/status1",
|
||||||
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
"@type": [
|
||||||
"prov:wasGeneratedBy": "me:EAnalysis1",
|
"nif:RFC5147String",
|
||||||
"onyx:hasEmotion": [
|
"nif:Context"
|
||||||
{
|
],
|
||||||
"onyx:hasEmotionCategory": "wna:liking"
|
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||||
},
|
"entities": [
|
||||||
{
|
{
|
||||||
"onyx:hasEmotionCategory": "wna:excitement"
|
"@id": "http://micro.blog/status1#char=5,13",
|
||||||
}
|
"nif:beginIndex": 5,
|
||||||
]
|
"nif:endIndex": 13,
|
||||||
|
"nif:anchorOf": "Microsoft",
|
||||||
|
"me:references": "http://dbpedia.org/page/Microsoft",
|
||||||
|
"prov:wasGeneratedBy": "me:NER1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "http://micro.blog/status1#char=25,37",
|
||||||
|
"nif:beginIndex": 25,
|
||||||
|
"nif:endIndex": 37,
|
||||||
|
"nif:anchorOf": "Windows Phone",
|
||||||
|
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
||||||
|
"prov:wasGeneratedBy": "me:NER1"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestions": [
|
||||||
|
{
|
||||||
|
"@id": "http://micro.blog/status1#char=16,77",
|
||||||
|
"nif:beginIndex": 16,
|
||||||
|
"nif:endIndex": 77,
|
||||||
|
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
||||||
|
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sentiments": [
|
||||||
|
{
|
||||||
|
"@id": "http://micro.blog/status1#char=80,97",
|
||||||
|
"nif:beginIndex": 80,
|
||||||
|
"nif:endIndex": 97,
|
||||||
|
"nif:anchorOf": "You'll be awesome.",
|
||||||
|
"marl:hasPolarity": "marl:Positive",
|
||||||
|
"marl:polarityValue": 0.9,
|
||||||
|
"prov:wasGeneratedBy": "me:SAnalysis1"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"emotions": [
|
||||||
|
{
|
||||||
|
"@id": "http://micro.blog/status1#char=0,109",
|
||||||
|
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||||
|
"prov:wasGeneratedBy": "me:EAnalysis1",
|
||||||
|
"onyx:hasEmotion": [
|
||||||
|
{
|
||||||
|
"onyx:hasEmotionCategory": "wna:liking"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"onyx:hasEmotionCategory": "wna:excitement"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
@@ -4,8 +4,9 @@
|
|||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
{
|
{
|
||||||
"@id": "me:EmotionAnalysis1",
|
"@id": "me:EmotionAnalysis1_Activity",
|
||||||
"@type": "onyx:EmotionAnalysis"
|
"@type": "me:EmotionAnalysis1",
|
||||||
|
"prov:wasAssociatedWith": "me:EmotionAnalysis1"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
@@ -26,7 +27,7 @@
|
|||||||
{
|
{
|
||||||
"@id": "http://micro.blog/status1#char=0,109",
|
"@id": "http://micro.blog/status1#char=0,109",
|
||||||
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||||
"prov:wasGeneratedBy": "me:EmotionAnalysis1",
|
"prov:wasGeneratedBy": "_:EmotionAnalysis1_Activity",
|
||||||
"onyx:hasEmotion": [
|
"onyx:hasEmotion": [
|
||||||
{
|
{
|
||||||
"onyx:hasEmotionCategory": "wna:liking"
|
"onyx:hasEmotionCategory": "wna:liking"
|
||||||
|
@@ -4,8 +4,9 @@
|
|||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
{
|
{
|
||||||
"@id": "me:NER1",
|
"@id": "_:NER1_Activity",
|
||||||
"@type": "me:NERAnalysis"
|
"@type": "me:NERAnalysis",
|
||||||
|
"prov:wasAssociatedWith": "me:NER1"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
|
@@ -9,9 +9,15 @@
|
|||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
{
|
{
|
||||||
"@id": "me:HesamsAnalysis",
|
"@id": "me:HesamsAnalysis_Activity",
|
||||||
"@type": "onyx:EmotionAnalysis",
|
"@type": "onyx:EmotionAnalysis",
|
||||||
"onyx:usesEmotionModel": "emovoc:pad-dimensions"
|
"prov:wasAssociatedWith": "me:HesamsAnalysis",
|
||||||
|
"prov:used": [
|
||||||
|
{
|
||||||
|
"name": "emotion-model",
|
||||||
|
"prov:value": "emovoc:pad-dimensions"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
@@ -32,7 +38,7 @@
|
|||||||
{
|
{
|
||||||
"@id": "Entry1#char=0,21",
|
"@id": "Entry1#char=0,21",
|
||||||
"nif:anchorOf": "This is a test string",
|
"nif:anchorOf": "This is a test string",
|
||||||
"prov:wasGeneratedBy": "me:HesamAnalysis",
|
"prov:wasGeneratedBy": "_:HesamAnalysis_Activity",
|
||||||
"onyx:hasEmotion": [
|
"onyx:hasEmotion": [
|
||||||
{
|
{
|
||||||
"emovoc:pleasure": 0.5,
|
"emovoc:pleasure": 0.5,
|
||||||
|
@@ -4,10 +4,9 @@
|
|||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
{
|
{
|
||||||
"@id": "me:SAnalysis1",
|
"@id": "_:SAnalysis1_Activity",
|
||||||
"@type": "marl:SentimentAnalysis",
|
"@type": "marl:SentimentAnalysis",
|
||||||
"marl:maxPolarityValue": 1,
|
"prov:wasAssociatedWith": "me:SAnalysis1"
|
||||||
"marl:minPolarityValue": 0
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
@@ -30,7 +29,7 @@
|
|||||||
"nif:anchorOf": "You'll be awesome.",
|
"nif:anchorOf": "You'll be awesome.",
|
||||||
"marl:hasPolarity": "marl:Positive",
|
"marl:hasPolarity": "marl:Positive",
|
||||||
"marl:polarityValue": 0.9,
|
"marl:polarityValue": 0.9,
|
||||||
"prov:wasGeneratedBy": "me:SAnalysis1"
|
"prov:wasGeneratedBy": "_:SAnalysis1_Activity"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"emotionSets": [
|
"emotionSets": [
|
||||||
|
@@ -3,7 +3,11 @@
|
|||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
"me:SgAnalysis1"
|
{
|
||||||
|
"@id": "_:SgAnalysis1_Activity",
|
||||||
|
"@type": "me:SuggestionAnalysis",
|
||||||
|
"prov:wasAssociatedWith": "me:SgAnalysis1"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
{
|
{
|
||||||
@@ -12,7 +16,6 @@
|
|||||||
"nif:RFC5147String",
|
"nif:RFC5147String",
|
||||||
"nif:Context"
|
"nif:Context"
|
||||||
],
|
],
|
||||||
"prov:wasGeneratedBy": "me:SAnalysis1",
|
|
||||||
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||||
"entities": [
|
"entities": [
|
||||||
],
|
],
|
||||||
@@ -22,7 +25,7 @@
|
|||||||
"nif:beginIndex": 16,
|
"nif:beginIndex": 16,
|
||||||
"nif:endIndex": 77,
|
"nif:endIndex": 77,
|
||||||
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
||||||
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
"prov:wasGeneratedBy": "_:SgAnalysis1_Activity"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"sentiments": [
|
"sentiments": [
|
||||||
|
113
docs/plugins-definition.rst
Normal file
113
docs/plugins-definition.rst
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
Advanced plugin definition
|
||||||
|
--------------------------
|
||||||
|
In addition to finding plugins defined in source code files, senpy can also load a special type of definition file (`.senpy` files).
|
||||||
|
This used to be the only mechanism for loading in earlier versions of senpy.
|
||||||
|
|
||||||
|
The definition file contains basic information
|
||||||
|
|
||||||
|
Lastly, it is also possible to add new plugins programmatically.
|
||||||
|
|
||||||
|
.. contents:: :local:
|
||||||
|
|
||||||
|
What is a plugin?
|
||||||
|
=================
|
||||||
|
|
||||||
|
A plugin is a program that, given a text, will add annotations to it.
|
||||||
|
In practice, a plugin consists of at least two files:
|
||||||
|
|
||||||
|
- Definition file: a `.senpy` file that describes the plugin (e.g. what input parameters it accepts, what emotion model it uses).
|
||||||
|
- Python module: the actual code that will add annotations to each input.
|
||||||
|
|
||||||
|
This separation allows us to deploy plugins that use the same code but employ different parameters.
|
||||||
|
For instance, one could use the same classifier and processing in several plugins, but train with different datasets.
|
||||||
|
This scenario is particularly useful for evaluation purposes.
|
||||||
|
|
||||||
|
The only limitation is that the name of each plugin needs to be unique.
|
||||||
|
|
||||||
|
Definition files
|
||||||
|
================
|
||||||
|
|
||||||
|
The definition file complements and overrides the attributes provided by the plugin.
|
||||||
|
It can be written in YAML or JSON.
|
||||||
|
The most important attributes are:
|
||||||
|
|
||||||
|
* **name**: unique name that senpy will use internally to identify the plugin.
|
||||||
|
* **module**: indicates the module that contains the plugin code, which will be automatically loaded by senpy.
|
||||||
|
* **version**
|
||||||
|
* extra_params: to add parameters to the senpy API when this plugin is requested. Those parameters may be required, and have aliased names. For instance:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
extra_params:
|
||||||
|
hello_param:
|
||||||
|
aliases: # required
|
||||||
|
- hello_param
|
||||||
|
- hello
|
||||||
|
required: true
|
||||||
|
default: Hi you
|
||||||
|
values:
|
||||||
|
- Hi you
|
||||||
|
- Hello y'all
|
||||||
|
- Howdy
|
||||||
|
|
||||||
|
A complete example:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
name: <Name of the plugin>
|
||||||
|
module: <Python file>
|
||||||
|
version: 0.1
|
||||||
|
|
||||||
|
And the json equivalent:
|
||||||
|
|
||||||
|
.. code:: json
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "<Name of the plugin>",
|
||||||
|
"module": "<Python file>",
|
||||||
|
"version": "0.1"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Example plugin with a definition file
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
In this section, we will implement a basic sentiment analysis plugin.
|
||||||
|
To determine the polarity of each entry, the plugin will compare the length of the string to a threshold.
|
||||||
|
This threshold will be included in the definition file.
|
||||||
|
|
||||||
|
The definition file would look like this:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
name: helloworld
|
||||||
|
module: helloworld
|
||||||
|
version: 0.0
|
||||||
|
threshold: 10
|
||||||
|
description: Hello World
|
||||||
|
|
||||||
|
Now, in a file named ``helloworld.py``:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
#!/bin/env python
|
||||||
|
#helloworld.py
|
||||||
|
|
||||||
|
from senpy import AnalysisPlugin
|
||||||
|
from senpy import Sentiment
|
||||||
|
|
||||||
|
|
||||||
|
class HelloWorld(AnalysisPlugin):
|
||||||
|
|
||||||
|
def analyse_entry(entry, params):
|
||||||
|
'''Basically do nothing with each entry'''
|
||||||
|
|
||||||
|
sentiment = Sentiment()
|
||||||
|
if len(entry.text) < self.threshold:
|
||||||
|
sentiment['marl:hasPolarity'] = 'marl:Positive'
|
||||||
|
else:
|
||||||
|
sentiment['marl:hasPolarity'] = 'marl:Negative'
|
||||||
|
entry.sentiments.append(sentiment)
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
The complete code of the example plugin is available `here <https://lab.cluster.gsi.dit.upm.es/senpy/plugin-prueba>`__.
|
317
docs/plugins.rst
317
docs/plugins.rst
@@ -1,6 +1,8 @@
|
|||||||
Developing new plugins
|
Developing new plugins
|
||||||
----------------------
|
----------------------
|
||||||
This document describes how to develop a new analysis plugin. For an example of conversion plugins, see :doc:`conversion`.
|
This document contains the minimum to get you started with developing new analysis plugin.
|
||||||
|
For an example of conversion plugins, see :doc:`conversion`.
|
||||||
|
For a description of definition files, see :doc:`plugins-definition`.
|
||||||
|
|
||||||
A more step-by-step tutorial with slides is available `here <https://lab.cluster.gsi.dit.upm.es/senpy/senpy-tutorial>`__
|
A more step-by-step tutorial with slides is available `here <https://lab.cluster.gsi.dit.upm.es/senpy/senpy-tutorial>`__
|
||||||
|
|
||||||
@@ -9,83 +11,29 @@ A more step-by-step tutorial with slides is available `here <https://lab.cluster
|
|||||||
What is a plugin?
|
What is a plugin?
|
||||||
=================
|
=================
|
||||||
|
|
||||||
A plugin is a program that, given a text, will add annotations to it.
|
A plugin is a python object that can process entries. Given an entry, it will modify it, add annotations to it, or generate new entries.
|
||||||
In practice, a plugin consists of at least two files:
|
|
||||||
|
|
||||||
- Definition file: a `.senpy` file that describes the plugin (e.g. what input parameters it accepts, what emotion model it uses).
|
|
||||||
- Python module: the actual code that will add annotations to each input.
|
|
||||||
|
|
||||||
This separation allows us to deploy plugins that use the same code but employ different parameters.
|
|
||||||
For instance, one could use the same classifier and processing in several plugins, but train with different datasets.
|
|
||||||
This scenario is particularly useful for evaluation purposes.
|
|
||||||
|
|
||||||
The only limitation is that the name of each plugin needs to be unique.
|
|
||||||
|
|
||||||
Plugin Definition files
|
|
||||||
=======================
|
|
||||||
|
|
||||||
The definition file contains all the attributes of the plugin, and can be written in YAML or JSON.
|
|
||||||
When the server is launched, it will recursively search for definition files in the plugin folder (the current folder, by default).
|
|
||||||
The most important attributes are:
|
|
||||||
|
|
||||||
* **name**: unique name that senpy will use internally to identify the plugin.
|
|
||||||
* **module**: indicates the module that contains the plugin code, which will be automatically loaded by senpy.
|
|
||||||
* **version**
|
|
||||||
* extra_params: to add parameters to the senpy API when this plugin is requested. Those parameters may be required, and have aliased names. For instance:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
extra_params:
|
|
||||||
hello_param:
|
|
||||||
aliases: # required
|
|
||||||
- hello_param
|
|
||||||
- hello
|
|
||||||
required: true
|
|
||||||
default: Hi you
|
|
||||||
values:
|
|
||||||
- Hi you
|
|
||||||
- Hello y'all
|
|
||||||
- Howdy
|
|
||||||
|
|
||||||
Parameter validation will fail if a required parameter without a default has not been provided, or if the definition includes a set of values and the provided one does not match one of them.
|
|
||||||
|
|
||||||
|
|
||||||
A complete example:
|
What is an entry?
|
||||||
|
=================
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
name: <Name of the plugin>
|
|
||||||
module: <Python file>
|
|
||||||
version: 0.1
|
|
||||||
|
|
||||||
And the json equivalent:
|
|
||||||
|
|
||||||
.. code:: json
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "<Name of the plugin>",
|
|
||||||
"module": "<Python file>",
|
|
||||||
"version": "0.1"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Plugins Code
|
|
||||||
============
|
|
||||||
|
|
||||||
The basic methods in a plugin are:
|
|
||||||
|
|
||||||
* __init__
|
|
||||||
* activate: used to load memory-hungry resources
|
|
||||||
* deactivate: used to free up resources
|
|
||||||
* analyse_entry: called in every user requests. It takes two parameters: ``Entry``, the entry object, and ``params``, the parameters supplied by the user. It should yield one or more ``Entry`` objects.
|
|
||||||
|
|
||||||
Plugins are loaded asynchronously, so don't worry if the activate method takes too long. The plugin will be marked as activated once it is finished executing the method.
|
|
||||||
|
|
||||||
Entries
|
|
||||||
=======
|
|
||||||
|
|
||||||
Entries are objects that can be annotated.
|
Entries are objects that can be annotated.
|
||||||
|
In general, they will be a piece of text.
|
||||||
By default, entries are `NIF contexts <http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core/nif-core.html>`_ represented in JSON-LD format.
|
By default, entries are `NIF contexts <http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core/nif-core.html>`_ represented in JSON-LD format.
|
||||||
|
It is a dictionary/JSON object that looks like this:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
{
|
||||||
|
"@id": "<unique identifier or blank node name>",
|
||||||
|
"nif:isString": "input text",
|
||||||
|
"sentiments": [ {
|
||||||
|
...
|
||||||
|
}
|
||||||
|
],
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
Annotations are added to the object like this:
|
Annotations are added to the object like this:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
@@ -100,96 +48,111 @@ The value may be any valid JSON-LD dictionary.
|
|||||||
For simplicity, senpy includes a series of models by default in the ``senpy.models`` module.
|
For simplicity, senpy includes a series of models by default in the ``senpy.models`` module.
|
||||||
|
|
||||||
|
|
||||||
Example plugin
|
What are annotations?
|
||||||
==============
|
=====================
|
||||||
|
They are objects just like entries.
|
||||||
|
Senpy ships with several default annotations, including: ``Sentiment``, ``Emotion``, ``EmotionSet``...jk bb
|
||||||
|
|
||||||
In this section, we will implement a basic sentiment analysis plugin.
|
|
||||||
To determine the polarity of each entry, the plugin will compare the length of the string to a threshold.
|
|
||||||
This threshold will be included in the definition file.
|
|
||||||
|
|
||||||
The definition file would look like this:
|
What's a plugin made of?
|
||||||
|
========================
|
||||||
|
|
||||||
.. code:: yaml
|
When receiving a query, senpy selects what plugin or plugins should process each entry, and in what order.
|
||||||
|
It also makes sure the every entry and the parameters provided by the user meet the plugin requirements.
|
||||||
|
|
||||||
name: helloworld
|
Hence, two parts are necessary: 1) the code that will process the entry, and 2) some attributes and metadata that will tell senpy how to interact with the plugin.
|
||||||
module: helloworld
|
|
||||||
version: 0.0
|
|
||||||
threshold: 10
|
|
||||||
description: Hello World
|
|
||||||
|
|
||||||
Now, in a file named ``helloworld.py``:
|
In practice, this is what a plugin looks like, tests included:
|
||||||
|
|
||||||
|
|
||||||
|
.. literalinclude:: ../senpy/plugins/example/rand_plugin.py
|
||||||
|
:emphasize-lines: 5-11
|
||||||
|
:language: python
|
||||||
|
|
||||||
|
|
||||||
|
The lines highlighted contain some information about the plugin.
|
||||||
|
In particular, the following information is mandatory:
|
||||||
|
|
||||||
|
* A unique name for the class. In our example, Rand.
|
||||||
|
* The subclass/type of plugin. This is typically either `SentimentPlugin` or `EmotionPlugin`. However, new types of plugin can be created for different annotations. The only requirement is that these new types inherit from `senpy.Analysis`
|
||||||
|
* A description of the plugin. This can be done simply by adding a doc to the class.
|
||||||
|
* A version, which should get updated.
|
||||||
|
* An author name.
|
||||||
|
|
||||||
|
|
||||||
|
Plugins Code
|
||||||
|
============
|
||||||
|
|
||||||
|
The basic methods in a plugin are:
|
||||||
|
|
||||||
|
* analyse_entry: called in every user requests. It takes two parameters: ``Entry``, the entry object, and ``params``, the parameters supplied by the user. It should yield one or more ``Entry`` objects.
|
||||||
|
* activate: used to load memory-hungry resources. For instance, to train a classifier.
|
||||||
|
* deactivate: used to free up resources when the plugin is no longer needed.
|
||||||
|
|
||||||
|
Plugins are loaded asynchronously, so don't worry if the activate method takes too long. The plugin will be marked as activated once it is finished executing the method.
|
||||||
|
|
||||||
|
|
||||||
|
How does senpy find modules?
|
||||||
|
============================
|
||||||
|
|
||||||
|
Senpy looks for files of two types:
|
||||||
|
|
||||||
|
* Python files of the form `senpy_<NAME>.py` or `<NAME>_plugin.py`. In these files, it will look for: 1) Instances that inherit from `senpy.Plugin`, or subclasses of `senpy.Plugin` that can be initialized without a configuration file. i.e. classes that contain all the required attributes for a plugin.
|
||||||
|
* Plugin definition files (see :doc:`advanced-plugins`)
|
||||||
|
|
||||||
|
Defining additional parameters
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Your plugin may ask for additional parameters from the users of the service by using the attribute ``extra_params`` in your plugin definition.
|
||||||
|
It takes a dictionary, where the keys are the name of the argument/parameter, and the value has the following fields:
|
||||||
|
|
||||||
|
* aliases: the different names which can be used in the request to use the parameter.
|
||||||
|
* required: if set to true, users need to provide this parameter unless a default is set.
|
||||||
|
* options: the different acceptable values of the parameter (i.e. an enum). If set, the value provided must match one of the options.
|
||||||
|
* default: the default value of the parameter, if none is provided in the request.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
#!/bin/env python
|
"extra_params":{
|
||||||
#helloworld.py
|
"language": {
|
||||||
|
"aliases": ["language", "lang", "l"],
|
||||||
from senpy.plugins import AnalysisPlugin
|
"required": True,
|
||||||
from senpy.models import Sentiment
|
"options": ["es", "en"],
|
||||||
|
"default": "es"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class HelloWorld(AnalysisPlugin):
|
|
||||||
|
|
||||||
def analyse_entry(entry, params):
|
|
||||||
'''Basically do nothing with each entry'''
|
|
||||||
|
|
||||||
sentiment = Sentiment()
|
|
||||||
if len(entry.text) < self.threshold:
|
|
||||||
sentiment['marl:hasPolarity'] = 'marl:Positive'
|
|
||||||
else:
|
|
||||||
sentiment['marl:hasPolarity'] = 'marl:Negative'
|
|
||||||
entry.sentiments.append(sentiment)
|
|
||||||
yield entry
|
|
||||||
|
|
||||||
The complete code of the example plugin is available `here <https://lab.cluster.gsi.dit.upm.es/senpy/plugin-prueba>`__.
|
|
||||||
|
|
||||||
Loading data and files
|
Loading data and files
|
||||||
======================
|
======================
|
||||||
|
|
||||||
Most plugins will need access to files (dictionaries, lexicons, etc.).
|
Most plugins will need access to files (dictionaries, lexicons, etc.).
|
||||||
It is good practice to specify the paths of these files in the plugin configuration, so the same code can be reused with different resources.
|
These files are usually heavy or under a license that does not allow redistribution.
|
||||||
|
For this reason, senpy has a `data_folder` that is separated from the source files.
|
||||||
|
The location of this folder is controlled programmatically or by setting the `SENPY_DATA` environment variable.
|
||||||
|
|
||||||
|
Plugins have a convenience function `self.open` which will automatically prepend the data folder to relative paths:
|
||||||
|
|
||||||
|
|
||||||
.. code:: yaml
|
.. code:: python
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class PluginWithResources(AnalysisPlugin):
|
||||||
|
file_in_data = <FILE PATH>
|
||||||
|
file_in_sources = <FILE PATH>
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
with self.open(self.file_in_data) as f:
|
||||||
|
self._classifier = train_from_file(f)
|
||||||
|
file_in_source = os.path.join(self.get_folder(), self.file_in_sources)
|
||||||
|
with self.open(file_in_source) as f:
|
||||||
|
pass
|
||||||
|
|
||||||
name: dictworld
|
|
||||||
module: dictworld
|
|
||||||
dictionary_path: <PATH OF THE FILE>
|
|
||||||
|
|
||||||
The path can be either absolute, or relative.
|
It is good practice to specify the paths of these files in the plugin configuration, so the same code can be reused with different resources.
|
||||||
|
|
||||||
From absolute paths
|
|
||||||
???????????????????
|
|
||||||
|
|
||||||
Absolute paths (such as ``/data/dictionary.csv`` are straightfoward:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
with open(os.path.join(self.dictionary_path) as f:
|
|
||||||
...
|
|
||||||
|
|
||||||
From relative paths
|
|
||||||
???????????????????
|
|
||||||
Since plugins are loading dynamically, relative paths will refer to the current working directory.
|
|
||||||
Instead, what you usually want is to load files *relative to the plugin source folder*, like so:
|
|
||||||
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
.
|
|
||||||
..
|
|
||||||
plugin.senpy
|
|
||||||
plugin.py
|
|
||||||
dictionary.csv
|
|
||||||
|
|
||||||
For this, we need to first get the path of your source folder first, like so:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import os
|
|
||||||
root = os.path.realpath(__file__)
|
|
||||||
with open(os.path.join(root, self.dictionary_path) as f:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
Docker image
|
Docker image
|
||||||
@@ -199,8 +162,17 @@ Add the following dockerfile to your project to generate a docker image with you
|
|||||||
|
|
||||||
.. code:: dockerfile
|
.. code:: dockerfile
|
||||||
|
|
||||||
FROM gsiupm/senpy:0.8.8
|
FROM gsiupm/senpy
|
||||||
|
|
||||||
|
Once you make sure your plugin works with a specific version of senpy, modify that file to make sure your build will work even if senpy gets updated.
|
||||||
|
e.g.:
|
||||||
|
|
||||||
|
|
||||||
|
.. code:: dockerfile
|
||||||
|
|
||||||
|
FROM gsiupm/senpy:1.0.1
|
||||||
|
|
||||||
|
|
||||||
This will copy your source folder to the image, and install all dependencies.
|
This will copy your source folder to the image, and install all dependencies.
|
||||||
Now, to build an image:
|
Now, to build an image:
|
||||||
|
|
||||||
@@ -215,7 +187,7 @@ And you can run it with:
|
|||||||
docker run -p 5000:5000 gsiupm/exampleplugin
|
docker run -p 5000:5000 gsiupm/exampleplugin
|
||||||
|
|
||||||
|
|
||||||
If the plugin non-source files (:ref:`loading data and files`), the recommended way is to use absolute paths.
|
If the plugin uses non-source files (:ref:`loading data and files`), the recommended way is to use `SENPY_DATA` folder.
|
||||||
Data can then be mounted in the container or added to the image.
|
Data can then be mounted in the container or added to the image.
|
||||||
The former is recommended for open source plugins with licensed resources, whereas the latter is the most convenient and can be used for private images.
|
The former is recommended for open source plugins with licensed resources, whereas the latter is the most convenient and can be used for private images.
|
||||||
|
|
||||||
@@ -229,7 +201,7 @@ Adding data to the image:
|
|||||||
|
|
||||||
.. code:: dockerfile
|
.. code:: dockerfile
|
||||||
|
|
||||||
FROM gsiupm/senpy:0.8.8
|
FROM gsiupm/senpy:1.0.1
|
||||||
COPY data /
|
COPY data /
|
||||||
|
|
||||||
F.A.Q.
|
F.A.Q.
|
||||||
@@ -245,7 +217,7 @@ Why does the analyse function yield instead of return?
|
|||||||
??????????????????????????????????????????????????????
|
??????????????????????????????????????????????????????
|
||||||
|
|
||||||
This is so that plugins may add new entries to the response or filter some of them.
|
This is so that plugins may add new entries to the response or filter some of them.
|
||||||
For instance, a `context detection` plugin may add a new entry for each context in the original entry.
|
For instance, a chunker may split one entry into several.
|
||||||
On the other hand, a conversion plugin may leave out those entries that do not contain relevant information.
|
On the other hand, a conversion plugin may leave out those entries that do not contain relevant information.
|
||||||
|
|
||||||
|
|
||||||
@@ -275,11 +247,13 @@ Training a classifier can be time time consuming. To avoid running the training
|
|||||||
def deactivate(self):
|
def deactivate(self):
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
You can specify a 'shelf_file' in your .senpy file. By default the ShelfMixin creates a file based on the plugin name and stores it in that plugin's folder.
|
|
||||||
|
By default the ShelfMixin creates a file based on the plugin name and stores it in that plugin's folder.
|
||||||
|
However, you can manually specify a 'shelf_file' in your .senpy file.
|
||||||
|
|
||||||
Shelves may get corrupted if the plugin exists unexpectedly.
|
Shelves may get corrupted if the plugin exists unexpectedly.
|
||||||
A corrupt shelf prevents the plugin from loading.
|
A corrupt shelf prevents the plugin from loading.
|
||||||
If you do not care about the pickle, you can force your plugin to remove the corrupted file and load anyway, set the 'force_shelf' to True in your .senpy file.
|
If you do not care about the data in the shelf, you can force your plugin to remove the corrupted file and load anyway, set the 'force_shelf' to True in your plugin and start it again.
|
||||||
|
|
||||||
How can I turn an external service into a plugin?
|
How can I turn an external service into a plugin?
|
||||||
?????????????????????????????????????????????????
|
?????????????????????????????????????????????????
|
||||||
@@ -313,50 +287,11 @@ This example ilustrate how to implement a plugin that accesses the Sentiment140
|
|||||||
prefix=p,
|
prefix=p,
|
||||||
marl__hasPolarity=polarity,
|
marl__hasPolarity=polarity,
|
||||||
marl__polarityValue=polarity_value)
|
marl__polarityValue=polarity_value)
|
||||||
sentiment.prov__wasGeneratedBy = self.id
|
sentiment.prov(self)
|
||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
|
||||||
Can my plugin require additional parameters from the user?
|
|
||||||
??????????????????????????????????????????????????????????
|
|
||||||
|
|
||||||
You can add extra parameters in the definition file under the attribute ``extra_params``.
|
|
||||||
It takes a dictionary, where the keys are the name of the argument/parameter, and the value has the following fields:
|
|
||||||
|
|
||||||
* aliases: the different names which can be used in the request to use the parameter.
|
|
||||||
* required: if set to true, users need to provide this parameter unless a default is set.
|
|
||||||
* options: the different acceptable values of the parameter (i.e. an enum). If set, the value provided must match one of the options.
|
|
||||||
* default: the default value of the parameter, if none is provided in the request.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
extra_params
|
|
||||||
language:
|
|
||||||
aliases:
|
|
||||||
- language
|
|
||||||
- lang
|
|
||||||
- l
|
|
||||||
required: true,
|
|
||||||
options:
|
|
||||||
- es
|
|
||||||
- en
|
|
||||||
default: es
|
|
||||||
|
|
||||||
This example shows how to introduce a parameter associated with language.
|
|
||||||
The extraction of this paremeter is used in the analyse method of the Plugin interface.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
lang = params.get("language")
|
|
||||||
|
|
||||||
Where can I set up variables for using them in my plugin?
|
|
||||||
?????????????????????????????????????????????????????????
|
|
||||||
|
|
||||||
You can add these variables in the definition file with the structure of attribute-value pairs.
|
|
||||||
|
|
||||||
Every field added to the definition file is available to the plugin instance.
|
|
||||||
|
|
||||||
Can I activate a DEBUG mode for my plugin?
|
Can I activate a DEBUG mode for my plugin?
|
||||||
???????????????????????????????????????????
|
???????????????????????????????????????????
|
||||||
|
|
||||||
@@ -371,7 +306,7 @@ Additionally, with the ``--pdb`` option you will be dropped into a pdb post mort
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
senpy --pdb
|
python -m pdb yourplugin.py
|
||||||
|
|
||||||
Where can I find more code examples?
|
Where can I find more code examples?
|
||||||
????????????????????????????????????
|
????????????????????????????????????
|
||||||
|
@@ -1,8 +1,11 @@
|
|||||||
What is Senpy?
|
What is Senpy?
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
Web services can get really complex: data validation, user interaction, formatting, logging., etc.
|
Senpy is a framework for text analysis using Linked Data. There are three main applications of Senpy so far: sentiment and emotion analysis, user profiling and entity recoginition. Annotations and Services are compliant with NIF (NLP Interchange Format).
|
||||||
The figure below summarizes the typical features in an analysis service.
|
|
||||||
|
Senpy aims at providing a framework where analysis modules can be integrated easily as plugins, and providing a core functionality for managing tasks such as data validation, user interaction, formatting, logging, translation to linked data, etc.
|
||||||
|
|
||||||
|
The figure below summarizes the typical features in a text analysis service.
|
||||||
Senpy implements all the common blocks, so developers can focus on what really matters: great analysis algorithms that solve real problems.
|
Senpy implements all the common blocks, so developers can focus on what really matters: great analysis algorithms that solve real problems.
|
||||||
|
|
||||||
.. image:: senpy-framework.png
|
.. image:: senpy-framework.png
|
||||||
|
@@ -7,21 +7,29 @@ The senpy server is launched via the `senpy` command:
|
|||||||
|
|
||||||
usage: senpy [-h] [--level logging_level] [--debug] [--default-plugins]
|
usage: senpy [-h] [--level logging_level] [--debug] [--default-plugins]
|
||||||
[--host HOST] [--port PORT] [--plugins-folder PLUGINS_FOLDER]
|
[--host HOST] [--port PORT] [--plugins-folder PLUGINS_FOLDER]
|
||||||
[--only-install]
|
[--only-install] [--only-list] [--data-folder DATA_FOLDER]
|
||||||
|
[--threaded] [--version]
|
||||||
|
|
||||||
Run a Senpy server
|
Run a Senpy server
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
--level logging_level, -l logging_level
|
--level logging_level, -l logging_level
|
||||||
Logging level
|
Logging level
|
||||||
--debug, -d Run the application in debug mode
|
--debug, -d Run the application in debug mode
|
||||||
--default-plugins Load the default plugins
|
--default-plugins Load the default plugins
|
||||||
--host HOST Use 0.0.0.0 to accept requests from any host.
|
--host HOST Use 0.0.0.0 to accept requests from any host.
|
||||||
--port PORT, -p PORT Port to listen on.
|
--port PORT, -p PORT Port to listen on.
|
||||||
--plugins-folder PLUGINS_FOLDER, -f PLUGINS_FOLDER
|
--plugins-folder PLUGINS_FOLDER, -f PLUGINS_FOLDER
|
||||||
Where to look for plugins.
|
Where to look for plugins.
|
||||||
--only-install, -i Do not run a server, only install plugin dependencies
|
--only-install, -i Do not run a server, only install plugin dependencies
|
||||||
|
--only-list, --list Do not run a server, only list plugins found
|
||||||
|
--data-folder DATA_FOLDER, --data DATA_FOLDER
|
||||||
|
Where to look for data. It be set with the SENPY_DATA
|
||||||
|
environment variable as well.
|
||||||
|
--threaded Run a threaded server
|
||||||
|
--version, -v Output the senpy version and exit
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
When launched, the server will recursively look for plugins in the specified plugins folder (the current working directory by default).
|
When launched, the server will recursively look for plugins in the specified plugins folder (the current working directory by default).
|
||||||
|
@@ -1,8 +1,24 @@
|
|||||||
Vocabularies and model
|
Vocabularies and model
|
||||||
======================
|
======================
|
||||||
|
|
||||||
The model used in Senpy is based on the following vocabularies:
|
The model used in Senpy is based on NIF 2.0 [1], which defines a semantic format and API for improving interoperability among natural language processing services.
|
||||||
|
|
||||||
* Marl, a vocabulary designed to annotate and describe subjetive opinions expressed on the web or in information systems.
|
Senpy has been applied to sentiment and emotion analysis services using the following vocabularies:
|
||||||
* Onyx, which is built one the same principles as Marl to annotate and describe emotions, and provides interoperability with Emotion Markup Language.
|
|
||||||
* NIF 2.0, which defines a semantic format and APO for improving interoperability among natural language processing services
|
* Marl [2,6], a vocabulary designed to annotate and describe subjetive opinions expressed on the web or in information systems.
|
||||||
|
* Onyx [3,5], which is built one the same principles as Marl to annotate and describe emotions, and provides interoperability with Emotion Markup Language.
|
||||||
|
|
||||||
|
An overview of the vocabularies and their use can be found in [4].
|
||||||
|
|
||||||
|
|
||||||
|
[1] Guidelines for developing NIF-based NLP services, Final Community Group Report 22 December 2015 Available at: https://www.w3.org/2015/09/bpmlod-reports/nif-based-nlp-webservices/
|
||||||
|
|
||||||
|
[2] Marl Ontology Specification, available at http://www.gsi.dit.upm.es/ontologies/marl/
|
||||||
|
|
||||||
|
[3] Onyx Ontology Specification, available at http://www.gsi.dit.upm.es/ontologies/onyx/
|
||||||
|
|
||||||
|
[4] Iglesias, C. A., Sánchez-Rada, J. F., Vulcu, G., & Buitelaar, P. (2017). Linked Data Models for Sentiment and Emotion Analysis in Social Networks. In Sentiment Analysis in Social Networks (pp. 49-69).
|
||||||
|
|
||||||
|
[5] Sánchez-Rada, J. F., & Iglesias, C. A. (2016). Onyx: A linked data approach to emotion representation. Information Processing & Management, 52(1), 99-114.
|
||||||
|
|
||||||
|
[6] Westerski, A., Iglesias Fernandez, C. A., & Tapia Rico, F. (2011). Linked opinions: Describing sentiments on the structured web of data.
|
||||||
|
23
example-plugins/README.md
Normal file
23
example-plugins/README.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
This is a collection of plugins that exemplify certain aspects of plugin development with senpy.
|
||||||
|
|
||||||
|
The first series of plugins the `basic` ones.
|
||||||
|
Their starting point is a classification function defined in `basic.py`.
|
||||||
|
They all include testing and running them as a script will run all tests.
|
||||||
|
In ascending order of customization, the plugins are:
|
||||||
|
|
||||||
|
* Basic is the simplest plugin of all. It leverages the `SentimentBox` Plugin class to create a plugin out of a classification method, and `MappingMixin` to convert the labels from (`pos`, `neg`) to (`marl:Positive`, `marl:Negative`
|
||||||
|
* Basic_box is just like the previous one, but replaces the mixin with a custom function.
|
||||||
|
* Basic_configurable is a version of `basic` with a configurable map of emojis for each sentiment.
|
||||||
|
* Basic_parameterized like `basic_info`, but users set the map in each query (via `extra_parameters`).
|
||||||
|
* Basic_analyse\_entry uses the more general `analyse_entry` method and adds the annotations individually.
|
||||||
|
|
||||||
|
|
||||||
|
In rest of the plugins show advanced topics:
|
||||||
|
|
||||||
|
* mynoop: shows how to add a definition file with external requirements for a plugin. Doing this with a python-only module would require moving all imports of the requirements to their functions, which is considered bad practice.
|
||||||
|
* Async: a barebones example of training a plugin and analyzing data in parallel.
|
||||||
|
|
||||||
|
All of the plugins in this folder include a set of test cases and they are periodically tested with the latest version of senpy.
|
||||||
|
|
||||||
|
Additioanlly, for an example of stand-alone plugin that can be tested and deployed with docker, take a look at: lab.cluster.gsi.dit.upm.es/senpy/plugin-example
|
||||||
|
bbm
|
@@ -1,4 +1,4 @@
|
|||||||
from senpy.plugins import AnalysisPlugin
|
from senpy import AnalysisPlugin
|
||||||
|
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
|
||||||
@@ -7,10 +7,15 @@ def _train(process_number):
|
|||||||
return process_number
|
return process_number
|
||||||
|
|
||||||
|
|
||||||
class AsyncPlugin(AnalysisPlugin):
|
class Async(AnalysisPlugin):
|
||||||
|
'''An example of an asynchronous module'''
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.2'
|
||||||
|
sync = False
|
||||||
|
|
||||||
def _do_async(self, num_processes):
|
def _do_async(self, num_processes):
|
||||||
pool = multiprocessing.Pool(processes=num_processes)
|
pool = multiprocessing.Pool(processes=num_processes)
|
||||||
values = pool.map(_train, range(num_processes))
|
values = sorted(pool.map(_train, range(num_processes)))
|
||||||
|
|
||||||
return values
|
return values
|
||||||
|
|
||||||
@@ -22,5 +27,11 @@ class AsyncPlugin(AnalysisPlugin):
|
|||||||
entry.async_values = values
|
entry.async_values = values
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
test_cases = [
|
||||||
pass
|
{
|
||||||
|
'input': 'any',
|
||||||
|
'expected': {
|
||||||
|
'async_values': [0, 1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
23
example-plugins/basic.py
Normal file
23
example-plugins/basic.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
emoticons = {
|
||||||
|
'pos': [':)', ':]', '=)', ':D'],
|
||||||
|
'neg': [':(', ':[', '=(']
|
||||||
|
}
|
||||||
|
|
||||||
|
emojis = {
|
||||||
|
'pos': ['😁', '😂', '😃', '😄', '😆', '😅', '😄' '😍'],
|
||||||
|
'neg': ['😢', '😡', '😠', '😞', '😖', '😔', '😓', '😒']
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_polarity(text, dictionaries=[emoticons, emojis]):
|
||||||
|
polarity = 'marl:Neutral'
|
||||||
|
for dictionary in dictionaries:
|
||||||
|
for label, values in dictionary.items():
|
||||||
|
for emoticon in values:
|
||||||
|
if emoticon and emoticon in text:
|
||||||
|
polarity = label
|
||||||
|
break
|
||||||
|
return polarity
|
47
example-plugins/basic_analyse_entry_plugin.py
Normal file
47
example-plugins/basic_analyse_entry_plugin.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from senpy import easy_test, models, plugins
|
||||||
|
|
||||||
|
import basic
|
||||||
|
|
||||||
|
|
||||||
|
class BasicAnalyseEntry(plugins.SentimentPlugin):
|
||||||
|
'''Equivalent to Basic, implementing the analyse_entry method'''
|
||||||
|
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
|
||||||
|
mappings = {
|
||||||
|
'pos': 'marl:Positive',
|
||||||
|
'neg': 'marl:Negative',
|
||||||
|
'default': 'marl:Neutral'
|
||||||
|
}
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
polarity = basic.get_polarity(entry.text)
|
||||||
|
|
||||||
|
polarity = self.mappings.get(polarity, self.mappings['default'])
|
||||||
|
|
||||||
|
s = models.Sentiment(marl__hasPolarity=polarity)
|
||||||
|
s.prov(self)
|
||||||
|
entry.sentiments.append(s)
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'So sad :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
41
example-plugins/basic_box_plugin.py
Normal file
41
example-plugins/basic_box_plugin.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from senpy import easy_test, SentimentBox
|
||||||
|
|
||||||
|
import basic
|
||||||
|
|
||||||
|
|
||||||
|
class BasicBox(SentimentBox):
|
||||||
|
''' A modified version of Basic that also does converts annotations manually'''
|
||||||
|
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
|
||||||
|
mappings = {
|
||||||
|
'pos': 'marl:Positive',
|
||||||
|
'neg': 'marl:Negative',
|
||||||
|
'default': 'marl:Neutral'
|
||||||
|
}
|
||||||
|
|
||||||
|
def predict_one(self, input):
|
||||||
|
output = basic.get_polarity(input)
|
||||||
|
return self.mappings.get(output, self.mappings['default'])
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'So sad :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
40
example-plugins/basic_plugin.py
Normal file
40
example-plugins/basic_plugin.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from senpy import easy_test, SentimentBox, MappingMixin
|
||||||
|
|
||||||
|
import basic
|
||||||
|
|
||||||
|
|
||||||
|
class Basic(MappingMixin, SentimentBox):
|
||||||
|
'''Provides sentiment annotation using a lexicon'''
|
||||||
|
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
|
||||||
|
mappings = {
|
||||||
|
'pos': 'marl:Positive',
|
||||||
|
'neg': 'marl:Negative',
|
||||||
|
'default': 'marl:Neutral'
|
||||||
|
}
|
||||||
|
|
||||||
|
def predict_one(self, input):
|
||||||
|
return basic.get_polarity(input)
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'So sad :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
105
example-plugins/configurable_plugin.py
Normal file
105
example-plugins/configurable_plugin.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from senpy import easy_test, models, plugins
|
||||||
|
|
||||||
|
import basic
|
||||||
|
|
||||||
|
|
||||||
|
class Dictionary(plugins.SentimentPlugin):
|
||||||
|
'''Sentiment annotation using a configurable lexicon'''
|
||||||
|
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.2'
|
||||||
|
|
||||||
|
dictionaries = [basic.emojis, basic.emoticons]
|
||||||
|
|
||||||
|
mappings = {'pos': 'marl:Positive', 'neg': 'marl:Negative'}
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
polarity = basic.get_polarity(entry.text, self.dictionaries)
|
||||||
|
if polarity in self.mappings:
|
||||||
|
polarity = self.mappings[polarity]
|
||||||
|
|
||||||
|
s = models.Sentiment(marl__hasPolarity=polarity)
|
||||||
|
s.prov(self)
|
||||||
|
entry.sentiments.append(s)
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'So sad :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class EmojiOnly(Dictionary):
|
||||||
|
'''Sentiment annotation with a basic lexicon of emojis'''
|
||||||
|
dictionaries = [basic.emojis]
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Neutral'
|
||||||
|
}, {
|
||||||
|
'input': 'So sad :(',
|
||||||
|
'polarity': 'marl:Neutral'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class EmoticonsOnly(Dictionary):
|
||||||
|
'''Sentiment annotation with a basic lexicon of emoticons'''
|
||||||
|
dictionaries = [basic.emoticons]
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'So sad :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Neutral'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Neutral'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class Salutes(Dictionary):
|
||||||
|
'''Sentiment annotation with a custom lexicon, for illustration purposes'''
|
||||||
|
dictionaries = [{
|
||||||
|
'marl:Positive': ['Hello', '!'],
|
||||||
|
'marl:Negative': ['Good bye', ]
|
||||||
|
}]
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'Good bye :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}, {
|
||||||
|
'input': 'Yay! Emojis 😁',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
}, {
|
||||||
|
'input': 'But no emoticons 😢',
|
||||||
|
'polarity': 'marl:Neutral'
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
25
example-plugins/dummy_plugin.py
Normal file
25
example-plugins/dummy_plugin.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from senpy import AnalysisPlugin, easy
|
||||||
|
|
||||||
|
|
||||||
|
class Dummy(AnalysisPlugin):
|
||||||
|
'''This is a dummy self-contained plugin'''
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
entry['nif:isString'] = entry['nif:isString'][::-1]
|
||||||
|
entry.reversed = entry.get('reversed', 0) + 1
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'entry': {
|
||||||
|
'nif:isString': 'Hello',
|
||||||
|
},
|
||||||
|
'expected': {
|
||||||
|
'nif:isString': 'olleH'
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy()
|
40
example-plugins/dummy_required_plugin.py
Normal file
40
example-plugins/dummy_required_plugin.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
from senpy import AnalysisPlugin, easy
|
||||||
|
|
||||||
|
|
||||||
|
class DummyRequired(AnalysisPlugin):
|
||||||
|
'''This is a dummy self-contained plugin'''
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
extra_params = {
|
||||||
|
'example': {
|
||||||
|
'description': 'An example parameter',
|
||||||
|
'required': True,
|
||||||
|
'options': ['a', 'b']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
entry['nif:isString'] = entry['nif:isString'][::-1]
|
||||||
|
entry.reversed = entry.get('reversed', 0) + 1
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'entry': {
|
||||||
|
'nif:isString': 'Hello',
|
||||||
|
},
|
||||||
|
'should_fail': True
|
||||||
|
}, {
|
||||||
|
'entry': {
|
||||||
|
'nif:isString': 'Hello',
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'example': 'a'
|
||||||
|
},
|
||||||
|
'expected': {
|
||||||
|
'nif:isString': 'olleH'
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy()
|
24
example-plugins/mynoop.py
Normal file
24
example-plugins/mynoop.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import noop
|
||||||
|
from senpy.plugins import SentimentPlugin
|
||||||
|
|
||||||
|
|
||||||
|
class NoOp(SentimentPlugin):
|
||||||
|
'''This plugin does nothing. Literally nothing.'''
|
||||||
|
|
||||||
|
version = 0
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, *args, **kwargs):
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
print(dir(noop))
|
||||||
|
super(NoOp, self).test()
|
||||||
|
|
||||||
|
test_cases = [{
|
||||||
|
'entry': {
|
||||||
|
'nif:isString': 'hello'
|
||||||
|
},
|
||||||
|
'expected': {
|
||||||
|
'nif:isString': 'hello'
|
||||||
|
}
|
||||||
|
}]
|
3
example-plugins/mynoop.senpy
Normal file
3
example-plugins/mynoop.senpy
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module: mynoop
|
||||||
|
requirements:
|
||||||
|
- noop
|
63
example-plugins/parameterized_plugin.py
Normal file
63
example-plugins/parameterized_plugin.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from senpy import easy_test, models, plugins
|
||||||
|
|
||||||
|
import basic
|
||||||
|
|
||||||
|
|
||||||
|
class ParameterizedDictionary(plugins.SentimentPlugin):
|
||||||
|
'''This is a basic self-contained plugin'''
|
||||||
|
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.2'
|
||||||
|
|
||||||
|
extra_params = {
|
||||||
|
'positive-words': {
|
||||||
|
'description': 'Comma-separated list of words that are considered positive',
|
||||||
|
'aliases': ['positive'],
|
||||||
|
'required': True
|
||||||
|
},
|
||||||
|
'negative-words': {
|
||||||
|
'description': 'Comma-separated list of words that are considered negative',
|
||||||
|
'aliases': ['negative'],
|
||||||
|
'required': False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
positive_words = params['positive-words'].split(',')
|
||||||
|
negative_words = params['negative-words'].split(',')
|
||||||
|
dictionary = {
|
||||||
|
'marl:Positive': positive_words,
|
||||||
|
'marl:Negative': negative_words,
|
||||||
|
}
|
||||||
|
polarity = basic.get_polarity(entry.text, [dictionary])
|
||||||
|
|
||||||
|
s = models.Sentiment(marl__hasPolarity=polarity)
|
||||||
|
s.prov(self)
|
||||||
|
entry.sentiments.append(s)
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
test_cases = [
|
||||||
|
{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Positive',
|
||||||
|
'parameters': {
|
||||||
|
'positive': "Hello,:)",
|
||||||
|
'negative': "sad,:()"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'input': 'Hello :)',
|
||||||
|
'polarity': 'marl:Negative',
|
||||||
|
'parameters': {
|
||||||
|
'positive': "",
|
||||||
|
'negative': "Hello"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
33
example-plugins/sklearn/mydata.py
Normal file
33
example-plugins/sklearn/mydata.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
'''
|
||||||
|
Create a dummy dataset.
|
||||||
|
Messages with a happy emoticon are labelled positive
|
||||||
|
Messages with a sad emoticon are labelled negative
|
||||||
|
'''
|
||||||
|
import random
|
||||||
|
|
||||||
|
dataset = []
|
||||||
|
|
||||||
|
vocabulary = ['hello', 'world', 'senpy', 'cool', 'goodbye', 'random', 'text']
|
||||||
|
|
||||||
|
emojimap = {
|
||||||
|
1: [':)', ],
|
||||||
|
-1: [':(', ]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
for tag, values in emojimap.items():
|
||||||
|
for i in range(1000):
|
||||||
|
msg = ''
|
||||||
|
for j in range(3):
|
||||||
|
msg += random.choice(vocabulary)
|
||||||
|
msg += " "
|
||||||
|
msg += random.choice(values)
|
||||||
|
dataset.append([msg, tag])
|
||||||
|
|
||||||
|
|
||||||
|
text = []
|
||||||
|
labels = []
|
||||||
|
|
||||||
|
for i in dataset:
|
||||||
|
text.append(i[0])
|
||||||
|
labels.append(i[1])
|
30
example-plugins/sklearn/mypipeline.py
Normal file
30
example-plugins/sklearn/mypipeline.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from sklearn.pipeline import Pipeline
|
||||||
|
from sklearn.feature_extraction.text import CountVectorizer
|
||||||
|
from sklearn.model_selection import train_test_split
|
||||||
|
|
||||||
|
from mydata import text, labels
|
||||||
|
|
||||||
|
X_train, X_test, y_train, y_test = train_test_split(text, labels, test_size=0.12, random_state=42)
|
||||||
|
|
||||||
|
from sklearn.naive_bayes import MultinomialNB
|
||||||
|
|
||||||
|
|
||||||
|
count_vec = CountVectorizer(tokenizer=lambda x: x.split())
|
||||||
|
clf3 = MultinomialNB()
|
||||||
|
pipeline = Pipeline([('cv', count_vec),
|
||||||
|
('clf', clf3)])
|
||||||
|
|
||||||
|
pipeline.fit(X_train, y_train)
|
||||||
|
print('Feature names: {}'.format(count_vec.get_feature_names()))
|
||||||
|
print('Class count: {}'.format(clf3.class_count_))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print('--Results--')
|
||||||
|
tests = [
|
||||||
|
(['The sentiment for senpy should be positive :)', ], 1),
|
||||||
|
(['The sentiment for anything else should be negative :()', ], -1)
|
||||||
|
]
|
||||||
|
for features, expected in tests:
|
||||||
|
result = pipeline.predict(features)
|
||||||
|
print('Input: {}\nExpected: {}\nGot: {}'.format(features[0], expected, result))
|
37
example-plugins/sklearn/pipeline_plugin.py
Normal file
37
example-plugins/sklearn/pipeline_plugin.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from senpy import SentimentBox, MappingMixin, easy_test
|
||||||
|
|
||||||
|
from mypipeline import pipeline
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineSentiment(MappingMixin, SentimentBox):
|
||||||
|
'''
|
||||||
|
This is a pipeline plugin that wraps a classifier defined in another module
|
||||||
|
(mypipeline).
|
||||||
|
'''
|
||||||
|
author = '@balkian'
|
||||||
|
version = 0.1
|
||||||
|
maxPolarityValue = 1
|
||||||
|
minPolarityValue = -1
|
||||||
|
|
||||||
|
mappings = {
|
||||||
|
1: 'marl:Positive',
|
||||||
|
-1: 'marl:Negative'
|
||||||
|
}
|
||||||
|
|
||||||
|
def predict_one(self, input):
|
||||||
|
return pipeline.predict([input, ])[0]
|
||||||
|
|
||||||
|
test_cases = [
|
||||||
|
{
|
||||||
|
'input': 'The sentiment for senpy should be positive :)',
|
||||||
|
'polarity': 'marl:Positive'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'input': 'The sentiment for senpy should be negative :(',
|
||||||
|
'polarity': 'marl:Negative'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
27
example-plugins/sleep_plugin.py
Normal file
27
example-plugins/sleep_plugin.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from senpy.plugins import AnalysisPlugin
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
|
||||||
|
class Sleep(AnalysisPlugin):
|
||||||
|
'''Dummy plugin to test async'''
|
||||||
|
author = "@balkian"
|
||||||
|
version = "0.2"
|
||||||
|
timeout = 0.05
|
||||||
|
extra_params = {
|
||||||
|
"timeout": {
|
||||||
|
"@id": "timeout_sleep",
|
||||||
|
"aliases": ["timeout", "to"],
|
||||||
|
"required": False,
|
||||||
|
"default": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def activate(self, *args, **kwargs):
|
||||||
|
sleep(self.timeout)
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
sleep(float(params.get("timeout", self.timeout)))
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
pass
|
1
extra-requirements.txt
Normal file
1
extra-requirements.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
gsitk
|
@@ -13,7 +13,7 @@ spec:
|
|||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- name: senpy-latest
|
- name: senpy-latest
|
||||||
image: gsiupm/senpy:latest
|
image: $IMAGEWTAG
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
args:
|
args:
|
||||||
- "--default-plugins"
|
- "--default-plugins"
|
||||||
|
@@ -3,10 +3,13 @@ requests>=2.4.1
|
|||||||
tornado>=4.4.3
|
tornado>=4.4.3
|
||||||
PyLD>=0.6.5
|
PyLD>=0.6.5
|
||||||
nltk
|
nltk
|
||||||
six
|
|
||||||
future
|
future
|
||||||
jsonschema
|
jsonschema
|
||||||
jsonref
|
jsonref
|
||||||
PyYAML
|
PyYAML
|
||||||
rdflib
|
rdflib
|
||||||
rdflib-jsonld
|
rdflib-jsonld
|
||||||
|
numpy
|
||||||
|
scipy
|
||||||
|
scikit-learn
|
||||||
|
responses
|
||||||
|
@@ -25,4 +25,10 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
logger.info('Using senpy version: {}'.format(__version__))
|
logger.info('Using senpy version: {}'.format(__version__))
|
||||||
|
|
||||||
|
from .utils import easy, easy_load, easy_test # noqa: F401
|
||||||
|
|
||||||
|
from .models import * # noqa: F401,F403
|
||||||
|
from .plugins import * # noqa: F401,F403
|
||||||
|
from .extensions import * # noqa: F401,F403
|
||||||
|
|
||||||
__all__ = ['api', 'blueprints', 'cli', 'extensions', 'models', 'plugins']
|
__all__ = ['api', 'blueprints', 'cli', 'extensions', 'models', 'plugins']
|
||||||
|
@@ -22,9 +22,11 @@ the server.
|
|||||||
|
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from senpy.extensions import Senpy
|
from senpy.extensions import Senpy
|
||||||
|
from senpy.utils import easy_test
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import argparse
|
import argparse
|
||||||
import senpy
|
import senpy
|
||||||
|
|
||||||
@@ -38,7 +40,7 @@ def main():
|
|||||||
'-l',
|
'-l',
|
||||||
metavar='logging_level',
|
metavar='logging_level',
|
||||||
type=str,
|
type=str,
|
||||||
default="INFO",
|
default="WARN",
|
||||||
help='Logging level')
|
help='Logging level')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--debug',
|
'--debug',
|
||||||
@@ -66,7 +68,7 @@ def main():
|
|||||||
'--plugins-folder',
|
'--plugins-folder',
|
||||||
'-f',
|
'-f',
|
||||||
type=str,
|
type=str,
|
||||||
default='plugins',
|
default='.',
|
||||||
help='Where to look for plugins.')
|
help='Where to look for plugins.')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--only-install',
|
'--only-install',
|
||||||
@@ -74,31 +76,87 @@ def main():
|
|||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help='Do not run a server, only install plugin dependencies')
|
help='Do not run a server, only install plugin dependencies')
|
||||||
|
parser.add_argument(
|
||||||
|
'--only-test',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Do not run a server, just test all plugins')
|
||||||
|
parser.add_argument(
|
||||||
|
'--test',
|
||||||
|
'-t',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Test all plugins before launching the server')
|
||||||
|
parser.add_argument(
|
||||||
|
'--only-list',
|
||||||
|
'--list',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Do not run a server, only list plugins found')
|
||||||
|
parser.add_argument(
|
||||||
|
'--data-folder',
|
||||||
|
'--data',
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help='Where to look for data. It be set with the SENPY_DATA environment variable as well.')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--threaded',
|
'--threaded',
|
||||||
action='store_false',
|
action='store_false',
|
||||||
default=True,
|
default=True,
|
||||||
help='Run a threaded server')
|
help='Run a threaded server')
|
||||||
|
parser.add_argument(
|
||||||
|
'--no-deps',
|
||||||
|
'-n',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Skip installing dependencies')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--version',
|
'--version',
|
||||||
'-v',
|
'-v',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help='Output the senpy version and exit')
|
help='Output the senpy version and exit')
|
||||||
|
parser.add_argument(
|
||||||
|
'--allow-fail',
|
||||||
|
'--fail',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Do not exit if some plugins fail to activate')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if args.version:
|
if args.version:
|
||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
|
print(sys.version)
|
||||||
exit(1)
|
exit(1)
|
||||||
logging.basicConfig()
|
|
||||||
rl = logging.getLogger()
|
rl = logging.getLogger()
|
||||||
rl.setLevel(getattr(logging, args.level))
|
rl.setLevel(getattr(logging, args.level))
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.debug = args.debug
|
app.debug = args.debug
|
||||||
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
|
sp = Senpy(app, args.plugins_folder,
|
||||||
sp.install_deps()
|
default_plugins=args.default_plugins,
|
||||||
|
data_folder=args.data_folder)
|
||||||
|
if args.only_list:
|
||||||
|
plugins = sp.plugins()
|
||||||
|
maxname = max(len(x.name) for x in plugins)
|
||||||
|
maxversion = max(len(x.version) for x in plugins)
|
||||||
|
print('Found {} plugins:'.format(len(plugins)))
|
||||||
|
for plugin in plugins:
|
||||||
|
import inspect
|
||||||
|
fpath = inspect.getfile(plugin.__class__)
|
||||||
|
print('\t{: <{maxname}} @ {: <{maxversion}} -> {}'.format(plugin.name,
|
||||||
|
plugin.version,
|
||||||
|
fpath,
|
||||||
|
maxname=maxname,
|
||||||
|
maxversion=maxversion))
|
||||||
|
return
|
||||||
|
if not args.no_deps:
|
||||||
|
sp.install_deps()
|
||||||
if args.only_install:
|
if args.only_install:
|
||||||
return
|
return
|
||||||
sp.activate_all()
|
sp.activate_all(allow_fail=args.allow_fail)
|
||||||
|
if args.test or args.only_test:
|
||||||
|
easy_test(sp.plugins(), debug=args.debug)
|
||||||
|
if args.only_test:
|
||||||
|
return
|
||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
||||||
args.port))
|
args.port))
|
||||||
|
229
senpy/api.py
229
senpy/api.py
@@ -1,50 +1,55 @@
|
|||||||
from future.utils import iteritems
|
from future.utils import iteritems
|
||||||
from .models import Error, Results, Entry, from_string
|
from .models import Analysis, Error, Results, Entry, from_string
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
boolean = [True, False]
|
||||||
|
|
||||||
API_PARAMS = {
|
API_PARAMS = {
|
||||||
"algorithm": {
|
"algorithm": {
|
||||||
"aliases": ["algorithms", "a", "algo"],
|
"aliases": ["algorithms", "a", "algo"],
|
||||||
"required": False,
|
"required": True,
|
||||||
|
"default": 'default',
|
||||||
"description": ("Algorithms that will be used to process the request."
|
"description": ("Algorithms that will be used to process the request."
|
||||||
"It may be a list of comma-separated names."),
|
"It may be a list of comma-separated names."),
|
||||||
},
|
},
|
||||||
"expanded-jsonld": {
|
"expanded-jsonld": {
|
||||||
"@id": "expanded-jsonld",
|
"@id": "expanded-jsonld",
|
||||||
"aliases": ["expanded"],
|
"aliases": ["expanded"],
|
||||||
|
"options": boolean,
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": 0
|
"default": False
|
||||||
},
|
},
|
||||||
"with_parameters": {
|
"with_parameters": {
|
||||||
"aliases": ['withparameters',
|
"aliases": ['withparameters',
|
||||||
'with-parameters'],
|
'with-parameters'],
|
||||||
"options": "boolean",
|
"options": boolean,
|
||||||
"default": False,
|
"default": False,
|
||||||
"required": True
|
"required": True
|
||||||
},
|
},
|
||||||
"plugin_type": {
|
|
||||||
"@id": "pluginType",
|
|
||||||
"description": 'What kind of plugins to list',
|
|
||||||
"aliases": ["pluginType"],
|
|
||||||
"required": True,
|
|
||||||
"default": "analysisPlugin"
|
|
||||||
},
|
|
||||||
"outformat": {
|
"outformat": {
|
||||||
"@id": "outformat",
|
"@id": "outformat",
|
||||||
"aliases": ["o"],
|
"aliases": ["o"],
|
||||||
"default": "json-ld",
|
"default": "json-ld",
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": ["json-ld", "turtle"],
|
"options": ["json-ld", "turtle", "ntriples"],
|
||||||
},
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"@id": "help",
|
"@id": "help",
|
||||||
"description": "Show additional help to know more about the possible parameters",
|
"description": "Show additional help to know more about the possible parameters",
|
||||||
"aliases": ["h"],
|
"aliases": ["h"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": "boolean",
|
"options": boolean,
|
||||||
"default": False
|
"default": False
|
||||||
},
|
},
|
||||||
|
"verbose": {
|
||||||
|
"@id": "verbose",
|
||||||
|
"description": "Show all help, including the common API parameters, or only plugin-related info",
|
||||||
|
"aliases": ["v"],
|
||||||
|
"required": True,
|
||||||
|
"options": boolean,
|
||||||
|
"default": True
|
||||||
|
},
|
||||||
"emotionModel": {
|
"emotionModel": {
|
||||||
"@id": "emotionModel",
|
"@id": "emotionModel",
|
||||||
"aliases": ["emoModel"],
|
"aliases": ["emoModel"],
|
||||||
@@ -59,12 +64,37 @@ API_PARAMS = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
EVAL_PARAMS = {
|
||||||
|
"algorithm": {
|
||||||
|
"aliases": ["plug", "p", "plugins", "algorithms", 'algo', 'a', 'plugin'],
|
||||||
|
"description": "Plugins to be evaluated",
|
||||||
|
"required": True,
|
||||||
|
"help": "See activated plugins in /plugins"
|
||||||
|
},
|
||||||
|
"dataset": {
|
||||||
|
"aliases": ["datasets", "data", "d"],
|
||||||
|
"description": "Datasets to be evaluated",
|
||||||
|
"required": True,
|
||||||
|
"help": "See avalaible datasets in /datasets"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PLUGINS_PARAMS = {
|
||||||
|
"plugin_type": {
|
||||||
|
"@id": "pluginType",
|
||||||
|
"description": 'What kind of plugins to list',
|
||||||
|
"aliases": ["pluginType"],
|
||||||
|
"required": True,
|
||||||
|
"default": 'analysisPlugin'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
WEB_PARAMS = {
|
WEB_PARAMS = {
|
||||||
"inHeaders": {
|
"inHeaders": {
|
||||||
"aliases": ["headers"],
|
"aliases": ["headers"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": False,
|
"default": False,
|
||||||
"options": "boolean"
|
"options": boolean
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,7 +125,7 @@ NIF_PARAMS = {
|
|||||||
"aliases": ["f"],
|
"aliases": ["f"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "text",
|
"default": "text",
|
||||||
"options": ["turtle", "text", "json-ld"],
|
"options": ["text", "json-ld"],
|
||||||
},
|
},
|
||||||
"language": {
|
"language": {
|
||||||
"@id": "language",
|
"@id": "language",
|
||||||
@@ -113,10 +143,19 @@ NIF_PARAMS = {
|
|||||||
"aliases": ["u"],
|
"aliases": ["u"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "RFC5147String",
|
"default": "RFC5147String",
|
||||||
"options": "RFC5147String"
|
"options": ["RFC5147String", ]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BUILTIN_PARAMS = {}
|
||||||
|
|
||||||
|
for d in [
|
||||||
|
NIF_PARAMS, CLI_PARAMS, WEB_PARAMS, PLUGINS_PARAMS, EVAL_PARAMS,
|
||||||
|
API_PARAMS
|
||||||
|
]:
|
||||||
|
for k, v in d.items():
|
||||||
|
BUILTIN_PARAMS[k] = v
|
||||||
|
|
||||||
|
|
||||||
def parse_params(indict, *specs):
|
def parse_params(indict, *specs):
|
||||||
if not specs:
|
if not specs:
|
||||||
@@ -126,57 +165,151 @@ def parse_params(indict, *specs):
|
|||||||
wrong_params = {}
|
wrong_params = {}
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
for param, options in iteritems(spec):
|
for param, options in iteritems(spec):
|
||||||
if param[0] != "@": # Exclude json-ld properties
|
for alias in options.get("aliases", []):
|
||||||
for alias in options.get("aliases", []):
|
# Replace each alias with the correct name of the parameter
|
||||||
# Replace each alias with the correct name of the parameter
|
if alias in indict and alias != param:
|
||||||
if alias in indict and alias is not param:
|
outdict[param] = indict[alias]
|
||||||
outdict[param] = indict[alias]
|
del outdict[alias]
|
||||||
del indict[alias]
|
continue
|
||||||
continue
|
if param not in outdict:
|
||||||
if param not in outdict:
|
if "default" in options:
|
||||||
if options.get("required", False) and "default" not in options:
|
# We assume the default is correct
|
||||||
wrong_params[param] = spec[param]
|
outdict[param] = options["default"]
|
||||||
else:
|
elif options.get("required", False):
|
||||||
if "default" in options:
|
wrong_params[param] = spec[param]
|
||||||
outdict[param] = options["default"]
|
elif "options" in options:
|
||||||
elif "options" in spec[param]:
|
if options["options"] == boolean:
|
||||||
if spec[param]["options"] == "boolean":
|
outdict[param] = str(outdict[param]).lower() in ['true', '1']
|
||||||
outdict[param] = outdict[param] in [None, True, 'true', '1']
|
elif outdict[param] not in options["options"]:
|
||||||
elif outdict[param] not in spec[param]["options"]:
|
wrong_params[param] = spec[param]
|
||||||
wrong_params[param] = spec[param]
|
|
||||||
if wrong_params:
|
if wrong_params:
|
||||||
logger.debug("Error parsing: %s", wrong_params)
|
logger.debug("Error parsing: %s", wrong_params)
|
||||||
message = Error(
|
message = Error(
|
||||||
status=400,
|
status=400,
|
||||||
message="Missing or invalid parameters",
|
message='Missing or invalid parameters',
|
||||||
parameters=outdict,
|
parameters=outdict,
|
||||||
errors={param: error
|
errors=wrong_params)
|
||||||
for param, error in iteritems(wrong_params)})
|
|
||||||
raise message
|
raise message
|
||||||
if 'algorithm' in outdict and not isinstance(outdict['algorithm'], list):
|
|
||||||
outdict['algorithm'] = outdict['algorithm'].split(',')
|
|
||||||
return outdict
|
return outdict
|
||||||
|
|
||||||
|
|
||||||
def get_extra_params(request, plugin=None):
|
def get_all_params(plugins, *specs):
|
||||||
params = request.parameters.copy()
|
'''Return a list of parameters for a given set of specifications and plugins.'''
|
||||||
if plugin:
|
dic = {}
|
||||||
extra_params = parse_params(params, plugin.get('extra_params', {}))
|
for s in specs:
|
||||||
params.update(extra_params)
|
dic.update(s)
|
||||||
|
dic.update(get_extra_params(plugins))
|
||||||
|
return dic
|
||||||
|
|
||||||
|
|
||||||
|
def get_extra_params(plugins):
|
||||||
|
'''Get a list of possible parameters given a list of plugins'''
|
||||||
|
params = {}
|
||||||
|
extra_params = {}
|
||||||
|
for plugin in plugins:
|
||||||
|
this_params = plugin.get('extra_params', {})
|
||||||
|
for k, v in this_params.items():
|
||||||
|
if k not in extra_params:
|
||||||
|
extra_params[k] = {}
|
||||||
|
extra_params[k][plugin.name] = v
|
||||||
|
for k, v in extra_params.items(): # Resolve conflicts
|
||||||
|
if len(v) == 1: # Add the extra options that do not collide
|
||||||
|
params[k] = list(v.values())[0]
|
||||||
|
else:
|
||||||
|
required = False
|
||||||
|
aliases = None
|
||||||
|
options = None
|
||||||
|
default = None
|
||||||
|
nodefault = False # Set when defaults are not compatible
|
||||||
|
|
||||||
|
for plugin, opt in v.items():
|
||||||
|
params['{}.{}'.format(plugin, k)] = opt
|
||||||
|
required = required or opt.get('required', False)
|
||||||
|
newaliases = set(opt.get('aliases', []))
|
||||||
|
if aliases is None:
|
||||||
|
aliases = newaliases
|
||||||
|
else:
|
||||||
|
aliases = aliases & newaliases
|
||||||
|
if 'options' in opt:
|
||||||
|
newoptions = set(opt['options'])
|
||||||
|
options = newoptions if options is None else options & newoptions
|
||||||
|
if 'default' in opt:
|
||||||
|
newdefault = opt['default']
|
||||||
|
if newdefault:
|
||||||
|
if default is None and not nodefault:
|
||||||
|
default = newdefault
|
||||||
|
elif newdefault != default:
|
||||||
|
nodefault = True
|
||||||
|
default = None
|
||||||
|
# Check for incompatibilities
|
||||||
|
if options != set():
|
||||||
|
params[k] = {
|
||||||
|
'default': default,
|
||||||
|
'aliases': list(aliases),
|
||||||
|
'required': required,
|
||||||
|
'options': list(options)
|
||||||
|
}
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def parse_analysis(params, plugins):
|
||||||
|
'''
|
||||||
|
Parse the given parameters individually for each plugin, and get a list of the parameters that
|
||||||
|
belong to each of the plugins. Each item can then be used in the plugin.analyse_entries method.
|
||||||
|
'''
|
||||||
|
analysis_list = []
|
||||||
|
for i, plugin in enumerate(plugins):
|
||||||
|
if not plugin:
|
||||||
|
continue
|
||||||
|
this_params = filter_params(params, plugin, i)
|
||||||
|
parsed = parse_params(this_params, plugin.get('extra_params', {}))
|
||||||
|
analysis = plugin.activity(parsed)
|
||||||
|
analysis_list.append(analysis)
|
||||||
|
return analysis_list
|
||||||
|
|
||||||
|
|
||||||
|
def filter_params(params, plugin, ith=-1):
|
||||||
|
'''
|
||||||
|
Get the values within params that apply to a plugin.
|
||||||
|
More specific names override more general names, in this order:
|
||||||
|
|
||||||
|
<index_order>.parameter > <plugin.name>.parameter > parameter
|
||||||
|
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
>>> filter_params({'0.hello': True, 'hello': False}, Plugin(), 0)
|
||||||
|
{ '0.hello': True, 'hello': True}
|
||||||
|
|
||||||
|
'''
|
||||||
|
thisparams = {}
|
||||||
|
if ith >= 0:
|
||||||
|
ith = '{}.'.format(ith)
|
||||||
|
else:
|
||||||
|
ith = ""
|
||||||
|
for k, v in params.items():
|
||||||
|
if ith and k.startswith(str(ith)):
|
||||||
|
thisparams[k[len(ith):]] = v
|
||||||
|
elif k.startswith(plugin.name):
|
||||||
|
thisparams[k[len(plugin.name) + 1:]] = v
|
||||||
|
elif k not in thisparams:
|
||||||
|
thisparams[k] = v
|
||||||
|
return thisparams
|
||||||
|
|
||||||
|
|
||||||
def parse_call(params):
|
def parse_call(params):
|
||||||
'''Return a results object based on the parameters used in a call/request.
|
'''
|
||||||
|
Return a results object based on the parameters used in a call/request.
|
||||||
'''
|
'''
|
||||||
params = parse_params(params, NIF_PARAMS)
|
params = parse_params(params, NIF_PARAMS)
|
||||||
if params['informat'] == 'text':
|
if params['informat'] == 'text':
|
||||||
results = Results()
|
results = Results()
|
||||||
entry = Entry(nif__isString=params['input'])
|
entry = Entry(nif__isString=params['input'], id='#') # Use @base
|
||||||
results.entries.append(entry)
|
results.entries.append(entry)
|
||||||
elif params['informat'] == 'json-ld':
|
elif params['informat'] == 'json-ld':
|
||||||
results = from_string(params['input'], cls=Results)
|
results = from_string(params['input'], cls=Results)
|
||||||
else:
|
else: # pragma: no cover
|
||||||
raise NotImplemented('Informat {} is not implemented'.format(params['informat']))
|
raise NotImplementedError('Informat {} is not implemented'.format(
|
||||||
|
params['informat']))
|
||||||
results.parameters = params
|
results.parameters = params
|
||||||
return results
|
return results
|
||||||
|
@@ -18,21 +18,40 @@
|
|||||||
Blueprints for Senpy
|
Blueprints for Senpy
|
||||||
"""
|
"""
|
||||||
from flask import (Blueprint, request, current_app, render_template, url_for,
|
from flask import (Blueprint, request, current_app, render_template, url_for,
|
||||||
jsonify)
|
jsonify, redirect)
|
||||||
from .models import Error, Response, Help, Plugins, read_schema
|
from .models import Error, Response, Help, Plugins, read_schema, dump_schema, Datasets
|
||||||
from . import api
|
from . import api
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
api_blueprint = Blueprint("api", __name__)
|
api_blueprint = Blueprint("api", __name__)
|
||||||
demo_blueprint = Blueprint("demo", __name__)
|
demo_blueprint = Blueprint("demo", __name__, template_folder='templates')
|
||||||
ns_blueprint = Blueprint("ns", __name__)
|
ns_blueprint = Blueprint("ns", __name__)
|
||||||
|
|
||||||
|
_mimetypes_r = {'json-ld': ['application/ld+json'],
|
||||||
|
'turtle': ['text/turtle'],
|
||||||
|
'ntriples': ['application/n-triples'],
|
||||||
|
'text': ['text/plain']}
|
||||||
|
|
||||||
|
MIMETYPES = {}
|
||||||
|
|
||||||
|
for k, vs in _mimetypes_r.items():
|
||||||
|
for v in vs:
|
||||||
|
if v in MIMETYPES:
|
||||||
|
raise Exception('MIMETYPE {} specified for two formats: {} and {}'.format(v,
|
||||||
|
v,
|
||||||
|
MIMETYPES[v]))
|
||||||
|
MIMETYPES[v] = k
|
||||||
|
|
||||||
|
DEFAULT_MIMETYPE = 'application/ld+json'
|
||||||
|
DEFAULT_FORMAT = 'json-ld'
|
||||||
|
|
||||||
|
|
||||||
def get_params(req):
|
def get_params(req):
|
||||||
if req.method == 'POST':
|
if req.method == 'POST':
|
||||||
@@ -44,83 +63,166 @@ def get_params(req):
|
|||||||
return indict
|
return indict
|
||||||
|
|
||||||
|
|
||||||
|
def encoded_url(url=None, base=None):
|
||||||
|
code = ''
|
||||||
|
if not url:
|
||||||
|
if request.method == 'GET':
|
||||||
|
url = request.full_path[1:] # Remove the first slash
|
||||||
|
else:
|
||||||
|
hash(frozenset(tuple(request.parameters.items())))
|
||||||
|
code = 'hash:{}'.format(hash)
|
||||||
|
|
||||||
|
code = code or base64.urlsafe_b64encode(url.encode()).decode()
|
||||||
|
|
||||||
|
if base:
|
||||||
|
return base + code
|
||||||
|
return url_for('api.decode', code=code, _external=True)
|
||||||
|
|
||||||
|
|
||||||
|
def decoded_url(code, base=None):
|
||||||
|
if code.startswith('hash:'):
|
||||||
|
raise Exception('Can not decode a URL for a POST request')
|
||||||
|
base = base or request.url_root
|
||||||
|
path = base64.urlsafe_b64decode(code.encode()).decode()
|
||||||
|
return base + path
|
||||||
|
|
||||||
|
|
||||||
@demo_blueprint.route('/')
|
@demo_blueprint.route('/')
|
||||||
def index():
|
def index():
|
||||||
return render_template("index.html", version=__version__)
|
ev = str(get_params(request).get('evaluation', False))
|
||||||
|
evaluation_enabled = ev.lower() not in ['false', 'no', 'none']
|
||||||
|
|
||||||
|
return render_template("index.html",
|
||||||
|
evaluation=evaluation_enabled,
|
||||||
|
version=__version__)
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/contexts/<entity>.jsonld')
|
@api_blueprint.route('/contexts/<entity>.jsonld')
|
||||||
def context(entity="context"):
|
def context(entity="context"):
|
||||||
context = Response._context
|
context = Response._context
|
||||||
context['@vocab'] = url_for('ns.index', _external=True)
|
context['@vocab'] = url_for('ns.index', _external=True)
|
||||||
|
context['endpoint'] = url_for('api.api_root', _external=True)
|
||||||
return jsonify({"@context": context})
|
return jsonify({"@context": context})
|
||||||
|
|
||||||
|
|
||||||
|
@api_blueprint.route('/d/<code>')
|
||||||
|
def decode(code):
|
||||||
|
try:
|
||||||
|
return redirect(decoded_url(code))
|
||||||
|
except Exception:
|
||||||
|
return Error('invalid URL').flask()
|
||||||
|
|
||||||
|
|
||||||
@ns_blueprint.route('/') # noqa: F811
|
@ns_blueprint.route('/') # noqa: F811
|
||||||
def index():
|
def index():
|
||||||
context = Response._context
|
context = Response._context.copy()
|
||||||
context['@vocab'] = url_for('.ns', _external=True)
|
context['endpoint'] = url_for('api.api_root', _external=True)
|
||||||
return jsonify({"@context": context})
|
return jsonify({"@context": context})
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/schemas/<schema>')
|
@api_blueprint.route('/schemas/<schema>')
|
||||||
def schema(schema="definitions"):
|
def schema(schema="definitions"):
|
||||||
try:
|
try:
|
||||||
return jsonify(read_schema(schema))
|
return dump_schema(read_schema(schema))
|
||||||
except Exception: # Should be FileNotFoundError, but it's missing from py2
|
except Exception as ex: # Should be FileNotFoundError, but it's missing from py2
|
||||||
return Error(message="Schema not found", status=404).flask()
|
return Error(message="Schema not found: {}".format(ex), status=404).flask()
|
||||||
|
|
||||||
|
|
||||||
def basic_api(f):
|
def basic_api(f):
|
||||||
|
default_params = {
|
||||||
|
'inHeaders': False,
|
||||||
|
'expanded-jsonld': False,
|
||||||
|
'outformat': None,
|
||||||
|
'with_parameters': True,
|
||||||
|
}
|
||||||
|
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def decorated_function(*args, **kwargs):
|
def decorated_function(*args, **kwargs):
|
||||||
raw_params = get_params(request)
|
raw_params = get_params(request)
|
||||||
|
logger.info('Getting request: {}'.format(raw_params))
|
||||||
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
||||||
|
params = default_params
|
||||||
|
|
||||||
outformat = 'json-ld'
|
|
||||||
try:
|
try:
|
||||||
print('Getting request:')
|
|
||||||
print(request)
|
|
||||||
params = api.parse_params(raw_params, api.WEB_PARAMS, api.API_PARAMS)
|
params = api.parse_params(raw_params, api.WEB_PARAMS, api.API_PARAMS)
|
||||||
if hasattr(request, 'parameters'):
|
if hasattr(request, 'parameters'):
|
||||||
request.parameters.update(params)
|
request.parameters.update(params)
|
||||||
else:
|
else:
|
||||||
request.parameters = params
|
request.parameters = params
|
||||||
response = f(*args, **kwargs)
|
response = f(*args, **kwargs)
|
||||||
except Error as ex:
|
except (Exception) as ex:
|
||||||
response = ex
|
if current_app.debug or current_app.config['TESTING']:
|
||||||
response.parameters = params
|
|
||||||
logger.error(ex)
|
|
||||||
if current_app.debug:
|
|
||||||
raise
|
raise
|
||||||
|
if not isinstance(ex, Error):
|
||||||
|
msg = "{}".format(ex)
|
||||||
|
ex = Error(message=msg, status=500)
|
||||||
|
response = ex
|
||||||
|
response.parameters = raw_params
|
||||||
|
logger.exception(ex)
|
||||||
|
|
||||||
in_headers = params['inHeaders']
|
if 'parameters' in response and not params['with_parameters']:
|
||||||
expanded = params['expanded-jsonld']
|
del response.parameters
|
||||||
outformat = params['outformat']
|
|
||||||
|
logger.info('Response: {}'.format(response))
|
||||||
|
mime = request.accept_mimetypes\
|
||||||
|
.best_match(MIMETYPES.keys(),
|
||||||
|
DEFAULT_MIMETYPE)
|
||||||
|
|
||||||
|
mimeformat = MIMETYPES.get(mime, DEFAULT_FORMAT)
|
||||||
|
outformat = params['outformat'] or mimeformat
|
||||||
|
|
||||||
return response.flask(
|
return response.flask(
|
||||||
in_headers=in_headers,
|
in_headers=params['inHeaders'],
|
||||||
headers=headers,
|
headers=headers,
|
||||||
prefix=url_for('.api_root', _external=True),
|
prefix=params.get('prefix', encoded_url()),
|
||||||
context_uri=url_for('api.context',
|
context_uri=url_for('api.context',
|
||||||
entity=type(response).__name__,
|
entity=type(response).__name__,
|
||||||
_external=True),
|
_external=True),
|
||||||
outformat=outformat,
|
outformat=outformat,
|
||||||
expanded=expanded)
|
expanded=params['expanded-jsonld'])
|
||||||
|
|
||||||
return decorated_function
|
return decorated_function
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/', methods=['POST', 'GET'])
|
@api_blueprint.route('/', defaults={'plugin': None}, methods=['POST', 'GET'])
|
||||||
|
@api_blueprint.route('/<path:plugin>', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def api_root():
|
def api_root(plugin):
|
||||||
|
if plugin:
|
||||||
|
if request.parameters['algorithm'] != api.API_PARAMS['algorithm']['default']:
|
||||||
|
raise Error('You cannot specify the algorithm with a parameter and a URL variable.'
|
||||||
|
' Please, remove one of them')
|
||||||
|
request.parameters['algorithm'] = tuple(plugin.replace('+', '/').split('/'))
|
||||||
|
|
||||||
|
params = request.parameters
|
||||||
|
plugin = request.parameters['algorithm']
|
||||||
|
|
||||||
|
sp = current_app.senpy
|
||||||
|
plugins = sp.get_plugins(plugin)
|
||||||
|
|
||||||
if request.parameters['help']:
|
if request.parameters['help']:
|
||||||
dic = dict(api.API_PARAMS, **api.NIF_PARAMS)
|
apis = []
|
||||||
|
if request.parameters['verbose']:
|
||||||
|
apis.append(api.BUILTIN_PARAMS)
|
||||||
|
allparameters = api.get_all_params(plugins, *apis)
|
||||||
|
response = Help(valid_parameters=allparameters)
|
||||||
|
return response
|
||||||
|
req = api.parse_call(request.parameters)
|
||||||
|
analysis = api.parse_analysis(req.parameters, plugins)
|
||||||
|
results = current_app.senpy.analyse(req, analysis)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@api_blueprint.route('/evaluate/', methods=['POST', 'GET'])
|
||||||
|
@basic_api
|
||||||
|
def evaluate():
|
||||||
|
if request.parameters['help']:
|
||||||
|
dic = dict(api.EVAL_PARAMS)
|
||||||
response = Help(parameters=dic)
|
response = Help(parameters=dic)
|
||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
req = api.parse_call(request.parameters)
|
params = api.parse_params(request.parameters, api.EVAL_PARAMS)
|
||||||
response = current_app.senpy.analyse(req)
|
response = current_app.senpy.evaluate(params)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@@ -128,22 +230,24 @@ def api_root():
|
|||||||
@basic_api
|
@basic_api
|
||||||
def plugins():
|
def plugins():
|
||||||
sp = current_app.senpy
|
sp = current_app.senpy
|
||||||
ptype = request.parameters.get('plugin_type')
|
params = api.parse_params(request.parameters, api.PLUGINS_PARAMS)
|
||||||
plugins = sp.filter_plugins(plugin_type=ptype)
|
ptype = params.get('plugin_type')
|
||||||
dic = Plugins(plugins=list(plugins.values()))
|
plugins = list(sp.plugins(plugin_type=ptype))
|
||||||
|
dic = Plugins(plugins=plugins)
|
||||||
return dic
|
return dic
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/plugins/<plugin>/', methods=['POST', 'GET'])
|
@api_blueprint.route('/plugins/<plugin>/', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def plugin(plugin=None):
|
def plugin(plugin):
|
||||||
sp = current_app.senpy
|
sp = current_app.senpy
|
||||||
if plugin == 'default' and sp.default_plugin:
|
return sp.get_plugin(plugin)
|
||||||
return sp.default_plugin
|
|
||||||
plugins = sp.filter_plugins(
|
|
||||||
id='plugins/{}'.format(plugin)) or sp.filter_plugins(name=plugin)
|
@api_blueprint.route('/datasets/', methods=['POST', 'GET'])
|
||||||
if plugins:
|
@basic_api
|
||||||
response = list(plugins.values())[0]
|
def datasets():
|
||||||
else:
|
sp = current_app.senpy
|
||||||
return Error(message="Plugin not found", status=404)
|
datasets = sp.datasets
|
||||||
return response
|
dic = Datasets(datasets=list(datasets.values()))
|
||||||
|
return dic
|
||||||
|
16
senpy/cli.py
16
senpy/cli.py
@@ -28,11 +28,15 @@ def main_function(argv):
|
|||||||
api.API_PARAMS,
|
api.API_PARAMS,
|
||||||
api.NIF_PARAMS)
|
api.NIF_PARAMS)
|
||||||
plugin_folder = params['plugin_folder']
|
plugin_folder = params['plugin_folder']
|
||||||
sp = Senpy(default_plugins=False, plugin_folder=plugin_folder)
|
default_plugins = params.get('default-plugins', False)
|
||||||
|
sp = Senpy(default_plugins=default_plugins, plugin_folder=plugin_folder)
|
||||||
request = api.parse_call(params)
|
request = api.parse_call(params)
|
||||||
algos = request.parameters.get('algorithm', sp.plugins.keys())
|
algos = sp.get_plugins(request.parameters.get('algorithm', None))
|
||||||
for algo in algos:
|
if algos:
|
||||||
sp.activate_plugin(algo)
|
for algo in algos:
|
||||||
|
sp.activate_plugin(algo.name)
|
||||||
|
else:
|
||||||
|
sp.activate_all()
|
||||||
res = sp.analyse(request)
|
res = sp.analyse(request)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@@ -42,9 +46,9 @@ def main():
|
|||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
res = main_function(sys.argv[1:])
|
res = main_function(sys.argv[1:])
|
||||||
print(res.to_JSON())
|
print(res.serialize())
|
||||||
except Error as err:
|
except Error as err:
|
||||||
print(err.to_JSON())
|
print(err.serialize())
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
import requests
|
import requests
|
||||||
import logging
|
import logging
|
||||||
from . import models
|
from . import models
|
||||||
from .plugins import default_plugin_type
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -13,13 +12,24 @@ class Client(object):
|
|||||||
def analyse(self, input, method='GET', **kwargs):
|
def analyse(self, input, method='GET', **kwargs):
|
||||||
return self.request('/', method=method, input=input, **kwargs)
|
return self.request('/', method=method, input=input, **kwargs)
|
||||||
|
|
||||||
def plugins(self, ptype=default_plugin_type):
|
def evaluate(self, input, method='GET', **kwargs):
|
||||||
resp = self.request(path='/plugins', plugin_type=ptype).plugins
|
return self.request('/evaluate', method=method, input=input, **kwargs)
|
||||||
|
|
||||||
|
def plugins(self, *args, **kwargs):
|
||||||
|
resp = self.request(path='/plugins').plugins
|
||||||
return {p.name: p for p in resp}
|
return {p.name: p for p in resp}
|
||||||
|
|
||||||
|
def datasets(self):
|
||||||
|
resp = self.request(path='/datasets').datasets
|
||||||
|
return {d.name: d for d in resp}
|
||||||
|
|
||||||
def request(self, path=None, method='GET', **params):
|
def request(self, path=None, method='GET', **params):
|
||||||
url = '{}{}'.format(self.endpoint, path)
|
url = '{}{}'.format(self.endpoint.rstrip('/'), path)
|
||||||
response = requests.request(method=method, url=url, params=params)
|
if method == 'POST':
|
||||||
|
response = requests.post(url=url, data=params)
|
||||||
|
else:
|
||||||
|
response = requests.request(method=method, url=url, params=params)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = models.from_dict(response.json())
|
resp = models.from_dict(response.json())
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
|
@@ -6,17 +6,17 @@ from future import standard_library
|
|||||||
standard_library.install_aliases()
|
standard_library.install_aliases()
|
||||||
|
|
||||||
from . import plugins, api
|
from . import plugins, api
|
||||||
from .plugins import SenpyPlugin
|
from .models import Error, AggregatedEvaluation
|
||||||
from .models import Error
|
|
||||||
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
||||||
|
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
|
||||||
|
from . import gsitk_compat
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -27,21 +27,31 @@ class Senpy(object):
|
|||||||
def __init__(self,
|
def __init__(self,
|
||||||
app=None,
|
app=None,
|
||||||
plugin_folder=".",
|
plugin_folder=".",
|
||||||
|
data_folder=None,
|
||||||
default_plugins=False):
|
default_plugins=False):
|
||||||
self.app = app
|
|
||||||
self._search_folders = set()
|
|
||||||
self._plugin_list = []
|
|
||||||
self._outdated = True
|
|
||||||
self._default = None
|
|
||||||
|
|
||||||
self.add_folder(plugin_folder)
|
default_data = os.path.join(os.getcwd(), 'senpy_data')
|
||||||
|
self.data_folder = data_folder or os.environ.get('SENPY_DATA', default_data)
|
||||||
|
try:
|
||||||
|
os.makedirs(self.data_folder)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST:
|
||||||
|
logger.debug('Data folder exists: {}'.format(self.data_folder))
|
||||||
|
else: # pragma: no cover
|
||||||
|
raise
|
||||||
|
|
||||||
|
self._default = None
|
||||||
|
self._plugins = {}
|
||||||
|
if plugin_folder:
|
||||||
|
self.add_folder(plugin_folder)
|
||||||
|
|
||||||
if default_plugins:
|
if default_plugins:
|
||||||
self.add_folder('plugins', from_root=True)
|
self.add_folder('plugins', from_root=True)
|
||||||
else:
|
else:
|
||||||
# Add only conversion plugins
|
# Add only conversion plugins
|
||||||
self.add_folder(os.path.join('plugins', 'conversion'),
|
self.add_folder(os.path.join('plugins', 'postprocessing'),
|
||||||
from_root=True)
|
from_root=True)
|
||||||
|
self.app = app
|
||||||
if app is not None:
|
if app is not None:
|
||||||
self.init_app(app)
|
self.init_app(app)
|
||||||
|
|
||||||
@@ -56,110 +66,130 @@ class Senpy(object):
|
|||||||
# otherwise fall back to the request context
|
# otherwise fall back to the request context
|
||||||
if hasattr(app, 'teardown_appcontext'):
|
if hasattr(app, 'teardown_appcontext'):
|
||||||
app.teardown_appcontext(self.teardown)
|
app.teardown_appcontext(self.teardown)
|
||||||
else:
|
else: # pragma: no cover
|
||||||
app.teardown_request(self.teardown)
|
app.teardown_request(self.teardown)
|
||||||
app.register_blueprint(api_blueprint, url_prefix="/api")
|
app.register_blueprint(api_blueprint, url_prefix="/api")
|
||||||
app.register_blueprint(ns_blueprint, url_prefix="/ns")
|
app.register_blueprint(ns_blueprint, url_prefix="/ns")
|
||||||
app.register_blueprint(demo_blueprint, url_prefix="/")
|
app.register_blueprint(demo_blueprint, url_prefix="/")
|
||||||
|
|
||||||
|
def add_plugin(self, plugin):
|
||||||
|
self._plugins[plugin.name.lower()] = plugin
|
||||||
|
|
||||||
|
def delete_plugin(self, plugin):
|
||||||
|
del self._plugins[plugin.name.lower()]
|
||||||
|
|
||||||
|
def plugins(self, plugin_type=None, is_activated=True, **kwargs):
|
||||||
|
""" Return the plugins registered for a given application. Filtered by criteria """
|
||||||
|
return list(plugins.pfilter(self._plugins, plugin_type=plugin_type,
|
||||||
|
is_activated=is_activated, **kwargs))
|
||||||
|
|
||||||
|
def get_plugin(self, name, default=None):
|
||||||
|
if name == 'default':
|
||||||
|
return self.default_plugin
|
||||||
|
elif name == 'conversion':
|
||||||
|
return None
|
||||||
|
|
||||||
|
if name.lower() in self._plugins:
|
||||||
|
return self._plugins[name.lower()]
|
||||||
|
|
||||||
|
results = self.plugins(id='endpoint:plugins/{}'.format(name.lower()),
|
||||||
|
plugin_type=None)
|
||||||
|
if results:
|
||||||
|
return results[0]
|
||||||
|
|
||||||
|
results = self.plugins(id=name,
|
||||||
|
plugin_type=None)
|
||||||
|
if results:
|
||||||
|
return results[0]
|
||||||
|
|
||||||
|
msg = ("Plugin not found: '{}'\n"
|
||||||
|
"Make sure it is ACTIVATED\n"
|
||||||
|
"Valid algorithms: {}").format(name,
|
||||||
|
self._plugins.keys())
|
||||||
|
raise Error(message=msg, status=404)
|
||||||
|
|
||||||
|
def get_plugins(self, name):
|
||||||
|
try:
|
||||||
|
name = name.split(',')
|
||||||
|
except AttributeError:
|
||||||
|
pass # Assume it is a tuple or a list
|
||||||
|
return tuple(self.get_plugin(n) for n in name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def analysis_plugins(self):
|
||||||
|
""" Return only the analysis plugins that are active"""
|
||||||
|
return self.plugins(plugin_type='analysisPlugin', is_activated=True)
|
||||||
|
|
||||||
def add_folder(self, folder, from_root=False):
|
def add_folder(self, folder, from_root=False):
|
||||||
|
""" Find plugins in this folder and add them to this instance """
|
||||||
if from_root:
|
if from_root:
|
||||||
folder = os.path.join(os.path.dirname(__file__), folder)
|
folder = os.path.join(os.path.dirname(__file__), folder)
|
||||||
logger.debug("Adding folder: %s", folder)
|
logger.debug("Adding folder: %s", folder)
|
||||||
if os.path.isdir(folder):
|
if os.path.isdir(folder):
|
||||||
self._search_folders.add(folder)
|
new_plugins = plugins.from_folder([folder],
|
||||||
self._outdated = True
|
data_folder=self.data_folder)
|
||||||
|
for plugin in new_plugins:
|
||||||
|
self.add_plugin(plugin)
|
||||||
else:
|
else:
|
||||||
logger.debug("Not a folder: %s", folder)
|
raise AttributeError("Not a folder or does not exist: %s", folder)
|
||||||
|
|
||||||
def _get_plugins(self, request):
|
# def check_analysis_request(self, analysis):
|
||||||
if not self.analysis_plugins:
|
# '''Check if the analysis request can be fulfilled'''
|
||||||
raise Error(
|
# if not self.plugins():
|
||||||
status=404,
|
# raise Error(
|
||||||
message=("No plugins found."
|
# status=404,
|
||||||
" Please install one."))
|
# message=("No plugins found."
|
||||||
algos = request.parameters.get('algorithm', None)
|
# " Please install one."))
|
||||||
if not algos:
|
# for a in analysis:
|
||||||
if self.default_plugin:
|
# algo = a.algorithm
|
||||||
algos = [self.default_plugin.name, ]
|
# if algo == 'default' and not self.default_plugin:
|
||||||
else:
|
# raise Error(
|
||||||
raise Error(
|
# status=404,
|
||||||
status=404,
|
# message="No default plugin found, and None provided")
|
||||||
message="No default plugin found, and None provided")
|
# else:
|
||||||
|
# self.get_plugin(algo)
|
||||||
|
|
||||||
plugins = list()
|
|
||||||
for algo in algos:
|
|
||||||
if algo not in self.plugins:
|
|
||||||
logger.debug(("The algorithm '{}' is not valid\n"
|
|
||||||
"Valid algorithms: {}").format(algo,
|
|
||||||
self.plugins.keys()))
|
|
||||||
raise Error(
|
|
||||||
status=404,
|
|
||||||
message="The algorithm '{}' is not valid".format(algo))
|
|
||||||
plugins.append(self.plugins[algo])
|
|
||||||
return plugins
|
|
||||||
|
|
||||||
def _process_entries(self, entries, req, plugins):
|
def _process(self, req, pending, done=None):
|
||||||
"""
|
"""
|
||||||
Recursively process the entries with the first plugin in the list, and pass the results
|
Recursively process the entries with the first plugin in the list, and pass the results
|
||||||
to the rest of the plugins.
|
to the rest of the plugins.
|
||||||
"""
|
"""
|
||||||
if not plugins:
|
done = done or []
|
||||||
for i in entries:
|
if not pending:
|
||||||
yield i
|
return req
|
||||||
return
|
|
||||||
plugin = plugins[0]
|
analysis = pending[0]
|
||||||
self._activate(plugin) # Make sure the plugin is activated
|
results = analysis.run(req)
|
||||||
specific_params = api.get_extra_params(req, plugin)
|
results.analysis.append(analysis)
|
||||||
req.analysis.append({'plugin': plugin,
|
done += analysis
|
||||||
'parameters': specific_params})
|
return self._process(results, pending[1:], done)
|
||||||
results = plugin.analyse_entries(entries, specific_params)
|
|
||||||
for i in self._process_entries(results, req, plugins[1:]):
|
|
||||||
yield i
|
|
||||||
|
|
||||||
def install_deps(self):
|
def install_deps(self):
|
||||||
for plugin in self.filter_plugins(is_activated=True):
|
plugins.install_deps(*self.plugins())
|
||||||
plugins.install_deps(plugin)
|
|
||||||
|
|
||||||
def analyse(self, request):
|
def analyse(self, request, analysis=None):
|
||||||
"""
|
"""
|
||||||
Main method that analyses a request, either from CLI or HTTP.
|
Main method that analyses a request, either from CLI or HTTP.
|
||||||
It takes a processed request, provided by the user, as returned
|
It takes a processed request, provided by the user, as returned
|
||||||
by api.parse_call().
|
by api.parse_call().
|
||||||
"""
|
"""
|
||||||
|
if not self.plugins():
|
||||||
|
raise Error(
|
||||||
|
status=404,
|
||||||
|
message=("No plugins found."
|
||||||
|
" Please install one."))
|
||||||
|
if analysis is None:
|
||||||
|
params = str(request)
|
||||||
|
plugins = self.get_plugins(request.parameters['algorithm'])
|
||||||
|
analysis = api.parse_analysis(request.parameters, plugins)
|
||||||
logger.debug("analysing request: {}".format(request))
|
logger.debug("analysing request: {}".format(request))
|
||||||
try:
|
results = self._process(request, analysis)
|
||||||
entries = request.entries
|
logger.debug("Got analysis result: {}".format(results))
|
||||||
request.entries = []
|
results = self.postprocess(results)
|
||||||
plugins = self._get_plugins(request)
|
logger.debug("Returning post-processed result: {}".format(results))
|
||||||
results = request
|
|
||||||
for i in self._process_entries(entries, results, plugins):
|
|
||||||
results.entries.append(i)
|
|
||||||
self.convert_emotions(results)
|
|
||||||
if 'with_parameters' not in results.parameters:
|
|
||||||
del results.parameters
|
|
||||||
logger.debug("Returning analysis result: {}".format(results))
|
|
||||||
except (Error, Exception) as ex:
|
|
||||||
if not isinstance(ex, Error):
|
|
||||||
msg = "Error during analysis: {} \n\t{}".format(ex,
|
|
||||||
traceback.format_exc())
|
|
||||||
ex = Error(message=msg, status=500)
|
|
||||||
logger.exception('Error returning analysis result')
|
|
||||||
raise ex
|
|
||||||
results.analysis = [i['plugin'].id for i in results.analysis]
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _conversion_candidates(self, fromModel, toModel):
|
|
||||||
candidates = self.filter_plugins(plugin_type='emotionConversionPlugin')
|
|
||||||
for name, candidate in candidates.items():
|
|
||||||
for pair in candidate.onyx__doesConversion:
|
|
||||||
logging.debug(pair)
|
|
||||||
|
|
||||||
if pair['onyx:conversionFrom'] == fromModel \
|
|
||||||
and pair['onyx:conversionTo'] == toModel:
|
|
||||||
# logging.debug('Found candidate: {}'.format(candidate))
|
|
||||||
yield candidate
|
|
||||||
|
|
||||||
def convert_emotions(self, resp):
|
def convert_emotions(self, resp):
|
||||||
"""
|
"""
|
||||||
Conversion of all emotions in a response **in place**.
|
Conversion of all emotions in a response **in place**.
|
||||||
@@ -168,11 +198,15 @@ class Senpy(object):
|
|||||||
Needless to say, this is far from an elegant solution, but it works.
|
Needless to say, this is far from an elegant solution, but it works.
|
||||||
@todo refactor and clean up
|
@todo refactor and clean up
|
||||||
"""
|
"""
|
||||||
plugins = [i['plugin'] for i in resp.analysis]
|
plugins = resp.analysis
|
||||||
params = resp.parameters
|
|
||||||
|
if 'parameters' not in resp:
|
||||||
|
return resp
|
||||||
|
|
||||||
|
params = resp['parameters']
|
||||||
toModel = params.get('emotionModel', None)
|
toModel = params.get('emotionModel', None)
|
||||||
if not toModel:
|
if not toModel:
|
||||||
return
|
return resp
|
||||||
|
|
||||||
logger.debug('Asked for model: {}'.format(toModel))
|
logger.debug('Asked for model: {}'.format(toModel))
|
||||||
output = params.get('conversion', None)
|
output = params.get('conversion', None)
|
||||||
@@ -181,14 +215,17 @@ class Senpy(object):
|
|||||||
try:
|
try:
|
||||||
fromModel = plugin.get('onyx:usesEmotionModel', None)
|
fromModel = plugin.get('onyx:usesEmotionModel', None)
|
||||||
candidates[plugin.id] = next(self._conversion_candidates(fromModel, toModel))
|
candidates[plugin.id] = next(self._conversion_candidates(fromModel, toModel))
|
||||||
logger.debug('Analysis plugin {} uses model: {}'.format(plugin.id, fromModel))
|
logger.debug('Analysis plugin {} uses model: {}'.format(
|
||||||
|
plugin.id, fromModel))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
e = Error(('No conversion plugin found for: '
|
e = Error(('No conversion plugin found for: '
|
||||||
'{} -> {}'.format(fromModel, toModel)))
|
'{} -> {}'.format(fromModel, toModel)),
|
||||||
|
status=404)
|
||||||
e.original_response = resp
|
e.original_response = resp
|
||||||
e.parameters = params
|
e.parameters = params
|
||||||
raise e
|
raise e
|
||||||
newentries = []
|
newentries = []
|
||||||
|
done = []
|
||||||
for i in resp.entries:
|
for i in resp.entries:
|
||||||
if output == "full":
|
if output == "full":
|
||||||
newemotions = copy.deepcopy(i.emotions)
|
newemotions = copy.deepcopy(i.emotions)
|
||||||
@@ -197,8 +234,7 @@ class Senpy(object):
|
|||||||
for j in i.emotions:
|
for j in i.emotions:
|
||||||
plugname = j['prov:wasGeneratedBy']
|
plugname = j['prov:wasGeneratedBy']
|
||||||
candidate = candidates[plugname]
|
candidate = candidates[plugname]
|
||||||
resp.analysis.append({'plugin': candidate,
|
done.append({'plugin': candidate, 'parameters': params})
|
||||||
'parameters': params})
|
|
||||||
for k in candidate.convert(j, fromModel, toModel, params):
|
for k in candidate.convert(j, fromModel, toModel, params):
|
||||||
k.prov__wasGeneratedBy = candidate.id
|
k.prov__wasGeneratedBy = candidate.id
|
||||||
if output == 'nested':
|
if output == 'nested':
|
||||||
@@ -207,39 +243,119 @@ class Senpy(object):
|
|||||||
i.emotions = newemotions
|
i.emotions = newemotions
|
||||||
newentries.append(i)
|
newentries.append(i)
|
||||||
resp.entries = newentries
|
resp.entries = newentries
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def _conversion_candidates(self, fromModel, toModel):
|
||||||
|
candidates = self.plugins(plugin_type=plugins.EmotionConversion)
|
||||||
|
for candidate in candidates:
|
||||||
|
for pair in candidate.onyx__doesConversion:
|
||||||
|
logging.debug(pair)
|
||||||
|
if candidate.can_convert(fromModel, toModel):
|
||||||
|
yield candidate
|
||||||
|
|
||||||
|
def postprocess(self, response):
|
||||||
|
'''
|
||||||
|
Transform the results from the analysis plugins.
|
||||||
|
It has some pre-defined post-processing like emotion conversion,
|
||||||
|
and it also allows plugins to auto-select themselves.
|
||||||
|
'''
|
||||||
|
|
||||||
|
response = self.convert_emotions(response)
|
||||||
|
|
||||||
|
for plug in self.plugins(plugin_type=plugins.PostProcessing):
|
||||||
|
if plug.check(response, response.analysis):
|
||||||
|
response = plug.process(response)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _get_datasets(self, request):
|
||||||
|
if not self.datasets:
|
||||||
|
raise Error(
|
||||||
|
status=404,
|
||||||
|
message=("No datasets found."
|
||||||
|
" Please verify DatasetManager"))
|
||||||
|
datasets_name = request.parameters.get('dataset', None).split(',')
|
||||||
|
for dataset in datasets_name:
|
||||||
|
if dataset not in self.datasets:
|
||||||
|
logger.debug(("The dataset '{}' is not valid\n"
|
||||||
|
"Valid datasets: {}").format(
|
||||||
|
dataset, self.datasets.keys()))
|
||||||
|
raise Error(
|
||||||
|
status=404,
|
||||||
|
message="The dataset '{}' is not valid".format(dataset))
|
||||||
|
dm = gsitk_compat.DatasetManager()
|
||||||
|
datasets = dm.prepare_datasets(datasets_name)
|
||||||
|
return datasets
|
||||||
|
|
||||||
|
@property
|
||||||
|
def datasets(self):
|
||||||
|
self._dataset_list = {}
|
||||||
|
dm = gsitk_compat.DatasetManager()
|
||||||
|
for item in dm.get_datasets():
|
||||||
|
for key in item:
|
||||||
|
if key in self._dataset_list:
|
||||||
|
continue
|
||||||
|
properties = item[key]
|
||||||
|
properties['@id'] = key
|
||||||
|
self._dataset_list[key] = properties
|
||||||
|
return self._dataset_list
|
||||||
|
|
||||||
|
def evaluate(self, params):
|
||||||
|
logger.debug("evaluating request: {}".format(params))
|
||||||
|
results = AggregatedEvaluation()
|
||||||
|
results.parameters = params
|
||||||
|
datasets = self._get_datasets(results)
|
||||||
|
plugins = []
|
||||||
|
for plugname in params.algorithm:
|
||||||
|
plugins = self.get_plugin(plugname)
|
||||||
|
|
||||||
|
for eval in plugins.evaluate(plugins, datasets):
|
||||||
|
results.evaluations.append(eval)
|
||||||
|
if 'with_parameters' not in results.parameters:
|
||||||
|
del results.parameters
|
||||||
|
logger.debug("Returning evaluation result: {}".format(results))
|
||||||
|
return results
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def default_plugin(self):
|
def default_plugin(self):
|
||||||
candidate = self._default
|
if not self._default or not self._default.is_activated:
|
||||||
if not candidate:
|
candidates = self.plugins(
|
||||||
candidates = self.filter_plugins(plugin_type='analysisPlugin',
|
plugin_type='analysisPlugin', is_activated=True)
|
||||||
is_activated=True)
|
|
||||||
if len(candidates) > 0:
|
if len(candidates) > 0:
|
||||||
candidate = list(candidates.values())[0]
|
self._default = candidates[0]
|
||||||
logger.debug("Default: {}".format(candidate))
|
else:
|
||||||
return candidate
|
self._default = None
|
||||||
|
logger.debug("Default: {}".format(self._default))
|
||||||
|
return self._default
|
||||||
|
|
||||||
@default_plugin.setter
|
@default_plugin.setter
|
||||||
def default_plugin(self, value):
|
def default_plugin(self, value):
|
||||||
if isinstance(value, SenpyPlugin):
|
if isinstance(value, plugins.Plugin):
|
||||||
|
if not value.is_activated:
|
||||||
|
raise AttributeError('The default plugin has to be activated.')
|
||||||
self._default = value
|
self._default = value
|
||||||
else:
|
|
||||||
self._default = self.plugins[value]
|
|
||||||
|
|
||||||
def activate_all(self, sync=True):
|
else:
|
||||||
|
self._default = self._plugins[value.lower()]
|
||||||
|
|
||||||
|
def activate_all(self, sync=True, allow_fail=False):
|
||||||
ps = []
|
ps = []
|
||||||
for plug in self.plugins.keys():
|
for plug in self._plugins.keys():
|
||||||
ps.append(self.activate_plugin(plug, sync=sync))
|
try:
|
||||||
|
self.activate_plugin(plug, sync=sync)
|
||||||
|
except Exception as ex:
|
||||||
|
if not allow_fail:
|
||||||
|
raise
|
||||||
|
logger.error('Could not activate {}: {}'.format(plug, ex))
|
||||||
return ps
|
return ps
|
||||||
|
|
||||||
def deactivate_all(self, sync=True):
|
def deactivate_all(self, sync=True):
|
||||||
ps = []
|
ps = []
|
||||||
for plug in self.plugins.keys():
|
for plug in self._plugins.keys():
|
||||||
ps.append(self.deactivate_plugin(plug, sync=sync))
|
ps.append(self.deactivate_plugin(plug, sync=sync))
|
||||||
return ps
|
return ps
|
||||||
|
|
||||||
def _set_active(self, plugin, active=True, *args, **kwargs):
|
def _set_active(self, plugin, active=True, *args, **kwargs):
|
||||||
''' We're using a variable in the plugin itself to activate/deactive plugins.\
|
''' We're using a variable in the plugin itself to activate/deactivate plugins.\
|
||||||
Note that plugins may activate themselves by setting this variable.
|
Note that plugins may activate themselves by setting this variable.
|
||||||
'''
|
'''
|
||||||
plugin.is_activated = active
|
plugin.is_activated = active
|
||||||
@@ -249,29 +365,25 @@ class Senpy(object):
|
|||||||
with plugin._lock:
|
with plugin._lock:
|
||||||
if plugin.is_activated:
|
if plugin.is_activated:
|
||||||
return
|
return
|
||||||
try:
|
plugin.activate()
|
||||||
plugin.activate()
|
msg = "Plugin activated: {}".format(plugin.name)
|
||||||
msg = "Plugin activated: {}".format(plugin.name)
|
logger.info(msg)
|
||||||
logger.info(msg)
|
success = True
|
||||||
success = True
|
self._set_active(plugin, success)
|
||||||
self._set_active(plugin, success)
|
return success
|
||||||
except Exception as ex:
|
|
||||||
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
|
||||||
plugin.name, ex, traceback.format_exc())
|
|
||||||
logger.error(msg)
|
|
||||||
raise Error(msg)
|
|
||||||
|
|
||||||
def activate_plugin(self, plugin_name, sync=True):
|
def activate_plugin(self, plugin_name, sync=True):
|
||||||
try:
|
plugin_name = plugin_name.lower()
|
||||||
plugin = self.plugins[plugin_name]
|
if plugin_name not in self._plugins:
|
||||||
except KeyError:
|
|
||||||
raise Error(
|
raise Error(
|
||||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||||
|
plugin = self._plugins[plugin_name]
|
||||||
|
|
||||||
logger.info("Activating plugin: {}".format(plugin.name))
|
logger.info("Activating plugin: {}".format(plugin.name))
|
||||||
|
|
||||||
if sync or 'async' in plugin and not plugin.async:
|
if sync or not getattr(plugin, 'async', True) or getattr(
|
||||||
self._activate(plugin)
|
plugin, 'sync', False):
|
||||||
|
return self._activate(plugin)
|
||||||
else:
|
else:
|
||||||
th = Thread(target=partial(self._activate, plugin))
|
th = Thread(target=partial(self._activate, plugin))
|
||||||
th.start()
|
th.start()
|
||||||
@@ -281,24 +393,20 @@ class Senpy(object):
|
|||||||
with plugin._lock:
|
with plugin._lock:
|
||||||
if not plugin.is_activated:
|
if not plugin.is_activated:
|
||||||
return
|
return
|
||||||
try:
|
plugin.deactivate()
|
||||||
plugin.deactivate()
|
logger.info("Plugin deactivated: {}".format(plugin.name))
|
||||||
logger.info("Plugin deactivated: {}".format(plugin.name))
|
|
||||||
except Exception as ex:
|
|
||||||
logger.error(
|
|
||||||
"Error deactivating plugin {}: {}".format(plugin.name, ex))
|
|
||||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
|
||||||
|
|
||||||
def deactivate_plugin(self, plugin_name, sync=True):
|
def deactivate_plugin(self, plugin_name, sync=True):
|
||||||
try:
|
plugin_name = plugin_name.lower()
|
||||||
plugin = self.plugins[plugin_name]
|
if plugin_name not in self._plugins:
|
||||||
except KeyError:
|
|
||||||
raise Error(
|
raise Error(
|
||||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||||
|
plugin = self._plugins[plugin_name]
|
||||||
|
|
||||||
self._set_active(plugin, False)
|
self._set_active(plugin, False)
|
||||||
|
|
||||||
if sync or 'async' in plugin and not plugin.async:
|
if sync or not getattr(plugin, 'async', True) or not getattr(
|
||||||
|
plugin, 'sync', False):
|
||||||
self._deactivate(plugin)
|
self._deactivate(plugin)
|
||||||
else:
|
else:
|
||||||
th = Thread(target=partial(self._deactivate, plugin))
|
th = Thread(target=partial(self._deactivate, plugin))
|
||||||
@@ -307,19 +415,3 @@ class Senpy(object):
|
|||||||
|
|
||||||
def teardown(self, exception):
|
def teardown(self, exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
|
||||||
def plugins(self):
|
|
||||||
""" Return the plugins registered for a given application. """
|
|
||||||
if self._outdated:
|
|
||||||
self._plugin_list = plugins.load_plugins(self._search_folders)
|
|
||||||
self._outdated = False
|
|
||||||
return self._plugin_list
|
|
||||||
|
|
||||||
def filter_plugins(self, **kwargs):
|
|
||||||
return plugins.pfilter(self.plugins, **kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def analysis_plugins(self):
|
|
||||||
""" Return only the analysis plugins """
|
|
||||||
return self.filter_plugins(plugin_type='analysisPlugin')
|
|
||||||
|
31
senpy/gsitk_compat.py
Normal file
31
senpy/gsitk_compat.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from pkg_resources import parse_version, get_distribution, DistributionNotFound
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MSG = 'GSITK is not (properly) installed.'
|
||||||
|
IMPORTMSG = '{} Some functions will be unavailable.'.format(MSG)
|
||||||
|
RUNMSG = '{} Install it to use this function.'.format(MSG)
|
||||||
|
|
||||||
|
|
||||||
|
def raise_exception(*args, **kwargs):
|
||||||
|
raise Exception(RUNMSG)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
gsitk_distro = get_distribution("gsitk")
|
||||||
|
GSITK_VERSION = parse_version(gsitk_distro.version)
|
||||||
|
GSITK_AVAILABLE = GSITK_VERSION > parse_version("0.1.9.1") # Earlier versions have a bug
|
||||||
|
except DistributionNotFound:
|
||||||
|
GSITK_AVAILABLE = False
|
||||||
|
GSITK_VERSION = ()
|
||||||
|
|
||||||
|
if GSITK_AVAILABLE:
|
||||||
|
from gsitk.datasets.datasets import DatasetManager
|
||||||
|
from gsitk.evaluation.evaluation import Evaluation as Eval
|
||||||
|
from sklearn.pipeline import Pipeline
|
||||||
|
modules = locals()
|
||||||
|
else:
|
||||||
|
logger.warning(IMPORTMSG)
|
||||||
|
DatasetManager = Eval = Pipeline = raise_exception
|
258
senpy/meta.py
Normal file
258
senpy/meta.py
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
'''
|
||||||
|
Meta-programming for the models.
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import jsonschema
|
||||||
|
import inspect
|
||||||
|
import copy
|
||||||
|
|
||||||
|
from abc import ABCMeta
|
||||||
|
from collections import MutableMapping, namedtuple
|
||||||
|
|
||||||
|
|
||||||
|
class BaseMeta(ABCMeta):
|
||||||
|
'''
|
||||||
|
Metaclass for models. It extracts the default values for the fields in
|
||||||
|
the model.
|
||||||
|
|
||||||
|
For instance, instances of the following class wouldn't need to mark
|
||||||
|
their version or description on initialization:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class MyPlugin(Plugin):
|
||||||
|
version=0.3
|
||||||
|
description='A dull plugin'
|
||||||
|
|
||||||
|
|
||||||
|
Note that these operations could be included in the __init__ of the
|
||||||
|
class, but it would be very inefficient.
|
||||||
|
'''
|
||||||
|
_subtypes = {}
|
||||||
|
|
||||||
|
def __new__(mcs, name, bases, attrs, **kwargs):
|
||||||
|
register_afterwards = False
|
||||||
|
defaults = {}
|
||||||
|
|
||||||
|
attrs = mcs.expand_with_schema(name, attrs)
|
||||||
|
if 'schema' in attrs:
|
||||||
|
register_afterwards = True
|
||||||
|
for base in bases:
|
||||||
|
if hasattr(base, '_defaults'):
|
||||||
|
defaults.update(getattr(base, '_defaults'))
|
||||||
|
|
||||||
|
info, rest = mcs.split_attrs(attrs)
|
||||||
|
|
||||||
|
for i in list(info.keys()):
|
||||||
|
if isinstance(info[i], _Alias):
|
||||||
|
fget, fset, fdel = make_property(info[i].indict)
|
||||||
|
rest[i] = property(fget=fget, fset=fset, fdel=fdel)
|
||||||
|
else:
|
||||||
|
defaults[i] = info[i]
|
||||||
|
|
||||||
|
rest['_defaults'] = defaults
|
||||||
|
|
||||||
|
cls = super(BaseMeta, mcs).__new__(mcs, name, tuple(bases), rest)
|
||||||
|
|
||||||
|
if register_afterwards:
|
||||||
|
mcs.register(cls, defaults['@type'])
|
||||||
|
return cls
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register(mcs, rsubclass, rtype=None):
|
||||||
|
mcs._subtypes[rtype or rsubclass.__name__] = rsubclass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expand_with_schema(name, attrs):
|
||||||
|
if 'schema' in attrs: # Schema specified by name
|
||||||
|
schema_file = '{}.json'.format(attrs['schema'])
|
||||||
|
elif 'schema_file' in attrs:
|
||||||
|
schema_file = attrs['schema_file']
|
||||||
|
del attrs['schema_file']
|
||||||
|
else:
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
if '/' not in 'schema_file':
|
||||||
|
thisdir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
schema_file = os.path.join(thisdir,
|
||||||
|
'schemas',
|
||||||
|
schema_file)
|
||||||
|
|
||||||
|
schema_path = 'file://' + schema_file
|
||||||
|
|
||||||
|
with open(schema_file) as f:
|
||||||
|
schema = json.load(f)
|
||||||
|
|
||||||
|
resolver = jsonschema.RefResolver(schema_path, schema)
|
||||||
|
if '@type' not in attrs:
|
||||||
|
attrs['@type'] = "".join((name[0].lower(), name[1:]))
|
||||||
|
attrs['_schema_file'] = schema_file
|
||||||
|
attrs['schema'] = schema
|
||||||
|
attrs['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver)
|
||||||
|
|
||||||
|
schema_defaults = BaseMeta.get_defaults(attrs['schema'])
|
||||||
|
attrs.update(schema_defaults)
|
||||||
|
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_func(v):
|
||||||
|
return inspect.isroutine(v) or inspect.ismethod(v) or \
|
||||||
|
inspect.ismodule(v) or isinstance(v, property)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_internal(k):
|
||||||
|
return k[0] == '_' or k == 'schema' or k == 'data'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_key(key):
|
||||||
|
if key[0] != '_':
|
||||||
|
key = key.replace("__", ":", 1)
|
||||||
|
return key
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def split_attrs(attrs):
|
||||||
|
'''
|
||||||
|
Extract the attributes of the class.
|
||||||
|
|
||||||
|
This allows adding default values in the class definition.
|
||||||
|
e.g.:
|
||||||
|
'''
|
||||||
|
isattr = {}
|
||||||
|
rest = {}
|
||||||
|
for key, value in attrs.items():
|
||||||
|
if not (BaseMeta.is_internal(key)) and (not BaseMeta.is_func(value)):
|
||||||
|
isattr[key] = value
|
||||||
|
else:
|
||||||
|
rest[key] = value
|
||||||
|
return isattr, rest
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_defaults(schema):
|
||||||
|
temp = {}
|
||||||
|
for obj in [
|
||||||
|
schema,
|
||||||
|
] + schema.get('allOf', []):
|
||||||
|
for k, v in obj.get('properties', {}).items():
|
||||||
|
if 'default' in v and k not in temp:
|
||||||
|
temp[k] = v['default']
|
||||||
|
return temp
|
||||||
|
|
||||||
|
|
||||||
|
def make_property(key):
|
||||||
|
|
||||||
|
def fget(self):
|
||||||
|
return self[key]
|
||||||
|
|
||||||
|
def fdel(self):
|
||||||
|
del self[key]
|
||||||
|
|
||||||
|
def fset(self, value):
|
||||||
|
self[key] = value
|
||||||
|
|
||||||
|
return fget, fset, fdel
|
||||||
|
|
||||||
|
|
||||||
|
class CustomDict(MutableMapping, object):
|
||||||
|
'''
|
||||||
|
A dictionary whose elements can also be accessed as attributes. Since some
|
||||||
|
characters are not valid in the dot-notation, the attribute names also
|
||||||
|
converted. e.g.:
|
||||||
|
|
||||||
|
> d = CustomDict()
|
||||||
|
> d.key = d['ns:name'] = 1
|
||||||
|
> d.key == d['key']
|
||||||
|
True
|
||||||
|
> d.ns__name == d['ns:name']
|
||||||
|
'''
|
||||||
|
|
||||||
|
_defaults = {}
|
||||||
|
_map_attr_key = {'id': '@id'}
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(CustomDict, self).__init__()
|
||||||
|
for k, v in self._defaults.items():
|
||||||
|
self[k] = copy.copy(v)
|
||||||
|
for arg in args:
|
||||||
|
self.update(arg)
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
self[self._attr_to_key(k)] = v
|
||||||
|
return self
|
||||||
|
|
||||||
|
def serializable(self):
|
||||||
|
def ser_or_down(item):
|
||||||
|
if hasattr(item, 'serializable'):
|
||||||
|
return item.serializable()
|
||||||
|
elif isinstance(item, dict):
|
||||||
|
temp = dict()
|
||||||
|
for kp in item:
|
||||||
|
vp = item[kp]
|
||||||
|
temp[kp] = ser_or_down(vp)
|
||||||
|
return temp
|
||||||
|
elif isinstance(item, list) or isinstance(item, set):
|
||||||
|
return list(ser_or_down(i) for i in item)
|
||||||
|
else:
|
||||||
|
return item
|
||||||
|
|
||||||
|
return ser_or_down(self.as_dict())
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
key = self._key_to_attr(key)
|
||||||
|
return self.__dict__[key]
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
'''Do not insert data directly, there might be a property in that key. '''
|
||||||
|
key = self._key_to_attr(key)
|
||||||
|
return setattr(self, key, value)
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
return {self._attr_to_key(k): v for k, v in self.__dict__.items()
|
||||||
|
if not self._internal_key(k)}
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return (k for k in self.__dict__ if not self._internal_key(k))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.__dict__)
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
del self.__dict__[key]
|
||||||
|
|
||||||
|
def update(self, other):
|
||||||
|
for k, v in other.items():
|
||||||
|
self[k] = v
|
||||||
|
|
||||||
|
def _attr_to_key(self, key):
|
||||||
|
key = key.replace("__", ":", 1)
|
||||||
|
key = self._map_attr_key.get(key, key)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def _key_to_attr(self, key):
|
||||||
|
if self._internal_key(key):
|
||||||
|
return key
|
||||||
|
key = key.replace(":", "__", 1)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
try:
|
||||||
|
return self.__dict__[self._attr_to_key(key)]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _internal_key(key):
|
||||||
|
return key[0] == '_'
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return json.dumps(self.serializable(), sort_keys=True, indent=4)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return json.dumps(self.serializable(), sort_keys=True, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
_Alias = namedtuple('Alias', 'indict')
|
||||||
|
|
||||||
|
|
||||||
|
def alias(key):
|
||||||
|
return _Alias(key)
|
430
senpy/models.py
430
senpy/models.py
@@ -6,24 +6,30 @@ For compatibility with Py3 and for easier debugging, this new version drops
|
|||||||
introspection and adds all arguments to the models.
|
introspection and adds all arguments to the models.
|
||||||
'''
|
'''
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from six import string_types
|
from future import standard_library
|
||||||
|
standard_library.install_aliases()
|
||||||
|
|
||||||
|
from future.utils import with_metaclass
|
||||||
|
from past.builtins import basestring
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import jsonref
|
import jsonref
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
from flask import Response as FlaskResponse
|
from flask import Response as FlaskResponse
|
||||||
from pyld import jsonld
|
from pyld import jsonld
|
||||||
|
|
||||||
from rdflib import Graph
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
logging.getLogger('rdflib').setLevel(logging.WARN)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
from rdflib import Graph
|
||||||
|
|
||||||
|
|
||||||
|
from .meta import BaseMeta, CustomDict, alias
|
||||||
|
|
||||||
DEFINITIONS_FILE = 'definitions.json'
|
DEFINITIONS_FILE = 'definitions.json'
|
||||||
CONTEXT_PATH = os.path.join(
|
CONTEXT_PATH = os.path.join(
|
||||||
os.path.dirname(os.path.realpath(__file__)), 'schemas', 'context.jsonld')
|
os.path.dirname(os.path.realpath(__file__)), 'schemas', 'context.jsonld')
|
||||||
@@ -45,40 +51,102 @@ def read_schema(schema_file, absolute=False):
|
|||||||
return jsonref.load(f, base_uri=schema_uri)
|
return jsonref.load(f, base_uri=schema_uri)
|
||||||
|
|
||||||
|
|
||||||
base_schema = read_schema(DEFINITIONS_FILE)
|
def dump_schema(schema):
|
||||||
|
return jsonref.dumps(schema)
|
||||||
|
|
||||||
|
|
||||||
class Context(dict):
|
def load_context(context):
|
||||||
@staticmethod
|
logging.debug('Loading context: {}'.format(context))
|
||||||
def load(context):
|
if not context:
|
||||||
logging.debug('Loading context: {}'.format(context))
|
return context
|
||||||
if not context:
|
elif isinstance(context, list):
|
||||||
|
contexts = []
|
||||||
|
for c in context:
|
||||||
|
contexts.append(load_context(c))
|
||||||
|
return contexts
|
||||||
|
elif isinstance(context, dict):
|
||||||
|
return dict(context)
|
||||||
|
elif isinstance(context, basestring):
|
||||||
|
try:
|
||||||
|
with open(context) as f:
|
||||||
|
return dict(json.loads(f.read()))
|
||||||
|
except IOError:
|
||||||
return context
|
return context
|
||||||
elif isinstance(context, list):
|
else:
|
||||||
contexts = []
|
raise AttributeError('Please, provide a valid context')
|
||||||
for c in context:
|
|
||||||
contexts.append(Context.load(c))
|
|
||||||
return contexts
|
|
||||||
elif isinstance(context, dict):
|
|
||||||
return Context(context)
|
|
||||||
elif isinstance(context, string_types):
|
|
||||||
try:
|
|
||||||
with open(context) as f:
|
|
||||||
return Context(json.loads(f.read()))
|
|
||||||
except IOError:
|
|
||||||
return context
|
|
||||||
else:
|
|
||||||
raise AttributeError('Please, provide a valid context')
|
|
||||||
|
|
||||||
|
|
||||||
base_context = Context.load(CONTEXT_PATH)
|
base_context = load_context(CONTEXT_PATH)
|
||||||
|
|
||||||
|
|
||||||
class SenpyMixin(object):
|
def register(rsubclass, rtype=None):
|
||||||
|
BaseMeta.register(rsubclass, rtype)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
||||||
|
'''
|
||||||
|
Entities of the base model are a special kind of dictionary that emulates
|
||||||
|
a JSON-LD object. The structure of the dictionary is checked via JSON-schema.
|
||||||
|
For convenience, the values can also be accessed as attributes
|
||||||
|
(a la Javascript). e.g.:
|
||||||
|
|
||||||
|
>>> myobject.key == myobject['key']
|
||||||
|
True
|
||||||
|
>>> myobject.ns__name == myobject['ns:name']
|
||||||
|
True
|
||||||
|
|
||||||
|
Additionally, subclasses of this class can specify default values for their
|
||||||
|
instances. These defaults are inherited by subclasses. e.g.:
|
||||||
|
|
||||||
|
>>> class NewModel(BaseModel):
|
||||||
|
... mydefault = 5
|
||||||
|
>>> n1 = NewModel()
|
||||||
|
>>> n1['mydefault'] == 5
|
||||||
|
True
|
||||||
|
>>> n1.mydefault = 3
|
||||||
|
>>> n1['mydefault'] = 3
|
||||||
|
True
|
||||||
|
>>> n2 = NewModel()
|
||||||
|
>>> n2 == 5
|
||||||
|
True
|
||||||
|
>>> class SubModel(NewModel):
|
||||||
|
pass
|
||||||
|
>>> subn = SubModel()
|
||||||
|
>>> subn.mydefault == 5
|
||||||
|
True
|
||||||
|
|
||||||
|
Lastly, every subclass that also specifies a schema will get registered, so it
|
||||||
|
is possible to deserialize JSON and get the right type.
|
||||||
|
i.e. to recover an instance of the original class from a plain JSON.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# schema_file = DEFINITIONS_FILE
|
||||||
_context = base_context["@context"]
|
_context = base_context["@context"]
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
auto_id = kwargs.pop('_auto_id', False)
|
||||||
|
|
||||||
|
super(BaseModel, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
if auto_id:
|
||||||
|
self.id
|
||||||
|
|
||||||
|
if '@type' not in self:
|
||||||
|
logger.warning('Created an instance of an unknown model')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
if '@id' not in self:
|
||||||
|
self['@id'] = '_:{}_{}'.format(type(self).__name__, time.time())
|
||||||
|
return self['@id']
|
||||||
|
|
||||||
|
@id.setter
|
||||||
|
def id(self, value):
|
||||||
|
self['@id'] = value
|
||||||
|
|
||||||
def flask(self,
|
def flask(self,
|
||||||
in_headers=True,
|
in_headers=False,
|
||||||
headers=None,
|
headers=None,
|
||||||
outformat='json-ld',
|
outformat='json-ld',
|
||||||
**kwargs):
|
**kwargs):
|
||||||
@@ -102,26 +170,28 @@ class SenpyMixin(object):
|
|||||||
})
|
})
|
||||||
return FlaskResponse(
|
return FlaskResponse(
|
||||||
response=content,
|
response=content,
|
||||||
status=getattr(self, "status", 200),
|
status=self.get('status', 200),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
mimetype=mimetype)
|
mimetype=mimetype)
|
||||||
|
|
||||||
def serialize(self, format='json-ld', with_mime=False, **kwargs):
|
def serialize(self, format='json-ld', with_mime=False, **kwargs):
|
||||||
js = self.jsonld(**kwargs)
|
js = self.jsonld(**kwargs)
|
||||||
|
content = json.dumps(js, indent=2, sort_keys=True)
|
||||||
if format == 'json-ld':
|
if format == 'json-ld':
|
||||||
content = json.dumps(js, indent=2, sort_keys=True)
|
|
||||||
mimetype = "application/json"
|
mimetype = "application/json"
|
||||||
elif format in ['turtle', ]:
|
elif format in ['turtle', 'ntriples']:
|
||||||
logger.debug(js)
|
logger.debug(js)
|
||||||
content = json.dumps(js, indent=2, sort_keys=True)
|
base = kwargs.get('prefix')
|
||||||
g = Graph().parse(
|
g = Graph().parse(
|
||||||
data=content,
|
data=content,
|
||||||
format='json-ld',
|
format='json-ld',
|
||||||
base=kwargs.get('prefix'),
|
base=base,
|
||||||
context=self._context)
|
context=[self._context,
|
||||||
|
{'@base': base}])
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Parsing with prefix: {}'.format(kwargs.get('prefix')))
|
'Parsing with prefix: {}'.format(kwargs.get('prefix')))
|
||||||
content = g.serialize(format='turtle').decode('utf-8')
|
content = g.serialize(format=format,
|
||||||
|
base=base).decode('utf-8')
|
||||||
mimetype = 'text/{}'.format(format)
|
mimetype = 'text/{}'.format(format)
|
||||||
else:
|
else:
|
||||||
raise Error('Unknown outformat: {}'.format(format))
|
raise Error('Unknown outformat: {}'.format(format))
|
||||||
@@ -130,51 +200,29 @@ class SenpyMixin(object):
|
|||||||
else:
|
else:
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def serializable(self):
|
|
||||||
def ser_or_down(item):
|
|
||||||
if hasattr(item, 'serializable'):
|
|
||||||
return item.serializable()
|
|
||||||
elif isinstance(item, dict):
|
|
||||||
temp = dict()
|
|
||||||
for kp in item:
|
|
||||||
vp = item[kp]
|
|
||||||
temp[kp] = ser_or_down(vp)
|
|
||||||
return temp
|
|
||||||
elif isinstance(item, list) or isinstance(item, set):
|
|
||||||
return list(ser_or_down(i) for i in item)
|
|
||||||
else:
|
|
||||||
return item
|
|
||||||
|
|
||||||
return ser_or_down(self._plain_dict())
|
|
||||||
|
|
||||||
def jsonld(self,
|
def jsonld(self,
|
||||||
with_context=True,
|
with_context=False,
|
||||||
context_uri=None,
|
context_uri=None,
|
||||||
prefix=None,
|
prefix=None,
|
||||||
expanded=False):
|
expanded=False):
|
||||||
ser = self.serializable()
|
|
||||||
|
|
||||||
result = jsonld.compact(
|
result = self.serializable()
|
||||||
ser,
|
|
||||||
self._context,
|
|
||||||
options={
|
|
||||||
'base': prefix,
|
|
||||||
'expandContext': self._context,
|
|
||||||
'senpy': prefix
|
|
||||||
})
|
|
||||||
if context_uri:
|
|
||||||
result['@context'] = context_uri
|
|
||||||
if expanded:
|
if expanded:
|
||||||
result = jsonld.expand(
|
result = jsonld.expand(
|
||||||
result, options={'base': prefix,
|
result, options={'base': prefix,
|
||||||
'expandContext': self._context})
|
'expandContext': self._context})[0]
|
||||||
if not with_context:
|
if not with_context:
|
||||||
del result['@context']
|
try:
|
||||||
return result
|
del result['@context']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
elif context_uri:
|
||||||
|
result['@context'] = context_uri
|
||||||
|
else:
|
||||||
|
result['@context'] = self._context
|
||||||
|
|
||||||
def to_JSON(self, *args, **kwargs):
|
return result
|
||||||
js = json.dumps(self.jsonld(*args, **kwargs), indent=4, sort_keys=True)
|
|
||||||
return js
|
|
||||||
|
|
||||||
def validate(self, obj=None):
|
def validate(self, obj=None):
|
||||||
if not obj:
|
if not obj:
|
||||||
@@ -183,86 +231,22 @@ class SenpyMixin(object):
|
|||||||
obj = obj.jsonld()
|
obj = obj.jsonld()
|
||||||
self._validator.validate(obj)
|
self._validator.validate(obj)
|
||||||
|
|
||||||
def __str__(self):
|
def prov(self, another):
|
||||||
return str(self.serialize())
|
self['prov:wasGeneratedBy'] = another.id
|
||||||
|
|
||||||
|
|
||||||
class BaseModel(SenpyMixin, dict):
|
def subtypes():
|
||||||
|
return BaseMeta._subtypes
|
||||||
schema = base_schema
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
if 'id' in kwargs:
|
|
||||||
self.id = kwargs.pop('id')
|
|
||||||
elif kwargs.pop('_auto_id', True):
|
|
||||||
self.id = '_:{}_{}'.format(type(self).__name__, time.time())
|
|
||||||
temp = dict(*args, **kwargs)
|
|
||||||
|
|
||||||
for obj in [
|
|
||||||
self.schema,
|
|
||||||
] + self.schema.get('allOf', []):
|
|
||||||
for k, v in obj.get('properties', {}).items():
|
|
||||||
if 'default' in v and k not in temp:
|
|
||||||
temp[k] = copy.deepcopy(v['default'])
|
|
||||||
|
|
||||||
for i in temp:
|
|
||||||
nk = self._get_key(i)
|
|
||||||
if nk != i:
|
|
||||||
temp[nk] = temp[i]
|
|
||||||
del temp[i]
|
|
||||||
try:
|
|
||||||
temp['@type'] = getattr(self, '@type')
|
|
||||||
except AttributeError:
|
|
||||||
logger.warn('Creating an instance of an unknown model')
|
|
||||||
|
|
||||||
super(BaseModel, self).__init__(temp)
|
|
||||||
|
|
||||||
def _get_key(self, key):
|
|
||||||
if key is 'id':
|
|
||||||
key = '@id'
|
|
||||||
key = key.replace("__", ":", 1)
|
|
||||||
return key
|
|
||||||
|
|
||||||
def __delitem__(self, key):
|
|
||||||
dict.__delitem__(self, key)
|
|
||||||
|
|
||||||
def __getattr__(self, key):
|
|
||||||
try:
|
|
||||||
return self.__getitem__(self._get_key(key))
|
|
||||||
except KeyError:
|
|
||||||
raise AttributeError(key)
|
|
||||||
|
|
||||||
def __setattr__(self, key, value):
|
|
||||||
self.__setitem__(self._get_key(key), value)
|
|
||||||
|
|
||||||
def __delattr__(self, key):
|
|
||||||
try:
|
|
||||||
object.__delattr__(self, key)
|
|
||||||
except AttributeError:
|
|
||||||
self.__delitem__(self._get_key(key))
|
|
||||||
|
|
||||||
def _plain_dict(self):
|
|
||||||
d = {k: v for (k, v) in self.items() if k[0] != "_"}
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
def register(rsubclass, rtype=None):
|
|
||||||
_subtypes[rtype or rsubclass.__name__] = rsubclass
|
|
||||||
|
|
||||||
|
|
||||||
_subtypes = {}
|
|
||||||
|
|
||||||
|
|
||||||
def from_dict(indict, cls=None):
|
def from_dict(indict, cls=None):
|
||||||
if not cls:
|
if not cls:
|
||||||
target = indict.get('@type', None)
|
target = indict.get('@type', None)
|
||||||
|
cls = BaseModel
|
||||||
try:
|
try:
|
||||||
if target and target in _subtypes:
|
cls = subtypes()[target]
|
||||||
cls = _subtypes[target]
|
except KeyError:
|
||||||
else:
|
pass
|
||||||
cls = BaseModel
|
|
||||||
except Exception:
|
|
||||||
cls = BaseModel
|
|
||||||
outdict = dict()
|
outdict = dict()
|
||||||
for k, v in indict.items():
|
for k, v in indict.items():
|
||||||
if k == '@context':
|
if k == '@context':
|
||||||
@@ -270,10 +254,11 @@ def from_dict(indict, cls=None):
|
|||||||
elif isinstance(v, dict):
|
elif isinstance(v, dict):
|
||||||
v = from_dict(indict[k])
|
v = from_dict(indict[k])
|
||||||
elif isinstance(v, list):
|
elif isinstance(v, list):
|
||||||
|
v = v[:]
|
||||||
for ix, v2 in enumerate(v):
|
for ix, v2 in enumerate(v):
|
||||||
if isinstance(v2, dict):
|
if isinstance(v2, dict):
|
||||||
v[ix] = from_dict(v2)
|
v[ix] = from_dict(v2)
|
||||||
outdict[k] = v
|
outdict[k] = copy.copy(v)
|
||||||
return cls(**outdict)
|
return cls(**outdict)
|
||||||
|
|
||||||
|
|
||||||
@@ -286,43 +271,62 @@ def from_json(injson):
|
|||||||
return from_dict(indict)
|
return from_dict(indict)
|
||||||
|
|
||||||
|
|
||||||
def from_schema(name, schema=None, schema_file=None, base_classes=None):
|
class Entry(BaseModel):
|
||||||
|
schema = 'entry'
|
||||||
|
|
||||||
|
text = alias('nif:isString')
|
||||||
|
|
||||||
|
|
||||||
|
class Sentiment(BaseModel):
|
||||||
|
schema = 'sentiment'
|
||||||
|
|
||||||
|
polarity = alias('marl:hasPolarity')
|
||||||
|
polarityValue = alias('marl:hasPolarityValue')
|
||||||
|
|
||||||
|
|
||||||
|
class Error(BaseModel, Exception):
|
||||||
|
schema = 'error'
|
||||||
|
|
||||||
|
def __init__(self, message='Generic senpy exception', *args, **kwargs):
|
||||||
|
Exception.__init__(self, message)
|
||||||
|
super(Error, self).__init__(*args, **kwargs)
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if not hasattr(self, 'errors'):
|
||||||
|
return self.message
|
||||||
|
return '{}:\n\t{}'.format(self.message, self.errors)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return Exception.__hash__(self)
|
||||||
|
|
||||||
|
|
||||||
|
# Add the remaining schemas programmatically
|
||||||
|
|
||||||
|
def _class_from_schema(name, schema=None, schema_file=None, base_classes=None):
|
||||||
base_classes = base_classes or []
|
base_classes = base_classes or []
|
||||||
base_classes.append(BaseModel)
|
base_classes.append(BaseModel)
|
||||||
schema_file = schema_file or '{}.json'.format(name)
|
attrs = {}
|
||||||
class_name = '{}{}'.format(name[0].upper(), name[1:])
|
if schema:
|
||||||
if '/' not in 'schema_file':
|
attrs['schema'] = schema
|
||||||
schema_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
elif schema_file:
|
||||||
'schemas',
|
attrs['schema_file'] = schema_file
|
||||||
schema_file)
|
else:
|
||||||
|
attrs['schema'] = name
|
||||||
schema_path = 'file://' + schema_file
|
name = "".join((name[0].upper(), name[1:]))
|
||||||
|
return BaseMeta(name, base_classes, attrs)
|
||||||
with open(schema_file) as f:
|
|
||||||
schema = json.load(f)
|
|
||||||
|
|
||||||
dct = {}
|
|
||||||
|
|
||||||
resolver = jsonschema.RefResolver(schema_path, schema)
|
|
||||||
dct['@type'] = name
|
|
||||||
dct['_schema_file'] = schema_file
|
|
||||||
dct['schema'] = schema
|
|
||||||
dct['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver)
|
|
||||||
|
|
||||||
newclass = type(class_name, tuple(base_classes), dct)
|
|
||||||
|
|
||||||
register(newclass, name)
|
|
||||||
return newclass
|
|
||||||
|
|
||||||
|
|
||||||
def _add_from_schema(*args, **kwargs):
|
def _add_class_from_schema(*args, **kwargs):
|
||||||
generatedClass = from_schema(*args, **kwargs)
|
generatedClass = _class_from_schema(*args, **kwargs)
|
||||||
globals()[generatedClass.__name__] = generatedClass
|
globals()[generatedClass.__name__] = generatedClass
|
||||||
del generatedClass
|
del generatedClass
|
||||||
|
|
||||||
|
|
||||||
for i in [
|
for i in [
|
||||||
'analysis',
|
'aggregatedEvaluation',
|
||||||
|
'dataset',
|
||||||
|
'datasets',
|
||||||
'emotion',
|
'emotion',
|
||||||
'emotionConversion',
|
'emotionConversion',
|
||||||
'emotionConversionPlugin',
|
'emotionConversionPlugin',
|
||||||
@@ -330,55 +334,69 @@ for i in [
|
|||||||
'emotionModel',
|
'emotionModel',
|
||||||
'emotionPlugin',
|
'emotionPlugin',
|
||||||
'emotionSet',
|
'emotionSet',
|
||||||
'entry',
|
'evaluation',
|
||||||
|
'entity',
|
||||||
'help',
|
'help',
|
||||||
'plugin',
|
'metric',
|
||||||
|
'parameter',
|
||||||
'plugins',
|
'plugins',
|
||||||
'response',
|
'response',
|
||||||
'results',
|
'results',
|
||||||
'sentiment',
|
|
||||||
'sentimentPlugin',
|
'sentimentPlugin',
|
||||||
'suggestion',
|
'suggestion',
|
||||||
|
'topic',
|
||||||
|
|
||||||
]:
|
]:
|
||||||
_add_from_schema(i)
|
_add_class_from_schema(i)
|
||||||
|
|
||||||
_ErrorModel = from_schema('error')
|
|
||||||
|
|
||||||
|
|
||||||
class Error(SenpyMixin, Exception):
|
class Analysis(BaseModel):
|
||||||
def __init__(self, message, *args, **kwargs):
|
schema = 'analysis'
|
||||||
super(Error, self).__init__(self, message, message)
|
|
||||||
self._error = _ErrorModel(message=message, *args, **kwargs)
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
def validate(self, obj=None):
|
parameters = alias('prov:used')
|
||||||
self._error.validate()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
@property
|
||||||
return self._error[key]
|
def params(self):
|
||||||
|
outdict = {}
|
||||||
|
outdict['algorithm'] = self.algorithm
|
||||||
|
for param in self.parameters:
|
||||||
|
outdict[param['name']] = param['value']
|
||||||
|
return outdict
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
@params.setter
|
||||||
self._error[key] = value
|
def params(self, value):
|
||||||
|
for k, v in value.items():
|
||||||
|
for param in self.parameters:
|
||||||
|
if param.name == k:
|
||||||
|
param.value = v
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.parameters.append(Parameter(name=k, value=v))
|
||||||
|
|
||||||
def __delitem__(self, key):
|
@property
|
||||||
del self._error[key]
|
def algorithm(self):
|
||||||
|
return self['prov:wasAssociatedWith']
|
||||||
|
|
||||||
def __getattr__(self, key):
|
@property
|
||||||
if key != '_error' and hasattr(self._error, key):
|
def plugin(self):
|
||||||
return getattr(self._error, key)
|
return self._plugin
|
||||||
raise AttributeError(key)
|
|
||||||
|
|
||||||
def __setattr__(self, key, value):
|
@plugin.setter
|
||||||
if key != '_error':
|
def plugin(self, value):
|
||||||
return setattr(self._error, key, value)
|
self._plugin = value
|
||||||
else:
|
self['prov:wasAssociatedWith'] = value.id
|
||||||
super(Error, self).__setattr__(key, value)
|
|
||||||
|
|
||||||
def __delattr__(self, key):
|
def run(self, request):
|
||||||
delattr(self._error, key)
|
return self.plugin.process(request, self.params)
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.to_JSON(with_context=False))
|
|
||||||
|
|
||||||
|
|
||||||
register(Error, 'error')
|
class Plugin(BaseModel):
|
||||||
|
schema = 'plugin'
|
||||||
|
|
||||||
|
def activity(self, parameters):
|
||||||
|
'''Generate a prov:Activity from this plugin and the '''
|
||||||
|
a = Analysis()
|
||||||
|
a.plugin = self
|
||||||
|
a.params = parameters
|
||||||
|
return a
|
||||||
|
|
||||||
|
@@ -1,41 +1,127 @@
|
|||||||
from future import standard_library
|
from future import standard_library
|
||||||
standard_library.install_aliases()
|
standard_library.install_aliases()
|
||||||
|
|
||||||
|
from future.utils import with_metaclass
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import pickle
|
import pickle
|
||||||
import logging
|
import logging
|
||||||
import tempfile
|
import pprint
|
||||||
import copy
|
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import inspect
|
import inspect
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import importlib
|
import importlib
|
||||||
import yaml
|
import yaml
|
||||||
import threading
|
import threading
|
||||||
|
from nltk import download
|
||||||
|
|
||||||
from .. import models, utils
|
from .. import models, utils
|
||||||
from ..api import API_PARAMS
|
from .. import api
|
||||||
|
from .. import gsitk_compat
|
||||||
|
from .. import testing
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Plugin(models.Plugin):
|
class PluginMeta(models.BaseMeta):
|
||||||
def __init__(self, info=None):
|
_classes = {}
|
||||||
|
|
||||||
|
def __new__(mcs, name, bases, attrs, **kwargs):
|
||||||
|
plugin_type = []
|
||||||
|
if hasattr(bases[0], 'plugin_type'):
|
||||||
|
plugin_type += bases[0].plugin_type
|
||||||
|
plugin_type.append(name)
|
||||||
|
alias = attrs.get('name', name)
|
||||||
|
attrs['plugin_type'] = plugin_type
|
||||||
|
attrs['name'] = alias
|
||||||
|
if 'description' not in attrs:
|
||||||
|
doc = attrs.get('__doc__', None)
|
||||||
|
if doc:
|
||||||
|
attrs['description'] = doc
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
('Plugin {} does not have a description. '
|
||||||
|
'Please, add a short summary to help other developers'
|
||||||
|
).format(name))
|
||||||
|
cls = super(PluginMeta, mcs).__new__(mcs, name, bases, attrs)
|
||||||
|
|
||||||
|
if alias in mcs._classes:
|
||||||
|
if os.environ.get('SENPY_TESTING', ""):
|
||||||
|
raise Exception(
|
||||||
|
('The type of plugin {} already exists. '
|
||||||
|
'Please, choose a different name').format(name))
|
||||||
|
else:
|
||||||
|
logger.warning('Overloading plugin class: {}'.format(alias))
|
||||||
|
mcs._classes[alias] = cls
|
||||||
|
return cls
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def for_type(cls, ptype):
|
||||||
|
return cls._classes[ptype]
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
||||||
|
'''
|
||||||
|
Base class for all plugins in senpy.
|
||||||
|
A plugin must provide at least these attributes:
|
||||||
|
|
||||||
|
- version
|
||||||
|
- description (or docstring)
|
||||||
|
- author
|
||||||
|
|
||||||
|
Additionally, they may provide a URL (url) of a repository or website.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, info=None, data_folder=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Provides a canonical name for plugins and serves as base for other
|
Provides a canonical name for plugins and serves as base for other
|
||||||
kinds of plugins.
|
kinds of plugins.
|
||||||
"""
|
"""
|
||||||
if not info:
|
|
||||||
raise models.Error(message=("You need to provide configuration"
|
|
||||||
"information for the plugin."))
|
|
||||||
logger.debug("Initialising {}".format(info))
|
logger.debug("Initialising {}".format(info))
|
||||||
id = 'plugins/{}_{}'.format(info['name'], info['version'])
|
super(Plugin, self).__init__(**kwargs)
|
||||||
super(Plugin, self).__init__(id=id, **info)
|
if info:
|
||||||
|
self.update(info)
|
||||||
|
self.validate()
|
||||||
|
self.id = 'endpoint:plugins/{}_{}'.format(self['name'],
|
||||||
|
self['version'])
|
||||||
self.is_activated = False
|
self.is_activated = False
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
|
self._directory = os.path.abspath(
|
||||||
|
os.path.dirname(inspect.getfile(self.__class__)))
|
||||||
|
|
||||||
|
data_folder = data_folder or os.getcwd()
|
||||||
|
subdir = os.path.join(data_folder, self.name)
|
||||||
|
|
||||||
|
self._data_paths = [
|
||||||
|
data_folder,
|
||||||
|
subdir,
|
||||||
|
self._directory,
|
||||||
|
os.path.join(self._directory, 'data'),
|
||||||
|
]
|
||||||
|
|
||||||
|
if os.path.exists(subdir):
|
||||||
|
data_folder = subdir
|
||||||
|
self.data_folder = data_folder
|
||||||
|
|
||||||
|
self._log = logging.getLogger('{}.{}'.format(__name__, self.name))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log(self):
|
||||||
|
return self._log
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
missing = []
|
||||||
|
for x in ['name', 'description', 'version']:
|
||||||
|
if x not in self:
|
||||||
|
missing.append(x)
|
||||||
|
if missing:
|
||||||
|
raise models.Error(
|
||||||
|
'Missing configuration parameters: {}'.format(missing))
|
||||||
|
|
||||||
def get_folder(self):
|
def get_folder(self):
|
||||||
return os.path.dirname(inspect.getfile(self.__class__))
|
return os.path.dirname(inspect.getfile(self.__class__))
|
||||||
@@ -46,84 +132,347 @@ class Plugin(models.Plugin):
|
|||||||
def deactivate(self):
|
def deactivate(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test(self):
|
def process(self, request, parameters, **kwargs):
|
||||||
if not hasattr(self, 'test_cases'):
|
"""
|
||||||
import inspect
|
An implemented plugin should override this method.
|
||||||
raise AttributeError(('Plugin {} [{}] does not have any defined '
|
Here, we assume that a process_entries method exists."""
|
||||||
'test cases').format(self.id, inspect.getfile(self.__class__)))
|
newentries = list(
|
||||||
for case in self.test_cases:
|
self.process_entries(request.entries, parameters))
|
||||||
res = list(self.analyse_entry(models.Entry(case['entry']),
|
request.entries = newentries
|
||||||
case['params']))
|
return request
|
||||||
exp = case['expected']
|
|
||||||
if not isinstance(exp, list):
|
|
||||||
exp = [exp]
|
|
||||||
utils.check_template(res, exp)
|
|
||||||
for r in res:
|
|
||||||
r.validate()
|
|
||||||
|
|
||||||
|
def process_entries(self, entries, parameters):
|
||||||
|
for entry in entries:
|
||||||
|
self.log.debug('Processing entry with plugin {}: {}'.format(
|
||||||
|
self, entry))
|
||||||
|
results = self.process_entry(entry, parameters)
|
||||||
|
if inspect.isgenerator(results):
|
||||||
|
for result in results:
|
||||||
|
yield result
|
||||||
|
else:
|
||||||
|
yield results
|
||||||
|
|
||||||
SenpyPlugin = Plugin
|
def process_entry(self, entry, parameters):
|
||||||
|
"""
|
||||||
|
This base method is here to adapt plugins which only
|
||||||
class AnalysisPlugin(Plugin):
|
implement the *process* function.
|
||||||
|
|
||||||
def analyse(self, *args, **kwargs):
|
|
||||||
raise NotImplemented(
|
|
||||||
'Your method should implement either analyse or analyse_entry')
|
|
||||||
|
|
||||||
def analyse_entry(self, entry, parameters):
|
|
||||||
""" An implemented plugin should override this method.
|
|
||||||
This base method is here to adapt old style plugins which only
|
|
||||||
implement the *analyse* function.
|
|
||||||
Note that this method may yield an annotated entry or a list of
|
Note that this method may yield an annotated entry or a list of
|
||||||
entries (e.g. in a tokenizer)
|
entries (e.g. in a tokenizer)
|
||||||
"""
|
"""
|
||||||
text = entry['nif:isString']
|
raise NotImplementedError(
|
||||||
params = copy.copy(parameters)
|
'You need to implement process, process_entries or process_entry in your plugin'
|
||||||
params['input'] = text
|
)
|
||||||
results = self.analyse(**params)
|
|
||||||
for i in results.entries:
|
def test(self, test_cases=None):
|
||||||
yield i
|
if not test_cases:
|
||||||
|
if not hasattr(self, 'test_cases'):
|
||||||
|
raise AttributeError(
|
||||||
|
('Plugin {} [{}] does not have any defined '
|
||||||
|
'test cases').format(self.id,
|
||||||
|
inspect.getfile(self.__class__)))
|
||||||
|
test_cases = self.test_cases
|
||||||
|
for case in test_cases:
|
||||||
|
try:
|
||||||
|
self.test_case(case)
|
||||||
|
self.log.debug('Test case passed:\n{}'.format(
|
||||||
|
pprint.pformat(case)))
|
||||||
|
except Exception as ex:
|
||||||
|
self.log.warning('Test case failed:\n{}'.format(
|
||||||
|
pprint.pformat(case)))
|
||||||
|
raise
|
||||||
|
|
||||||
|
def test_case(self, case, mock=testing.MOCK_REQUESTS):
|
||||||
|
if 'entry' not in case and 'input' in case:
|
||||||
|
entry = models.Entry(_auto_id=False)
|
||||||
|
entry.nif__isString = case['input']
|
||||||
|
case['entry'] = entry
|
||||||
|
entry = models.Entry(case['entry'])
|
||||||
|
given_parameters = case.get('params', case.get('parameters', {}))
|
||||||
|
expected = case.get('expected', None)
|
||||||
|
should_fail = case.get('should_fail', False)
|
||||||
|
responses = case.get('responses', [])
|
||||||
|
|
||||||
|
try:
|
||||||
|
request = models.Response()
|
||||||
|
parameters = api.parse_params(given_parameters,
|
||||||
|
self.extra_params)
|
||||||
|
request.entries = [
|
||||||
|
entry,
|
||||||
|
]
|
||||||
|
|
||||||
|
method = partial(self.process, request, parameters)
|
||||||
|
|
||||||
|
if mock:
|
||||||
|
res = method()
|
||||||
|
else:
|
||||||
|
with testing.patch_all_requests(responses):
|
||||||
|
res = method()
|
||||||
|
|
||||||
|
if not isinstance(expected, list):
|
||||||
|
expected = [expected]
|
||||||
|
utils.check_template(res.entries, expected)
|
||||||
|
res.validate()
|
||||||
|
except models.Error:
|
||||||
|
if should_fail:
|
||||||
|
return
|
||||||
|
raise
|
||||||
|
assert not should_fail
|
||||||
|
|
||||||
|
def find_file(self, fname):
|
||||||
|
for p in self._data_paths:
|
||||||
|
alternative = os.path.join(p, fname)
|
||||||
|
if os.path.exists(alternative):
|
||||||
|
return alternative
|
||||||
|
raise IOError('File does not exist: {}'.format(fname))
|
||||||
|
|
||||||
|
def open(self, fpath, mode='r'):
|
||||||
|
if 'w' in mode:
|
||||||
|
# When writing, only use absolute paths or data_folder
|
||||||
|
if not os.path.isabs(fpath):
|
||||||
|
fpath = os.path.join(self.data_folder, fpath)
|
||||||
|
else:
|
||||||
|
fpath = self.find_file(fpath)
|
||||||
|
|
||||||
|
return open(fpath, mode=mode)
|
||||||
|
|
||||||
|
def serve(self, debug=True, **kwargs):
|
||||||
|
utils.easy(plugin_list=[self, ], plugin_folder=None, debug=debug, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# For backwards compatibility
|
||||||
|
SenpyPlugin = Plugin
|
||||||
|
|
||||||
|
|
||||||
|
class Analysis(Plugin):
|
||||||
|
'''
|
||||||
|
A subclass of Plugin that analyses text and provides an annotation.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def analyse(self, request, parameters):
|
||||||
|
return super(Analysis, self).process(request, parameters)
|
||||||
|
|
||||||
def analyse_entries(self, entries, parameters):
|
def analyse_entries(self, entries, parameters):
|
||||||
|
for i in super(Analysis, self).process_entries(entries, parameters):
|
||||||
|
yield i
|
||||||
|
|
||||||
|
def process(self, request, parameters, **kwargs):
|
||||||
|
return self.analyse(request, parameters)
|
||||||
|
|
||||||
|
def process_entries(self, entries, parameters):
|
||||||
|
for i in self.analyse_entries(entries, parameters):
|
||||||
|
yield i
|
||||||
|
|
||||||
|
def process_entry(self, entry, parameters, **kwargs):
|
||||||
|
if hasattr(self, 'analyse_entry'):
|
||||||
|
for i in self.analyse_entry(entry, parameters):
|
||||||
|
yield i
|
||||||
|
else:
|
||||||
|
super(Analysis, self).process_entry(entry, parameters, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
AnalysisPlugin = Analysis
|
||||||
|
|
||||||
|
|
||||||
|
class Conversion(Plugin):
|
||||||
|
'''
|
||||||
|
A subclass of Plugins that convert between different annotation models.
|
||||||
|
e.g. a conversion of emotion models, or normalization of sentiment values.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def process(self, response, parameters, plugins=None, **kwargs):
|
||||||
|
plugins = plugins or []
|
||||||
|
newentries = []
|
||||||
|
for entry in response.entries:
|
||||||
|
newentries.append(
|
||||||
|
self.convert_entry(entry, parameters, plugins))
|
||||||
|
response.entries = newentries
|
||||||
|
return response
|
||||||
|
|
||||||
|
def convert_entry(self, entry, parameters, conversions_applied):
|
||||||
|
raise NotImplementedError(
|
||||||
|
'You should implement a way to convert each entry, or a custom process method'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ConversionPlugin = Conversion
|
||||||
|
|
||||||
|
|
||||||
|
class SentimentPlugin(Analysis, models.SentimentPlugin):
|
||||||
|
'''
|
||||||
|
Sentiment plugins provide sentiment annotation (using Marl)
|
||||||
|
'''
|
||||||
|
minPolarityValue = 0
|
||||||
|
maxPolarityValue = 1
|
||||||
|
|
||||||
|
def test_case(self, case):
|
||||||
|
if 'polarity' in case:
|
||||||
|
expected = case.get('expected', {})
|
||||||
|
s = models.Sentiment(_auto_id=False)
|
||||||
|
s.marl__hasPolarity = case['polarity']
|
||||||
|
if 'sentiments' not in expected:
|
||||||
|
expected['sentiments'] = []
|
||||||
|
expected['sentiments'].append(s)
|
||||||
|
case['expected'] = expected
|
||||||
|
super(SentimentPlugin, self).test_case(case)
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionPlugin(Analysis, models.EmotionPlugin):
|
||||||
|
'''
|
||||||
|
Emotion plugins provide emotion annotation (using Onyx)
|
||||||
|
'''
|
||||||
|
minEmotionValue = 0
|
||||||
|
maxEmotionValue = 1
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionConversion(Conversion):
|
||||||
|
'''
|
||||||
|
A subclass of Conversion that converts emotion annotations using different models
|
||||||
|
'''
|
||||||
|
|
||||||
|
def can_convert(self, fromModel, toModel):
|
||||||
|
'''
|
||||||
|
Whether this plugin can convert from fromModel to toModel.
|
||||||
|
If fromModel is None, it is interpreted as "any Model"
|
||||||
|
'''
|
||||||
|
for pair in self.onyx__doesConversion:
|
||||||
|
if (pair['onyx:conversionTo'] == toModel) and \
|
||||||
|
((fromModel is None) or (pair['onyx:conversionFrom'] == fromModel)):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
EmotionConversionPlugin = EmotionConversion
|
||||||
|
|
||||||
|
|
||||||
|
class PostProcessing(Plugin):
|
||||||
|
def check(self, request, plugins):
|
||||||
|
'''Should this plugin be run for this request?'''
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Box(AnalysisPlugin):
|
||||||
|
'''
|
||||||
|
Black box plugins delegate analysis to a function.
|
||||||
|
The flow is like so:
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
|
entry --> input() --> predict_one() --> output() --> entry'
|
||||||
|
|
||||||
|
|
||||||
|
In other words: their ``input`` method convers a query (entry and a set of parameters) into
|
||||||
|
the input to the box method. The ``output`` method convers the results given by the box into
|
||||||
|
an entry that senpy can handle.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def input(self, entry, params=None):
|
||||||
|
'''Transforms a query (entry+param) into an input for the black box'''
|
||||||
|
return entry
|
||||||
|
|
||||||
|
def output(self, output, entry=None, params=None):
|
||||||
|
'''Transforms the results of the black box into an entry'''
|
||||||
|
return output
|
||||||
|
|
||||||
|
def predict_one(self, input):
|
||||||
|
raise NotImplementedError(
|
||||||
|
'You should define the behavior of this plugin')
|
||||||
|
|
||||||
|
def process_entries(self, entries, params):
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
logger.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
input = self.input(entry=entry, params=params)
|
||||||
for result in self.analyse_entry(entry, parameters):
|
results = self.predict_one(input=input)
|
||||||
yield result
|
yield self.output(output=results, entry=entry, params=params)
|
||||||
|
|
||||||
|
def fit(self, X=None, y=None):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def transform(self, X):
|
||||||
|
return [self.predict_one(x) for x in X]
|
||||||
|
|
||||||
|
def predict(self, X):
|
||||||
|
return self.transform(X)
|
||||||
|
|
||||||
|
def fit_transform(self, X, y):
|
||||||
|
self.fit(X, y)
|
||||||
|
return self.transform(X)
|
||||||
|
|
||||||
|
def as_pipe(self):
|
||||||
|
pipe = gsitk_compat.Pipeline([('plugin', self)])
|
||||||
|
pipe.name = self.name
|
||||||
|
return pipe
|
||||||
|
|
||||||
|
|
||||||
class ConversionPlugin(Plugin):
|
class TextBox(Box):
|
||||||
pass
|
'''A black box plugin that takes only text as input'''
|
||||||
|
|
||||||
|
def input(self, entry, params):
|
||||||
|
entry = super(TextBox, self).input(entry, params)
|
||||||
|
return entry['nif:isString']
|
||||||
|
|
||||||
|
|
||||||
class SentimentPlugin(models.SentimentPlugin, AnalysisPlugin):
|
class SentimentBox(TextBox, SentimentPlugin):
|
||||||
def __init__(self, info, *args, **kwargs):
|
'''
|
||||||
super(SentimentPlugin, self).__init__(info, *args, **kwargs)
|
A box plugin where the output is only a polarity label or a tuple (polarity, polarityValue)
|
||||||
self.minPolarityValue = float(info.get("minPolarityValue", 0))
|
'''
|
||||||
self.maxPolarityValue = float(info.get("maxPolarityValue", 1))
|
|
||||||
|
def output(self, output, entry, **kwargs):
|
||||||
|
s = models.Sentiment()
|
||||||
|
try:
|
||||||
|
label, value = output
|
||||||
|
except ValueError:
|
||||||
|
label, value = output, None
|
||||||
|
s.prov(self)
|
||||||
|
s.polarity = label
|
||||||
|
if value is not None:
|
||||||
|
s.polarityValue = value
|
||||||
|
entry.sentiments.append(s)
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
class EmotionPlugin(models.EmotionPlugin, AnalysisPlugin):
|
class EmotionBox(TextBox, EmotionPlugin):
|
||||||
def __init__(self, info, *args, **kwargs):
|
'''
|
||||||
super(EmotionPlugin, self).__init__(info, *args, **kwargs)
|
A box plugin where the output is only an a tuple of emotion labels
|
||||||
self.minEmotionValue = float(info.get("minEmotionValue", -1))
|
'''
|
||||||
self.maxEmotionValue = float(info.get("maxEmotionValue", 1))
|
|
||||||
|
def output(self, output, entry, **kwargs):
|
||||||
|
if not isinstance(output, list):
|
||||||
|
output = [output]
|
||||||
|
s = models.EmotionSet()
|
||||||
|
entry.emotions.append(s)
|
||||||
|
for label in output:
|
||||||
|
e = models.Emotion(onyx__hasEmotionCategory=label)
|
||||||
|
s.append(e)
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
class EmotionConversionPlugin(models.EmotionConversionPlugin, ConversionPlugin):
|
class MappingMixin(object):
|
||||||
pass
|
@property
|
||||||
|
def mappings(self):
|
||||||
|
return self._mappings
|
||||||
|
|
||||||
|
@mappings.setter
|
||||||
|
def mappings(self, value):
|
||||||
|
self._mappings = value
|
||||||
|
|
||||||
|
def output(self, output, entry, params):
|
||||||
|
output = self.mappings.get(output, self.mappings.get(
|
||||||
|
'default', output))
|
||||||
|
return super(MappingMixin, self).output(
|
||||||
|
output=output, entry=entry, params=params)
|
||||||
|
|
||||||
|
|
||||||
class ShelfMixin(object):
|
class ShelfMixin(object):
|
||||||
@property
|
@property
|
||||||
def sh(self):
|
def sh(self):
|
||||||
if not hasattr(self, '_sh') or self._sh is None:
|
if not hasattr(self, '_sh') or self._sh is None:
|
||||||
self.__dict__['_sh'] = {}
|
self._sh = {}
|
||||||
if os.path.isfile(self.shelf_file):
|
if os.path.isfile(self.shelf_file):
|
||||||
try:
|
try:
|
||||||
self.__dict__['_sh'] = pickle.load(open(self.shelf_file, 'rb'))
|
with self.open(self.shelf_file, 'rb') as p:
|
||||||
|
self._sh = pickle.load(p)
|
||||||
except (IndexError, EOFError, pickle.UnpicklingError):
|
except (IndexError, EOFError, pickle.UnpicklingError):
|
||||||
logger.warning('{} has a corrupted shelf file!'.format(self.id))
|
self.log.warning('Corrupted shelf file: {}'.format(
|
||||||
|
self.shelf_file))
|
||||||
if not self.get('force_shelf', False):
|
if not self.get('force_shelf', False):
|
||||||
raise
|
raise
|
||||||
return self._sh
|
return self._sh
|
||||||
@@ -132,44 +481,48 @@ class ShelfMixin(object):
|
|||||||
def sh(self):
|
def sh(self):
|
||||||
if os.path.isfile(self.shelf_file):
|
if os.path.isfile(self.shelf_file):
|
||||||
os.remove(self.shelf_file)
|
os.remove(self.shelf_file)
|
||||||
del self.__dict__['_sh']
|
del self._sh
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
@sh.setter
|
||||||
|
def sh(self, value):
|
||||||
|
self._sh = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def shelf_file(self):
|
def shelf_file(self):
|
||||||
if 'shelf_file' not in self or not self['shelf_file']:
|
if not hasattr(self, '_shelf_file') or not self._shelf_file:
|
||||||
sd = os.environ.get('SENPY_DATA', tempfile.gettempdir())
|
self._shelf_file = os.path.join(self.data_folder, self.name + '.p')
|
||||||
self.shelf_file = os.path.join(sd, self.name + '.p')
|
return self._shelf_file
|
||||||
return self['shelf_file']
|
|
||||||
|
@shelf_file.setter
|
||||||
|
def shelf_file(self, value):
|
||||||
|
self._shelf_file = value
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
logger.debug('saving pickle')
|
self.log.debug('Saving pickle')
|
||||||
if hasattr(self, '_sh') and self._sh is not None:
|
if hasattr(self, '_sh') and self._sh is not None:
|
||||||
with open(self.shelf_file, 'wb') as f:
|
with self.open(self.shelf_file, 'wb') as f:
|
||||||
pickle.dump(self._sh, f)
|
pickle.dump(self._sh, f)
|
||||||
|
|
||||||
|
|
||||||
default_plugin_type = API_PARAMS['plugin_type']['default']
|
def pfilter(plugins, plugin_type=Analysis, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def pfilter(plugins, **kwargs):
|
|
||||||
""" Filter plugins by different criteria """
|
""" Filter plugins by different criteria """
|
||||||
if isinstance(plugins, models.Plugins):
|
if isinstance(plugins, models.Plugins):
|
||||||
plugins = plugins.plugins
|
plugins = plugins.plugins
|
||||||
elif isinstance(plugins, dict):
|
elif isinstance(plugins, dict):
|
||||||
plugins = plugins.values()
|
plugins = plugins.values()
|
||||||
ptype = kwargs.pop('plugin_type', default_plugin_type)
|
|
||||||
logger.debug('#' * 100)
|
logger.debug('#' * 100)
|
||||||
logger.debug('ptype {}'.format(ptype))
|
logger.debug('plugin_type {}'.format(plugin_type))
|
||||||
if ptype:
|
if plugin_type:
|
||||||
|
if isinstance(plugin_type, PluginMeta):
|
||||||
|
plugin_type = plugin_type.__name__
|
||||||
try:
|
try:
|
||||||
ptype = ptype[0].upper() + ptype[1:]
|
plugin_type = plugin_type[0].upper() + plugin_type[1:]
|
||||||
pclass = globals()[ptype]
|
pclass = globals()[plugin_type]
|
||||||
logger.debug('Class: {}'.format(pclass))
|
logger.debug('Class: {}'.format(pclass))
|
||||||
candidates = filter(lambda x: isinstance(x, pclass),
|
candidates = filter(lambda x: isinstance(x, pclass), plugins)
|
||||||
plugins)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise models.Error('{} is not a valid type'.format(ptype))
|
raise models.Error('{} is not a valid type'.format(plugin_type))
|
||||||
else:
|
else:
|
||||||
candidates = plugins
|
candidates = plugins
|
||||||
|
|
||||||
@@ -177,17 +530,12 @@ def pfilter(plugins, **kwargs):
|
|||||||
|
|
||||||
def matches(plug):
|
def matches(plug):
|
||||||
res = all(getattr(plug, k, None) == v for (k, v) in kwargs.items())
|
res = all(getattr(plug, k, None) == v for (k, v) in kwargs.items())
|
||||||
logger.debug(
|
logger.debug("matching {} with {}: {}".format(plug.name, kwargs, res))
|
||||||
"matching {} with {}: {}".format(plug.name, kwargs, res))
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
if kwargs:
|
if kwargs:
|
||||||
candidates = filter(matches, candidates)
|
candidates = filter(matches, candidates)
|
||||||
return {p.name: p for p in candidates}
|
return candidates
|
||||||
|
|
||||||
|
|
||||||
def validate_info(info):
|
|
||||||
return all(x in info for x in ('name', 'module', 'description', 'version'))
|
|
||||||
|
|
||||||
|
|
||||||
def load_module(name, root=None):
|
def load_module(name, root=None):
|
||||||
@@ -199,7 +547,7 @@ def load_module(name, root=None):
|
|||||||
return tmp
|
return tmp
|
||||||
|
|
||||||
|
|
||||||
def log_subprocess_output(process):
|
def _log_subprocess_output(process):
|
||||||
for line in iter(process.stdout.readline, b''):
|
for line in iter(process.stdout.readline, b''):
|
||||||
logger.info('%r', line)
|
logger.info('%r', line)
|
||||||
for line in iter(process.stderr.readline, b''):
|
for line in iter(process.stderr.readline, b''):
|
||||||
@@ -207,77 +555,179 @@ def log_subprocess_output(process):
|
|||||||
|
|
||||||
|
|
||||||
def install_deps(*plugins):
|
def install_deps(*plugins):
|
||||||
|
installed = False
|
||||||
|
nltk_resources = set()
|
||||||
for info in plugins:
|
for info in plugins:
|
||||||
requirements = info.get('requirements', [])
|
requirements = info.get('requirements', [])
|
||||||
if requirements:
|
if requirements:
|
||||||
pip_args = ['pip']
|
pip_args = [sys.executable, '-m', 'pip', 'install']
|
||||||
pip_args.append('install')
|
|
||||||
pip_args.append('--use-wheel')
|
|
||||||
for req in requirements:
|
for req in requirements:
|
||||||
pip_args.append(req)
|
pip_args.append(req)
|
||||||
logger.info('Installing requirements: ' + str(requirements))
|
logger.info('Installing requirements: ' + str(requirements))
|
||||||
process = subprocess.Popen(pip_args,
|
process = subprocess.Popen(
|
||||||
stdout=subprocess.PIPE,
|
pip_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
stderr=subprocess.PIPE)
|
_log_subprocess_output(process)
|
||||||
log_subprocess_output(process)
|
|
||||||
exitcode = process.wait()
|
exitcode = process.wait()
|
||||||
|
installed = True
|
||||||
if exitcode != 0:
|
if exitcode != 0:
|
||||||
raise models.Error("Dependencies not properly installed")
|
raise models.Error(
|
||||||
|
"Dependencies not properly installed: {}".format(pip_args))
|
||||||
|
nltk_resources |= set(info.get('nltk_resources', []))
|
||||||
|
|
||||||
|
installed |= download(list(nltk_resources))
|
||||||
|
return installed
|
||||||
|
|
||||||
|
|
||||||
def load_plugin_from_info(info, root=None, validator=validate_info, install=True):
|
is_plugin_file = re.compile(r'.*\.senpy$|senpy_[a-zA-Z0-9_]+\.py$|'
|
||||||
if not root and '_path' in info:
|
'^(?!test_)[a-zA-Z0-9_]+_plugin.py$')
|
||||||
root = os.path.dirname(info['_path'])
|
|
||||||
if not validator(info):
|
|
||||||
raise ValueError('Plugin info is not valid: {}'.format(info))
|
|
||||||
module = info["module"]
|
|
||||||
|
|
||||||
try:
|
|
||||||
tmp = load_module(module, root)
|
|
||||||
except ImportError:
|
|
||||||
if not install:
|
|
||||||
raise
|
|
||||||
install_deps(info)
|
|
||||||
tmp = load_module(module, root)
|
|
||||||
candidate = None
|
|
||||||
for _, obj in inspect.getmembers(tmp):
|
|
||||||
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
|
||||||
logger.debug(("Found plugin class:"
|
|
||||||
" {}@{}").format(obj, inspect.getmodule(obj)))
|
|
||||||
candidate = obj
|
|
||||||
break
|
|
||||||
if not candidate:
|
|
||||||
logger.debug("No valid plugin for: {}".format(module))
|
|
||||||
return
|
|
||||||
module = candidate(info=info)
|
|
||||||
return module
|
|
||||||
|
|
||||||
|
|
||||||
def parse_plugin_info(fpath):
|
def find_plugins(folders):
|
||||||
logger.debug("Loading plugin: {}".format(fpath))
|
|
||||||
with open(fpath, 'r') as f:
|
|
||||||
info = yaml.load(f)
|
|
||||||
info['_path'] = fpath
|
|
||||||
name = info['name']
|
|
||||||
return name, info
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugin(fpath):
|
|
||||||
name, info = parse_plugin_info(fpath)
|
|
||||||
logger.debug("Info: {}".format(info))
|
|
||||||
plugin = load_plugin_from_info(info)
|
|
||||||
return name, plugin
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugins(folders, loader=load_plugin):
|
|
||||||
plugins = {}
|
|
||||||
for search_folder in folders:
|
for search_folder in folders:
|
||||||
for root, dirnames, filenames in os.walk(search_folder):
|
for root, dirnames, filenames in os.walk(search_folder):
|
||||||
# Do not look for plugins in hidden or special folders
|
# Do not look for plugins in hidden or special folders
|
||||||
dirnames[:] = [d for d in dirnames if d[0] not in ['.', '_']]
|
dirnames[:] = [d for d in dirnames if d[0] not in ['.', '_']]
|
||||||
for filename in fnmatch.filter(filenames, '*.senpy'):
|
for filename in filter(is_plugin_file.match, filenames):
|
||||||
fpath = os.path.join(root, filename)
|
fpath = os.path.join(root, filename)
|
||||||
name, plugin = loader(fpath)
|
yield fpath
|
||||||
if plugin and name:
|
|
||||||
plugins[name] = plugin
|
|
||||||
|
def from_path(fpath, install_on_fail=False, **kwargs):
|
||||||
|
logger.debug("Loading plugin from {}".format(fpath))
|
||||||
|
if fpath.endswith('.py'):
|
||||||
|
# We asume root is the dir of the file, and module is the name of the file
|
||||||
|
root = os.path.dirname(fpath)
|
||||||
|
module = os.path.basename(fpath)[:-3]
|
||||||
|
for instance in _from_module_name(module=module, root=root, **kwargs):
|
||||||
|
yield instance
|
||||||
|
else:
|
||||||
|
info = parse_plugin_info(fpath)
|
||||||
|
yield from_info(info, install_on_fail=install_on_fail, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def from_folder(folders, loader=from_path, **kwargs):
|
||||||
|
plugins = []
|
||||||
|
for fpath in find_plugins(folders):
|
||||||
|
for plugin in loader(fpath, **kwargs):
|
||||||
|
plugins.append(plugin)
|
||||||
return plugins
|
return plugins
|
||||||
|
|
||||||
|
|
||||||
|
def from_info(info, root=None, install_on_fail=True, **kwargs):
|
||||||
|
if any(x not in info for x in ('module', )):
|
||||||
|
raise ValueError('Plugin info is not valid: {}'.format(info))
|
||||||
|
module = info["module"]
|
||||||
|
|
||||||
|
if not root and '_path' in info:
|
||||||
|
root = os.path.dirname(info['_path'])
|
||||||
|
|
||||||
|
fun = partial(one_from_module, module, root=root, info=info, **kwargs)
|
||||||
|
try:
|
||||||
|
return fun()
|
||||||
|
except (ImportError, LookupError):
|
||||||
|
install_deps(info)
|
||||||
|
return fun()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_plugin_info(fpath):
|
||||||
|
logger.debug("Parsing plugin info: {}".format(fpath))
|
||||||
|
with open(fpath, 'r') as f:
|
||||||
|
info = yaml.load(f)
|
||||||
|
info['_path'] = fpath
|
||||||
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
def from_module(module, **kwargs):
|
||||||
|
|
||||||
|
if inspect.ismodule(module):
|
||||||
|
res = _from_loaded_module(module, **kwargs)
|
||||||
|
else:
|
||||||
|
res = _from_module_name(module, **kwargs)
|
||||||
|
for p in res:
|
||||||
|
yield p
|
||||||
|
|
||||||
|
|
||||||
|
def one_from_module(module, root, info, **kwargs):
|
||||||
|
if '@type' in info:
|
||||||
|
cls = PluginMeta.from_type(info['@type'])
|
||||||
|
return cls(info=info, **kwargs)
|
||||||
|
instance = next(
|
||||||
|
from_module(module=module, root=root, info=info, **kwargs), None)
|
||||||
|
if not instance:
|
||||||
|
raise Exception("No valid plugin for: {}".format(module))
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
def _classes_in_module(module):
|
||||||
|
for _, obj in inspect.getmembers(module):
|
||||||
|
if inspect.isclass(obj) and inspect.getmodule(obj) == module:
|
||||||
|
logger.debug(("Found plugin class:"
|
||||||
|
" {}@{}").format(obj, inspect.getmodule(obj)))
|
||||||
|
yield obj
|
||||||
|
|
||||||
|
|
||||||
|
def _instances_in_module(module):
|
||||||
|
for _, obj in inspect.getmembers(module):
|
||||||
|
if isinstance(obj, Plugin) and inspect.getmodule(obj) == module:
|
||||||
|
logger.debug(("Found plugin instance:"
|
||||||
|
" {}@{}").format(obj, inspect.getmodule(obj)))
|
||||||
|
yield obj
|
||||||
|
|
||||||
|
|
||||||
|
def _from_module_name(module, root, info=None, **kwargs):
|
||||||
|
module = load_module(module, root)
|
||||||
|
for plugin in _from_loaded_module(
|
||||||
|
module=module, root=root, info=info, **kwargs):
|
||||||
|
yield plugin
|
||||||
|
|
||||||
|
|
||||||
|
def _from_loaded_module(module, info=None, **kwargs):
|
||||||
|
for cls in _classes_in_module(module):
|
||||||
|
yield cls(info=info, **kwargs)
|
||||||
|
for instance in _instances_in_module(module):
|
||||||
|
yield instance
|
||||||
|
|
||||||
|
|
||||||
|
def evaluate(plugins, datasets, **kwargs):
|
||||||
|
ev = gsitk_compat.Eval(
|
||||||
|
tuples=None,
|
||||||
|
datasets=datasets,
|
||||||
|
pipelines=[plugin.as_pipe() for plugin in plugins])
|
||||||
|
ev.evaluate()
|
||||||
|
results = ev.results
|
||||||
|
evaluations = evaluations_to_JSONLD(results, **kwargs)
|
||||||
|
return evaluations
|
||||||
|
|
||||||
|
|
||||||
|
def evaluations_to_JSONLD(results, flatten=False):
|
||||||
|
'''
|
||||||
|
Map the evaluation results to a JSONLD scheme
|
||||||
|
'''
|
||||||
|
|
||||||
|
evaluations = list()
|
||||||
|
metric_names = ['accuracy', 'precision_macro', 'recall_macro',
|
||||||
|
'f1_macro', 'f1_weighted', 'f1_micro', 'f1_macro']
|
||||||
|
|
||||||
|
for index, row in results.iterrows():
|
||||||
|
evaluation = models.Evaluation()
|
||||||
|
if row.get('CV', True):
|
||||||
|
evaluation['@type'] = ['StaticCV', 'Evaluation']
|
||||||
|
evaluation.evaluatesOn = row['Dataset']
|
||||||
|
evaluation.evaluates = row['Model']
|
||||||
|
i = 0
|
||||||
|
if flatten:
|
||||||
|
metric = models.Metric()
|
||||||
|
for name in metric_names:
|
||||||
|
metric[name] = row[name]
|
||||||
|
evaluation.metrics.append(metric)
|
||||||
|
else:
|
||||||
|
# We should probably discontinue this representation
|
||||||
|
for name in metric_names:
|
||||||
|
metric = models.Metric()
|
||||||
|
metric['@id'] = 'Metric' + str(i)
|
||||||
|
metric['@type'] = name.capitalize()
|
||||||
|
metric.value = row[name]
|
||||||
|
evaluation.metrics.append(metric)
|
||||||
|
i += 1
|
||||||
|
evaluations.append(evaluation)
|
||||||
|
return evaluations
|
||||||
|
@@ -4,7 +4,15 @@ from senpy.plugins import EmotionPlugin
|
|||||||
from senpy.models import EmotionSet, Emotion, Entry
|
from senpy.models import EmotionSet, Emotion, Entry
|
||||||
|
|
||||||
|
|
||||||
class RmoRandPlugin(EmotionPlugin):
|
class EmoRand(EmotionPlugin):
|
||||||
|
name = "emoRand"
|
||||||
|
description = 'A sample plugin that returns a random emotion annotation'
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
||||||
|
requirements = {}
|
||||||
|
onyx__usesEmotionModel = "emoml:big6"
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, params):
|
||||||
category = "emoml:big6happiness"
|
category = "emoml:big6happiness"
|
||||||
number = max(-1, min(1, random.gauss(0, 0.5)))
|
number = max(-1, min(1, random.gauss(0, 0.5)))
|
||||||
|
@@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
name: emoRand
|
|
||||||
module: emoRand
|
|
||||||
description: A sample plugin that returns a random emotion annotation
|
|
||||||
author: "@balkian"
|
|
||||||
version: '0.1'
|
|
||||||
url: "https://github.com/gsi-upm/senpy-plugins-community"
|
|
||||||
requirements: {}
|
|
||||||
onyx:usesEmotionModel: "emoml:big6"
|
|
32
senpy/plugins/example/emorand_plugin.py
Normal file
32
senpy/plugins/example/emorand_plugin.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import random
|
||||||
|
|
||||||
|
from senpy.plugins import EmotionPlugin
|
||||||
|
from senpy.models import EmotionSet, Emotion, Entry
|
||||||
|
|
||||||
|
|
||||||
|
class EmoRand(EmotionPlugin):
|
||||||
|
'''A sample plugin that returns a random emotion annotation'''
|
||||||
|
author = '@balkian'
|
||||||
|
version = '0.1'
|
||||||
|
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
||||||
|
onyx__usesEmotionModel = "emoml:big6"
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
category = "emoml:big6happiness"
|
||||||
|
number = max(-1, min(1, random.gauss(0, 0.5)))
|
||||||
|
if number > 0:
|
||||||
|
category = "emoml:big6anger"
|
||||||
|
emotionSet = EmotionSet()
|
||||||
|
emotion = Emotion({"onyx:hasEmotionCategory": category})
|
||||||
|
emotionSet.onyx__hasEmotion.append(emotion)
|
||||||
|
emotionSet.prov__wasGeneratedBy = self.id
|
||||||
|
entry.emotions.append(emotionSet)
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
params = dict()
|
||||||
|
results = list()
|
||||||
|
for i in range(100):
|
||||||
|
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
||||||
|
res.validate()
|
||||||
|
results.append(res.emotions[0]['onyx:hasEmotion'][0]['onyx:hasEmotionCategory'])
|
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
name: rand
|
|
||||||
module: rand
|
|
||||||
description: A sample plugin that returns a random sentiment annotation
|
|
||||||
author: "@balkian"
|
|
||||||
version: '0.1'
|
|
||||||
url: "https://github.com/gsi-upm/senpy-plugins-community"
|
|
||||||
requirements: {}
|
|
||||||
marl:maxPolarityValue: '1'
|
|
||||||
marl:minPolarityValue: "-1"
|
|
@@ -1,33 +1,35 @@
|
|||||||
import random
|
import random
|
||||||
|
from senpy import SentimentPlugin, Sentiment, Entry
|
||||||
from senpy.plugins import SentimentPlugin
|
|
||||||
from senpy.models import Sentiment, Entry
|
|
||||||
|
|
||||||
|
|
||||||
class RandPlugin(SentimentPlugin):
|
class Rand(SentimentPlugin):
|
||||||
|
'''A sample plugin that returns a random sentiment annotation'''
|
||||||
|
author = "@balkian"
|
||||||
|
version = '0.1'
|
||||||
|
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
||||||
|
marl__maxPolarityValue = '1'
|
||||||
|
marl__minPolarityValue = "-1"
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, params):
|
||||||
lang = params.get("language", "auto")
|
|
||||||
|
|
||||||
polarity_value = max(-1, min(1, random.gauss(0.2, 0.2)))
|
polarity_value = max(-1, min(1, random.gauss(0.2, 0.2)))
|
||||||
polarity = "marl:Neutral"
|
polarity = "marl:Neutral"
|
||||||
if polarity_value > 0:
|
if polarity_value > 0:
|
||||||
polarity = "marl:Positive"
|
polarity = "marl:Positive"
|
||||||
elif polarity_value < 0:
|
elif polarity_value < 0:
|
||||||
polarity = "marl:Negative"
|
polarity = "marl:Negative"
|
||||||
sentiment = Sentiment({
|
sentiment = Sentiment(marl__hasPolarity=polarity,
|
||||||
"marl:hasPolarity": polarity,
|
marl__polarityValue=polarity_value)
|
||||||
"marl:polarityValue": polarity_value
|
sentiment.prov(self)
|
||||||
})
|
|
||||||
sentiment["prov:wasGeneratedBy"] = self.id
|
|
||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
entry.language = lang
|
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
|
'''Run several random analyses.'''
|
||||||
params = dict()
|
params = dict()
|
||||||
results = list()
|
results = list()
|
||||||
for i in range(100):
|
for i in range(50):
|
||||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
res = next(self.analyse_entry(Entry(nif__isString="Hello"),
|
||||||
|
params))
|
||||||
res.validate()
|
res.validate()
|
||||||
results.append(res.sentiments[0]['marl:hasPolarity'])
|
results.append(res.sentiments[0]['marl:hasPolarity'])
|
||||||
assert 'marl:Positive' in results
|
assert 'marl:Positive' in results
|
@@ -1,64 +0,0 @@
|
|||||||
from senpy.plugins import AnalysisPlugin
|
|
||||||
from senpy.models import Entry
|
|
||||||
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
|
||||||
from nltk.tokenize.simple import LineTokenizer
|
|
||||||
import nltk
|
|
||||||
|
|
||||||
|
|
||||||
class SplitPlugin(AnalysisPlugin):
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
nltk.download('punkt')
|
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
|
||||||
chunker_type = params.get("delimiter", "sentence")
|
|
||||||
original_text = entry.get('nif:isString', None)
|
|
||||||
if chunker_type == "sentence":
|
|
||||||
tokenizer = PunktSentenceTokenizer()
|
|
||||||
if chunker_type == "paragraph":
|
|
||||||
tokenizer = LineTokenizer()
|
|
||||||
chars = tokenizer.span_tokenize(original_text)
|
|
||||||
for i, chunk in enumerate(tokenizer.tokenize(original_text)):
|
|
||||||
e = Entry()
|
|
||||||
e['nif:isString'] = chunk
|
|
||||||
if entry.id:
|
|
||||||
e.id = entry.id + "#char={},{}".format(chars[i][0], chars[i][1])
|
|
||||||
yield e
|
|
||||||
|
|
||||||
test_cases = [
|
|
||||||
{
|
|
||||||
'entry': {
|
|
||||||
'nif:isString': 'Hello. World.'
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'delimiter': 'sentence',
|
|
||||||
},
|
|
||||||
'expected': [
|
|
||||||
{
|
|
||||||
'nif:isString': 'Hello.'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'nif:isString': 'World.'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'entry': {
|
|
||||||
"id": ":test",
|
|
||||||
'nif:isString': 'Hello. World.'
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'delimiter': 'sentence',
|
|
||||||
},
|
|
||||||
'expected': [
|
|
||||||
{
|
|
||||||
"@id": ":test#char=0,6",
|
|
||||||
'nif:isString': 'Hello.'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": ":test#char=7,13",
|
|
||||||
'nif:isString': 'World.'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
@@ -1,19 +0,0 @@
|
|||||||
---
|
|
||||||
name: split
|
|
||||||
module: senpy.plugins.misc.split
|
|
||||||
description: A sample plugin that chunks input text
|
|
||||||
author: "@militarpancho"
|
|
||||||
version: '0.2'
|
|
||||||
url: "https://github.com/gsi-upm/senpy"
|
|
||||||
requirements:
|
|
||||||
- nltk
|
|
||||||
extra_params:
|
|
||||||
delimiter:
|
|
||||||
aliases:
|
|
||||||
- type
|
|
||||||
- t
|
|
||||||
required: false
|
|
||||||
default: sentence
|
|
||||||
options:
|
|
||||||
- sentence
|
|
||||||
- paragraph
|
|
83
senpy/plugins/misc/split_plugin.py
Normal file
83
senpy/plugins/misc/split_plugin.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
from senpy.plugins import AnalysisPlugin
|
||||||
|
from senpy.models import Entry
|
||||||
|
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
||||||
|
from nltk.tokenize.simple import LineTokenizer
|
||||||
|
import nltk
|
||||||
|
|
||||||
|
|
||||||
|
class Split(AnalysisPlugin):
|
||||||
|
'''description: A sample plugin that chunks input text'''
|
||||||
|
|
||||||
|
author = ["@militarpancho", '@balkian']
|
||||||
|
version = '0.3'
|
||||||
|
url = "https://github.com/gsi-upm/senpy"
|
||||||
|
|
||||||
|
extra_params = {
|
||||||
|
'delimiter': {
|
||||||
|
'aliases': ['type', 't'],
|
||||||
|
'required': False,
|
||||||
|
'default': 'sentence',
|
||||||
|
'options': ['sentence', 'paragraph']
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
nltk.download('punkt')
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
yield entry
|
||||||
|
chunker_type = params["delimiter"]
|
||||||
|
original_text = entry['nif:isString']
|
||||||
|
if chunker_type == "sentence":
|
||||||
|
tokenizer = PunktSentenceTokenizer()
|
||||||
|
if chunker_type == "paragraph":
|
||||||
|
tokenizer = LineTokenizer()
|
||||||
|
chars = list(tokenizer.span_tokenize(original_text))
|
||||||
|
if len(chars) == 1:
|
||||||
|
# This sentence was already split
|
||||||
|
return
|
||||||
|
for i, chunk in enumerate(chars):
|
||||||
|
start, end = chunk
|
||||||
|
e = Entry()
|
||||||
|
e['nif:isString'] = original_text[start:end]
|
||||||
|
if entry.id:
|
||||||
|
e.id = entry.id + "#char={},{}".format(start, end)
|
||||||
|
yield e
|
||||||
|
|
||||||
|
test_cases = [
|
||||||
|
{
|
||||||
|
'entry': {
|
||||||
|
'nif:isString': 'Hello. World.'
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'delimiter': 'sentence',
|
||||||
|
},
|
||||||
|
'expected': [
|
||||||
|
{
|
||||||
|
'nif:isString': 'Hello.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'nif:isString': 'World.'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'entry': {
|
||||||
|
"@id": ":test",
|
||||||
|
'nif:isString': 'Hello\nWorld'
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'delimiter': 'paragraph',
|
||||||
|
},
|
||||||
|
'expected': [
|
||||||
|
{
|
||||||
|
"@id": ":test#char=0,5",
|
||||||
|
'nif:isString': 'Hello'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": ":test#char=6,11",
|
||||||
|
'nif:isString': 'World'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
@@ -6,7 +6,12 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class CentroidConversion(EmotionConversionPlugin):
|
class CentroidConversion(EmotionConversionPlugin):
|
||||||
def __init__(self, info):
|
'''
|
||||||
|
This plugin converts emotion annotations from a dimensional model to a
|
||||||
|
categorical one, and vice versa. The centroids used in the conversion
|
||||||
|
are configurable and appear in the semantic description of the plugin.
|
||||||
|
'''
|
||||||
|
def __init__(self, info, *args, **kwargs):
|
||||||
if 'centroids' not in info:
|
if 'centroids' not in info:
|
||||||
raise Error('Centroid conversion plugins should provide '
|
raise Error('Centroid conversion plugins should provide '
|
||||||
'the centroids in their senpy file')
|
'the centroids in their senpy file')
|
||||||
@@ -33,7 +38,7 @@ class CentroidConversion(EmotionConversionPlugin):
|
|||||||
ncentroids[aliases.get(k1, k1)] = nv1
|
ncentroids[aliases.get(k1, k1)] = nv1
|
||||||
info['centroids'] = ncentroids
|
info['centroids'] = ncentroids
|
||||||
|
|
||||||
super(CentroidConversion, self).__init__(info)
|
super(CentroidConversion, self).__init__(info, *args, **kwargs)
|
||||||
|
|
||||||
self.dimensions = set()
|
self.dimensions = set()
|
||||||
for c in self.centroids.values():
|
for c in self.centroids.values():
|
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: Ekman2FSRE
|
name: Ekman2FSRE
|
||||||
module: senpy.plugins.conversion.emotion.centroids
|
module: senpy.plugins.postprocessing.emotion.centroids
|
||||||
description: Plugin to convert emotion sets from Ekman to VAD
|
description: Plugin to convert emotion sets from Ekman to VAD
|
||||||
version: 0.2
|
version: 0.2
|
||||||
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: Ekman2PAD
|
name: Ekman2PAD
|
||||||
module: senpy.plugins.conversion.emotion.centroids
|
module: senpy.plugins.postprocessing.emotion.centroids
|
||||||
description: Plugin to convert emotion sets from Ekman to VAD
|
description: Plugin to convert emotion sets from Ekman to VAD
|
||||||
version: 0.2
|
version: 0.2
|
||||||
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
196
senpy/plugins/postprocessing/emotion/maxEmotion_plugin.py
Normal file
196
senpy/plugins/postprocessing/emotion/maxEmotion_plugin.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
from senpy import PostProcessing, easy_test
|
||||||
|
|
||||||
|
|
||||||
|
class MaxEmotion(PostProcessing):
|
||||||
|
'''Plugin to extract the emotion with highest value from an EmotionSet'''
|
||||||
|
author = '@dsuarezsouto'
|
||||||
|
version = '0.1'
|
||||||
|
|
||||||
|
def process_entry(self, entry, params):
|
||||||
|
if len(entry.emotions) < 1:
|
||||||
|
yield entry
|
||||||
|
return
|
||||||
|
|
||||||
|
set_emotions = entry.emotions[0]['onyx:hasEmotion']
|
||||||
|
|
||||||
|
# If there is only one emotion, do not modify it
|
||||||
|
if len(set_emotions) < 2:
|
||||||
|
yield entry
|
||||||
|
return
|
||||||
|
|
||||||
|
max_emotion = set_emotions[0]
|
||||||
|
|
||||||
|
# Extract max emotion from the set emotions (emotion with highest intensity)
|
||||||
|
for tmp_emotion in set_emotions:
|
||||||
|
if tmp_emotion['onyx:hasEmotionIntensity'] > max_emotion[
|
||||||
|
'onyx:hasEmotionIntensity']:
|
||||||
|
max_emotion = tmp_emotion
|
||||||
|
|
||||||
|
if max_emotion['onyx:hasEmotionIntensity'] == 0:
|
||||||
|
max_emotion['onyx:hasEmotionCategory'] = "neutral"
|
||||||
|
max_emotion['onyx:hasEmotionIntensity'] = 1.0
|
||||||
|
|
||||||
|
entry.emotions[0]['onyx:hasEmotion'] = [max_emotion]
|
||||||
|
|
||||||
|
entry.emotions[0]['prov:wasGeneratedBy'] = "maxSentiment"
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
def check(self, request, plugins):
|
||||||
|
return 'maxemotion' in request.parameters and self not in plugins
|
||||||
|
|
||||||
|
# Test Cases:
|
||||||
|
# 1 Normal Situation.
|
||||||
|
# 2 Case to return a Neutral Emotion.
|
||||||
|
test_cases = [
|
||||||
|
{
|
||||||
|
"name":
|
||||||
|
"If there are several emotions within an emotion set, reduce it to one.",
|
||||||
|
"entry": {
|
||||||
|
"@type":
|
||||||
|
"entry",
|
||||||
|
"emotions": [
|
||||||
|
{
|
||||||
|
"@id":
|
||||||
|
"Emotions0",
|
||||||
|
"@type":
|
||||||
|
"emotionSet",
|
||||||
|
"onyx:hasEmotion": [
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "anger",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "joy",
|
||||||
|
"onyx:hasEmotionIntensity": 0.3333333333333333
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "negative-fear",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "sadness",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "disgust",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"nif:isString":
|
||||||
|
"Test"
|
||||||
|
},
|
||||||
|
'expected': {
|
||||||
|
"@type":
|
||||||
|
"entry",
|
||||||
|
"emotions": [
|
||||||
|
{
|
||||||
|
"@id":
|
||||||
|
"Emotions0",
|
||||||
|
"@type":
|
||||||
|
"emotionSet",
|
||||||
|
"onyx:hasEmotion": [
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "joy",
|
||||||
|
"onyx:hasEmotionIntensity": 0.3333333333333333
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"prov:wasGeneratedBy":
|
||||||
|
'maxSentiment'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"nif:isString":
|
||||||
|
"Test"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":
|
||||||
|
"If the maximum emotion has an intensity of 0, return a neutral emotion.",
|
||||||
|
"entry": {
|
||||||
|
"@type":
|
||||||
|
"entry",
|
||||||
|
"emotions": [{
|
||||||
|
"@id":
|
||||||
|
"Emotions0",
|
||||||
|
"@type":
|
||||||
|
"emotionSet",
|
||||||
|
"onyx:hasEmotion": [
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "anger",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "joy",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id":
|
||||||
|
"_:Emotion_1538121033.74",
|
||||||
|
"@type":
|
||||||
|
"emotion",
|
||||||
|
"onyx:hasEmotionCategory":
|
||||||
|
"negative-fear",
|
||||||
|
"onyx:hasEmotionIntensity":
|
||||||
|
0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory":
|
||||||
|
"sadness",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory":
|
||||||
|
"disgust",
|
||||||
|
"onyx:hasEmotionIntensity": 0
|
||||||
|
}]
|
||||||
|
}],
|
||||||
|
"nif:isString":
|
||||||
|
"Test"
|
||||||
|
},
|
||||||
|
'expected': {
|
||||||
|
"@type":
|
||||||
|
"entry",
|
||||||
|
"emotions": [{
|
||||||
|
"@id":
|
||||||
|
"Emotions0",
|
||||||
|
"@type":
|
||||||
|
"emotionSet",
|
||||||
|
"onyx:hasEmotion": [{
|
||||||
|
"@id": "_:Emotion_1538121033.74",
|
||||||
|
"@type": "emotion",
|
||||||
|
"onyx:hasEmotionCategory": "neutral",
|
||||||
|
"onyx:hasEmotionIntensity": 1
|
||||||
|
}],
|
||||||
|
"prov:wasGeneratedBy":
|
||||||
|
'maxSentiment'
|
||||||
|
}],
|
||||||
|
"nif:isString":
|
||||||
|
"Test"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
easy_test()
|
@@ -1,21 +0,0 @@
|
|||||||
---
|
|
||||||
name: sentiment140
|
|
||||||
module: sentiment140
|
|
||||||
description: "Connects to the sentiment140 free API: http://sentiment140.com"
|
|
||||||
author: "@balkian"
|
|
||||||
version: '0.2'
|
|
||||||
url: "https://github.com/gsi-upm/senpy-plugins-community"
|
|
||||||
extra_params:
|
|
||||||
language:
|
|
||||||
"@id": lang_sentiment140
|
|
||||||
aliases:
|
|
||||||
- language
|
|
||||||
- l
|
|
||||||
required: false
|
|
||||||
options:
|
|
||||||
- es
|
|
||||||
- en
|
|
||||||
- auto
|
|
||||||
requirements: {}
|
|
||||||
maxPolarityValue: 1
|
|
||||||
minPolarityValue: 0
|
|
@@ -4,11 +4,31 @@ import json
|
|||||||
from senpy.plugins import SentimentPlugin
|
from senpy.plugins import SentimentPlugin
|
||||||
from senpy.models import Sentiment
|
from senpy.models import Sentiment
|
||||||
|
|
||||||
|
ENDPOINT = 'http://www.sentiment140.com/api/bulkClassifyJson'
|
||||||
|
|
||||||
|
|
||||||
|
class Sentiment140(SentimentPlugin):
|
||||||
|
'''Connects to the sentiment140 free API: http://sentiment140.com'''
|
||||||
|
|
||||||
|
author = "@balkian"
|
||||||
|
version = '0.2'
|
||||||
|
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
||||||
|
extra_params = {
|
||||||
|
'language': {
|
||||||
|
"@id": 'lang_sentiment140',
|
||||||
|
'aliases': ['language', 'l'],
|
||||||
|
'required': False,
|
||||||
|
'default': 'auto',
|
||||||
|
'options': ['es', 'en', 'auto']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
maxPolarityValue = 1
|
||||||
|
minPolarityValue = 0
|
||||||
|
|
||||||
class Sentiment140Plugin(SentimentPlugin):
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, params):
|
||||||
lang = params.get("language", "auto")
|
lang = params["language"]
|
||||||
res = requests.post("http://www.sentiment140.com/api/bulkClassifyJson",
|
res = requests.post(ENDPOINT,
|
||||||
json.dumps({
|
json.dumps({
|
||||||
"language": lang,
|
"language": lang,
|
||||||
"data": [{
|
"data": [{
|
||||||
@@ -30,7 +50,6 @@ class Sentiment140Plugin(SentimentPlugin):
|
|||||||
marl__hasPolarity=polarity,
|
marl__hasPolarity=polarity,
|
||||||
marl__polarityValue=polarity_value)
|
marl__polarityValue=polarity_value)
|
||||||
sentiment.prov__wasGeneratedBy = self.id
|
sentiment.prov__wasGeneratedBy = self.id
|
||||||
entry.sentiments = []
|
|
||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
entry.language = lang
|
entry.language = lang
|
||||||
yield entry
|
yield entry
|
||||||
@@ -48,6 +67,9 @@ class Sentiment140Plugin(SentimentPlugin):
|
|||||||
'marl:hasPolarity': 'marl:Positive',
|
'marl:hasPolarity': 'marl:Positive',
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
'responses': [{'url': ENDPOINT,
|
||||||
|
'method': 'POST',
|
||||||
|
'json': {'data': [{'polarity': 4}]}}]
|
||||||
}
|
}
|
||||||
]
|
]
|
38
senpy/schemas/aggregatedEvaluation.json
Normal file
38
senpy/schemas/aggregatedEvaluation.json
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"allOf": [
|
||||||
|
{"$ref": "response.json"},
|
||||||
|
{
|
||||||
|
"title": "AggregatedEvaluation",
|
||||||
|
"description": "The results of the evaluation",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"@context": {
|
||||||
|
"$ref": "context.json"
|
||||||
|
},
|
||||||
|
"@type": {
|
||||||
|
"default": "AggregatedEvaluation"
|
||||||
|
},
|
||||||
|
"@id": {
|
||||||
|
"description": "ID of the aggregated evaluation",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"evaluations": {
|
||||||
|
"default": [],
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "evaluation.json"
|
||||||
|
},{
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
},
|
||||||
|
"required": ["@id", "evaluations"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@@ -9,7 +9,20 @@
|
|||||||
"@type": {
|
"@type": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Type of the analysis. e.g. marl:SentimentAnalysis"
|
"description": "Type of the analysis. e.g. marl:SentimentAnalysis"
|
||||||
|
},
|
||||||
|
"prov:wasAssociatedWith": {
|
||||||
|
"@type": "string",
|
||||||
|
"description": "Algorithm/plugin that was used"
|
||||||
|
},
|
||||||
|
"prov:used": {
|
||||||
|
"description": "Parameters of the algorithm",
|
||||||
|
"@type": "array",
|
||||||
|
"default": [],
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "parameter.json"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["@id", "@type"]
|
"required": ["@type", "prov:wasAssociatedWith"]
|
||||||
}
|
}
|
||||||
|
@@ -10,8 +10,10 @@
|
|||||||
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
||||||
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
||||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||||
|
"fam": "http://vocab.fusepool.info/fam#",
|
||||||
"topics": {
|
"topics": {
|
||||||
"@id": "dc:subject"
|
"@id": "nif:topic",
|
||||||
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"entities": {
|
"entities": {
|
||||||
"@id": "me:hasEntities"
|
"@id": "me:hasEntities"
|
||||||
@@ -39,7 +41,7 @@
|
|||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"analysis": {
|
"analysis": {
|
||||||
"@id": "AnalysisInvolved",
|
"@id": "prov:wasInformedBy",
|
||||||
"@type": "@id",
|
"@type": "@id",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
|
29
senpy/schemas/dataset.json
Normal file
29
senpy/schemas/dataset.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"name": "Dataset",
|
||||||
|
"properties": {
|
||||||
|
"@id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"compression": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"expected_bytes": {
|
||||||
|
"type": "int"
|
||||||
|
},
|
||||||
|
"filename": {
|
||||||
|
"description": "Name of the dataset",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"url": {
|
||||||
|
"description": "Classifier or plugin evaluated",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"stats": {
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["@id"]
|
||||||
|
}
|
18
senpy/schemas/datasets.json
Normal file
18
senpy/schemas/datasets.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"allOf": [
|
||||||
|
{"$ref": "response.json"},
|
||||||
|
{
|
||||||
|
"required": ["datasets"],
|
||||||
|
"properties": {
|
||||||
|
"datasets": {
|
||||||
|
"type": "array",
|
||||||
|
"default": [],
|
||||||
|
"items": {
|
||||||
|
"$ref": "dataset.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@@ -41,5 +41,20 @@
|
|||||||
},
|
},
|
||||||
"Response": {
|
"Response": {
|
||||||
"$ref": "response.json"
|
"$ref": "response.json"
|
||||||
|
},
|
||||||
|
"AggregatedEvaluation": {
|
||||||
|
"$ref": "aggregatedEvaluation.json"
|
||||||
|
},
|
||||||
|
"Evaluation": {
|
||||||
|
"$ref": "evaluation.json"
|
||||||
|
},
|
||||||
|
"Metric": {
|
||||||
|
"$ref": "metric.json"
|
||||||
|
},
|
||||||
|
"Dataset": {
|
||||||
|
"$ref": "dataset.json"
|
||||||
|
},
|
||||||
|
"Datasets": {
|
||||||
|
"$ref": "datasets.json"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -20,5 +20,5 @@
|
|||||||
"description": "The ID of the analysis that generated this Emotion. The full object should be included in the \"analysis\" property of the root object"
|
"description": "The ID of the analysis that generated this Emotion. The full object should be included in the \"analysis\" property of the root object"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["@id", "prov:wasGeneratedBy", "onyx:hasEmotion"]
|
"required": ["prov:wasGeneratedBy", "onyx:hasEmotion"]
|
||||||
}
|
}
|
||||||
|
@@ -35,5 +35,5 @@
|
|||||||
"default": []
|
"default": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["@id", "nif:isString"]
|
"required": ["nif:isString"]
|
||||||
}
|
}
|
||||||
|
28
senpy/schemas/evaluation.json
Normal file
28
senpy/schemas/evaluation.json
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"name": "Evaluation",
|
||||||
|
"properties": {
|
||||||
|
"@id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"@type": {
|
||||||
|
"type": "array",
|
||||||
|
"default": "Evaluation"
|
||||||
|
|
||||||
|
},
|
||||||
|
"metrics": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"$ref": "metric.json" },
|
||||||
|
"default": []
|
||||||
|
},
|
||||||
|
"evaluatesOn": {
|
||||||
|
"description": "Name of the dataset evaluated ",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"evaluates": {
|
||||||
|
"description": "Classifier or plugin evaluated",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["@id", "metrics"]
|
||||||
|
}
|
@@ -7,11 +7,11 @@
|
|||||||
"description": "Help containing accepted parameters",
|
"description": "Help containing accepted parameters",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"parameters": {
|
"valid_parameters": {
|
||||||
"type": "object"
|
"type": "object"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": "parameters"
|
"required": "valid_parameters"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
24
senpy/schemas/metric.json
Normal file
24
senpy/schemas/metric.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"properties": {
|
||||||
|
"@id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"@type": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"maxValue": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"minValue": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"deviation": {
|
||||||
|
"type": "number"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["@id"]
|
||||||
|
}
|
16
senpy/schemas/parameter.json
Normal file
16
senpy/schemas/parameter.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"description": "Parameters for a senpy analysis",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Name of the parameter"
|
||||||
|
},
|
||||||
|
"prov:value": {
|
||||||
|
"@type": "any",
|
||||||
|
"description": "Value of the parameter"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["name", "prov:value"]
|
||||||
|
}
|
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": ["@id", "extra_params"],
|
"required": ["@id", "name", "description", "version", "plugin_type"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"@id": {
|
"@id": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@@ -9,7 +9,19 @@
|
|||||||
},
|
},
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "The name of the plugin, which will be used in the algorithm detection phase"
|
"description": "The name of the plugin, which will be used in the algorithm detection phase."
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "A summary of what the plugin does, and pointers to further information."
|
||||||
|
},
|
||||||
|
"version": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The version of the plugin."
|
||||||
|
},
|
||||||
|
"plugin_type": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Sub-type of plugin. e.g. sentimentPlugin"
|
||||||
},
|
},
|
||||||
"extra_params": {
|
"extra_params": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
@@ -21,13 +21,7 @@
|
|||||||
"default": [],
|
"default": [],
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"anyOf": [
|
"$ref": "analysis.json"
|
||||||
{
|
|
||||||
"$ref": "analysis.json"
|
|
||||||
},{
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entries": {
|
"entries": {
|
||||||
|
@@ -19,5 +19,5 @@
|
|||||||
"description": "The ID of the analysis that generated this Sentiment. The full object should be included in the \"analysis\" property of the root object"
|
"description": "The ID of the analysis that generated this Sentiment. The full object should be included in the \"analysis\" property of the root object"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["@id", "prov:wasGeneratedBy"]
|
"required": ["prov:wasGeneratedBy"]
|
||||||
}
|
}
|
||||||
|
@@ -152,3 +152,51 @@ textarea{
|
|||||||
/* background: white; */
|
/* background: white; */
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.deco-none {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deco-none:link {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deco-none:hover {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.collapsed .collapseicon {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.collapsed .expandicon {
|
||||||
|
display: inline-block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expandicon {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.collapseicon {
|
||||||
|
display: inline-block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loader {
|
||||||
|
border: 6px solid #f3f3f3; /* Light grey */
|
||||||
|
border-top: 6px solid blue;
|
||||||
|
border-bottom: 6px solid blue;
|
||||||
|
|
||||||
|
border-radius: 50%;
|
||||||
|
width: 3em;
|
||||||
|
height: 3em;
|
||||||
|
animation: spin 2s linear infinite;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
0% { transform: rotate(0deg); }
|
||||||
|
100% { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
|
@@ -1,7 +1,11 @@
|
|||||||
var ONYX = "http://www.gsi.dit.upm.es/ontologies/onyx/ns#";
|
var ONYX = "http://www.gsi.dit.upm.es/ontologies/onyx/ns#";
|
||||||
var RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
|
var RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
|
||||||
var plugins_params={};
|
var plugins_params = default_params = {};
|
||||||
var default_params = JSON.parse($.ajax({type: "GET", url: "/api?help=True" , async: false}).responseText);
|
var plugins = [];
|
||||||
|
var defaultPlugin = {};
|
||||||
|
var gplugins = {};
|
||||||
|
var pipeline = [];
|
||||||
|
|
||||||
function replaceURLWithHTMLLinks(text) {
|
function replaceURLWithHTMLLinks(text) {
|
||||||
console.log('Text: ' + text);
|
console.log('Text: ' + text);
|
||||||
var exp = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig;
|
var exp = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig;
|
||||||
@@ -25,158 +29,436 @@ function hashchanged(){
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function() {
|
|
||||||
var response = JSON.parse($.ajax({type: "GET", url: "/api/plugins/" , async: false}).responseText);
|
|
||||||
var defaultPlugin= JSON.parse($.ajax({type: "GET", url: "/api/plugins/default" , async: false}).responseText);
|
|
||||||
html="";
|
|
||||||
var availablePlugins = document.getElementById('availablePlugins');
|
|
||||||
plugins = response.plugins;
|
|
||||||
gplugins = {};
|
|
||||||
for (r in plugins){
|
|
||||||
ptype = plugins[r]['@type'];
|
|
||||||
if(gplugins[ptype] == undefined){
|
|
||||||
gplugins[ptype] = [r]
|
|
||||||
}else{
|
|
||||||
gplugins[ptype].push(r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (g in gplugins){
|
|
||||||
html += "<optgroup label=\""+g+"\">"
|
|
||||||
for (r in gplugins[g]){
|
|
||||||
plugin = plugins[r]
|
|
||||||
if (plugin["name"]){
|
|
||||||
if (plugin["name"] == defaultPlugin["name"]){
|
|
||||||
if (plugin["is_activated"]){
|
|
||||||
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\">"+plugin["name"]+"</option>"
|
|
||||||
}else{
|
|
||||||
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
if (plugin["is_activated"]){
|
|
||||||
html+= "<option value=\""+plugin["name"]+"\">"+plugin["name"]+"</option>"
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
html+= "<option value=\""+plugin["name"]+"\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (plugin["extra_params"]){
|
function get_plugins(response){
|
||||||
plugins_params[plugin["name"]]={};
|
for(ix in response.plugins){
|
||||||
for (param in plugin["extra_params"]){
|
plug = response.plugins[ix];
|
||||||
if (typeof plugin["extra_params"][param] !="string"){
|
plugins[plug.name] = plug;
|
||||||
var params = new Array();
|
}
|
||||||
var alias = plugin["extra_params"][param]["aliases"][0];
|
}
|
||||||
params[alias]=new Array();
|
|
||||||
for (option in plugin["extra_params"][param]["options"]){
|
|
||||||
params[alias].push(plugin["extra_params"][param]["options"][option])
|
|
||||||
}
|
|
||||||
plugins_params[plugin["name"]][alias] = (params[alias])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var pluginList = document.createElement('li');
|
|
||||||
|
|
||||||
newHtml = ""
|
|
||||||
if(plugin.url) {
|
|
||||||
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
|
||||||
}else {
|
|
||||||
newHtml= plugin["name"];
|
|
||||||
}
|
|
||||||
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
|
||||||
pluginList.innerHTML = newHtml;
|
|
||||||
availablePlugins.appendChild(pluginList)
|
|
||||||
}
|
|
||||||
html += "</optgroup>"
|
|
||||||
}
|
|
||||||
document.getElementById('plugins').innerHTML = html;
|
|
||||||
change_params();
|
|
||||||
|
|
||||||
$(window).on('hashchange', hashchanged);
|
|
||||||
hashchanged();
|
|
||||||
$('.tooltip-form').tooltip();
|
|
||||||
|
|
||||||
});
|
function get_datasets(response){
|
||||||
|
datasets = response.datasets
|
||||||
|
}
|
||||||
|
|
||||||
|
function group_plugins(){
|
||||||
|
for (r in plugins){
|
||||||
|
ptype = plugins[r]['@type'];
|
||||||
|
if(gplugins[ptype] == undefined){
|
||||||
|
gplugins[ptype] = [r];
|
||||||
|
}else{
|
||||||
|
gplugins[ptype].push(r);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function change_params(){
|
function get_parameters(){
|
||||||
var plugin = document.getElementById("plugins").options[document.getElementById("plugins").selectedIndex].value;
|
for (p in plugins){
|
||||||
html=""
|
plugin = plugins[p];
|
||||||
for (param in default_params){
|
if (plugin["extra_params"]){
|
||||||
if ((default_params[param]['options']) && (['help','conversion'].indexOf(param) < 0)){
|
plugins_params[plugin["name"]] = plugin["extra_params"];
|
||||||
html+= "<label> "+param+"</label>"
|
|
||||||
html+= "<select id=\""+param+"\" name=\""+param+"\">"
|
|
||||||
for (option in default_params[param]['options']){
|
|
||||||
if (default_params[param]['options'][option] == default_params[param]['default']){
|
|
||||||
html+="<option value \""+default_params[param]['options'][option]+"\" selected >"+default_params[param]['options'][option]+"</option>"
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
html+="<option value \""+default_params[param]['options'][option]+"\">"+default_params[param]['options'][option]+"</option>"
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
html+="</select><br>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (param in plugins_params[plugin]){
|
|
||||||
if (param || plugins_params[plugin][param].length > 1){
|
|
||||||
html+= "<label> Parameter "+param+"</label>"
|
|
||||||
html+= "<select id=\""+param+"\" name=\""+param+"\">"
|
|
||||||
for (option in plugins_params[plugin][param]){
|
|
||||||
html+="<option value \""+plugins_params[plugin][param][option]+"\">"+plugins_params[plugin][param][option]+"</option>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
html+="</select>"
|
|
||||||
}
|
}
|
||||||
document.getElementById("params").innerHTML = html
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
|
function draw_plugins_selection(){
|
||||||
|
html="";
|
||||||
|
group_plugins();
|
||||||
|
for (g in gplugins){
|
||||||
|
html += "<optgroup label=\""+g+"\">"
|
||||||
|
for (r in gplugins[g]){
|
||||||
|
plugin = plugins[gplugins[g][r]]
|
||||||
|
if (!plugin["name"]){
|
||||||
|
console.log("No name for plugin ", plugin);
|
||||||
|
continue;
|
||||||
|
|
||||||
function load_JSON(){
|
|
||||||
url = "/api";
|
|
||||||
var container = document.getElementById('results');
|
|
||||||
var rawcontainer = document.getElementById("jsonraw");
|
|
||||||
rawcontainer.innerHTML = '';
|
|
||||||
container.innerHTML = '';
|
|
||||||
var plugin = document.getElementById("plugins").options[document.getElementById("plugins").selectedIndex].value;
|
|
||||||
var input = encodeURIComponent(document.getElementById("input").value);
|
|
||||||
url += "?algo="+plugin+"&i="+input
|
|
||||||
for (param in plugins_params[plugin]){
|
|
||||||
if (param != null){
|
|
||||||
var param_value = encodeURIComponent(document.getElementById(param).options[document.getElementById(param).selectedIndex].text);
|
|
||||||
if (param_value){
|
|
||||||
url+="&"+param+"="+param_value
|
|
||||||
}
|
}
|
||||||
}
|
html+= "<option value=\""+plugin.name+"\" "
|
||||||
}
|
if (plugin["name"] == defaultPlugin["name"]){
|
||||||
|
html+= " selected=\"selected\""
|
||||||
for (param in default_params){
|
|
||||||
if ((param != null) && (default_params[param]['options']) && (['help','conversion'].indexOf(param) < 0)){
|
|
||||||
var param_value = encodeURIComponent(document.getElementById(param).options[document.getElementById(param).selectedIndex].text);
|
|
||||||
if (param_value){
|
|
||||||
url+="&"+param+"="+param_value
|
|
||||||
}
|
}
|
||||||
|
if (!plugin["is_activated"]){
|
||||||
|
html+= " disabled=\"disabled\" "
|
||||||
|
}
|
||||||
|
html+=">"+plugin["name"]+"</option>"
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
html += "</optgroup>"
|
||||||
var response = $.ajax({type: "GET", url: url , async: false}).responseText;
|
// Two elements with plugin class
|
||||||
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response)
|
// One from the evaluate tab and another one from the analyse tab
|
||||||
|
plugin_lists = document.getElementsByClassName('plugin')
|
||||||
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
for (element in plugin_lists){
|
||||||
document.getElementById("results-div").style.display = 'block';
|
plugin_lists[element].innerHTML = html;
|
||||||
try {
|
}
|
||||||
response = JSON.parse(response);
|
draw_plugin_pipeline();
|
||||||
var options = {
|
}
|
||||||
mode: 'view'
|
|
||||||
};
|
|
||||||
var editor = new JSONEditor(container, options, response);
|
|
||||||
editor.expandAll();
|
|
||||||
}
|
|
||||||
catch(err){
|
|
||||||
console.log("Error decoding JSON (got turtle?)");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
function draw_plugin_pipeline(){
|
||||||
|
var pipeHTML = "";
|
||||||
|
console.log("Drawing pipeline: ", pipeline);
|
||||||
|
for (ix in pipeline){
|
||||||
|
plug = pipeline[ix];
|
||||||
|
pipeHTML += '<span onclick="remove_plugin_pipeline(\'' + plug + '\')" class="btn btn-primary"><span ><i class="fa fa-minus"></i></span> ' + plug + '</span> <i class="fa fa-arrow-right"></i> ';
|
||||||
|
}
|
||||||
|
console.log(pipeHTML);
|
||||||
|
$("#pipeline").html(pipeHTML);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function remove_plugin_pipeline(name){
|
||||||
|
console.log("Removing plugin: ", name);
|
||||||
|
var index = pipeline.indexOf(name);
|
||||||
|
pipeline.splice(index, 1);
|
||||||
|
draw_plugin_pipeline();
|
||||||
|
|
||||||
|
}
|
||||||
|
function draw_plugins_list(){
|
||||||
|
var availablePlugins = document.getElementById('availablePlugins');
|
||||||
|
|
||||||
|
for(p in plugins){
|
||||||
|
var pluginEntry = document.createElement('li');
|
||||||
|
plugin = plugins[p];
|
||||||
|
newHtml = ""
|
||||||
|
if(plugin.url) {
|
||||||
|
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
||||||
|
}else {
|
||||||
|
newHtml= plugin["name"];
|
||||||
|
}
|
||||||
|
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
||||||
|
pluginEntry.innerHTML = newHtml;
|
||||||
|
availablePlugins.appendChild(pluginEntry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function add_plugin_pipeline(){
|
||||||
|
var selected = get_selected_plugin();
|
||||||
|
pipeline.push(selected);
|
||||||
|
console.log("Adding ", selected);
|
||||||
|
draw_plugin_pipeline();
|
||||||
|
}
|
||||||
|
|
||||||
|
function draw_datasets(){
|
||||||
|
html = "";
|
||||||
|
repeated_html = "<input class=\"checks-datasets\" type=\"checkbox\" value=\"";
|
||||||
|
for (dataset in datasets){
|
||||||
|
html += repeated_html+datasets[dataset]["@id"]+"\">"+datasets[dataset]["@id"];
|
||||||
|
html += "<br>"
|
||||||
|
}
|
||||||
|
document.getElementById("datasets").innerHTML = html;
|
||||||
|
}
|
||||||
|
|
||||||
|
$(document).ready(function() {
|
||||||
|
var response = JSON.parse($.ajax({type: "GET", url: "/api/plugins/" , async: false}).responseText);
|
||||||
|
defaultPlugin= JSON.parse($.ajax({type: "GET", url: "/api/plugins/default" , async: false}).responseText);
|
||||||
|
|
||||||
|
get_plugins(response);
|
||||||
|
get_default_parameters();
|
||||||
|
|
||||||
|
draw_plugins_list();
|
||||||
|
draw_plugins_selection();
|
||||||
|
draw_parameters();
|
||||||
|
draw_plugin_description();
|
||||||
|
|
||||||
|
if (evaluation_enabled) {
|
||||||
|
var response2 = JSON.parse($.ajax({type: "GET", url: "/api/datasets/" , async: false}).responseText);
|
||||||
|
get_datasets(response2);
|
||||||
|
draw_datasets();
|
||||||
|
}
|
||||||
|
|
||||||
|
$(window).on('hashchange', hashchanged);
|
||||||
|
hashchanged();
|
||||||
|
$('.tooltip-form').tooltip();
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
function get_default_parameters(){
|
||||||
|
default_params = JSON.parse($.ajax({type: "GET", url: "/api?help=true" , async: false}).responseText).valid_parameters;
|
||||||
|
// Remove the parameters that are always added
|
||||||
|
delete default_params["input"];
|
||||||
|
delete default_params["algorithm"];
|
||||||
|
delete default_params["help"];
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_selected_plugin(){
|
||||||
|
return document.getElementsByClassName('plugin')[0].options[document.getElementsByClassName('plugin')[0].selectedIndex].value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function draw_default_parameters(){
|
||||||
|
var basic_params = document.getElementById("basic_params");
|
||||||
|
basic_params.innerHTML = params_div(default_params);
|
||||||
|
}
|
||||||
|
|
||||||
|
function update_params(params, plug){
|
||||||
|
ep = plugins_params[plug];
|
||||||
|
for(k in ep){
|
||||||
|
params[k] = ep[k];
|
||||||
|
}
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
function draw_extra_parameters(){
|
||||||
|
var plugin = get_selected_plugin();
|
||||||
|
get_parameters();
|
||||||
|
|
||||||
|
var extra_params = document.getElementById("extra_params");
|
||||||
|
var params = {};
|
||||||
|
for (sel in pipeline){
|
||||||
|
update_params(params, pipeline[sel]);
|
||||||
|
}
|
||||||
|
update_params(params, plugin);
|
||||||
|
extra_params.innerHTML = params_div(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
function draw_parameters(){
|
||||||
|
draw_default_parameters();
|
||||||
|
draw_extra_parameters();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function add_default_params(){
|
||||||
|
var html = "";
|
||||||
|
// html += '<a href="#basic_params" class="btn btn-info" data-toggle="collapse">Basic API parameters</a>';
|
||||||
|
html += '<span id="basic_params" class="panel-collapse collapse">';
|
||||||
|
html += '<ul class="list-group">'
|
||||||
|
html += params_div(default_params);
|
||||||
|
html += '</span>';
|
||||||
|
return html;
|
||||||
|
}
|
||||||
|
|
||||||
|
function params_div(params){
|
||||||
|
var html = '<div class="container-fluid">';
|
||||||
|
if (Object.keys(params).length === 0) {
|
||||||
|
html += '<p class="text text-muted text-center">This plugin does not take any extra parameters</p>';
|
||||||
|
}
|
||||||
|
// Iterate over the keys in order
|
||||||
|
pnames = Object.keys(params).sort()
|
||||||
|
for (ix in pnames){
|
||||||
|
pname = pnames[ix];
|
||||||
|
param = params[pname];
|
||||||
|
html+='<div class="form-group">';
|
||||||
|
html += '<div class="row">'
|
||||||
|
html+= '<label class="col-sm-4" for="'+pname+'">'+pname+'</label>'
|
||||||
|
if (param.options){
|
||||||
|
opts = param.options;
|
||||||
|
if(param.options.length == 1 && param.options[0] == 'boolean') {
|
||||||
|
opts = [true, false];
|
||||||
|
}
|
||||||
|
html+= '<select class="col-sm-8" id="'+pname+"\" name=\""+pname+"\">"
|
||||||
|
var defaultopt = param.default;
|
||||||
|
for (option in opts){
|
||||||
|
isselected = "";
|
||||||
|
if (defaultopt != undefined && opts[option] == defaultopt ){
|
||||||
|
isselected = ' selected="selected"'
|
||||||
|
}
|
||||||
|
html+="<option value=\""+opts[option]+'"' + isselected +
|
||||||
|
'>'+opts[option]+"</option>"
|
||||||
|
}
|
||||||
|
html+="</select>"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
default_value = "";
|
||||||
|
if(param.default != undefined){
|
||||||
|
default_value = param.default;
|
||||||
|
};
|
||||||
|
html +='<input class="col-sm-8" id="'+pname+'" name="'+pname+'" value="' + default_value + '"></input>';
|
||||||
|
}
|
||||||
|
html+='</div>';
|
||||||
|
html+='<div class="row">';
|
||||||
|
if ('description' in param){
|
||||||
|
html += '<p class="form-text sm-sm-12 text-muted text-center">' + param.description + '</p>';
|
||||||
|
|
||||||
|
}
|
||||||
|
html+='</div>';
|
||||||
|
html+='</div>';
|
||||||
|
}
|
||||||
|
html+='</div>';
|
||||||
|
return html;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _get_form_parameters(id){
|
||||||
|
var element = document.getElementById(id);
|
||||||
|
params = {};
|
||||||
|
var selects = element.getElementsByTagName('select');
|
||||||
|
var inputs = element.getElementsByTagName('input');
|
||||||
|
|
||||||
|
Array.prototype.forEach.call(selects, function (sel) {
|
||||||
|
key = sel.name;
|
||||||
|
value = sel.options[sel.selectedIndex].value
|
||||||
|
params[key] = value;
|
||||||
|
});
|
||||||
|
|
||||||
|
Array.prototype.forEach.call(inputs, function (el) {
|
||||||
|
params[el.name] = el.value;
|
||||||
|
});
|
||||||
|
|
||||||
|
for (k in params){
|
||||||
|
value = params[k];
|
||||||
|
if (value == "" || value === "undefined"){
|
||||||
|
delete params[k];
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_form_parameters(){
|
||||||
|
var p1 = _get_form_parameters("basic_params");
|
||||||
|
var p2 = _get_form_parameters("extra_params");
|
||||||
|
return Object.assign(p1, p2);
|
||||||
|
}
|
||||||
|
|
||||||
|
function add_param(key, value){
|
||||||
|
value = encodeURIComponent(value);
|
||||||
|
return "&"+key+"="+value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_pipeline_arg(){
|
||||||
|
arg = "";
|
||||||
|
for (ix in pipeline){
|
||||||
|
arg = arg + pipeline[ix] + ",";
|
||||||
|
}
|
||||||
|
arg = arg + get_selected_plugin();
|
||||||
|
return arg;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function load_JSON(){
|
||||||
|
url = "/api";
|
||||||
|
var container = document.getElementById('results');
|
||||||
|
var rawcontainer = document.getElementById("jsonraw");
|
||||||
|
rawcontainer.innerHTML = '';
|
||||||
|
container.innerHTML = '';
|
||||||
|
|
||||||
|
var plugin = get_pipeline_arg();
|
||||||
|
$(".loading").addClass("loader");
|
||||||
|
$("#preview").hide();
|
||||||
|
|
||||||
|
var input = encodeURIComponent(document.getElementById("input").value);
|
||||||
|
url += "?algo="+plugin+"&i="+input
|
||||||
|
|
||||||
|
params = get_form_parameters();
|
||||||
|
|
||||||
|
for (key in params){
|
||||||
|
url += add_param(key, params[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
$.ajax({type: "GET", url: url}).always(function(response){
|
||||||
|
document.getElementById("results-div").style.display = 'block';
|
||||||
|
if(typeof response=="object") {
|
||||||
|
var options = {
|
||||||
|
mode: 'view'
|
||||||
|
};
|
||||||
|
var editor = new JSONEditor(container, options, response);
|
||||||
|
editor.expandAll();
|
||||||
|
$('#results-div a[href="#viewer"]').click();
|
||||||
|
response = JSON.stringify(response, null, 4);
|
||||||
|
} else {
|
||||||
|
console.log("Got turtle?");
|
||||||
|
$('#results-div a[href="#raw"]').click();
|
||||||
|
}
|
||||||
|
|
||||||
|
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response);
|
||||||
|
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||||
|
|
||||||
|
$(".loading").removeClass("loader");
|
||||||
|
$("#preview").show();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_datasets_from_checkbox(){
|
||||||
|
var checks = document.getElementsByClassName("checks-datasets");
|
||||||
|
|
||||||
|
datasets = "";
|
||||||
|
for (var i = 0; i < checks.length; i++){
|
||||||
|
if (checks[i].checked){
|
||||||
|
datasets += checks[i].value + ",";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
datasets = datasets.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function create_body_metrics(evaluations){
|
||||||
|
var new_tbody = document.createElement('tbody')
|
||||||
|
var metric_html = ""
|
||||||
|
for (var eval in evaluations){
|
||||||
|
metric_html += "<tr><th>"+evaluations[eval].evaluates+"</th><th>"+evaluations[eval].evaluatesOn+"</th>";
|
||||||
|
for (var metric in evaluations[eval].metrics){
|
||||||
|
metric_html += "<th>"+parseFloat(evaluations[eval].metrics[metric].value.toFixed(4))+"</th>";
|
||||||
|
}
|
||||||
|
metric_html += "</tr>";
|
||||||
|
}
|
||||||
|
new_tbody.innerHTML = metric_html
|
||||||
|
return new_tbody
|
||||||
|
}
|
||||||
|
|
||||||
|
function evaluate_JSON(){
|
||||||
|
|
||||||
|
url = "/api/evaluate";
|
||||||
|
|
||||||
|
var container = document.getElementById('results_eval');
|
||||||
|
var rawcontainer = document.getElementById('jsonraw_eval');
|
||||||
|
var table = document.getElementById("eval_table");
|
||||||
|
|
||||||
|
rawcontainer.innerHTML = "";
|
||||||
|
container.innerHTML = "";
|
||||||
|
|
||||||
|
var plugin = document.getElementsByClassName("plugin")[0].options[document.getElementsByClassName("plugin")[0].selectedIndex].value;
|
||||||
|
|
||||||
|
get_datasets_from_checkbox();
|
||||||
|
|
||||||
|
url += "?algo="+plugin+"&dataset="+datasets
|
||||||
|
|
||||||
|
$('#doevaluate').attr("disabled", true);
|
||||||
|
$.ajax({type: "GET", url: url, dataType: 'json'}).always(function(resp) {
|
||||||
|
$('#doevaluate').attr("disabled", false);
|
||||||
|
response = resp.responseText;
|
||||||
|
|
||||||
|
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response);
|
||||||
|
|
||||||
|
document.getElementById("input_request_eval").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||||
|
document.getElementById("evaluate-div").style.display = 'block';
|
||||||
|
|
||||||
|
try {
|
||||||
|
response = JSON.parse(response);
|
||||||
|
var options = {
|
||||||
|
mode: 'view'
|
||||||
|
};
|
||||||
|
|
||||||
|
//Control the single response results
|
||||||
|
if (!(Array.isArray(response.evaluations))){
|
||||||
|
response.evaluations = [response.evaluations]
|
||||||
|
}
|
||||||
|
|
||||||
|
new_tbody = create_body_metrics(response.evaluations)
|
||||||
|
table.replaceChild(new_tbody, table.lastElementChild)
|
||||||
|
|
||||||
|
var editor = new JSONEditor(container, options, response);
|
||||||
|
editor.expandAll();
|
||||||
|
// $('#results-div a[href="#viewer"]').tab('show');
|
||||||
|
$('#evaluate-div a[href="#evaluate-table"]').click();
|
||||||
|
// location.hash = 'raw';
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
catch(err){
|
||||||
|
console.log("Error decoding JSON (got turtle?)");
|
||||||
|
$('#evaluate-div a[href="#evaluate-raw"]').click();
|
||||||
|
// location.hash = 'raw';
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function draw_plugin_description(){
|
||||||
|
var plugin = plugins[get_selected_plugin()];
|
||||||
|
$("#plugdescription").text(plugin.description);
|
||||||
|
console.log(plugin);
|
||||||
|
}
|
||||||
|
|
||||||
|
function plugin_selected(){
|
||||||
|
draw_extra_parameters();
|
||||||
|
draw_plugin_description();
|
||||||
|
}
|
||||||
|
@@ -5,6 +5,9 @@
|
|||||||
<title>Playground {{version}}</title>
|
<title>Playground {{version}}</title>
|
||||||
|
|
||||||
</head>
|
</head>
|
||||||
|
<script>
|
||||||
|
this.evaluation_enabled = {% if evaluation %}true{%else %}false{%endif%};
|
||||||
|
</script>
|
||||||
<script src="static/js/jquery-2.1.1.min.js" ></script>
|
<script src="static/js/jquery-2.1.1.min.js" ></script>
|
||||||
<!--<script src="jquery.autosize.min.js"></script>-->
|
<!--<script src="jquery.autosize.min.js"></script>-->
|
||||||
<link rel="stylesheet" href="static/css/bootstrap.min.css">
|
<link rel="stylesheet" href="static/css/bootstrap.min.css">
|
||||||
@@ -32,6 +35,10 @@
|
|||||||
<ul class="nav nav-tabs" role="tablist">
|
<ul class="nav nav-tabs" role="tablist">
|
||||||
<li role="presentation" ><a class="active" href="#about">About</a></li>
|
<li role="presentation" ><a class="active" href="#about">About</a></li>
|
||||||
<li role="presentation"class="active"><a class="active" href="#test">Test it</a></li>
|
<li role="presentation"class="active"><a class="active" href="#test">Test it</a></li>
|
||||||
|
{% if evaluation %}
|
||||||
|
<li role="presentation"><a class="active" href="#evaluate">Evaluate Plugins</a></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<div class="tab-content">
|
<div class="tab-content">
|
||||||
@@ -54,10 +61,19 @@
|
|||||||
<ul>
|
<ul>
|
||||||
<li>List all available plugins: <a href="/api/plugins">/api/plugins</a></li>
|
<li>List all available plugins: <a href="/api/plugins">/api/plugins</a></li>
|
||||||
<li>Get information about the default plugin: <a href="/api/plugins/default">/api/plugins/default</a></li>
|
<li>Get information about the default plugin: <a href="/api/plugins/default">/api/plugins/default</a></li>
|
||||||
|
<li>List all available datasets: <a href="/api/datasets">/api/datasets</a></li>
|
||||||
<li>Download the JSON-LD context used: <a href="/api/contexts/Results.jsonld">/api/contexts/Results.jsonld</a></li>
|
<li>Download the JSON-LD context used: <a href="/api/contexts/Results.jsonld">/api/contexts/Results.jsonld</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
</p>
|
</p>
|
||||||
|
<p>Senpy is a research project. If you use it in your research, please cite:
|
||||||
|
<pre>
|
||||||
|
Senpy: A Pragmatic Linked Sentiment Analysis Framework.
|
||||||
|
Sánchez-Rada, J. F., Iglesias, C. A., Corcuera, I., & Araque, Ó.
|
||||||
|
In Data Science and Advanced Analytics (DSAA),
|
||||||
|
2016 IEEE International Conference on (pp. 735-742). IEEE.
|
||||||
|
</pre>
|
||||||
|
</p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<div class="col-lg-6 ">
|
<div class="col-lg-6 ">
|
||||||
@@ -67,8 +83,6 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="panel-body"><ul id=availablePlugins></ul></div>
|
<div class="panel-body"><ul id=availablePlugins></ul></div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
<div class="col-lg-6 ">
|
|
||||||
<a href="http://senpy.readthedocs.io">
|
<a href="http://senpy.readthedocs.io">
|
||||||
<div class="panel panel-default">
|
<div class="panel panel-default">
|
||||||
<div class="panel-heading"><i class="fa fa-book"></i> If you are new to senpy, you might want to read senpy's documentation</div>
|
<div class="panel-heading"><i class="fa fa-book"></i> If you are new to senpy, you might want to read senpy's documentation</div>
|
||||||
@@ -79,37 +93,72 @@
|
|||||||
<div class="panel-heading"><i class="fa fa-sign-in"></i> Feel free to follow us on GitHub</div>
|
<div class="panel-heading"><i class="fa fa-sign-in"></i> Feel free to follow us on GitHub</div>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
<div class="panel panel-default">
|
|
||||||
<div class="panel-heading"><i class="fa fa-child"></i> Enjoy.</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="tab-pane active" id="test">
|
<div class="tab-pane active" id="test">
|
||||||
<div class="well">
|
<div class="well">
|
||||||
<form id="form" onsubmit="return getPlugins();" accept-charset="utf-8">
|
<form id="form" class="container" onsubmit="return getPlugins();" accept-charset="utf-8">
|
||||||
<div id="inputswrapper">
|
|
||||||
<div><textarea id="input" class="boxsizingBorder" rows="5" name="i">This text makes me sad.
|
<div><textarea id="input" class="boxsizingBorder" rows="5" name="i">This text makes me sad.
|
||||||
whilst this text makes me happy and surprised at the same time.
|
whilst this text makes me happy and surprised at the same time.
|
||||||
I cannot believe it!</textarea></div>
|
I cannot believe it!</textarea>
|
||||||
<label>Select the plugin:</label>
|
</div>
|
||||||
<select id="plugins" name="plugins" onchange="change_params()">
|
<!-- PARAMETERS -->
|
||||||
</select>
|
<div class="panel-group" id="parameters">
|
||||||
</br>
|
<div class="panel panel-default">
|
||||||
<div id ="params">
|
<div class="panel-heading">
|
||||||
|
<h4 class="panel-title">
|
||||||
|
Select the plugin.
|
||||||
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</br>
|
<div id="plugin_selection" class="panel-collapse panel-body">
|
||||||
|
<span id="pipeline"></span>
|
||||||
|
<select name="plugins" class="plugin" onchange="plugin_selected()">
|
||||||
|
</select>
|
||||||
|
<span onclick="add_plugin_pipeline()"><span class="btn"><i class="fa fa-plus" title="Add more plugins to the pipeline. Processing order is left to right. i.e. the results of the leftmost plugin will be used as input for the second leftmost, and so on."></i></span></span>
|
||||||
|
<label class="help-block " id="plugdescription"></label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="panel panel-default">
|
||||||
|
<a data-toggle="collapse" class="deco-none collapsed" href="#basic_params">
|
||||||
|
<div class="panel-heading">
|
||||||
|
<h4 class="panel-title">
|
||||||
|
<i class="fa fa-chevron-right pull-left expandicon"></i>
|
||||||
|
<i class="fa fa-chevron-down pull-left collapseicon"></i>
|
||||||
|
Basic API parameters
|
||||||
|
</h4>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
<div id="basic_params" class="panel-collapse collapse panel-body">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="panel panel-default">
|
||||||
|
<a data-toggle="collapse" class="deco-none" href="#extra_params">
|
||||||
|
<div class="panel-heading">
|
||||||
|
<h4 class="panel-title">
|
||||||
|
<i class="fa fa-chevron-right pull-left expandicon"></i>
|
||||||
|
<i class="fa fa-chevron-down pull-left collapseicon"></i>
|
||||||
|
Plugin extra parameters
|
||||||
|
</h4>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
<div id="extra_params" class="panel-collapse collapse in panel-body">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- END PARAMETERS -->
|
||||||
|
|
||||||
<a id="preview" class="btn btn-lg btn-primary" onclick="load_JSON()">Analyse!</a>
|
<a id="preview" class="btn btn-lg btn-primary" onclick="load_JSON()">Analyse!</a>
|
||||||
|
<div id="loading-results" class="loading"></div>
|
||||||
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
||||||
</div>
|
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<span id="input_request"></span>
|
<span id="input_request"></span>
|
||||||
<div id="results-div">
|
<div id="results-div">
|
||||||
<ul class="nav nav-tabs" role="tablist">
|
<ul class="nav nav-tabs" role="tablist">
|
||||||
<li role="presentation" class="active"><a class="active" href="#viewer">Viewer</a></li>
|
<li role="presentation" class="active"><a data-toggle="tab" class="active" href="#viewer">Viewer</a></li>
|
||||||
<li role="presentation"><a class="active" href="#raw">Raw</a></li>
|
<li role="presentation"><a data-toggle="tab" class="active" href="#raw">Raw</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="tab-content" id="results-container">
|
<div class="tab-content" id="results-container">
|
||||||
|
|
||||||
@@ -119,7 +168,7 @@ I cannot believe it!</textarea></div>
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="tab-pane" id="raw">
|
<div class="tab-pane" id="raw">
|
||||||
<div id="content">
|
<div id="content">
|
||||||
<pre id="jsonraw" class="results"></pre>
|
<pre id="jsonraw" class="results"></pre>
|
||||||
</div>
|
</div>
|
||||||
@@ -127,6 +176,73 @@ I cannot believe it!</textarea></div>
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{% if evaluation %}
|
||||||
|
|
||||||
|
<div class="tab-pane" id="evaluate">
|
||||||
|
<div class="well">
|
||||||
|
<form id="form" class="container" onsubmit="return getPlugins();" accept-charset="utf-8">
|
||||||
|
<div>
|
||||||
|
<label>Select the plugin:</label>
|
||||||
|
<select id="plugins-eval" name="plugins-eval" class=plugin onchange="draw_extra_parameters()">
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label>Select the datasets:</label>
|
||||||
|
<div id="datasets" name="datasets" >
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<a id="doevaluate" class="btn btn-lg btn-primary" onclick="evaluate_JSON()">Evaluate Plugin!</a>
|
||||||
|
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
<span id="input_request_eval"></span>
|
||||||
|
<div id="evaluate-div">
|
||||||
|
<ul class="nav nav-tabs" role="tablist">
|
||||||
|
<li role="presentation" class="active"><a data-toggle="tab" class="active" href="#evaluate-viewer">Viewer</a></li>
|
||||||
|
<li role="presentation"><a data-toggle="tab" class="active" href="#evaluate-raw">Raw</a></li>
|
||||||
|
<li role="presentation"><a data-toggle="tab" class="active" href="#evaluate-table">Table</a></li>
|
||||||
|
</ul>
|
||||||
|
<div class="tab-content" id="evaluate-container">
|
||||||
|
|
||||||
|
<div class="tab-pane active" id="evaluate-viewer">
|
||||||
|
<div id="content">
|
||||||
|
<pre id="results_eval" class="results_eval"></pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-pane" id="evaluate-raw">
|
||||||
|
<div id="content">
|
||||||
|
<pre id="jsonraw_eval" class="results_eval"></pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="tab-pane" id="evaluate-table">
|
||||||
|
<table id="eval_table" class="table table-condensed">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Plugin</th>
|
||||||
|
<th>Dataset</th>
|
||||||
|
<th>Accuracy</th>
|
||||||
|
<th>Precision_macro</th>
|
||||||
|
<th>Recall_macro</th>
|
||||||
|
<th>F1_macro</th>
|
||||||
|
<th>F1_weighted</th>
|
||||||
|
<th>F1_micro</th>
|
||||||
|
<th>F1</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
<a href="http://www.gsi.dit.upm.es" target="_blank"><img class="center-block" src="static/img/gsi.png"/> </a>
|
<a href="http://www.gsi.dit.upm.es" target="_blank"><img class="center-block" src="static/img/gsi.png"/> </a>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
31
senpy/testing.py
Normal file
31
senpy/testing.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from past.builtins import basestring
|
||||||
|
|
||||||
|
import os
|
||||||
|
import responses as requestmock
|
||||||
|
|
||||||
|
from .models import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
MOCK_REQUESTS = os.environ.get('MOCK_REQUESTS', '').lower() in ['no', 'false']
|
||||||
|
|
||||||
|
|
||||||
|
def patch_all_requests(responses):
|
||||||
|
|
||||||
|
patched = requestmock.RequestsMock()
|
||||||
|
|
||||||
|
for response in responses or []:
|
||||||
|
args = response.copy()
|
||||||
|
if 'json' in args and isinstance(args['json'], BaseModel):
|
||||||
|
args['json'] = args['json'].jsonld()
|
||||||
|
args['method'] = getattr(requestmock, args.get('method', 'GET'))
|
||||||
|
patched.add(**args)
|
||||||
|
return patched
|
||||||
|
|
||||||
|
|
||||||
|
def patch_requests(url, response, method='GET', status=200):
|
||||||
|
args = {'url': url, 'method': method, 'status': status}
|
||||||
|
if isinstance(response, basestring):
|
||||||
|
args['body'] = response
|
||||||
|
else:
|
||||||
|
args['json'] = response
|
||||||
|
return patch_all_requests([args])
|
102
senpy/utils.py
102
senpy/utils.py
@@ -1,25 +1,107 @@
|
|||||||
from . import models
|
from . import models, __version__
|
||||||
|
from collections import MutableMapping
|
||||||
|
import pprint
|
||||||
|
import pdb
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# MutableMapping should be enough, but it causes problems with py2
|
||||||
|
DICTCLASSES = (MutableMapping, dict, models.BaseModel)
|
||||||
|
|
||||||
|
|
||||||
def check_template(indict, template):
|
def check_template(indict, template):
|
||||||
if isinstance(template, dict) and isinstance(indict, dict):
|
if isinstance(template, DICTCLASSES) and isinstance(indict, DICTCLASSES):
|
||||||
for k, v in template.items():
|
for k, v in template.items():
|
||||||
if k not in indict:
|
if k not in indict:
|
||||||
return '{} not in {}'.format(k, indict)
|
raise models.Error('{} not in {}'.format(k, indict))
|
||||||
check_template(indict[k], v)
|
check_template(indict[k], v)
|
||||||
elif isinstance(template, list) and isinstance(indict, list):
|
elif isinstance(template, list) and isinstance(indict, list):
|
||||||
if len(indict) != len(template):
|
|
||||||
raise models.Error('Different size for {} and {}'.format(indict, template))
|
|
||||||
for e in template:
|
for e in template:
|
||||||
found = False
|
|
||||||
for i in indict:
|
for i in indict:
|
||||||
try:
|
try:
|
||||||
check_template(i, e)
|
check_template(i, e)
|
||||||
found = True
|
break
|
||||||
except models.Error as ex:
|
except models.Error as ex:
|
||||||
|
# raise
|
||||||
continue
|
continue
|
||||||
if not found:
|
else:
|
||||||
raise models.Error('{} not found in {}'.format(e, indict))
|
raise models.Error(('Element not found.'
|
||||||
|
'\nExpected: {}\nIn: {}').format(pprint.pformat(e),
|
||||||
|
pprint.pformat(indict)))
|
||||||
else:
|
else:
|
||||||
if indict != template:
|
if indict != template:
|
||||||
raise models.Error('{} and {} are different'.format(indict, template))
|
raise models.Error(('Differences found.\n'
|
||||||
|
'\tExpected: {}\n'
|
||||||
|
'\tFound: {}').format(pprint.pformat(template),
|
||||||
|
pprint.pformat(indict)))
|
||||||
|
|
||||||
|
|
||||||
|
def convert_dictionary(original, mappings):
|
||||||
|
result = {}
|
||||||
|
for key, value in original.items():
|
||||||
|
if key in mappings:
|
||||||
|
key = mappings[key]
|
||||||
|
result[key] = value
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def easy_load(app=None, plugin_list=None, plugin_folder=None, **kwargs):
|
||||||
|
'''
|
||||||
|
Run a server with a specific plugin.
|
||||||
|
'''
|
||||||
|
|
||||||
|
from flask import Flask
|
||||||
|
from .extensions import Senpy
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
app = Flask(__name__)
|
||||||
|
sp = Senpy(app, plugin_folder=plugin_folder, **kwargs)
|
||||||
|
if not plugin_list:
|
||||||
|
from . import plugins
|
||||||
|
import __main__
|
||||||
|
plugin_list = plugins.from_module(__main__)
|
||||||
|
for plugin in plugin_list:
|
||||||
|
sp.add_plugin(plugin)
|
||||||
|
sp.install_deps()
|
||||||
|
sp.activate_all()
|
||||||
|
return sp, app
|
||||||
|
|
||||||
|
|
||||||
|
def easy_test(plugin_list=None, debug=True):
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
try:
|
||||||
|
if not plugin_list:
|
||||||
|
import __main__
|
||||||
|
logger.info('Loading classes from {}'.format(__main__))
|
||||||
|
from . import plugins
|
||||||
|
plugin_list = plugins.from_module(__main__)
|
||||||
|
for plug in plugin_list:
|
||||||
|
plug.test()
|
||||||
|
plug.log.info('My tests passed!')
|
||||||
|
logger.info('All tests passed for {} plugins!'.format(len(plugin_list)))
|
||||||
|
except Exception:
|
||||||
|
if not debug:
|
||||||
|
raise
|
||||||
|
pdb.post_mortem()
|
||||||
|
|
||||||
|
|
||||||
|
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
||||||
|
'''
|
||||||
|
Run a server with a specific plugin.
|
||||||
|
'''
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
logging.getLogger('senpy').setLevel(logging.INFO)
|
||||||
|
sp, app = easy_load(**kwargs)
|
||||||
|
easy_test(sp.plugins())
|
||||||
|
app.debug = debug
|
||||||
|
import time
|
||||||
|
logger.info(time.time())
|
||||||
|
logger.info('Senpy version {}'.format(__version__))
|
||||||
|
logger.info('Server running on port %s:%d. Ctrl+C to quit' % (host,
|
||||||
|
port))
|
||||||
|
app.debug = debug
|
||||||
|
app.run(host,
|
||||||
|
port,
|
||||||
|
debug=app.debug)
|
||||||
|
@@ -12,6 +12,7 @@ max-line-length = 100
|
|||||||
universal=1
|
universal=1
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
addopts = --cov=senpy --cov-report term-missing
|
addopts = --cov=senpy --cov-report term-missing
|
||||||
|
filterwarnings =
|
||||||
|
ignore:the matrix subclass:PendingDeprecationWarning
|
||||||
[coverage:report]
|
[coverage:report]
|
||||||
omit = senpy/__main__.py
|
omit = senpy/__main__.py
|
||||||
|
31
setup.py
31
setup.py
@@ -1,20 +1,20 @@
|
|||||||
import pip
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
# parse_requirements() returns generator of pip.req.InstallRequirement objects
|
|
||||||
from pip.req import parse_requirements
|
|
||||||
from senpy import __version__
|
|
||||||
|
|
||||||
try:
|
with open('senpy/VERSION') as f:
|
||||||
install_reqs = parse_requirements(
|
__version__ = f.read().strip()
|
||||||
"requirements.txt", session=pip.download.PipSession())
|
assert __version__
|
||||||
test_reqs = parse_requirements(
|
|
||||||
"test-requirements.txt", session=pip.download.PipSession())
|
|
||||||
except AttributeError:
|
|
||||||
install_reqs = parse_requirements("requirements.txt")
|
|
||||||
test_reqs = parse_requirements("test-requirements.txt")
|
|
||||||
|
|
||||||
install_reqs = [str(ir.req) for ir in install_reqs]
|
|
||||||
test_reqs = [str(ir.req) for ir in test_reqs]
|
def parse_requirements(filename):
|
||||||
|
""" load requirements from a pip requirements file """
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
lineiter = list(line.strip() for line in f)
|
||||||
|
return [line for line in lineiter if line and not line.startswith("#")]
|
||||||
|
|
||||||
|
|
||||||
|
install_reqs = parse_requirements("requirements.txt")
|
||||||
|
test_reqs = parse_requirements("test-requirements.txt")
|
||||||
|
extra_reqs = parse_requirements("extra-requirements.txt")
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
@@ -34,6 +34,9 @@ setup(
|
|||||||
install_requires=install_reqs,
|
install_requires=install_reqs,
|
||||||
tests_require=test_reqs,
|
tests_require=test_reqs,
|
||||||
setup_requires=['pytest-runner', ],
|
setup_requires=['pytest-runner', ],
|
||||||
|
extras_require={
|
||||||
|
'evaluation': extra_reqs
|
||||||
|
},
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts':
|
'console_scripts':
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
mock
|
mock
|
||||||
pytest-cov
|
pytest-cov
|
||||||
pytest
|
pytest
|
||||||
|
scikit-learn
|
||||||
|
numpy
|
||||||
|
@@ -1,8 +0,0 @@
|
|||||||
---
|
|
||||||
name: Async
|
|
||||||
module: asyncplugin
|
|
||||||
description: I am async
|
|
||||||
author: "@balkian"
|
|
||||||
version: '0.1'
|
|
||||||
async: true
|
|
||||||
extra_params: {}
|
|
@@ -1,11 +0,0 @@
|
|||||||
from senpy.plugins import SentimentPlugin
|
|
||||||
|
|
||||||
|
|
||||||
class DummyPlugin(SentimentPlugin):
|
|
||||||
def analyse_entry(self, entry, params):
|
|
||||||
entry['nif:iString'] = entry['nif:isString'][::-1]
|
|
||||||
entry.reversed = entry.get('reversed', 0) + 1
|
|
||||||
yield entry
|
|
||||||
|
|
||||||
def test(self):
|
|
||||||
pass
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user