mirror of
https://github.com/gsi-upm/senpy
synced 2025-10-19 01:38:28 +00:00
Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
83e2d415a1 | ||
|
f8ca595bc9 |
@@ -12,7 +12,7 @@ stages:
|
||||
- clean
|
||||
|
||||
before_script:
|
||||
- make -e login
|
||||
- docker login -u $HUB_USER -p $HUB_PASSWORD
|
||||
|
||||
.test: &test_definition
|
||||
stage: test
|
||||
@@ -36,7 +36,6 @@ test-2.7:
|
||||
only:
|
||||
- tags
|
||||
- triggers
|
||||
- fix-makefiles
|
||||
|
||||
push-3.5:
|
||||
<<: *image_definition
|
||||
@@ -63,13 +62,11 @@ push-github:
|
||||
only:
|
||||
- master
|
||||
- triggers
|
||||
- fix-makefiles
|
||||
|
||||
deploy_pypi:
|
||||
stage: deploy
|
||||
script: # Configure the PyPI credentials, then push the package, and cleanup the creds.
|
||||
- echo "[server-login]" >> ~/.pypirc
|
||||
- echo "repository=https://upload.pypi.org/legacy/" >> ~/.pypirc
|
||||
- echo "username=" ${PYPI_USER} >> ~/.pypirc
|
||||
- echo "password=" ${PYPI_PASSWORD} >> ~/.pypirc
|
||||
- make pip_upload
|
||||
@@ -86,7 +83,6 @@ deploy:
|
||||
- make -e deploy
|
||||
only:
|
||||
- master
|
||||
- fix-makefiles
|
||||
|
||||
push-github:
|
||||
stage: deploy
|
||||
|
@@ -1,27 +0,0 @@
|
||||
These makefiles are recipes for several common tasks in different types of projects.
|
||||
To add them to your project, simply do:
|
||||
|
||||
```
|
||||
git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git
|
||||
git subtree add --prefix=.makefiles/ makefiles master
|
||||
touch Makefile
|
||||
echo "include .makefiles/base.mk" >> Makefile
|
||||
```
|
||||
|
||||
Now you can take advantage of the recipes.
|
||||
For instance, to add useful targets for a python project, just add this to your Makefile:
|
||||
|
||||
```
|
||||
include .makefiles/python.mk
|
||||
```
|
||||
|
||||
You may need to set special variables like the name of your project or the python versions you're targetting.
|
||||
Take a look at each specific `.mk` file for more information, and the `Makefile` in the [senpy](https://lab.cluster.gsi.dit.upm.es/senpy/senpy) project for a real use case.
|
||||
|
||||
If you update the makefiles from your repository, make sure to push the changes for review in upstream (this repository):
|
||||
|
||||
```
|
||||
make makefiles-push
|
||||
```
|
||||
|
||||
It will automatically commit all unstaged changes in the .makefiles folder.
|
@@ -1,38 +0,0 @@
|
||||
export
|
||||
NAME ?= $(shell basename $(CURDIR))
|
||||
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
|
||||
|
||||
# Get the location of this makefile.
|
||||
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
-include .env
|
||||
-include ../.env
|
||||
|
||||
.FORCE:
|
||||
|
||||
version: .FORCE
|
||||
@echo $(VERSION) > $(NAME)/VERSION
|
||||
@echo $(VERSION)
|
||||
|
||||
help: ## Show this help.
|
||||
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
|
||||
|
||||
config: ## Load config from the environment. You should run it once in every session before other tasks. Run: eval $(make config)
|
||||
@awk '{ print "export " $$0}' ../.env
|
||||
@awk '{ print "export " $$0}' .env
|
||||
@echo "# Please, run: "
|
||||
@echo "# eval \$$(make config)"
|
||||
# If you need to run a command on the key/value pairs, use this:
|
||||
# @awk '{ split($$0, a, "="); "echo " a[2] " | base64 -w 0" |& getline b64; print "export " a[1] "=" a[2]; print "export " a[1] "_BASE64=" b64}' .env
|
||||
|
||||
ci: ## Run a task using gitlab-runner. Only use to debug problems in the CI pipeline
|
||||
gitlab-runner exec shell --builds-dir '.builds' --env CI_PROJECT_NAME=$(NAME) ${action}
|
||||
|
||||
include $(MK_DIR)/makefiles.mk
|
||||
include $(MK_DIR)/docker.mk
|
||||
include $(MK_DIR)/git.mk
|
||||
|
||||
info:: ## List all variables
|
||||
env
|
||||
|
||||
.PHONY:: config help ci version .FORCE
|
@@ -1,25 +0,0 @@
|
||||
IMAGEWTAG ?= $(IMAGENAME):$(VERSION)
|
||||
|
||||
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
||||
ifeq ($(CI_BUILD_TOKEN),)
|
||||
@echo "Not logging in to the docker registry" "$(CI_REGISTRY)"
|
||||
else
|
||||
@docker login -u gitlab-ci-token -p $(CI_BUILD_TOKEN) $(CI_REGISTRY)
|
||||
endif
|
||||
ifeq ($(HUB_USER),)
|
||||
@echo "Not logging in to global the docker registry"
|
||||
else
|
||||
@docker login -u $(HUB_USER) -p $(HUB_PASSWORD)
|
||||
endif
|
||||
|
||||
docker-clean: ## Remove docker credentials
|
||||
ifeq ($(HUB_USER),)
|
||||
else
|
||||
@docker logout
|
||||
endif
|
||||
|
||||
login:: docker-login
|
||||
|
||||
clean:: docker-clean
|
||||
|
||||
.PHONY:: docker-login docker-clean login clean
|
@@ -1,28 +0,0 @@
|
||||
commit:
|
||||
git commit -a
|
||||
|
||||
tag:
|
||||
git tag ${VERSION}
|
||||
|
||||
git-push::
|
||||
git push --tags -u origin HEAD
|
||||
|
||||
git-pull:
|
||||
git pull --all
|
||||
|
||||
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
|
||||
ifeq ($(GITHUB_DEPLOY_KEY),)
|
||||
else
|
||||
$(eval KEY_FILE := "$(shell mktemp)")
|
||||
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
|
||||
@git remote rm github-deploy || true
|
||||
git remote add github-deploy $(GITHUB_REPO)
|
||||
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
|
||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy HEAD:$(CI_COMMIT_REF_NAME)
|
||||
rm $(KEY_FILE)
|
||||
endif
|
||||
|
||||
push:: git-push
|
||||
pull:: git-pull
|
||||
|
||||
.PHONY:: commit tag push git-push git-pull push-github
|
@@ -1,51 +0,0 @@
|
||||
# Deployment with Kubernetes
|
||||
|
||||
# KUBE_CA_PEM_FILE is the path of a certificate file. It automatically set by GitLab
|
||||
# if you enable Kubernetes integration in a project.
|
||||
#
|
||||
# As of this writing, Kubernetes integration can not be set on a group level, so it has to
|
||||
# be manually set in every project.
|
||||
# Alternatively, we use a custom KUBE_CA_BUNDLE environment variable, which can be set at
|
||||
# the group level. In this case, the variable contains the whole content of the certificate,
|
||||
# which we dump to a temporary file
|
||||
#
|
||||
# Check if the KUBE_CA_PEM_FILE exists. Otherwise, create it from KUBE_CA_BUNDLE
|
||||
KUBE_CA_TEMP=false
|
||||
ifndef KUBE_CA_PEM_FILE
|
||||
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
||||
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
|
||||
endif
|
||||
KUBE_TOKEN?=""
|
||||
KUBE_NAMESPACE?=$(NAME)
|
||||
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
|
||||
CI_COMMIT_REF_NAME?=master
|
||||
|
||||
info:: ## Print variables. Useful for debugging.
|
||||
@echo "#KUBERNETES"
|
||||
@echo KUBE_URL=$(KUBE_URL)
|
||||
@echo KUBE_CA_PEM_FILE=$(KUBE_CA_PEM_FILE)
|
||||
@echo KUBE_CA_BUNDLE=$$KUBE_CA_BUNDLE
|
||||
@echo KUBE_TOKEN=$(KUBE_TOKEN)
|
||||
@echo KUBE_NAMESPACE=$(KUBE_NAMESPACE)
|
||||
@echo KUBECTL=$(KUBECTL)
|
||||
|
||||
@echo "#CI"
|
||||
@echo CI_PROJECT_NAME=$(CI_PROJECT_NAME)
|
||||
@echo CI_REGISTRY=$(CI_REGISTRY)
|
||||
@echo CI_REGISTRY_USER=$(CI_REGISTRY_USER)
|
||||
@echo CI_COMMIT_REF_NAME=$(CI_COMMIT_REF_NAME)
|
||||
@echo "CREATED=$(CREATED)"
|
||||
|
||||
#
|
||||
# Deployment and advanced features
|
||||
#
|
||||
|
||||
|
||||
deploy: ## Deploy to kubernetes using the credentials in KUBE_CA_PEM_FILE (or KUBE_CA_BUNDLE ) and TOKEN
|
||||
@ls k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null || true
|
||||
@cat k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null | envsubst | $(KUBECTL) apply -f -
|
||||
|
||||
deploy-check: ## Get the deployed configuration.
|
||||
@$(KUBECTL) get deploy,pods,svc,ingress
|
||||
|
||||
.PHONY:: info deploy deploy-check
|
@@ -1,17 +0,0 @@
|
||||
makefiles-remote:
|
||||
@git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git 2>/dev/null || true
|
||||
|
||||
makefiles-commit: makefiles-remote
|
||||
git add -f .makefiles
|
||||
git commit -em "Updated makefiles from ${NAME}"
|
||||
|
||||
makefiles-push:
|
||||
git subtree push --prefix=.makefiles/ makefiles $(NAME)
|
||||
|
||||
makefiles-pull: makefiles-remote
|
||||
git subtree pull --prefix=.makefiles/ makefiles master --squash
|
||||
|
||||
pull:: makefiles-pull
|
||||
push:: makefiles-push
|
||||
|
||||
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull pull push
|
@@ -1,5 +0,0 @@
|
||||
init: ## Init pre-commit hooks (i.e. enforcing format checking before allowing a commit)
|
||||
pip install --user pre-commit
|
||||
pre-commit install
|
||||
|
||||
.PHONY:: init
|
@@ -1,92 +0,0 @@
|
||||
PYVERSIONS ?= 2.7
|
||||
PYMAIN ?= $(firstword $(PYVERSIONS))
|
||||
TARNAME ?= $(NAME)-$(VERSION).tar.gz
|
||||
|
||||
DEVPORT ?= 6000
|
||||
|
||||
yapf: ## Format python code
|
||||
yapf -i -r $(NAME)
|
||||
yapf -i -r tests
|
||||
|
||||
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS)) ## Generate dockerfiles for each python version
|
||||
@unlink Dockerfile >/dev/null
|
||||
ln -s Dockerfile-$(PYMAIN) Dockerfile
|
||||
|
||||
Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Dockerfile-2.7)
|
||||
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
|
||||
|
||||
quick_build: $(addprefix build-, $(PYMAIN))
|
||||
|
||||
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
||||
|
||||
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
||||
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
|
||||
|
||||
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
|
||||
@docker start $(NAME)-dev$* || (\
|
||||
$(MAKE) build-$*; \
|
||||
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
|
||||
)\
|
||||
|
||||
docker exec -ti $(NAME)-dev$* bash
|
||||
|
||||
dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
|
||||
|
||||
quick_test: test-$(PYMAIN)
|
||||
|
||||
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
|
||||
# This speeds tests up because the image has most (if not all) of the dependencies already.
|
||||
docker rm $(NAME)-test-$* || true
|
||||
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
|
||||
docker cp . $(NAME)-test-$*:/usr/src/app
|
||||
docker start -a $(NAME)-test-$*
|
||||
|
||||
test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
|
||||
|
||||
run-%: build-%
|
||||
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
|
||||
|
||||
run: run-$(PYMAIN)
|
||||
|
||||
# Pypy - Upload a package
|
||||
|
||||
dist/$(TARNAME): version
|
||||
python setup.py sdist;
|
||||
|
||||
sdist: dist/$(TARNAME) ## Generate the distribution file (wheel)
|
||||
|
||||
pip_test-%: sdist ## Test the distribution file using pip install and a specific python version (e.g. pip_test-2.7)
|
||||
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
|
||||
|
||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS)) ## Test pip installation with the main python version
|
||||
|
||||
pip_upload: pip_test ## Upload package to pip
|
||||
python setup.py sdist upload ;
|
||||
|
||||
# Pushing to docker
|
||||
|
||||
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||
docker push '$(IMAGENAME):latest'
|
||||
docker push '$(IMAGEWTAG)'
|
||||
|
||||
push-latest-%: build-% ## Push the latest image for a specific python version
|
||||
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
||||
docker push $(IMAGENAME):$(VERSION)-python$*
|
||||
docker push $(IMAGENAME):python$*
|
||||
|
||||
push-%: build-% ## Push the image of the current version (tagged). e.g. push-2.7
|
||||
docker push $(IMAGENAME):$(VERSION)-python$*
|
||||
|
||||
push:: $(addprefix push-,$(PYVERSIONS)) ## Push an image with the current version for every python version
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||
docker push $(IMAGENAME):$(VERSION)
|
||||
|
||||
clean:: ## Clean older docker images and containers related to this project and dev environments
|
||||
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
||||
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
||||
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
|
||||
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
||||
|
||||
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push
|
140
Makefile
140
Makefile
@@ -1,17 +1,147 @@
|
||||
NAME=senpy
|
||||
VERSION=$(shell git describe --tags --dirty 2>/dev/null)
|
||||
GITHUB_REPO=git@github.com:gsi-upm/senpy.git
|
||||
|
||||
IMAGENAME=gsiupm/senpy
|
||||
IMAGEWTAG=$(IMAGENAME):$(VERSION)
|
||||
|
||||
# The first version is the main one (used for quick builds)
|
||||
# See .makefiles/python.mk for more info
|
||||
PYVERSIONS=3.5 2.7
|
||||
PYMAIN=$(firstword $(PYVERSIONS))
|
||||
|
||||
DEVPORT=5000
|
||||
|
||||
TARNAME=$(NAME)-$(VERSION).tar.gz
|
||||
action="test-${PYMAIN}"
|
||||
GITHUB_REPO=git@github.com:gsi-upm/senpy.git
|
||||
|
||||
include .makefiles/base.mk
|
||||
include .makefiles/k8s.mk
|
||||
include .makefiles/python.mk
|
||||
KUBE_CA_PEM_FILE=""
|
||||
KUBE_URL=""
|
||||
KUBE_TOKEN=""
|
||||
KUBE_NAMESPACE=$(NAME)
|
||||
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -v $$PWD:/tmp/cwd/ -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
|
||||
CI_REGISTRY=docker.io
|
||||
CI_REGISTRY_USER=gitlab
|
||||
CI_BUILD_TOKEN=""
|
||||
CI_COMMIT_REF_NAME=master
|
||||
|
||||
|
||||
|
||||
all: build run
|
||||
|
||||
.FORCE:
|
||||
|
||||
version: .FORCE
|
||||
@echo $(VERSION) > $(NAME)/VERSION
|
||||
@echo $(VERSION)
|
||||
|
||||
yapf:
|
||||
yapf -i -r $(NAME)
|
||||
yapf -i -r tests
|
||||
|
||||
init:
|
||||
pip install --user pre-commit
|
||||
pre-commit install
|
||||
|
||||
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS))
|
||||
@unlink Dockerfile >/dev/null
|
||||
ln -s Dockerfile-$(PYMAIN) Dockerfile
|
||||
|
||||
Dockerfile-%: Dockerfile.template
|
||||
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
|
||||
|
||||
quick_build: $(addprefix build-, $(PYMAIN))
|
||||
|
||||
build: $(addprefix build-, $(PYVERSIONS))
|
||||
|
||||
build-%: version Dockerfile-%
|
||||
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
|
||||
|
||||
quick_test: $(addprefix test-,$(PYMAIN))
|
||||
|
||||
dev-%:
|
||||
@docker start $(NAME)-dev$* || (\
|
||||
$(MAKE) build-$*; \
|
||||
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
|
||||
)\
|
||||
|
||||
docker exec -ti $(NAME)-dev$* bash
|
||||
|
||||
dev: dev-$(PYMAIN)
|
||||
|
||||
test-all: $(addprefix test-,$(PYVERSIONS))
|
||||
|
||||
test-%:
|
||||
docker run --rm --entrypoint /usr/local/bin/python -w /usr/src/app $(IMAGENAME):python$* setup.py test
|
||||
|
||||
test: test-$(PYMAIN)
|
||||
|
||||
dist/$(TARNAME): version
|
||||
python setup.py sdist;
|
||||
|
||||
sdist: dist/$(TARNAME)
|
||||
|
||||
pip_test-%: sdist
|
||||
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
|
||||
|
||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS))
|
||||
|
||||
pip_upload: pip_test
|
||||
python setup.py sdist upload ;
|
||||
|
||||
clean:
|
||||
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
|
||||
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
||||
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
||||
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
||||
|
||||
git_commit:
|
||||
git commit -a
|
||||
|
||||
git_tag:
|
||||
git tag ${VERSION}
|
||||
|
||||
git_push:
|
||||
git push --tags origin master
|
||||
|
||||
run-%: build-%
|
||||
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
|
||||
|
||||
run: run-$(PYMAIN)
|
||||
|
||||
push-latest: $(addprefix push-latest-,$(PYVERSIONS))
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||
docker push '$(IMAGENAME):latest'
|
||||
docker push '$(IMAGEWTAG)'
|
||||
|
||||
push-latest-%: build-%
|
||||
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
||||
docker push $(IMAGENAME):$(VERSION)-python$*
|
||||
docker push $(IMAGENAME):python$*
|
||||
|
||||
push-%: build-%
|
||||
docker push $(IMAGENAME):$(VERSION)-python$*
|
||||
|
||||
push: $(addprefix push-,$(PYVERSIONS))
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||
docker push $(IMAGENAME):$(VERSION)
|
||||
|
||||
push-github:
|
||||
$(eval KEY_FILE := $(shell mktemp))
|
||||
@echo "$$GITHUB_DEPLOY_KEY" > $(KEY_FILE)
|
||||
@git remote rm github-deploy || true
|
||||
git remote add github-deploy $(GITHUB_REPO)
|
||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME) || true
|
||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy $(CI_COMMIT_REF_NAME)
|
||||
rm $(KEY_FILE)
|
||||
|
||||
ci:
|
||||
gitlab-runner exec docker --docker-volumes /var/run/docker.sock:/var/run/docker.sock --env CI_PROJECT_NAME=$(NAME) ${action}
|
||||
|
||||
deploy:
|
||||
@$(KUBECTL) delete secret $(CI_REGISTRY) || true
|
||||
@$(KUBECTL) create secret docker-registry $(CI_REGISTRY) --docker-server=$(CI_REGISTRY) --docker-username=$(CI_REGISTRY_USER) --docker-email=$(CI_REGISTRY_USER) --docker-password=$(CI_BUILD_TOKEN)
|
||||
@$(KUBECTL) apply -f /tmp/cwd/k8s/
|
||||
|
||||
|
||||
.PHONY: test test-% test-all build-% build test pip_test run yapf push-main push-% dev ci version .FORCE deploy
|
||||
|
@@ -1,5 +1,5 @@
|
||||
.. image:: img/header.png
|
||||
:width: 100%
|
||||
:height: 6em
|
||||
:target: http://demos.gsi.dit.upm.es/senpy
|
||||
|
||||
.. image:: https://travis-ci.org/gsi-upm/senpy.svg?branch=master
|
||||
|
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"@type": "plugins",
|
||||
"plugins": {}
|
||||
"plugins": [
|
||||
]
|
||||
}
|
||||
|
@@ -8,7 +8,8 @@
|
||||
"me:EmotionAnalysis1",
|
||||
"me:NER1",
|
||||
{
|
||||
"description": "missing @id and @type"
|
||||
"@type": "analysis",
|
||||
"@id": "wrong"
|
||||
}
|
||||
],
|
||||
"entries": [
|
||||
|
@@ -1,78 +0,0 @@
|
||||
{
|
||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
"me:SAnalysis1",
|
||||
"me:SgAnalysis1",
|
||||
"me:EmotionAnalysis1",
|
||||
"me:NER1",
|
||||
{
|
||||
"@type": "analysis",
|
||||
"@id": "anonymous"
|
||||
}
|
||||
],
|
||||
"entries": [
|
||||
{
|
||||
"@id": "http://micro.blog/status1",
|
||||
"@type": [
|
||||
"nif:RFC5147String",
|
||||
"nif:Context"
|
||||
],
|
||||
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||
"entities": [
|
||||
{
|
||||
"@id": "http://micro.blog/status1#char=5,13",
|
||||
"nif:beginIndex": 5,
|
||||
"nif:endIndex": 13,
|
||||
"nif:anchorOf": "Microsoft",
|
||||
"me:references": "http://dbpedia.org/page/Microsoft",
|
||||
"prov:wasGeneratedBy": "me:NER1"
|
||||
},
|
||||
{
|
||||
"@id": "http://micro.blog/status1#char=25,37",
|
||||
"nif:beginIndex": 25,
|
||||
"nif:endIndex": 37,
|
||||
"nif:anchorOf": "Windows Phone",
|
||||
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
||||
"prov:wasGeneratedBy": "me:NER1"
|
||||
}
|
||||
],
|
||||
"suggestions": [
|
||||
{
|
||||
"@id": "http://micro.blog/status1#char=16,77",
|
||||
"nif:beginIndex": 16,
|
||||
"nif:endIndex": 77,
|
||||
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
||||
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
||||
}
|
||||
],
|
||||
"sentiments": [
|
||||
{
|
||||
"@id": "http://micro.blog/status1#char=80,97",
|
||||
"nif:beginIndex": 80,
|
||||
"nif:endIndex": 97,
|
||||
"nif:anchorOf": "You'll be awesome.",
|
||||
"marl:hasPolarity": "marl:Positive",
|
||||
"marl:polarityValue": 0.9,
|
||||
"prov:wasGeneratedBy": "me:SAnalysis1"
|
||||
}
|
||||
],
|
||||
"emotions": [
|
||||
{
|
||||
"@id": "http://micro.blog/status1#char=0,109",
|
||||
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
||||
"prov:wasGeneratedBy": "me:EAnalysis1",
|
||||
"onyx:hasEmotion": [
|
||||
{
|
||||
"onyx:hasEmotionCategory": "wna:liking"
|
||||
},
|
||||
{
|
||||
"onyx:hasEmotionCategory": "wna:excitement"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@@ -3,7 +3,10 @@
|
||||
"@id": "me:Result1",
|
||||
"@type": "results",
|
||||
"analysis": [
|
||||
"me:SgAnalysis1"
|
||||
{
|
||||
"@id": "me:SgAnalysis1",
|
||||
"@type": "me:SuggestionAnalysis"
|
||||
}
|
||||
],
|
||||
"entries": [
|
||||
{
|
||||
|
@@ -2,7 +2,6 @@ Flask>=0.10.1
|
||||
requests>=2.4.1
|
||||
tornado>=4.4.3
|
||||
PyLD>=0.6.5
|
||||
nltk
|
||||
six
|
||||
future
|
||||
jsonschema
|
||||
|
@@ -25,7 +25,6 @@ from senpy.extensions import Senpy
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import senpy
|
||||
|
||||
@@ -75,12 +74,6 @@ def main():
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Do not run a server, only install plugin dependencies')
|
||||
parser.add_argument(
|
||||
'--data-folder',
|
||||
'--data',
|
||||
type=str,
|
||||
default=None,
|
||||
help='Where to look for data. It be set with the SENPY_DATA environment variable as well.')
|
||||
parser.add_argument(
|
||||
'--threaded',
|
||||
action='store_false',
|
||||
@@ -95,18 +88,15 @@ def main():
|
||||
args = parser.parse_args()
|
||||
if args.version:
|
||||
print('Senpy version {}'.format(senpy.__version__))
|
||||
print(sys.version)
|
||||
exit(1)
|
||||
logging.basicConfig()
|
||||
rl = logging.getLogger()
|
||||
rl.setLevel(getattr(logging, args.level))
|
||||
app = Flask(__name__)
|
||||
app.debug = args.debug
|
||||
sp = Senpy(app, args.plugins_folder,
|
||||
default_plugins=args.default_plugins,
|
||||
data_folder=args.data_folder)
|
||||
sp.install_deps()
|
||||
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
|
||||
if args.only_install:
|
||||
sp.install_deps()
|
||||
return
|
||||
sp.activate_all()
|
||||
print('Senpy version {}'.format(senpy.__version__))
|
||||
|
147
senpy/api.py
147
senpy/api.py
@@ -1,55 +1,38 @@
|
||||
from future.utils import iteritems
|
||||
from .models import Error, Results, Entry, from_string
|
||||
from .models import Error
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
API_PARAMS = {
|
||||
"algorithm": {
|
||||
"aliases": ["algorithms", "a", "algo"],
|
||||
"aliases": ["algorithm", "a", "algo"],
|
||||
"required": False,
|
||||
"description": ("Algorithms that will be used to process the request."
|
||||
"It may be a list of comma-separated names."),
|
||||
},
|
||||
"expanded-jsonld": {
|
||||
"@id": "expanded-jsonld",
|
||||
"aliases": ["expanded"],
|
||||
"required": True,
|
||||
"default": 0
|
||||
},
|
||||
"with_parameters": {
|
||||
"aliases": ['withparameters',
|
||||
'with-parameters'],
|
||||
"options": "boolean",
|
||||
"default": False,
|
||||
"required": True
|
||||
},
|
||||
"plugin_type": {
|
||||
"@id": "pluginType",
|
||||
"description": 'What kind of plugins to list',
|
||||
"aliases": ["pluginType"],
|
||||
"required": True,
|
||||
"default": "analysisPlugin"
|
||||
},
|
||||
"outformat": {
|
||||
"@id": "outformat",
|
||||
"aliases": ["o"],
|
||||
"aliases": ["outformat", "o"],
|
||||
"default": "json-ld",
|
||||
"required": True,
|
||||
"options": ["json-ld", "turtle"],
|
||||
},
|
||||
"help": {
|
||||
"@id": "help",
|
||||
"description": "Show additional help to know more about the possible parameters",
|
||||
"aliases": ["h"],
|
||||
"expanded-jsonld": {
|
||||
"@id": "expanded-jsonld",
|
||||
"aliases": ["expanded", "expanded-jsonld"],
|
||||
"required": True,
|
||||
"options": "boolean",
|
||||
"default": False
|
||||
"default": 0
|
||||
},
|
||||
"emotionModel": {
|
||||
"@id": "emotionModel",
|
||||
"aliases": ["emoModel"],
|
||||
"aliases": ["emotionModel", "emoModel"],
|
||||
"required": False
|
||||
},
|
||||
"plugin_type": {
|
||||
"@id": "pluginType",
|
||||
"description": 'What kind of plugins to list',
|
||||
"aliases": ["pluginType", "plugin_type"],
|
||||
"required": True,
|
||||
"default": "analysisPlugin"
|
||||
},
|
||||
"conversion": {
|
||||
"@id": "conversion",
|
||||
"description": "How to show the elements that have (not) been converted",
|
||||
@@ -61,16 +44,15 @@ API_PARAMS = {
|
||||
|
||||
WEB_PARAMS = {
|
||||
"inHeaders": {
|
||||
"aliases": ["headers"],
|
||||
"aliases": ["inHeaders", "headers"],
|
||||
"required": True,
|
||||
"default": False,
|
||||
"options": "boolean"
|
||||
"default": "0"
|
||||
},
|
||||
}
|
||||
|
||||
CLI_PARAMS = {
|
||||
"plugin_folder": {
|
||||
"aliases": ["folder"],
|
||||
"aliases": ["plugin_folder", "folder"],
|
||||
"required": True,
|
||||
"default": "."
|
||||
},
|
||||
@@ -79,71 +61,64 @@ CLI_PARAMS = {
|
||||
NIF_PARAMS = {
|
||||
"input": {
|
||||
"@id": "input",
|
||||
"aliases": ["i"],
|
||||
"aliases": ["i", "input"],
|
||||
"required": True,
|
||||
"help": "Input text"
|
||||
},
|
||||
"intype": {
|
||||
"@id": "intype",
|
||||
"aliases": ["t"],
|
||||
"required": False,
|
||||
"default": "direct",
|
||||
"options": ["direct", "url", "file"],
|
||||
},
|
||||
"informat": {
|
||||
"@id": "informat",
|
||||
"aliases": ["f"],
|
||||
"aliases": ["f", "informat"],
|
||||
"required": False,
|
||||
"default": "text",
|
||||
"options": ["turtle", "text", "json-ld"],
|
||||
},
|
||||
"intype": {
|
||||
"@id": "intype",
|
||||
"aliases": ["intype", "t"],
|
||||
"required": False,
|
||||
"default": "direct",
|
||||
"options": ["direct", "url", "file"],
|
||||
},
|
||||
"language": {
|
||||
"@id": "language",
|
||||
"aliases": ["l"],
|
||||
"aliases": ["language", "l"],
|
||||
"required": False,
|
||||
},
|
||||
"prefix": {
|
||||
"@id": "prefix",
|
||||
"aliases": ["p"],
|
||||
"aliases": ["prefix", "p"],
|
||||
"required": True,
|
||||
"default": "",
|
||||
},
|
||||
"urischeme": {
|
||||
"@id": "urischeme",
|
||||
"aliases": ["u"],
|
||||
"aliases": ["urischeme", "u"],
|
||||
"required": False,
|
||||
"default": "RFC5147String",
|
||||
"options": "RFC5147String"
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def parse_params(indict, *specs):
|
||||
if not specs:
|
||||
specs = [NIF_PARAMS]
|
||||
logger.debug("Parsing: {}\n{}".format(indict, specs))
|
||||
def parse_params(indict, spec=NIF_PARAMS):
|
||||
logger.debug("Parsing: {}\n{}".format(indict, spec))
|
||||
outdict = indict.copy()
|
||||
wrong_params = {}
|
||||
for spec in specs:
|
||||
for param, options in iteritems(spec):
|
||||
if param[0] != "@": # Exclude json-ld properties
|
||||
for alias in options.get("aliases", []):
|
||||
# Replace each alias with the correct name of the parameter
|
||||
if alias in indict and alias is not param:
|
||||
outdict[param] = indict[alias]
|
||||
del indict[alias]
|
||||
continue
|
||||
if param not in outdict:
|
||||
if options.get("required", False) and "default" not in options:
|
||||
wrong_params[param] = spec[param]
|
||||
else:
|
||||
if "default" in options:
|
||||
outdict[param] = options["default"]
|
||||
elif "options" in spec[param]:
|
||||
if spec[param]["options"] == "boolean":
|
||||
outdict[param] = outdict[param] in [None, True, 'true', '1']
|
||||
elif outdict[param] not in spec[param]["options"]:
|
||||
wrong_params[param] = spec[param]
|
||||
for param, options in iteritems(spec):
|
||||
if param[0] != "@": # Exclude json-ld properties
|
||||
for alias in options.get("aliases", []):
|
||||
if alias in indict:
|
||||
outdict[param] = indict[alias]
|
||||
if param not in outdict:
|
||||
if options.get("required", False) and "default" not in options:
|
||||
wrong_params[param] = spec[param]
|
||||
else:
|
||||
if "default" in options:
|
||||
outdict[param] = options["default"]
|
||||
else:
|
||||
if "options" in spec[param] and \
|
||||
outdict[param] not in spec[param]["options"]:
|
||||
wrong_params[param] = spec[param]
|
||||
if wrong_params:
|
||||
logger.debug("Error parsing: %s", wrong_params)
|
||||
message = Error(
|
||||
@@ -153,30 +128,4 @@ def parse_params(indict, *specs):
|
||||
errors={param: error
|
||||
for param, error in iteritems(wrong_params)})
|
||||
raise message
|
||||
if 'algorithm' in outdict and not isinstance(outdict['algorithm'], list):
|
||||
outdict['algorithm'] = outdict['algorithm'].split(',')
|
||||
return outdict
|
||||
|
||||
|
||||
def get_extra_params(request, plugin=None):
|
||||
params = request.parameters.copy()
|
||||
if plugin:
|
||||
extra_params = parse_params(params, plugin.get('extra_params', {}))
|
||||
params.update(extra_params)
|
||||
return params
|
||||
|
||||
|
||||
def parse_call(params):
|
||||
'''Return a results object based on the parameters used in a call/request.
|
||||
'''
|
||||
params = parse_params(params, NIF_PARAMS)
|
||||
if params['informat'] == 'text':
|
||||
results = Results()
|
||||
entry = Entry(nif__isString=params['input'])
|
||||
results.entries.append(entry)
|
||||
elif params['informat'] == 'json-ld':
|
||||
results = from_string(params['input'], cls=Results)
|
||||
else:
|
||||
raise NotImplemented('Informat {} is not implemented'.format(params['informat']))
|
||||
results.parameters = params
|
||||
return results
|
||||
|
@@ -19,8 +19,8 @@ Blueprints for Senpy
|
||||
"""
|
||||
from flask import (Blueprint, request, current_app, render_template, url_for,
|
||||
jsonify)
|
||||
from .models import Error, Response, Help, Plugins, read_schema
|
||||
from . import api
|
||||
from .models import Error, Response, Plugins, read_schema
|
||||
from .api import WEB_PARAMS, API_PARAMS, parse_params
|
||||
from .version import __version__
|
||||
from functools import wraps
|
||||
|
||||
@@ -76,32 +76,35 @@ def basic_api(f):
|
||||
def decorated_function(*args, **kwargs):
|
||||
raw_params = get_params(request)
|
||||
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
||||
# Get defaults
|
||||
web_params = parse_params({}, spec=WEB_PARAMS)
|
||||
api_params = parse_params({}, spec=API_PARAMS)
|
||||
|
||||
outformat = 'json-ld'
|
||||
try:
|
||||
print('Getting request:')
|
||||
print(request)
|
||||
params = api.parse_params(raw_params, api.WEB_PARAMS, api.API_PARAMS)
|
||||
if hasattr(request, 'parameters'):
|
||||
request.parameters.update(params)
|
||||
web_params = parse_params(raw_params, spec=WEB_PARAMS)
|
||||
api_params = parse_params(raw_params, spec=API_PARAMS)
|
||||
if hasattr(request, 'params'):
|
||||
request.params.update(api_params)
|
||||
else:
|
||||
request.parameters = params
|
||||
request.params = api_params
|
||||
response = f(*args, **kwargs)
|
||||
except Error as ex:
|
||||
response = ex
|
||||
response.parameters = params
|
||||
logger.error(ex)
|
||||
if current_app.debug:
|
||||
raise
|
||||
|
||||
in_headers = params['inHeaders']
|
||||
expanded = params['expanded-jsonld']
|
||||
outformat = params['outformat']
|
||||
in_headers = web_params['inHeaders'] != "0"
|
||||
expanded = api_params['expanded-jsonld']
|
||||
outformat = api_params['outformat']
|
||||
|
||||
return response.flask(
|
||||
in_headers=in_headers,
|
||||
headers=headers,
|
||||
prefix=url_for('.api_root', _external=True),
|
||||
prefix=url_for('.api', _external=True),
|
||||
context_uri=url_for('api.context',
|
||||
entity=type(response).__name__,
|
||||
_external=True),
|
||||
@@ -113,22 +116,16 @@ def basic_api(f):
|
||||
|
||||
@api_blueprint.route('/', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def api_root():
|
||||
if request.parameters['help']:
|
||||
dic = dict(api.API_PARAMS, **api.NIF_PARAMS)
|
||||
response = Help(parameters=dic)
|
||||
return response
|
||||
else:
|
||||
req = api.parse_call(request.parameters)
|
||||
response = current_app.senpy.analyse(req)
|
||||
return response
|
||||
def api():
|
||||
response = current_app.senpy.analyse(**request.params)
|
||||
return response
|
||||
|
||||
|
||||
@api_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def plugins():
|
||||
sp = current_app.senpy
|
||||
ptype = request.parameters.get('plugin_type')
|
||||
ptype = request.params.get('plugin_type')
|
||||
plugins = sp.filter_plugins(plugin_type=ptype)
|
||||
dic = Plugins(plugins=list(plugins.values()))
|
||||
return dic
|
||||
|
26
senpy/cli.py
26
senpy/cli.py
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
from .models import Error
|
||||
from .api import parse_params, CLI_PARAMS
|
||||
from .extensions import Senpy
|
||||
from . import api
|
||||
|
||||
|
||||
def argv_to_dict(argv):
|
||||
@@ -13,27 +13,27 @@ def argv_to_dict(argv):
|
||||
if argv[i][0] == '-':
|
||||
key = argv[i].strip('-')
|
||||
value = argv[i + 1] if len(argv) > i + 1 else None
|
||||
if not value or value[0] == '-':
|
||||
cli_dict[key] = True
|
||||
if value and value[0] == '-':
|
||||
cli_dict[key] = ""
|
||||
else:
|
||||
cli_dict[key] = value
|
||||
return cli_dict
|
||||
|
||||
|
||||
def parse_cli(argv):
|
||||
cli_dict = argv_to_dict(argv)
|
||||
cli_params = parse_params(cli_dict, spec=CLI_PARAMS)
|
||||
return cli_params, cli_dict
|
||||
|
||||
|
||||
def main_function(argv):
|
||||
'''This is the method for unit testing
|
||||
'''
|
||||
params = api.parse_params(argv_to_dict(argv),
|
||||
api.CLI_PARAMS,
|
||||
api.API_PARAMS,
|
||||
api.NIF_PARAMS)
|
||||
plugin_folder = params['plugin_folder']
|
||||
cli_params, cli_dict = parse_cli(argv)
|
||||
plugin_folder = cli_params['plugin_folder']
|
||||
sp = Senpy(default_plugins=False, plugin_folder=plugin_folder)
|
||||
request = api.parse_call(params)
|
||||
algos = request.parameters.get('algorithm', sp.plugins.keys())
|
||||
for algo in algos:
|
||||
sp.activate_plugin(algo)
|
||||
res = sp.analyse(request)
|
||||
sp.activate_all(sync=True)
|
||||
res = sp.analyse(**cli_dict)
|
||||
return res
|
||||
|
||||
|
||||
|
@@ -5,30 +5,41 @@ It orchestrates plugin (de)activation and analysis.
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
|
||||
from . import plugins, api
|
||||
from . import plugins
|
||||
from .plugins import SenpyPlugin
|
||||
from .models import Error
|
||||
from .models import Error, Entry, Results, from_string
|
||||
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
||||
from .api import API_PARAMS, NIF_PARAMS, parse_params
|
||||
|
||||
from threading import Thread
|
||||
from functools import partial
|
||||
|
||||
import os
|
||||
import copy
|
||||
import errno
|
||||
import fnmatch
|
||||
import inspect
|
||||
import sys
|
||||
import importlib
|
||||
import logging
|
||||
import traceback
|
||||
import yaml
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def log_subprocess_output(process):
|
||||
for line in iter(process.stdout.readline, b''):
|
||||
logger.info('%r', line)
|
||||
for line in iter(process.stderr.readline, b''):
|
||||
logger.error('%r', line)
|
||||
|
||||
|
||||
class Senpy(object):
|
||||
""" Default Senpy extension for Flask """
|
||||
|
||||
def __init__(self,
|
||||
app=None,
|
||||
plugin_folder=".",
|
||||
data_folder=None,
|
||||
default_plugins=False):
|
||||
self.app = app
|
||||
self._search_folders = set()
|
||||
@@ -44,17 +55,6 @@ class Senpy(object):
|
||||
self.add_folder(os.path.join('plugins', 'conversion'),
|
||||
from_root=True)
|
||||
|
||||
self.data_folder = data_folder or os.environ.get('SENPY_DATA',
|
||||
os.path.join(os.getcwd(),
|
||||
'senpy_data'))
|
||||
try:
|
||||
os.makedirs(self.data_folder)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
print('Directory not created.')
|
||||
else:
|
||||
raise
|
||||
|
||||
if app is not None:
|
||||
self.init_app(app)
|
||||
|
||||
@@ -85,20 +85,22 @@ class Senpy(object):
|
||||
else:
|
||||
logger.debug("Not a folder: %s", folder)
|
||||
|
||||
def _get_plugins(self, request):
|
||||
def _find_plugins(self, params):
|
||||
if not self.analysis_plugins:
|
||||
raise Error(
|
||||
status=404,
|
||||
message=("No plugins found."
|
||||
" Please install one."))
|
||||
algos = request.parameters.get('algorithm', None)
|
||||
if not algos:
|
||||
if self.default_plugin:
|
||||
algos = [self.default_plugin.name, ]
|
||||
else:
|
||||
raise Error(
|
||||
status=404,
|
||||
message="No default plugin found, and None provided")
|
||||
api_params = parse_params(params, spec=API_PARAMS)
|
||||
algos = None
|
||||
if "algorithm" in api_params and api_params["algorithm"]:
|
||||
algos = api_params["algorithm"].split(',')
|
||||
elif self.default_plugin:
|
||||
algos = [self.default_plugin.name, ]
|
||||
else:
|
||||
raise Error(
|
||||
status=404,
|
||||
message="No default plugin found, and None provided")
|
||||
|
||||
plugins = list()
|
||||
for algo in algos:
|
||||
@@ -109,58 +111,76 @@ class Senpy(object):
|
||||
raise Error(
|
||||
status=404,
|
||||
message="The algorithm '{}' is not valid".format(algo))
|
||||
|
||||
if not self.plugins[algo].is_activated:
|
||||
logger.debug("Plugin not activated: {}".format(algo))
|
||||
raise Error(
|
||||
status=400,
|
||||
message=("The algorithm '{}'"
|
||||
" is not activated yet").format(algo))
|
||||
plugins.append(self.plugins[algo])
|
||||
return plugins
|
||||
|
||||
def _process_entries(self, entries, req, plugins):
|
||||
"""
|
||||
Recursively process the entries with the first plugin in the list, and pass the results
|
||||
to the rest of the plugins.
|
||||
"""
|
||||
def _get_params(self, params, plugin=None):
|
||||
nif_params = parse_params(params, spec=NIF_PARAMS)
|
||||
if plugin:
|
||||
extra_params = plugin.get('extra_params', {})
|
||||
specific_params = parse_params(params, spec=extra_params)
|
||||
nif_params.update(specific_params)
|
||||
return nif_params
|
||||
|
||||
def _get_entries(self, params):
|
||||
if params['informat'] == 'text':
|
||||
results = Results()
|
||||
entry = Entry(text=params['input'])
|
||||
results.entries.append(entry)
|
||||
elif params['informat'] == 'json-ld':
|
||||
results = from_string(params['input'], cls=Results)
|
||||
else:
|
||||
raise NotImplemented('Informat {} is not implemented'.format(params['informat']))
|
||||
return results
|
||||
|
||||
def _process_entries(self, entries, plugins, nif_params):
|
||||
if not plugins:
|
||||
for i in entries:
|
||||
yield i
|
||||
return
|
||||
plugin = plugins[0]
|
||||
self._activate(plugin) # Make sure the plugin is activated
|
||||
specific_params = api.get_extra_params(req, plugin)
|
||||
req.analysis.append({'plugin': plugin,
|
||||
'parameters': specific_params})
|
||||
specific_params = self._get_params(nif_params, plugin)
|
||||
results = plugin.analyse_entries(entries, specific_params)
|
||||
for i in self._process_entries(results, req, plugins[1:]):
|
||||
for i in self._process_entries(results, plugins[1:], nif_params):
|
||||
yield i
|
||||
|
||||
def install_deps(self):
|
||||
for plugin in self.filter_plugins(is_activated=True):
|
||||
plugins.install_deps(plugin)
|
||||
def _process_response(self, resp, plugins, nif_params):
|
||||
entries = resp.entries
|
||||
resp.entries = []
|
||||
for plug in plugins:
|
||||
resp.analysis.append(plug.id)
|
||||
for i in self._process_entries(entries, plugins, nif_params):
|
||||
resp.entries.append(i)
|
||||
return resp
|
||||
|
||||
def analyse(self, request):
|
||||
def analyse(self, **api_params):
|
||||
"""
|
||||
Main method that analyses a request, either from CLI or HTTP.
|
||||
It takes a processed request, provided by the user, as returned
|
||||
by api.parse_call().
|
||||
It uses a dictionary of parameters, provided by the user.
|
||||
"""
|
||||
logger.debug("analysing request: {}".format(request))
|
||||
logger.debug("analysing with params: {}".format(api_params))
|
||||
plugins = self._find_plugins(api_params)
|
||||
nif_params = self._get_params(api_params)
|
||||
resp = self._get_entries(nif_params)
|
||||
if 'with_parameters' in api_params:
|
||||
resp.parameters = nif_params
|
||||
try:
|
||||
entries = request.entries
|
||||
request.entries = []
|
||||
plugins = self._get_plugins(request)
|
||||
results = request
|
||||
for i in self._process_entries(entries, results, plugins):
|
||||
results.entries.append(i)
|
||||
self.convert_emotions(results)
|
||||
if 'with_parameters' not in results.parameters:
|
||||
del results.parameters
|
||||
logger.debug("Returning analysis result: {}".format(results))
|
||||
resp = self._process_response(resp, plugins, nif_params)
|
||||
self.convert_emotions(resp, plugins, nif_params)
|
||||
logger.debug("Returning analysis result: {}".format(resp))
|
||||
except (Error, Exception) as ex:
|
||||
if not isinstance(ex, Error):
|
||||
msg = "Error during analysis: {} \n\t{}".format(ex,
|
||||
traceback.format_exc())
|
||||
ex = Error(message=msg, status=500)
|
||||
ex = Error(message=str(ex), status=500)
|
||||
logger.exception('Error returning analysis result')
|
||||
raise ex
|
||||
results.analysis = [i['plugin'].id for i in results.analysis]
|
||||
return results
|
||||
return resp
|
||||
|
||||
def _conversion_candidates(self, fromModel, toModel):
|
||||
candidates = self.filter_plugins(plugin_type='emotionConversionPlugin')
|
||||
@@ -173,7 +193,7 @@ class Senpy(object):
|
||||
# logging.debug('Found candidate: {}'.format(candidate))
|
||||
yield candidate
|
||||
|
||||
def convert_emotions(self, resp):
|
||||
def convert_emotions(self, resp, plugins, params):
|
||||
"""
|
||||
Conversion of all emotions in a response **in place**.
|
||||
In addition to converting from one model to another, it has
|
||||
@@ -181,8 +201,6 @@ class Senpy(object):
|
||||
Needless to say, this is far from an elegant solution, but it works.
|
||||
@todo refactor and clean up
|
||||
"""
|
||||
plugins = [i['plugin'] for i in resp.analysis]
|
||||
params = resp.parameters
|
||||
toModel = params.get('emotionModel', None)
|
||||
if not toModel:
|
||||
return
|
||||
@@ -210,8 +228,7 @@ class Senpy(object):
|
||||
for j in i.emotions:
|
||||
plugname = j['prov:wasGeneratedBy']
|
||||
candidate = candidates[plugname]
|
||||
resp.analysis.append({'plugin': candidate,
|
||||
'parameters': params})
|
||||
resp.analysis.append(candidate.id)
|
||||
for k in candidate.convert(j, fromModel, toModel, params):
|
||||
k.prov__wasGeneratedBy = candidate.id
|
||||
if output == 'nested':
|
||||
@@ -220,6 +237,7 @@ class Senpy(object):
|
||||
i.emotions = newemotions
|
||||
newentries.append(i)
|
||||
resp.entries = newentries
|
||||
resp.analysis = list(set(resp.analysis))
|
||||
|
||||
@property
|
||||
def default_plugin(self):
|
||||
@@ -239,42 +257,25 @@ class Senpy(object):
|
||||
else:
|
||||
self._default = self.plugins[value]
|
||||
|
||||
def activate_all(self, sync=True):
|
||||
def activate_all(self, sync=False):
|
||||
ps = []
|
||||
for plug in self.plugins.keys():
|
||||
ps.append(self.activate_plugin(plug, sync=sync))
|
||||
return ps
|
||||
|
||||
def deactivate_all(self, sync=True):
|
||||
def deactivate_all(self, sync=False):
|
||||
ps = []
|
||||
for plug in self.plugins.keys():
|
||||
ps.append(self.deactivate_plugin(plug, sync=sync))
|
||||
return ps
|
||||
|
||||
def _set_active(self, plugin, active=True, *args, **kwargs):
|
||||
def _set_active_plugin(self, plugin_name, active=True, *args, **kwargs):
|
||||
''' We're using a variable in the plugin itself to activate/deactive plugins.\
|
||||
Note that plugins may activate themselves by setting this variable.
|
||||
'''
|
||||
plugin.is_activated = active
|
||||
self.plugins[plugin_name].is_activated = active
|
||||
|
||||
def _activate(self, plugin):
|
||||
success = False
|
||||
with plugin._lock:
|
||||
if plugin.is_activated:
|
||||
return
|
||||
try:
|
||||
plugin.activate()
|
||||
msg = "Plugin activated: {}".format(plugin.name)
|
||||
logger.info(msg)
|
||||
success = True
|
||||
self._set_active(plugin, success)
|
||||
except Exception as ex:
|
||||
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
||||
plugin.name, ex, traceback.format_exc())
|
||||
logger.error(msg)
|
||||
raise Error(msg)
|
||||
|
||||
def activate_plugin(self, plugin_name, sync=True):
|
||||
def activate_plugin(self, plugin_name, sync=False):
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
@@ -283,17 +284,37 @@ class Senpy(object):
|
||||
|
||||
logger.info("Activating plugin: {}".format(plugin.name))
|
||||
|
||||
def act():
|
||||
success = False
|
||||
try:
|
||||
plugin.activate()
|
||||
msg = "Plugin activated: {}".format(plugin.name)
|
||||
logger.info(msg)
|
||||
success = True
|
||||
self._set_active_plugin(plugin_name, success)
|
||||
except Exception as ex:
|
||||
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
||||
plugin.name, ex, traceback.format_exc())
|
||||
logger.error(msg)
|
||||
raise Error(msg)
|
||||
|
||||
if sync or 'async' in plugin and not plugin.async:
|
||||
self._activate(plugin)
|
||||
act()
|
||||
else:
|
||||
th = Thread(target=partial(self._activate, plugin))
|
||||
th = Thread(target=act)
|
||||
th.start()
|
||||
return th
|
||||
|
||||
def _deactivate(self, plugin):
|
||||
with plugin._lock:
|
||||
if not plugin.is_activated:
|
||||
return
|
||||
def deactivate_plugin(self, plugin_name, sync=False):
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise Error(
|
||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||
|
||||
self._set_active_plugin(plugin_name, False)
|
||||
|
||||
def deact():
|
||||
try:
|
||||
plugin.deactivate()
|
||||
logger.info("Plugin deactivated: {}".format(plugin.name))
|
||||
@@ -302,22 +323,91 @@ class Senpy(object):
|
||||
"Error deactivating plugin {}: {}".format(plugin.name, ex))
|
||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||
|
||||
def deactivate_plugin(self, plugin_name, sync=True):
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise Error(
|
||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||
|
||||
self._set_active(plugin, False)
|
||||
|
||||
if sync or 'async' in plugin and not plugin.async:
|
||||
self._deactivate(plugin)
|
||||
deact()
|
||||
else:
|
||||
th = Thread(target=partial(self._deactivate, plugin))
|
||||
th = Thread(target=deact)
|
||||
th.start()
|
||||
return th
|
||||
|
||||
@classmethod
|
||||
def validate_info(cls, info):
|
||||
return all(x in info for x in ('name', 'module', 'description', 'version'))
|
||||
|
||||
def install_deps(self):
|
||||
for i in self.plugins.values():
|
||||
self._install_deps(i)
|
||||
|
||||
@classmethod
|
||||
def _install_deps(cls, info=None):
|
||||
requirements = info.get('requirements', [])
|
||||
if requirements:
|
||||
pip_args = ['pip']
|
||||
pip_args.append('install')
|
||||
pip_args.append('--use-wheel')
|
||||
for req in requirements:
|
||||
pip_args.append(req)
|
||||
logger.info('Installing requirements: ' + str(requirements))
|
||||
process = subprocess.Popen(pip_args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
log_subprocess_output(process)
|
||||
exitcode = process.wait()
|
||||
if exitcode != 0:
|
||||
raise Error("Dependencies not properly installed")
|
||||
|
||||
@classmethod
|
||||
def _load_module(cls, name, root):
|
||||
sys.path.append(root)
|
||||
tmp = importlib.import_module(name)
|
||||
sys.path.remove(root)
|
||||
return tmp
|
||||
|
||||
@classmethod
|
||||
def _load_plugin_from_info(cls, info, root):
|
||||
if not cls.validate_info(info):
|
||||
logger.warn('The module info is not valid.\n\t{}'.format(info))
|
||||
return None, None
|
||||
module = info["module"]
|
||||
name = info["name"]
|
||||
|
||||
cls._install_deps(info)
|
||||
tmp = cls._load_module(module, root)
|
||||
|
||||
candidate = None
|
||||
for _, obj in inspect.getmembers(tmp):
|
||||
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
||||
logger.debug(("Found plugin class:"
|
||||
" {}@{}").format(obj, inspect.getmodule(obj)))
|
||||
candidate = obj
|
||||
break
|
||||
if not candidate:
|
||||
logger.debug("No valid plugin for: {}".format(module))
|
||||
return
|
||||
module = candidate(info=info)
|
||||
return name, module
|
||||
|
||||
@classmethod
|
||||
def _load_plugin(cls, root, filename):
|
||||
fpath = os.path.join(root, filename)
|
||||
logger.debug("Loading plugin: {}".format(fpath))
|
||||
with open(fpath, 'r') as f:
|
||||
info = yaml.load(f)
|
||||
logger.debug("Info: {}".format(info))
|
||||
return cls._load_plugin_from_info(info, root)
|
||||
|
||||
def _load_plugins(self):
|
||||
plugins = {}
|
||||
for search_folder in self._search_folders:
|
||||
for root, dirnames, filenames in os.walk(search_folder):
|
||||
for filename in fnmatch.filter(filenames, '*.senpy'):
|
||||
name, plugin = self._load_plugin(root, filename)
|
||||
if plugin and name:
|
||||
plugins[name] = plugin
|
||||
|
||||
self._outdated = False
|
||||
return plugins
|
||||
|
||||
def teardown(self, exception):
|
||||
pass
|
||||
|
||||
@@ -325,9 +415,7 @@ class Senpy(object):
|
||||
def plugins(self):
|
||||
""" Return the plugins registered for a given application. """
|
||||
if self._outdated:
|
||||
self._plugin_list = plugins.load_plugins(self._search_folders,
|
||||
data_folder=self.data_folder)
|
||||
self._outdated = False
|
||||
self._plugin_list = self._load_plugins()
|
||||
return self._plugin_list
|
||||
|
||||
def filter_plugins(self, **kwargs):
|
||||
|
@@ -140,7 +140,7 @@ class SenpyMixin(object):
|
||||
vp = item[kp]
|
||||
temp[kp] = ser_or_down(vp)
|
||||
return temp
|
||||
elif isinstance(item, list) or isinstance(item, set):
|
||||
elif isinstance(item, list):
|
||||
return list(ser_or_down(i) for i in item)
|
||||
else:
|
||||
return item
|
||||
@@ -181,10 +181,10 @@ class SenpyMixin(object):
|
||||
obj = self
|
||||
if hasattr(obj, "jsonld"):
|
||||
obj = obj.jsonld()
|
||||
self._validator.validate(obj)
|
||||
jsonschema.validate(obj, self.schema)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.serialize())
|
||||
return str(self.to_JSON())
|
||||
|
||||
|
||||
class BaseModel(SenpyMixin, dict):
|
||||
@@ -218,11 +218,12 @@ class BaseModel(SenpyMixin, dict):
|
||||
super(BaseModel, self).__init__(temp)
|
||||
|
||||
def _get_key(self, key):
|
||||
if key is 'id':
|
||||
key = '@id'
|
||||
key = key.replace("__", ":", 1)
|
||||
return key
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
|
||||
@@ -243,16 +244,18 @@ class BaseModel(SenpyMixin, dict):
|
||||
|
||||
def _plain_dict(self):
|
||||
d = {k: v for (k, v) in self.items() if k[0] != "_"}
|
||||
if 'id' in d:
|
||||
d["@id"] = d.pop('id')
|
||||
return d
|
||||
|
||||
|
||||
_subtypes = {}
|
||||
|
||||
|
||||
def register(rsubclass, rtype=None):
|
||||
_subtypes[rtype or rsubclass.__name__] = rsubclass
|
||||
|
||||
|
||||
_subtypes = {}
|
||||
|
||||
|
||||
def from_dict(indict, cls=None):
|
||||
if not cls:
|
||||
target = indict.get('@type', None)
|
||||
@@ -286,31 +289,15 @@ def from_json(injson):
|
||||
return from_dict(indict)
|
||||
|
||||
|
||||
def from_schema(name, schema=None, schema_file=None, base_classes=None):
|
||||
def from_schema(name, schema_file=None, base_classes=None):
|
||||
base_classes = base_classes or []
|
||||
base_classes.append(BaseModel)
|
||||
schema_file = schema_file or '{}.json'.format(name)
|
||||
class_name = '{}{}'.format(name[0].upper(), name[1:])
|
||||
if '/' not in 'schema_file':
|
||||
schema_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
'schemas',
|
||||
schema_file)
|
||||
|
||||
schema_path = 'file://' + schema_file
|
||||
|
||||
with open(schema_file) as f:
|
||||
schema = json.load(f)
|
||||
|
||||
dct = {}
|
||||
|
||||
resolver = jsonschema.RefResolver(schema_path, schema)
|
||||
dct['@type'] = name
|
||||
dct['_schema_file'] = schema_file
|
||||
dct['schema'] = schema
|
||||
dct['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver)
|
||||
|
||||
newclass = type(class_name, tuple(base_classes), dct)
|
||||
|
||||
newclass = type(class_name, tuple(base_classes), {})
|
||||
setattr(newclass, '@type', name)
|
||||
setattr(newclass, 'schema', read_schema(schema_file))
|
||||
setattr(newclass, 'class_name', class_name)
|
||||
register(newclass, name)
|
||||
return newclass
|
||||
|
||||
@@ -331,7 +318,6 @@ for i in [
|
||||
'emotionPlugin',
|
||||
'emotionSet',
|
||||
'entry',
|
||||
'help',
|
||||
'plugin',
|
||||
'plugins',
|
||||
'response',
|
||||
@@ -351,9 +337,6 @@ class Error(SenpyMixin, Exception):
|
||||
self._error = _ErrorModel(message=message, *args, **kwargs)
|
||||
self.message = message
|
||||
|
||||
def validate(self, obj=None):
|
||||
self._error.validate()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._error[key]
|
||||
|
||||
|
@@ -1,30 +1,21 @@
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
|
||||
import inspect
|
||||
import os.path
|
||||
import os
|
||||
import pickle
|
||||
import logging
|
||||
import tempfile
|
||||
import copy
|
||||
|
||||
import fnmatch
|
||||
import inspect
|
||||
import sys
|
||||
import subprocess
|
||||
import importlib
|
||||
import yaml
|
||||
import threading
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .. import models, utils
|
||||
from .. import models
|
||||
from ..api import API_PARAMS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Plugin(models.Plugin):
|
||||
def __init__(self, info=None, data_folder=None):
|
||||
def __init__(self, info=None):
|
||||
"""
|
||||
Provides a canonical name for plugins and serves as base for other
|
||||
kinds of plugins.
|
||||
@@ -36,8 +27,6 @@ class Plugin(models.Plugin):
|
||||
id = 'plugins/{}_{}'.format(info['name'], info['version'])
|
||||
super(Plugin, self).__init__(id=id, **info)
|
||||
self.is_activated = False
|
||||
self._lock = threading.Lock()
|
||||
self.data_folder = data_folder or os.getcwd()
|
||||
|
||||
def get_folder(self):
|
||||
return os.path.dirname(inspect.getfile(self.__class__))
|
||||
@@ -48,28 +37,6 @@ class Plugin(models.Plugin):
|
||||
def deactivate(self):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
if not hasattr(self, 'test_cases'):
|
||||
import inspect
|
||||
raise AttributeError(('Plugin {} [{}] does not have any defined '
|
||||
'test cases').format(self.id, inspect.getfile(self.__class__)))
|
||||
for case in self.test_cases:
|
||||
res = list(self.analyse_entry(models.Entry(case['entry']),
|
||||
case['params']))
|
||||
exp = case['expected']
|
||||
if not isinstance(exp, list):
|
||||
exp = [exp]
|
||||
utils.check_template(res, exp)
|
||||
for r in res:
|
||||
r.validate()
|
||||
|
||||
@contextmanager
|
||||
def open(self, fpath, *args, **kwargs):
|
||||
if not os.path.isabs(fpath):
|
||||
fpath = os.path.join(self.data_folder, fpath)
|
||||
with open(fpath, *args, **kwargs) as f:
|
||||
yield f
|
||||
|
||||
|
||||
SenpyPlugin = Plugin
|
||||
|
||||
@@ -87,7 +54,7 @@ class AnalysisPlugin(Plugin):
|
||||
Note that this method may yield an annotated entry or a list of
|
||||
entries (e.g. in a tokenizer)
|
||||
"""
|
||||
text = entry['nif:isString']
|
||||
text = entry['text']
|
||||
params = copy.copy(parameters)
|
||||
params['input'] = text
|
||||
results = self.analyse(**params)
|
||||
@@ -130,8 +97,7 @@ class ShelfMixin(object):
|
||||
self.__dict__['_sh'] = {}
|
||||
if os.path.isfile(self.shelf_file):
|
||||
try:
|
||||
with self.open(self.shelf_file, 'rb') as p:
|
||||
self.__dict__['_sh'] = pickle.load(p)
|
||||
self.__dict__['_sh'] = pickle.load(open(self.shelf_file, 'rb'))
|
||||
except (IndexError, EOFError, pickle.UnpicklingError):
|
||||
logger.warning('{} has a corrupted shelf file!'.format(self.id))
|
||||
if not self.get('force_shelf', False):
|
||||
@@ -148,13 +114,14 @@ class ShelfMixin(object):
|
||||
@property
|
||||
def shelf_file(self):
|
||||
if 'shelf_file' not in self or not self['shelf_file']:
|
||||
self.shelf_file = os.path.join(self.data_folder, self.name + '.p')
|
||||
sd = os.environ.get('SENPY_DATA', tempfile.gettempdir())
|
||||
self.shelf_file = os.path.join(sd, self.name + '.p')
|
||||
return self['shelf_file']
|
||||
|
||||
def save(self):
|
||||
logger.debug('saving pickle')
|
||||
if hasattr(self, '_sh') and self._sh is not None:
|
||||
with self.open(self.shelf_file, 'wb') as f:
|
||||
with open(self.shelf_file, 'wb') as f:
|
||||
pickle.dump(self._sh, f)
|
||||
|
||||
|
||||
@@ -193,101 +160,3 @@ def pfilter(plugins, **kwargs):
|
||||
if kwargs:
|
||||
candidates = filter(matches, candidates)
|
||||
return {p.name: p for p in candidates}
|
||||
|
||||
|
||||
def validate_info(info):
|
||||
return all(x in info for x in ('name', 'module', 'description', 'version'))
|
||||
|
||||
|
||||
def load_module(name, root=None):
|
||||
if root:
|
||||
sys.path.append(root)
|
||||
tmp = importlib.import_module(name)
|
||||
if root:
|
||||
sys.path.remove(root)
|
||||
return tmp
|
||||
|
||||
|
||||
def log_subprocess_output(process):
|
||||
for line in iter(process.stdout.readline, b''):
|
||||
logger.info('%r', line)
|
||||
for line in iter(process.stderr.readline, b''):
|
||||
logger.error('%r', line)
|
||||
|
||||
|
||||
def install_deps(*plugins):
|
||||
installed = False
|
||||
for info in plugins:
|
||||
requirements = info.get('requirements', [])
|
||||
if requirements:
|
||||
pip_args = [sys.executable, '-m', 'pip', 'install', '--use-wheel']
|
||||
for req in requirements:
|
||||
pip_args.append(req)
|
||||
logger.info('Installing requirements: ' + str(requirements))
|
||||
process = subprocess.Popen(pip_args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
log_subprocess_output(process)
|
||||
exitcode = process.wait()
|
||||
installed = True
|
||||
if exitcode != 0:
|
||||
raise models.Error("Dependencies not properly installed")
|
||||
return installed
|
||||
|
||||
|
||||
def load_plugin_from_info(info, root=None, validator=validate_info, install=True, *args, **kwargs):
|
||||
if not root and '_path' in info:
|
||||
root = os.path.dirname(info['_path'])
|
||||
if not validator(info):
|
||||
raise ValueError('Plugin info is not valid: {}'.format(info))
|
||||
module = info["module"]
|
||||
|
||||
try:
|
||||
tmp = load_module(module, root)
|
||||
except ImportError:
|
||||
if not install:
|
||||
raise
|
||||
install_deps(info)
|
||||
tmp = load_module(module, root)
|
||||
candidate = None
|
||||
for _, obj in inspect.getmembers(tmp):
|
||||
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
||||
logger.debug(("Found plugin class:"
|
||||
" {}@{}").format(obj, inspect.getmodule(obj)))
|
||||
candidate = obj
|
||||
break
|
||||
if not candidate:
|
||||
logger.debug("No valid plugin for: {}".format(module))
|
||||
return
|
||||
module = candidate(info=info, *args, **kwargs)
|
||||
return module
|
||||
|
||||
|
||||
def parse_plugin_info(fpath):
|
||||
logger.debug("Loading plugin: {}".format(fpath))
|
||||
with open(fpath, 'r') as f:
|
||||
info = yaml.load(f)
|
||||
info['_path'] = fpath
|
||||
name = info['name']
|
||||
return name, info
|
||||
|
||||
|
||||
def load_plugin(fpath, *args, **kwargs):
|
||||
name, info = parse_plugin_info(fpath)
|
||||
logger.debug("Info: {}".format(info))
|
||||
plugin = load_plugin_from_info(info, *args, **kwargs)
|
||||
return name, plugin
|
||||
|
||||
|
||||
def load_plugins(folders, loader=load_plugin, *args, **kwargs):
|
||||
plugins = {}
|
||||
for search_folder in folders:
|
||||
for root, dirnames, filenames in os.walk(search_folder):
|
||||
# Do not look for plugins in hidden or special folders
|
||||
dirnames[:] = [d for d in dirnames if d[0] not in ['.', '_']]
|
||||
for filename in fnmatch.filter(filenames, '*.senpy'):
|
||||
fpath = os.path.join(root, filename)
|
||||
name, plugin = loader(fpath, *args, **kwargs)
|
||||
if plugin and name:
|
||||
plugins[name] = plugin
|
||||
return plugins
|
||||
|
@@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CentroidConversion(EmotionConversionPlugin):
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
def __init__(self, info):
|
||||
if 'centroids' not in info:
|
||||
raise Error('Centroid conversion plugins should provide '
|
||||
'the centroids in their senpy file')
|
||||
@@ -33,7 +33,7 @@ class CentroidConversion(EmotionConversionPlugin):
|
||||
ncentroids[aliases.get(k1, k1)] = nv1
|
||||
info['centroids'] = ncentroids
|
||||
|
||||
super(CentroidConversion, self).__init__(info, *args, **kwargs)
|
||||
super(CentroidConversion, self).__init__(info)
|
||||
|
||||
self.dimensions = set()
|
||||
for c in self.centroids.values():
|
||||
@@ -100,54 +100,3 @@ class CentroidConversion(EmotionConversionPlugin):
|
||||
else:
|
||||
raise Error('EMOTION MODEL NOT KNOWN')
|
||||
yield e
|
||||
|
||||
def test(self, info=None):
|
||||
if not info:
|
||||
info = {
|
||||
"name": "CentroidTest",
|
||||
"description": "Centroid test",
|
||||
"version": 0,
|
||||
"centroids": {
|
||||
"c1": {"V1": 0.5,
|
||||
"V2": 0.5},
|
||||
"c2": {"V1": -0.5,
|
||||
"V2": 0.5},
|
||||
"c3": {"V1": -0.5,
|
||||
"V2": -0.5},
|
||||
"c4": {"V1": 0.5,
|
||||
"V2": -0.5}},
|
||||
"aliases": {
|
||||
"V1": "X-dimension",
|
||||
"V2": "Y-dimension"
|
||||
},
|
||||
"centroids_direction": ["emoml:big6", "emoml:fsre-dimensions"]
|
||||
}
|
||||
|
||||
c = CentroidConversion(info)
|
||||
|
||||
es1 = EmotionSet()
|
||||
e1 = Emotion()
|
||||
e1.onyx__hasEmotionCategory = "c1"
|
||||
es1.onyx__hasEmotion.append(e1)
|
||||
res = c._forward_conversion(es1)
|
||||
assert res["X-dimension"] == 0.5
|
||||
assert res["Y-dimension"] == 0.5
|
||||
|
||||
e2 = Emotion()
|
||||
e2.onyx__hasEmotionCategory = "c2"
|
||||
es1.onyx__hasEmotion.append(e2)
|
||||
res = c._forward_conversion(es1)
|
||||
assert res["X-dimension"] == 0
|
||||
assert res["Y-dimension"] == 1
|
||||
|
||||
e = Emotion()
|
||||
e["X-dimension"] = -0.2
|
||||
e["Y-dimension"] = -0.3
|
||||
res = c._backwards_conversion(e)
|
||||
assert res["onyx:hasEmotionCategory"] == "c3"
|
||||
|
||||
e = Emotion()
|
||||
e["X-dimension"] = -0.2
|
||||
e["Y-dimension"] = 0.3
|
||||
res = c._backwards_conversion(e)
|
||||
assert res["onyx:hasEmotionCategory"] == "c2"
|
||||
|
@@ -2,51 +2,38 @@
|
||||
name: Ekman2FSRE
|
||||
module: senpy.plugins.conversion.emotion.centroids
|
||||
description: Plugin to convert emotion sets from Ekman to VAD
|
||||
version: 0.2
|
||||
version: 0.1
|
||||
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
||||
neutralValue: 5.0
|
||||
centroids:
|
||||
anger:
|
||||
A: 6.95
|
||||
D: 5.1
|
||||
V: 2.7
|
||||
S: 5.0
|
||||
disgust:
|
||||
A: 5.3
|
||||
D: 8.05
|
||||
V: 2.7
|
||||
S: 5.0
|
||||
fear:
|
||||
A: 6.5
|
||||
D: 3.6
|
||||
V: 3.2
|
||||
S: 5.0
|
||||
happiness:
|
||||
A: 7.22
|
||||
D: 6.28
|
||||
V: 8.6
|
||||
S: 5.0
|
||||
sadness:
|
||||
A: 5.21
|
||||
D: 2.82
|
||||
V: 2.21
|
||||
S: 5.0
|
||||
surprise:
|
||||
A: 5.0
|
||||
D: 5.0
|
||||
V: 5.0
|
||||
S: 10.0
|
||||
centroids_direction:
|
||||
- emoml:big6
|
||||
- emoml:fsre-dimensions
|
||||
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
||||
A: emoml:fsre-dimensions_arousal
|
||||
V: emoml:fsre-dimensions_valence
|
||||
D: emoml:fsre-dimensions_potency
|
||||
S: emoml:fsre-dimensions_unpredictability
|
||||
A: emoml:arousal
|
||||
V: emoml:valence
|
||||
D: emoml:dominance
|
||||
anger: emoml:big6anger
|
||||
disgust: emoml:big6disgust
|
||||
fear: emoml:big6fear
|
||||
happiness: emoml:big6happiness
|
||||
sadness: emoml:big6sadness
|
||||
surprise: emoml:big6surprise
|
||||
sadness: emoml:big6sadness
|
@@ -2,9 +2,13 @@
|
||||
name: Ekman2PAD
|
||||
module: senpy.plugins.conversion.emotion.centroids
|
||||
description: Plugin to convert emotion sets from Ekman to VAD
|
||||
version: 0.2
|
||||
version: 0.1
|
||||
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
||||
neutralValue: 5.0
|
||||
origin:
|
||||
# Point in VAD space with no emotion (aka Neutral)
|
||||
A: 5.0
|
||||
D: 5.0
|
||||
V: 5.0
|
||||
centroids:
|
||||
anger:
|
||||
A: 6.95
|
||||
@@ -30,9 +34,9 @@ centroids_direction:
|
||||
- emoml:big6
|
||||
- emoml:pad
|
||||
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
||||
A: emoml:pad-dimensions:arousal
|
||||
V: emoml:pad-dimensions:pleasure
|
||||
D: emoml:pad-dimensions:dominance
|
||||
A: emoml:arousal
|
||||
V: emoml:valence
|
||||
D: emoml:dominance
|
||||
anger: emoml:big6anger
|
||||
disgust: emoml:big6disgust
|
||||
fear: emoml:big6fear
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import random
|
||||
|
||||
from senpy.plugins import EmotionPlugin
|
||||
from senpy.models import EmotionSet, Emotion, Entry
|
||||
from senpy.models import EmotionSet, Emotion
|
||||
|
||||
|
||||
class RmoRandPlugin(EmotionPlugin):
|
||||
@@ -16,11 +16,3 @@ class RmoRandPlugin(EmotionPlugin):
|
||||
emotionSet.prov__wasGeneratedBy = self.id
|
||||
entry.emotions.append(emotionSet)
|
||||
yield entry
|
||||
|
||||
def test(self):
|
||||
params = dict()
|
||||
results = list()
|
||||
for i in range(100):
|
||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
||||
res.validate()
|
||||
results.append(res.emotions[0]['onyx:hasEmotion'][0]['onyx:hasEmotionCategory'])
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import random
|
||||
|
||||
from senpy.plugins import SentimentPlugin
|
||||
from senpy.models import Sentiment, Entry
|
||||
from senpy.models import Sentiment
|
||||
|
||||
|
||||
class RandPlugin(SentimentPlugin):
|
||||
@@ -22,13 +22,3 @@ class RandPlugin(SentimentPlugin):
|
||||
entry.sentiments.append(sentiment)
|
||||
entry.language = lang
|
||||
yield entry
|
||||
|
||||
def test(self):
|
||||
params = dict()
|
||||
results = list()
|
||||
for i in range(100):
|
||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
||||
res.validate()
|
||||
results.append(res.sentiments[0]['marl:hasPolarity'])
|
||||
assert 'marl:Positive' in results
|
||||
assert 'marl:Negative' in results
|
||||
|
@@ -1,65 +0,0 @@
|
||||
from senpy.plugins import AnalysisPlugin
|
||||
from senpy.models import Entry
|
||||
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
||||
from nltk.tokenize.simple import LineTokenizer
|
||||
import nltk
|
||||
|
||||
|
||||
class SplitPlugin(AnalysisPlugin):
|
||||
|
||||
def activate(self):
|
||||
nltk.download('punkt')
|
||||
|
||||
def analyse_entry(self, entry, params):
|
||||
chunker_type = params.get("delimiter", "sentence")
|
||||
original_text = entry['nif:isString']
|
||||
if chunker_type == "sentence":
|
||||
tokenizer = PunktSentenceTokenizer()
|
||||
if chunker_type == "paragraph":
|
||||
tokenizer = LineTokenizer()
|
||||
chars = list(tokenizer.span_tokenize(original_text))
|
||||
for i, chunk in enumerate(tokenizer.tokenize(original_text)):
|
||||
print(chunk)
|
||||
e = Entry()
|
||||
e['nif:isString'] = chunk
|
||||
if entry.id:
|
||||
e.id = entry.id + "#char={},{}".format(chars[i][0], chars[i][1])
|
||||
yield e
|
||||
|
||||
test_cases = [
|
||||
{
|
||||
'entry': {
|
||||
'nif:isString': 'Hello. World.'
|
||||
},
|
||||
'params': {
|
||||
'delimiter': 'sentence',
|
||||
},
|
||||
'expected': [
|
||||
{
|
||||
'nif:isString': 'Hello.'
|
||||
},
|
||||
{
|
||||
'nif:isString': 'World.'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'entry': {
|
||||
"id": ":test",
|
||||
'nif:isString': 'Hello\nWorld'
|
||||
},
|
||||
'params': {
|
||||
'delimiter': 'paragraph',
|
||||
},
|
||||
'expected': [
|
||||
{
|
||||
"@id": ":test#char=0,5",
|
||||
'nif:isString': 'Hello'
|
||||
},
|
||||
{
|
||||
"@id": ":test#char=6,11",
|
||||
'nif:isString': 'World'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
@@ -12,7 +12,7 @@ class Sentiment140Plugin(SentimentPlugin):
|
||||
json.dumps({
|
||||
"language": lang,
|
||||
"data": [{
|
||||
"text": entry['nif:isString']
|
||||
"text": entry.text
|
||||
}]
|
||||
}))
|
||||
p = params.get("prefix", None)
|
||||
@@ -34,20 +34,3 @@ class Sentiment140Plugin(SentimentPlugin):
|
||||
entry.sentiments.append(sentiment)
|
||||
entry.language = lang
|
||||
yield entry
|
||||
|
||||
test_cases = [
|
||||
{
|
||||
'entry': {
|
||||
'nif:isString': 'I love Titanic'
|
||||
},
|
||||
'params': {},
|
||||
'expected': {
|
||||
"nif:isString": "I love Titanic",
|
||||
'sentiments': [
|
||||
{
|
||||
'marl:hasPolarity': 'marl:Positive',
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
30
senpy/plugins/split/split.py
Normal file
30
senpy/plugins/split/split.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from senpy.plugins import AnalysisPlugin
|
||||
from senpy.models import Entry
|
||||
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
||||
from nltk.tokenize.simple import LineTokenizer
|
||||
import nltk
|
||||
class SplitPlugin(AnalysisPlugin):
|
||||
def activate(self):
|
||||
nltk.download('punkt')
|
||||
|
||||
def analyse_entry(self, entry, params):
|
||||
chunker_type = params.get("delimiter", "sentence")
|
||||
original_id = entry.id
|
||||
original_text = entry.get("text", None)
|
||||
if chunker_type == "sentence":
|
||||
tokenizer = PunktSentenceTokenizer()
|
||||
chars = tokenizer.span_tokenize(original_text)
|
||||
for i, sentence in enumerate(tokenizer.tokenize(original_text)):
|
||||
e = Entry()
|
||||
e.text = sentence
|
||||
e.id = original_id + "#char={},{}".format(chars[i][0], chars[i][1])
|
||||
yield e
|
||||
if chunker_type == "paragraph":
|
||||
tokenizer = LineTokenizer()
|
||||
chars = tokenizer.span_tokenize(original_text)
|
||||
for i, paragraph in enumerate(tokenizer.tokenize(original_text)):
|
||||
e = Entry()
|
||||
e.text = paragraph
|
||||
chars = [char for char in chars]
|
||||
e.id = original_id + "#char={},{}".format(chars[i][0], chars[i][1])
|
||||
yield e
|
@@ -1,12 +1,11 @@
|
||||
---
|
||||
name: split
|
||||
module: senpy.plugins.misc.split
|
||||
module: split
|
||||
description: A sample plugin that chunks input text
|
||||
author: "@militarpancho"
|
||||
version: '0.2'
|
||||
version: '0.1'
|
||||
url: "https://github.com/gsi-upm/senpy"
|
||||
requirements:
|
||||
- nltk
|
||||
requirements: {nltk}
|
||||
extra_params:
|
||||
delimiter:
|
||||
aliases:
|
@@ -20,16 +20,10 @@
|
||||
"@id": "me:hasSuggestions",
|
||||
"@container": "@set"
|
||||
},
|
||||
"onyx:hasEmotion": {
|
||||
"@container": "@set"
|
||||
},
|
||||
"emotions": {
|
||||
"@id": "onyx:hasEmotionSet",
|
||||
"@container": "@set"
|
||||
},
|
||||
"onyx:hasEmotion": {
|
||||
"@container": "@set"
|
||||
},
|
||||
"sentiments": {
|
||||
"@id": "marl:hasOpinion",
|
||||
"@container": "@set"
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"nif:beginIndex": {"type": "integer"},
|
||||
"nif:endIndex": {"type": "integer"},
|
||||
"nif:anchorOf": {
|
||||
"description": "Piece of context that contains the Emotion",
|
||||
"description": "Piece of context that contains the Sentiment",
|
||||
"type": "string"
|
||||
},
|
||||
"onyx:hasDimension": {
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"nif:isString": {
|
||||
"description": "String contained in this Context. Alternative: nif:isString",
|
||||
"description": "String contained in this Context",
|
||||
"type": "string"
|
||||
},
|
||||
"sentiments": {
|
||||
|
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"allOf": [
|
||||
{"$ref": "response.json"},
|
||||
{
|
||||
"title": "Help",
|
||||
"description": "Help containing accepted parameters",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parameters": {
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": "parameters"
|
||||
}
|
||||
]
|
||||
}
|
@@ -3,7 +3,6 @@
|
||||
"allOf": [
|
||||
{"$ref": "response.json"},
|
||||
{
|
||||
"required": ["plugins"],
|
||||
"properties": {
|
||||
"plugins": {
|
||||
"type": "array",
|
||||
|
@@ -2,12 +2,7 @@
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@type": {"type": "string"},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
|
||||
}
|
||||
"@type": {"type": "string"}
|
||||
},
|
||||
"required": ["@type"]
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
var ONYX = "http://www.gsi.dit.upm.es/ontologies/onyx/ns#";
|
||||
var RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
|
||||
var plugins_params={};
|
||||
var default_params = JSON.parse($.ajax({type: "GET", url: "/api?help=true" , async: false}).responseText);
|
||||
|
||||
function replaceURLWithHTMLLinks(text) {
|
||||
console.log('Text: ' + text);
|
||||
var exp = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig;
|
||||
@@ -31,110 +31,74 @@ $(document).ready(function() {
|
||||
html="";
|
||||
var availablePlugins = document.getElementById('availablePlugins');
|
||||
plugins = response.plugins;
|
||||
gplugins = {};
|
||||
for (r in plugins){
|
||||
ptype = plugins[r]['@type'];
|
||||
if(gplugins[ptype] == undefined){
|
||||
gplugins[ptype] = [r]
|
||||
}else{
|
||||
gplugins[ptype].push(r)
|
||||
}
|
||||
}
|
||||
for (g in gplugins){
|
||||
html += "<optgroup label=\""+g+"\">"
|
||||
for (r in gplugins[g]){
|
||||
plugin = plugins[gplugins[g][r]]
|
||||
if (!plugin["name"]){
|
||||
console.log("No name for plugin ", plugin);
|
||||
continue;
|
||||
|
||||
}
|
||||
html+= "<option value=\""+plugin["name"]+"\" "
|
||||
if (plugin["name"] == defaultPlugin["name"]){
|
||||
html+= " selected=\"selected\""
|
||||
}
|
||||
if (!plugin["is_activated"]){
|
||||
html+= " disabled=\"disabled\" "
|
||||
}
|
||||
html+=">"+plugin["name"]+"</option>"
|
||||
plugin = plugins[r]
|
||||
if (plugin["name"]){
|
||||
if (plugin["name"] == defaultPlugin["name"]){
|
||||
if (plugin["is_activated"]){
|
||||
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\">"+plugin["name"]+"</option>"
|
||||
}else{
|
||||
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
||||
}
|
||||
}
|
||||
|
||||
if (plugin["extra_params"]){
|
||||
plugins_params[plugin["name"]]={};
|
||||
for (param in plugin["extra_params"]){
|
||||
if (typeof plugin["extra_params"][param] !="string"){
|
||||
var params = new Array();
|
||||
var alias = plugin["extra_params"][param]["aliases"][0];
|
||||
params[alias]=new Array();
|
||||
for (option in plugin["extra_params"][param]["options"]){
|
||||
params[alias].push(plugin["extra_params"][param]["options"][option])
|
||||
}
|
||||
plugins_params[plugin["name"]][alias] = (params[alias])
|
||||
}
|
||||
}
|
||||
}
|
||||
var pluginEntry = document.createElement('li');
|
||||
|
||||
newHtml = ""
|
||||
if(plugin.url) {
|
||||
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
||||
}else {
|
||||
newHtml= plugin["name"];
|
||||
else{
|
||||
if (plugin["is_activated"]){
|
||||
html+= "<option value=\""+plugin["name"]+"\">"+plugin["name"]+"</option>"
|
||||
}
|
||||
else{
|
||||
html+= "<option value=\""+plugin["name"]+"\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
||||
}
|
||||
}
|
||||
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
||||
pluginEntry.innerHTML = newHtml;
|
||||
availablePlugins.appendChild(pluginEntry)
|
||||
}
|
||||
if (plugin["extra_params"]){
|
||||
plugins_params[plugin["name"]]={};
|
||||
for (param in plugin["extra_params"]){
|
||||
if (typeof plugin["extra_params"][param] !="string"){
|
||||
var params = new Array();
|
||||
var alias = plugin["extra_params"][param]["aliases"][0];
|
||||
params[alias]=new Array();
|
||||
for (option in plugin["extra_params"][param]["options"]){
|
||||
params[alias].push(plugin["extra_params"][param]["options"][option])
|
||||
}
|
||||
plugins_params[plugin["name"]][alias] = (params[alias])
|
||||
}
|
||||
}
|
||||
}
|
||||
var pluginList = document.createElement('li');
|
||||
|
||||
newHtml = ""
|
||||
if(plugin.url) {
|
||||
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
||||
}else {
|
||||
newHtml= plugin["name"];
|
||||
}
|
||||
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
||||
pluginList.innerHTML = newHtml;
|
||||
availablePlugins.appendChild(pluginList)
|
||||
}
|
||||
html += "</optgroup>"
|
||||
document.getElementById('plugins').innerHTML = html;
|
||||
change_params();
|
||||
|
||||
$(window).on('hashchange', hashchanged);
|
||||
hashchanged();
|
||||
$('.tooltip-form').tooltip();
|
||||
|
||||
$(window).on('hashchange', hashchanged);
|
||||
hashchanged();
|
||||
$('.tooltip-form').tooltip();
|
||||
|
||||
});
|
||||
|
||||
|
||||
function change_params(){
|
||||
var plugin = document.getElementById("plugins").options[document.getElementById("plugins").selectedIndex].value;
|
||||
|
||||
html=""
|
||||
for (param in default_params){
|
||||
if ((default_params[param]['options']) && (['help','conversion'].indexOf(param) < 0)){
|
||||
html+= "<label> "+param+"</label>"
|
||||
if (default_params[param]['options'].length < 1) {
|
||||
html +="<input></input>";
|
||||
}
|
||||
else {
|
||||
html+= "<select id=\""+param+"\" name=\""+param+"\">"
|
||||
for (option in default_params[param]['options']){
|
||||
if (default_params[param]['options'][option] == default_params[param]['default']){
|
||||
html+="<option value \""+default_params[param]['options'][option]+"\" selected >"+default_params[param]['options'][option]+"</option>"
|
||||
}
|
||||
else{
|
||||
html+="<option value \""+default_params[param]['options'][option]+"\">"+default_params[param]['options'][option]+"</option>"
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
html+="</select><br>"
|
||||
}
|
||||
}
|
||||
for (param in plugins_params[plugin]){
|
||||
if (param || plugins_params[plugin][param].length > 1){
|
||||
html+= "<label> Parameter "+param+"</label>"
|
||||
param_opts = plugins_params[plugin][param]
|
||||
if (param_opts.length > 0) {
|
||||
html+= "<select id=\""+param+"\" name=\""+param+"\">"
|
||||
for (option in param_opts){
|
||||
html+="<option value \""+param_opts[option]+"\">"+param_opts[option]+"</option>"
|
||||
}
|
||||
html+="</select>"
|
||||
}
|
||||
else {
|
||||
html +="<input id=\""+param+"\" name=\""+param+"\"></input>";
|
||||
}
|
||||
html+= "<label> Parameter "+param+"</label>"
|
||||
html+= "<select id=\""+param+"\" name=\""+param+"\">"
|
||||
for (option in plugins_params[plugin][param]){
|
||||
html+="<option value \""+plugins_params[plugin][param][option]+"\">"+plugins_params[plugin][param][option]+"</option>"
|
||||
}
|
||||
}
|
||||
html+="</select>"
|
||||
}
|
||||
document.getElementById("params").innerHTML = html
|
||||
};
|
||||
@@ -150,43 +114,21 @@ function load_JSON(){
|
||||
url += "?algo="+plugin+"&i="+input
|
||||
for (param in plugins_params[plugin]){
|
||||
if (param != null){
|
||||
field = document.getElementById(param);
|
||||
if (plugins_params[plugin][param].length > 0){
|
||||
var param_value = encodeURIComponent(field.options[field.selectedIndex].text);
|
||||
} else {
|
||||
var param_value = encodeURIComponent(field.text);
|
||||
}
|
||||
if (param_value !== "undefined" && param_value.length > 0){
|
||||
url+="&"+param+"="+param_value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (param in default_params){
|
||||
if ((param != null) && (default_params[param]['options']) && (['help','conversion'].indexOf(param) < 0)){
|
||||
var param_value = encodeURIComponent(document.getElementById(param).options[document.getElementById(param).selectedIndex].value);
|
||||
var param_value = encodeURIComponent(document.getElementById(param).options[document.getElementById(param).selectedIndex].text);
|
||||
if (param_value){
|
||||
url+="&"+param+"="+param_value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var response = $.ajax({type: "GET", url: url , async: false}).responseText;
|
||||
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response)
|
||||
|
||||
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||
document.getElementById("results-div").style.display = 'block';
|
||||
try {
|
||||
response = JSON.parse(response);
|
||||
var response = JSON.parse($.ajax({type: "GET", url: url , async: false}).responseText);
|
||||
var options = {
|
||||
mode: 'view'
|
||||
};
|
||||
var editor = new JSONEditor(container, options, response);
|
||||
editor.expandAll();
|
||||
}
|
||||
catch(err){
|
||||
console.log("Error decoding JSON (got turtle?)");
|
||||
}
|
||||
rawcontainer.innerHTML = replaceURLWithHTMLLinks(JSON.stringify(response, undefined, 2))
|
||||
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||
document.getElementById("results-div").style.display = 'block';
|
||||
|
||||
|
||||
}
|
||||
|
@@ -1,25 +0,0 @@
|
||||
from . import models
|
||||
|
||||
|
||||
def check_template(indict, template):
|
||||
if isinstance(template, dict) and isinstance(indict, dict):
|
||||
for k, v in template.items():
|
||||
if k not in indict:
|
||||
return '{} not in {}'.format(k, indict)
|
||||
check_template(indict[k], v)
|
||||
elif isinstance(template, list) and isinstance(indict, list):
|
||||
if len(indict) != len(template):
|
||||
raise models.Error('Different size for {} and {}'.format(indict, template))
|
||||
for e in template:
|
||||
found = False
|
||||
for i in indict:
|
||||
try:
|
||||
check_template(i, e)
|
||||
found = True
|
||||
except models.Error as ex:
|
||||
continue
|
||||
if not found:
|
||||
raise models.Error('{} not found in {}'.format(e, indict))
|
||||
else:
|
||||
if indict != template:
|
||||
raise models.Error('{} and {} are different'.format(indict, template))
|
@@ -21,6 +21,3 @@ class AsyncPlugin(AnalysisPlugin):
|
||||
values = self._do_async(2)
|
||||
entry.async_values = values
|
||||
yield entry
|
||||
|
||||
def test(self):
|
||||
pass
|
||||
|
@@ -3,9 +3,6 @@ from senpy.plugins import SentimentPlugin
|
||||
|
||||
class DummyPlugin(SentimentPlugin):
|
||||
def analyse_entry(self, entry, params):
|
||||
entry['nif:iString'] = entry['nif:isString'][::-1]
|
||||
entry.text = entry.text[::-1]
|
||||
entry.reversed = entry.get('reversed', 0) + 1
|
||||
yield entry
|
||||
|
||||
def test(self):
|
||||
pass
|
||||
|
@@ -1,5 +0,0 @@
|
||||
from senpy.plugins import SentimentPlugin
|
||||
|
||||
|
||||
class DummyPlugin(SentimentPlugin):
|
||||
import noop
|
@@ -9,6 +9,3 @@ class SleepPlugin(AnalysisPlugin):
|
||||
def analyse_entry(self, entry, params):
|
||||
sleep(float(params.get("timeout", self.timeout)))
|
||||
yield entry
|
||||
|
||||
def test(self):
|
||||
pass
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"description": "I am dummy",
|
||||
"author": "@balkian",
|
||||
"version": "0.1",
|
||||
"timeout": 0.05,
|
||||
"timeout": 0.5,
|
||||
"extra_params": {
|
||||
"timeout": {
|
||||
"@id": "timeout_sleep",
|
||||
|
@@ -11,24 +11,24 @@ class APITest(TestCase):
|
||||
|
||||
def test_api_params(self):
|
||||
"""The API should not define any required parameters without a default"""
|
||||
parse_params({}, API_PARAMS)
|
||||
parse_params({}, spec=API_PARAMS)
|
||||
|
||||
def test_web_params(self):
|
||||
"""The WEB should not define any required parameters without a default"""
|
||||
parse_params({}, WEB_PARAMS)
|
||||
parse_params({}, spec=WEB_PARAMS)
|
||||
|
||||
def test_basic(self):
|
||||
a = {}
|
||||
try:
|
||||
parse_params(a, NIF_PARAMS)
|
||||
parse_params(a, spec=NIF_PARAMS)
|
||||
raise AssertionError()
|
||||
except Error:
|
||||
pass
|
||||
a = {'input': 'hello'}
|
||||
p = parse_params(a, NIF_PARAMS)
|
||||
p = parse_params(a, spec=NIF_PARAMS)
|
||||
assert 'input' in p
|
||||
b = {'i': 'hello'}
|
||||
p = parse_params(b, NIF_PARAMS)
|
||||
p = parse_params(b, spec=NIF_PARAMS)
|
||||
assert 'input' in p
|
||||
|
||||
def test_plugin(self):
|
||||
@@ -40,18 +40,18 @@ class APITest(TestCase):
|
||||
}
|
||||
}
|
||||
try:
|
||||
parse_params(query, plug_params)
|
||||
parse_params(query, spec=plug_params)
|
||||
raise AssertionError()
|
||||
except Error:
|
||||
pass
|
||||
query['hello'] = 'world'
|
||||
p = parse_params(query, plug_params)
|
||||
p = parse_params(query, spec=plug_params)
|
||||
assert 'hello' in p
|
||||
assert p['hello'] == 'world'
|
||||
del query['hello']
|
||||
|
||||
query['hiya'] = 'dlrow'
|
||||
p = parse_params(query, plug_params)
|
||||
p = parse_params(query, spec=plug_params)
|
||||
assert 'hello' in p
|
||||
assert 'hiya' in p
|
||||
assert p['hello'] == 'dlrow'
|
||||
@@ -63,17 +63,6 @@ class APITest(TestCase):
|
||||
'default': 1
|
||||
}
|
||||
}
|
||||
p = parse_params({}, spec)
|
||||
p = parse_params({}, spec=spec)
|
||||
assert 'hello' in p
|
||||
assert p['hello'] == 1
|
||||
|
||||
def test_call(self):
|
||||
call = {
|
||||
'input': "Aloha my friend",
|
||||
'algo': "Dummy"
|
||||
}
|
||||
p = parse_params(call, API_PARAMS, NIF_PARAMS)
|
||||
assert 'algorithm' in p
|
||||
assert "Dummy" in p['algorithm']
|
||||
assert 'input' in p
|
||||
assert p['input'] == 'Aloha my friend'
|
||||
|
@@ -17,19 +17,17 @@ def parse_resp(resp):
|
||||
|
||||
|
||||
class BlueprintsTest(TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""Set up only once, and re-use in every individual test"""
|
||||
cls.app = Flask("test_extensions")
|
||||
cls.app.debug = False
|
||||
cls.client = cls.app.test_client()
|
||||
cls.senpy = Senpy()
|
||||
cls.senpy.init_app(cls.app)
|
||||
cls.dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
cls.senpy.add_folder(cls.dir)
|
||||
cls.senpy.activate_plugin("Dummy", sync=True)
|
||||
cls.senpy.activate_plugin("DummyRequired", sync=True)
|
||||
cls.senpy.default_plugin = 'Dummy'
|
||||
def setUp(self):
|
||||
self.app = Flask("test_extensions")
|
||||
self.app.debug = False
|
||||
self.client = self.app.test_client()
|
||||
self.senpy = Senpy()
|
||||
self.senpy.init_app(self.app)
|
||||
self.dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
self.senpy.add_folder(self.dir)
|
||||
self.senpy.activate_plugin("Dummy", sync=True)
|
||||
self.senpy.activate_plugin("DummyRequired", sync=True)
|
||||
self.senpy.default_plugin = 'Dummy'
|
||||
|
||||
def assertCode(self, resp, code):
|
||||
self.assertEqual(resp.status_code, code)
|
||||
@@ -38,7 +36,6 @@ class BlueprintsTest(TestCase):
|
||||
"""
|
||||
Calling with no arguments should ask the user for more arguments
|
||||
"""
|
||||
self.app.debug = False
|
||||
resp = self.client.get("/api/")
|
||||
self.assertCode(resp, 400)
|
||||
js = parse_resp(resp)
|
||||
@@ -55,7 +52,7 @@ class BlueprintsTest(TestCase):
|
||||
The dummy plugin returns an empty response,\
|
||||
it should contain the context
|
||||
"""
|
||||
resp = self.client.get("/api/?i=My aloha mohame&with_parameters=True")
|
||||
resp = self.client.get("/api/?i=My aloha mohame")
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
logging.debug("Got response: %s", js)
|
||||
@@ -66,7 +63,7 @@ class BlueprintsTest(TestCase):
|
||||
"""
|
||||
Extra params that have a default should
|
||||
"""
|
||||
resp = self.client.get("/api/?i=My aloha mohame&algo=Dummy&with_parameters=true")
|
||||
resp = self.client.get("/api/?i=My aloha mohame&algo=Dummy")
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
logging.debug("Got response: %s", js)
|
||||
@@ -78,7 +75,6 @@ class BlueprintsTest(TestCase):
|
||||
Extra params that have a required argument that does not
|
||||
have a default should raise an error.
|
||||
"""
|
||||
self.app.debug = False
|
||||
resp = self.client.get("/api/?i=My aloha mohame&algo=DummyRequired")
|
||||
self.assertCode(resp, 400)
|
||||
js = parse_resp(resp)
|
||||
@@ -90,7 +86,6 @@ class BlueprintsTest(TestCase):
|
||||
The dummy plugin returns an empty response,\
|
||||
it should contain the context
|
||||
"""
|
||||
self.app.debug = False
|
||||
resp = self.client.get("/api/?i=My aloha mohame&algo=DOESNOTEXIST")
|
||||
self.assertCode(resp, 404)
|
||||
js = parse_resp(resp)
|
||||
@@ -157,10 +152,3 @@ class BlueprintsTest(TestCase):
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
assert "$schema" in js
|
||||
|
||||
def test_help(self):
|
||||
resp = self.client.get("/api/?help=true")
|
||||
self.assertCode(resp, 200)
|
||||
js = parse_resp(resp)
|
||||
assert "parameters" in js
|
||||
assert "help" in js["parameters"]
|
||||
|
@@ -1,6 +1,11 @@
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from unittest.mock import patch
|
||||
except ImportError:
|
||||
from mock import patch
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from unittest import TestCase
|
||||
@@ -12,7 +17,11 @@ class CLITest(TestCase):
|
||||
def test_basic(self):
|
||||
self.assertRaises(Error, partial(main_function, []))
|
||||
|
||||
res = main_function(['--input', 'test', '--algo', 'rand', '--with-parameters'])
|
||||
assert res.parameters['input'] == 'test'
|
||||
assert 'rand' in res.parameters['algorithm']
|
||||
assert res.parameters['input'] == 'test'
|
||||
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||
main_function(['--input', 'test'])
|
||||
|
||||
patched.assert_called_with(input='test')
|
||||
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||
main_function(['--input', 'test', '--algo', 'rand'])
|
||||
|
||||
patched.assert_called_with(input='test', algo='rand')
|
||||
|
@@ -10,22 +10,15 @@ except ImportError:
|
||||
|
||||
from functools import partial
|
||||
from senpy.extensions import Senpy
|
||||
from senpy import plugins
|
||||
from senpy.models import Error, Results, Entry, EmotionSet, Emotion, Plugin
|
||||
from senpy import api
|
||||
from flask import Flask
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
def analyse(instance, **kwargs):
|
||||
request = api.parse_call(kwargs)
|
||||
return instance.analyse(request)
|
||||
|
||||
|
||||
class ExtensionsTest(TestCase):
|
||||
def setUp(self):
|
||||
self.app = Flask('test_extensions')
|
||||
self.dir = os.path.dirname(__file__)
|
||||
self.dir = os.path.join(os.path.dirname(__file__))
|
||||
self.senpy = Senpy(plugin_folder=self.dir,
|
||||
app=self.app,
|
||||
default_plugins=False)
|
||||
@@ -45,23 +38,24 @@ class ExtensionsTest(TestCase):
|
||||
print(self.senpy.plugins)
|
||||
assert "Dummy" in self.senpy.plugins
|
||||
|
||||
def test_installing(self):
|
||||
""" Installing a plugin """
|
||||
def test_enabling(self):
|
||||
""" Enabling a plugin """
|
||||
info = {
|
||||
'name': 'TestPip',
|
||||
'module': 'noop_plugin',
|
||||
'module': 'dummy',
|
||||
'description': None,
|
||||
'requirements': ['noop'],
|
||||
'version': 0
|
||||
}
|
||||
root = os.path.join(self.dir, 'plugins', 'noop')
|
||||
module = plugins.load_plugin_from_info(info, root=root, install=True)
|
||||
assert module.name == 'TestPip'
|
||||
root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
|
||||
name, module = self.senpy._load_plugin_from_info(info, root=root)
|
||||
assert name == 'TestPip'
|
||||
assert module
|
||||
import noop
|
||||
dir(noop)
|
||||
self.senpy.install_deps()
|
||||
|
||||
def test_enabling(self):
|
||||
def test_installing(self):
|
||||
""" Enabling a plugin """
|
||||
self.senpy.activate_all(sync=True)
|
||||
assert len(self.senpy.plugins) >= 3
|
||||
@@ -76,8 +70,9 @@ class ExtensionsTest(TestCase):
|
||||
'requirements': ['IAmMakingThisPackageNameUpToFail'],
|
||||
'version': 0
|
||||
}
|
||||
root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
|
||||
with self.assertRaises(Error):
|
||||
plugins.install_deps(info)
|
||||
name, module = self.senpy._load_plugin_from_info(info, root=root)
|
||||
|
||||
def test_disabling(self):
|
||||
""" Disabling a plugin """
|
||||
@@ -96,17 +91,23 @@ class ExtensionsTest(TestCase):
|
||||
def test_noplugin(self):
|
||||
""" Don't analyse if there isn't any plugin installed """
|
||||
self.senpy.deactivate_all(sync=True)
|
||||
self.assertRaises(Error, partial(analyse, self.senpy, input="tupni"))
|
||||
self.assertRaises(Error, partial(self.senpy.analyse, input="tupni"))
|
||||
self.assertRaises(Error,
|
||||
partial(
|
||||
self.senpy.analyse,
|
||||
input="tupni",
|
||||
algorithm='Dummy'))
|
||||
|
||||
def test_analyse(self):
|
||||
""" Using a plugin """
|
||||
# I was using mock until plugin started inheriting
|
||||
# Leaf (defaultdict with __setattr__ and __getattr__.
|
||||
r1 = analyse(self.senpy, algorithm="Dummy", input="tupni", output="tuptuo")
|
||||
r2 = analyse(self.senpy, input="tupni", output="tuptuo")
|
||||
r1 = self.senpy.analyse(
|
||||
algorithm="Dummy", input="tupni", output="tuptuo")
|
||||
r2 = self.senpy.analyse(input="tupni", output="tuptuo")
|
||||
assert r1.analysis[0] == "plugins/Dummy_0.1"
|
||||
assert r2.analysis[0] == "plugins/Dummy_0.1"
|
||||
assert r1.entries[0]['nif:iString'] == 'input'
|
||||
assert r1.entries[0].text == 'input'
|
||||
|
||||
def test_analyse_jsonld(self):
|
||||
""" Using a plugin with JSON-LD input"""
|
||||
@@ -115,34 +116,30 @@ class ExtensionsTest(TestCase):
|
||||
"@type": "results",
|
||||
"entries": [
|
||||
{"@id": "entry1",
|
||||
"nif:isString": "tupni",
|
||||
"text": "tupni",
|
||||
"@type": "entry"
|
||||
}
|
||||
]
|
||||
}'''
|
||||
r1 = analyse(self.senpy,
|
||||
algorithm="Dummy",
|
||||
input=js_input,
|
||||
informat="json-ld",
|
||||
output="tuptuo")
|
||||
r2 = analyse(self.senpy,
|
||||
input="tupni",
|
||||
output="tuptuo")
|
||||
r1 = self.senpy.analyse(algorithm="Dummy",
|
||||
input=js_input,
|
||||
informat="json-ld",
|
||||
output="tuptuo")
|
||||
r2 = self.senpy.analyse(input="tupni", output="tuptuo")
|
||||
assert r1.analysis[0] == "plugins/Dummy_0.1"
|
||||
assert r2.analysis[0] == "plugins/Dummy_0.1"
|
||||
assert r1.entries[0]['nif:iString'] == 'input'
|
||||
assert r1.entries[0].text == 'input'
|
||||
|
||||
def test_analyse_error(self):
|
||||
mm = mock.MagicMock()
|
||||
mm.id = 'magic_mock'
|
||||
mm.is_activated = True
|
||||
mm.analyse_entries.side_effect = Error('error in analysis', status=500)
|
||||
mm.analyse_entries.side_effect = Error('error on analysis', status=500)
|
||||
self.senpy.plugins['MOCK'] = mm
|
||||
try:
|
||||
analyse(self.senpy, input='nothing', algorithm='MOCK')
|
||||
self.senpy.analyse(input='nothing', algorithm='MOCK')
|
||||
assert False
|
||||
except Error as ex:
|
||||
assert 'error in analysis' in ex['message']
|
||||
assert ex['message'] == 'error on analysis'
|
||||
assert ex['status'] == 500
|
||||
|
||||
mm.analyse.side_effect = Exception('generic exception on analysis')
|
||||
@@ -150,10 +147,10 @@ class ExtensionsTest(TestCase):
|
||||
'generic exception on analysis')
|
||||
|
||||
try:
|
||||
analyse(self.senpy, input='nothing', algorithm='MOCK')
|
||||
self.senpy.analyse(input='nothing', algorithm='MOCK')
|
||||
assert False
|
||||
except Error as ex:
|
||||
assert 'generic exception on analysis' in ex['message']
|
||||
assert ex['message'] == 'generic exception on analysis'
|
||||
assert ex['status'] == 500
|
||||
|
||||
def test_filtering(self):
|
||||
@@ -176,34 +173,47 @@ class ExtensionsTest(TestCase):
|
||||
'onyx:usesEmotionModel': 'emoml:fsre-dimensions'
|
||||
})
|
||||
eSet1 = EmotionSet()
|
||||
eSet1.prov__wasGeneratedBy = plugin['@id']
|
||||
eSet1.prov__wasGeneratedBy = plugin['id']
|
||||
eSet1['onyx:hasEmotion'].append(Emotion({
|
||||
'emoml:arousal': 1,
|
||||
'emoml:potency': 0,
|
||||
'emoml:valence': 0
|
||||
}))
|
||||
response = Results({
|
||||
'analysis': [{'plugin': plugin}],
|
||||
'entries': [Entry({
|
||||
'nif:iString': 'much ado about nothing',
|
||||
'text': 'much ado about nothing',
|
||||
'emotions': [eSet1]
|
||||
})]
|
||||
})
|
||||
params = {'emotionModel': 'emoml:big6',
|
||||
'conversion': 'full'}
|
||||
r1 = deepcopy(response)
|
||||
r1.parameters = params
|
||||
self.senpy.convert_emotions(r1)
|
||||
self.senpy.convert_emotions(r1,
|
||||
[plugin, ],
|
||||
params)
|
||||
assert len(r1.entries[0].emotions) == 2
|
||||
params['conversion'] = 'nested'
|
||||
r2 = deepcopy(response)
|
||||
r2.parameters = params
|
||||
self.senpy.convert_emotions(r2)
|
||||
self.senpy.convert_emotions(r2,
|
||||
[plugin, ],
|
||||
params)
|
||||
assert len(r2.entries[0].emotions) == 1
|
||||
assert r2.entries[0].emotions[0]['prov:wasDerivedFrom'] == eSet1
|
||||
params['conversion'] = 'filtered'
|
||||
r3 = deepcopy(response)
|
||||
r3.parameters = params
|
||||
self.senpy.convert_emotions(r3)
|
||||
self.senpy.convert_emotions(r3,
|
||||
[plugin, ],
|
||||
params)
|
||||
assert len(r3.entries[0].emotions) == 1
|
||||
r3.jsonld()
|
||||
|
||||
# def test_async_plugin(self):
|
||||
# """ We should accept multiprocessing plugins with async=False"""
|
||||
# thread1 = self.senpy.activate_plugin("Async", sync=False)
|
||||
# thread1.join(timeout=1)
|
||||
# assert len(self.senpy.plugins['Async'].value) == 4
|
||||
|
||||
# resp = self.senpy.analyse(input='nothing', algorithm='Async')
|
||||
|
||||
# assert len(resp.entries[0].async_values) == 2
|
||||
# self.senpy.activate_plugin("Async", sync=True)
|
||||
|
@@ -185,7 +185,7 @@ class ModelsTest(TestCase):
|
||||
'entries': [{
|
||||
'@id': 'entry1',
|
||||
'@type': 'entry',
|
||||
'nif:isString': 'TEST'
|
||||
'text': 'TEST'
|
||||
}]
|
||||
}
|
||||
recovered = from_dict(results)
|
||||
|
@@ -7,11 +7,11 @@ import tempfile
|
||||
|
||||
from unittest import TestCase
|
||||
from senpy.models import Results, Entry, EmotionSet, Emotion
|
||||
from senpy import plugins
|
||||
from senpy.plugins import SentimentPlugin, ShelfMixin
|
||||
from senpy.plugins.conversion.emotion.centroids import CentroidConversion
|
||||
|
||||
|
||||
class ShelfDummyPlugin(plugins.SentimentPlugin, plugins.ShelfMixin):
|
||||
class ShelfDummyPlugin(SentimentPlugin, ShelfMixin):
|
||||
def activate(self, *args, **kwargs):
|
||||
if 'counter' not in self.sh:
|
||||
self.sh['counter'] = 0
|
||||
@@ -33,6 +33,7 @@ class PluginsTest(TestCase):
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.shelf_dir):
|
||||
shutil.rmtree(self.shelf_dir)
|
||||
|
||||
if os.path.isfile(self.shelf_file):
|
||||
os.remove(self.shelf_file)
|
||||
|
||||
@@ -50,29 +51,26 @@ class PluginsTest(TestCase):
|
||||
|
||||
def test_shelf(self):
|
||||
''' A shelf is created and the value is stored '''
|
||||
newfile = self.shelf_file + "new"
|
||||
a = ShelfDummyPlugin(info={
|
||||
'name': 'shelve',
|
||||
'version': 'test',
|
||||
'shelf_file': newfile
|
||||
'shelf_file': self.shelf_file
|
||||
})
|
||||
assert a.sh == {}
|
||||
a.activate()
|
||||
assert a.sh == {'counter': 0}
|
||||
assert a.shelf_file == newfile
|
||||
assert a.shelf_file == self.shelf_file
|
||||
|
||||
a.sh['a'] = 'fromA'
|
||||
assert a.sh['a'] == 'fromA'
|
||||
|
||||
a.save()
|
||||
|
||||
sh = pickle.load(open(newfile, 'rb'))
|
||||
sh = pickle.load(open(self.shelf_file, 'rb'))
|
||||
|
||||
assert sh['a'] == 'fromA'
|
||||
|
||||
def test_dummy_shelf(self):
|
||||
with open(self.shelf_file, 'wb') as f:
|
||||
pickle.dump({'counter': 99}, f)
|
||||
a = ShelfDummyPlugin(info={
|
||||
'name': 'DummyShelf',
|
||||
'shelf_file': self.shelf_file,
|
||||
@@ -82,13 +80,9 @@ class PluginsTest(TestCase):
|
||||
|
||||
assert a.shelf_file == self.shelf_file
|
||||
res1 = a.analyse(input=1)
|
||||
assert res1.entries[0].nif__isString == 100
|
||||
a.deactivate()
|
||||
del a
|
||||
|
||||
with open(self.shelf_file, 'rb') as f:
|
||||
sh = pickle.load(f)
|
||||
assert sh['counter'] == 100
|
||||
assert res1.entries[0].nif__isString == 1
|
||||
res2 = a.analyse(input=1)
|
||||
assert res2.entries[0].nif__isString == 2
|
||||
|
||||
def test_corrupt_shelf(self):
|
||||
''' Reusing the values of a previous shelf '''
|
||||
@@ -181,7 +175,6 @@ class PluginsTest(TestCase):
|
||||
"centroids_direction": ["emoml:big6", "emoml:fsre-dimensions"]
|
||||
}
|
||||
c = CentroidConversion(info)
|
||||
print(c.serialize())
|
||||
|
||||
es1 = EmotionSet()
|
||||
e1 = Emotion()
|
||||
@@ -190,7 +183,6 @@ class PluginsTest(TestCase):
|
||||
res = c._forward_conversion(es1)
|
||||
assert res["X-dimension"] == 0.5
|
||||
assert res["Y-dimension"] == 0.5
|
||||
print(res)
|
||||
|
||||
e2 = Emotion()
|
||||
e2.onyx__hasEmotionCategory = "c2"
|
||||
@@ -198,39 +190,15 @@ class PluginsTest(TestCase):
|
||||
res = c._forward_conversion(es1)
|
||||
assert res["X-dimension"] == 0
|
||||
assert res["Y-dimension"] == 1
|
||||
print(res)
|
||||
|
||||
e = Emotion()
|
||||
e["X-dimension"] = -0.2
|
||||
e["Y-dimension"] = -0.3
|
||||
res = c._backwards_conversion(e)
|
||||
assert res["onyx:hasEmotionCategory"] == "c3"
|
||||
print(res)
|
||||
|
||||
e = Emotion()
|
||||
e["X-dimension"] = -0.2
|
||||
e["Y-dimension"] = 0.3
|
||||
res = c._backwards_conversion(e)
|
||||
assert res["onyx:hasEmotionCategory"] == "c2"
|
||||
|
||||
|
||||
def make_mini_test(plugin_info):
|
||||
def mini_test(self):
|
||||
plugin = plugins.load_plugin_from_info(plugin_info, install=True)
|
||||
plugin.test()
|
||||
return mini_test
|
||||
|
||||
|
||||
def _add_tests():
|
||||
root = os.path.join(os.path.dirname(__file__), '..')
|
||||
print(root)
|
||||
plugs = plugins.load_plugins([root, ], loader=plugins.parse_plugin_info)
|
||||
for k, v in plugs.items():
|
||||
pass
|
||||
t_method = make_mini_test(v)
|
||||
t_method.__name__ = 'test_plugin_{}'.format(k)
|
||||
setattr(PluginsTest, t_method.__name__, t_method)
|
||||
del t_method
|
||||
|
||||
|
||||
_add_tests()
|
||||
|
@@ -34,8 +34,7 @@ def do_create_(jsfile, success):
|
||||
except (AssertionError, ValidationError, KeyError) as ex:
|
||||
if success:
|
||||
raise
|
||||
return
|
||||
assert success
|
||||
|
||||
return do_expected
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user