mirror of
https://github.com/gsi-upm/senpy
synced 2025-10-19 09:48:26 +00:00
Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
83e2d415a1 | ||
|
f8ca595bc9 |
@@ -12,7 +12,7 @@ stages:
|
|||||||
- clean
|
- clean
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- make -e login
|
- docker login -u $HUB_USER -p $HUB_PASSWORD
|
||||||
|
|
||||||
.test: &test_definition
|
.test: &test_definition
|
||||||
stage: test
|
stage: test
|
||||||
@@ -36,7 +36,6 @@ test-2.7:
|
|||||||
only:
|
only:
|
||||||
- tags
|
- tags
|
||||||
- triggers
|
- triggers
|
||||||
- fix-makefiles
|
|
||||||
|
|
||||||
push-3.5:
|
push-3.5:
|
||||||
<<: *image_definition
|
<<: *image_definition
|
||||||
@@ -84,7 +83,6 @@ deploy:
|
|||||||
- make -e deploy
|
- make -e deploy
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
- fix-makefiles
|
|
||||||
|
|
||||||
push-github:
|
push-github:
|
||||||
stage: deploy
|
stage: deploy
|
||||||
|
@@ -1,27 +0,0 @@
|
|||||||
These makefiles are recipes for several common tasks in different types of projects.
|
|
||||||
To add them to your project, simply do:
|
|
||||||
|
|
||||||
```
|
|
||||||
git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git
|
|
||||||
git subtree add --prefix=.makefiles/ makefiles master
|
|
||||||
touch Makefile
|
|
||||||
echo "include .makefiles/base.mk" >> Makefile
|
|
||||||
```
|
|
||||||
|
|
||||||
Now you can take advantage of the recipes.
|
|
||||||
For instance, to add useful targets for a python project, just add this to your Makefile:
|
|
||||||
|
|
||||||
```
|
|
||||||
include .makefiles/python.mk
|
|
||||||
```
|
|
||||||
|
|
||||||
You may need to set special variables like the name of your project or the python versions you're targetting.
|
|
||||||
Take a look at each specific `.mk` file for more information, and the `Makefile` in the [senpy](https://lab.cluster.gsi.dit.upm.es/senpy/senpy) project for a real use case.
|
|
||||||
|
|
||||||
If you update the makefiles from your repository, make sure to push the changes for review in upstream (this repository):
|
|
||||||
|
|
||||||
```
|
|
||||||
make makefiles-push
|
|
||||||
```
|
|
||||||
|
|
||||||
It will automatically commit all unstaged changes in the .makefiles folder.
|
|
@@ -1,38 +0,0 @@
|
|||||||
export
|
|
||||||
NAME ?= $(shell basename $(CURDIR))
|
|
||||||
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
|
|
||||||
|
|
||||||
# Get the location of this makefile.
|
|
||||||
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
|
||||||
|
|
||||||
-include .env
|
|
||||||
-include ../.env
|
|
||||||
|
|
||||||
.FORCE:
|
|
||||||
|
|
||||||
version: .FORCE
|
|
||||||
@echo $(VERSION) > $(NAME)/VERSION
|
|
||||||
@echo $(VERSION)
|
|
||||||
|
|
||||||
help: ## Show this help.
|
|
||||||
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
|
|
||||||
|
|
||||||
config: ## Load config from the environment. You should run it once in every session before other tasks. Run: eval $(make config)
|
|
||||||
@awk '{ print "export " $$0}' ../.env
|
|
||||||
@awk '{ print "export " $$0}' .env
|
|
||||||
@echo "# Please, run: "
|
|
||||||
@echo "# eval \$$(make config)"
|
|
||||||
# If you need to run a command on the key/value pairs, use this:
|
|
||||||
# @awk '{ split($$0, a, "="); "echo " a[2] " | base64 -w 0" |& getline b64; print "export " a[1] "=" a[2]; print "export " a[1] "_BASE64=" b64}' .env
|
|
||||||
|
|
||||||
ci: ## Run a task using gitlab-runner. Only use to debug problems in the CI pipeline
|
|
||||||
gitlab-runner exec shell --builds-dir '.builds' --env CI_PROJECT_NAME=$(NAME) ${action}
|
|
||||||
|
|
||||||
include $(MK_DIR)/makefiles.mk
|
|
||||||
include $(MK_DIR)/docker.mk
|
|
||||||
include $(MK_DIR)/git.mk
|
|
||||||
|
|
||||||
info:: ## List all variables
|
|
||||||
env
|
|
||||||
|
|
||||||
.PHONY:: config help ci version .FORCE
|
|
@@ -1,25 +0,0 @@
|
|||||||
IMAGEWTAG ?= $(IMAGENAME):$(VERSION)
|
|
||||||
|
|
||||||
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
|
||||||
ifeq ($(CI_BUILD_TOKEN),)
|
|
||||||
@echo "Not logging in to the docker registry" "$(CI_REGISTRY)"
|
|
||||||
else
|
|
||||||
@docker login -u gitlab-ci-token -p $(CI_BUILD_TOKEN) $(CI_REGISTRY)
|
|
||||||
endif
|
|
||||||
ifeq ($(HUB_USER),)
|
|
||||||
@echo "Not logging in to global the docker registry"
|
|
||||||
else
|
|
||||||
@docker login -u $(HUB_USER) -p $(HUB_PASSWORD)
|
|
||||||
endif
|
|
||||||
|
|
||||||
docker-clean: ## Remove docker credentials
|
|
||||||
ifeq ($(HUB_USER),)
|
|
||||||
else
|
|
||||||
@docker logout
|
|
||||||
endif
|
|
||||||
|
|
||||||
login:: docker-login
|
|
||||||
|
|
||||||
clean:: docker-clean
|
|
||||||
|
|
||||||
.PHONY:: docker-login docker-clean login clean
|
|
@@ -1,28 +0,0 @@
|
|||||||
commit:
|
|
||||||
git commit -a
|
|
||||||
|
|
||||||
tag:
|
|
||||||
git tag ${VERSION}
|
|
||||||
|
|
||||||
git-push::
|
|
||||||
git push --tags -u origin HEAD
|
|
||||||
|
|
||||||
git-pull:
|
|
||||||
git pull --all
|
|
||||||
|
|
||||||
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
|
|
||||||
ifeq ($(GITHUB_DEPLOY_KEY),)
|
|
||||||
else
|
|
||||||
$(eval KEY_FILE := $(shell mktemp))
|
|
||||||
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
|
|
||||||
@git remote rm github-deploy || true
|
|
||||||
git remote add github-deploy $(GITHUB_REPO)
|
|
||||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME) || true
|
|
||||||
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy $(CI_COMMIT_REF_NAME)
|
|
||||||
rm $(KEY_FILE)
|
|
||||||
endif
|
|
||||||
|
|
||||||
push:: git-push
|
|
||||||
pull:: git-pull
|
|
||||||
|
|
||||||
.PHONY:: commit tag push git-push git-pull push-github
|
|
@@ -1,51 +0,0 @@
|
|||||||
# Deployment with Kubernetes
|
|
||||||
|
|
||||||
# KUBE_CA_PEM_FILE is the path of a certificate file. It automatically set by GitLab
|
|
||||||
# if you enable Kubernetes integration in a project.
|
|
||||||
#
|
|
||||||
# As of this writing, Kubernetes integration can not be set on a group level, so it has to
|
|
||||||
# be manually set in every project.
|
|
||||||
# Alternatively, we use a custom KUBE_CA_BUNDLE environment variable, which can be set at
|
|
||||||
# the group level. In this case, the variable contains the whole content of the certificate,
|
|
||||||
# which we dump to a temporary file
|
|
||||||
#
|
|
||||||
# Check if the KUBE_CA_PEM_FILE exists. Otherwise, create it from KUBE_CA_BUNDLE
|
|
||||||
KUBE_CA_TEMP=false
|
|
||||||
ifndef KUBE_CA_PEM_FILE
|
|
||||||
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
|
||||||
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
|
|
||||||
endif
|
|
||||||
KUBE_TOKEN?=""
|
|
||||||
KUBE_NAMESPACE?=$(NAME)
|
|
||||||
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
|
|
||||||
CI_COMMIT_REF_NAME?=master
|
|
||||||
|
|
||||||
info:: ## Print variables. Useful for debugging.
|
|
||||||
@echo "#KUBERNETES"
|
|
||||||
@echo KUBE_URL=$(KUBE_URL)
|
|
||||||
@echo KUBE_CA_PEM_FILE=$(KUBE_CA_PEM_FILE)
|
|
||||||
@echo KUBE_CA_BUNDLE=$$KUBE_CA_BUNDLE
|
|
||||||
@echo KUBE_TOKEN=$(KUBE_TOKEN)
|
|
||||||
@echo KUBE_NAMESPACE=$(KUBE_NAMESPACE)
|
|
||||||
@echo KUBECTL=$(KUBECTL)
|
|
||||||
|
|
||||||
@echo "#CI"
|
|
||||||
@echo CI_PROJECT_NAME=$(CI_PROJECT_NAME)
|
|
||||||
@echo CI_REGISTRY=$(CI_REGISTRY)
|
|
||||||
@echo CI_REGISTRY_USER=$(CI_REGISTRY_USER)
|
|
||||||
@echo CI_COMMIT_REF_NAME=$(CI_COMMIT_REF_NAME)
|
|
||||||
@echo "CREATED=$(CREATED)"
|
|
||||||
|
|
||||||
#
|
|
||||||
# Deployment and advanced features
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
deploy: ## Deploy to kubernetes using the credentials in KUBE_CA_PEM_FILE (or KUBE_CA_BUNDLE ) and TOKEN
|
|
||||||
@ls k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null || true
|
|
||||||
@cat k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null | envsubst | $(KUBECTL) apply -f -
|
|
||||||
|
|
||||||
deploy-check: ## Get the deployed configuration.
|
|
||||||
@$(KUBECTL) get deploy,pods,svc,ingress
|
|
||||||
|
|
||||||
.PHONY:: info deploy deploy-check
|
|
@@ -1,17 +0,0 @@
|
|||||||
makefiles-remote:
|
|
||||||
@git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git 2>/dev/null || true
|
|
||||||
|
|
||||||
makefiles-commit: makefiles-remote
|
|
||||||
git add -f .makefiles
|
|
||||||
git commit -em "Updated makefiles from ${NAME}"
|
|
||||||
|
|
||||||
makefiles-push:
|
|
||||||
git subtree push --prefix=.makefiles/ makefiles $(NAME)
|
|
||||||
|
|
||||||
makefiles-pull: makefiles-remote
|
|
||||||
git subtree pull --prefix=.makefiles/ makefiles master --squash
|
|
||||||
|
|
||||||
pull:: makefiles-pull
|
|
||||||
push:: makefiles-push
|
|
||||||
|
|
||||||
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull pull push
|
|
@@ -1,5 +0,0 @@
|
|||||||
init: ## Init pre-commit hooks (i.e. enforcing format checking before allowing a commit)
|
|
||||||
pip install --user pre-commit
|
|
||||||
pre-commit install
|
|
||||||
|
|
||||||
.PHONY:: init
|
|
@@ -1,92 +0,0 @@
|
|||||||
PYVERSIONS ?= 2.7
|
|
||||||
PYMAIN ?= $(firstword $(PYVERSIONS))
|
|
||||||
TARNAME ?= $(NAME)-$(VERSION).tar.gz
|
|
||||||
|
|
||||||
DEVPORT ?= 6000
|
|
||||||
|
|
||||||
yapf: ## Format python code
|
|
||||||
yapf -i -r $(NAME)
|
|
||||||
yapf -i -r tests
|
|
||||||
|
|
||||||
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS)) ## Generate dockerfiles for each python version
|
|
||||||
@unlink Dockerfile >/dev/null
|
|
||||||
ln -s Dockerfile-$(PYMAIN) Dockerfile
|
|
||||||
|
|
||||||
Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Dockerfile-2.7)
|
|
||||||
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
|
|
||||||
|
|
||||||
quick_build: $(addprefix build-, $(PYMAIN))
|
|
||||||
|
|
||||||
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
|
||||||
|
|
||||||
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
|
||||||
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
|
|
||||||
|
|
||||||
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
|
|
||||||
@docker start $(NAME)-dev$* || (\
|
|
||||||
$(MAKE) build-$*; \
|
|
||||||
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
|
|
||||||
)\
|
|
||||||
|
|
||||||
docker exec -ti $(NAME)-dev$* bash
|
|
||||||
|
|
||||||
dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
|
|
||||||
|
|
||||||
quick_test: test-$(PYMAIN)
|
|
||||||
|
|
||||||
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
|
|
||||||
# This speeds tests up because the image has most (if not all) of the dependencies already.
|
|
||||||
docker rm $(NAME)-test-$* || true
|
|
||||||
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
|
|
||||||
docker cp . $(NAME)-test-$*:/usr/src/app
|
|
||||||
docker start -a $(NAME)-test-$*
|
|
||||||
|
|
||||||
test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
|
|
||||||
|
|
||||||
run-%: build-%
|
|
||||||
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
|
|
||||||
|
|
||||||
run: run-$(PYMAIN)
|
|
||||||
|
|
||||||
# Pypy - Upload a package
|
|
||||||
|
|
||||||
dist/$(TARNAME): version
|
|
||||||
python setup.py sdist;
|
|
||||||
|
|
||||||
sdist: dist/$(TARNAME) ## Generate the distribution file (wheel)
|
|
||||||
|
|
||||||
pip_test-%: sdist ## Test the distribution file using pip install and a specific python version (e.g. pip_test-2.7)
|
|
||||||
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
|
|
||||||
|
|
||||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS)) ## Test pip installation with the main python version
|
|
||||||
|
|
||||||
pip_upload: pip_test ## Upload package to pip
|
|
||||||
python setup.py sdist upload ;
|
|
||||||
|
|
||||||
# Pushing to docker
|
|
||||||
|
|
||||||
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
|
|
||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
|
||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
|
||||||
docker push '$(IMAGENAME):latest'
|
|
||||||
docker push '$(IMAGEWTAG)'
|
|
||||||
|
|
||||||
push-latest-%: build-% ## Push the latest image for a specific python version
|
|
||||||
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
|
||||||
docker push $(IMAGENAME):$(VERSION)-python$*
|
|
||||||
docker push $(IMAGENAME):python$*
|
|
||||||
|
|
||||||
push-%: build-% ## Push the image of the current version (tagged). e.g. push-2.7
|
|
||||||
docker push $(IMAGENAME):$(VERSION)-python$*
|
|
||||||
|
|
||||||
push:: $(addprefix push-,$(PYVERSIONS)) ## Push an image with the current version for every python version
|
|
||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
|
||||||
docker push $(IMAGENAME):$(VERSION)
|
|
||||||
|
|
||||||
clean:: ## Clean older docker images and containers related to this project and dev environments
|
|
||||||
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
|
||||||
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
|
||||||
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
|
|
||||||
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
|
||||||
|
|
||||||
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push
|
|
140
Makefile
140
Makefile
@@ -1,17 +1,147 @@
|
|||||||
NAME=senpy
|
NAME=senpy
|
||||||
|
VERSION=$(shell git describe --tags --dirty 2>/dev/null)
|
||||||
GITHUB_REPO=git@github.com:gsi-upm/senpy.git
|
GITHUB_REPO=git@github.com:gsi-upm/senpy.git
|
||||||
|
|
||||||
IMAGENAME=gsiupm/senpy
|
IMAGENAME=gsiupm/senpy
|
||||||
|
IMAGEWTAG=$(IMAGENAME):$(VERSION)
|
||||||
|
|
||||||
# The first version is the main one (used for quick builds)
|
|
||||||
# See .makefiles/python.mk for more info
|
|
||||||
PYVERSIONS=3.5 2.7
|
PYVERSIONS=3.5 2.7
|
||||||
|
PYMAIN=$(firstword $(PYVERSIONS))
|
||||||
|
|
||||||
DEVPORT=5000
|
DEVPORT=5000
|
||||||
|
|
||||||
|
TARNAME=$(NAME)-$(VERSION).tar.gz
|
||||||
action="test-${PYMAIN}"
|
action="test-${PYMAIN}"
|
||||||
GITHUB_REPO=git@github.com:gsi-upm/senpy.git
|
GITHUB_REPO=git@github.com:gsi-upm/senpy.git
|
||||||
|
|
||||||
include .makefiles/base.mk
|
KUBE_CA_PEM_FILE=""
|
||||||
include .makefiles/k8s.mk
|
KUBE_URL=""
|
||||||
include .makefiles/python.mk
|
KUBE_TOKEN=""
|
||||||
|
KUBE_NAMESPACE=$(NAME)
|
||||||
|
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -v $$PWD:/tmp/cwd/ -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
|
||||||
|
CI_REGISTRY=docker.io
|
||||||
|
CI_REGISTRY_USER=gitlab
|
||||||
|
CI_BUILD_TOKEN=""
|
||||||
|
CI_COMMIT_REF_NAME=master
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
all: build run
|
||||||
|
|
||||||
|
.FORCE:
|
||||||
|
|
||||||
|
version: .FORCE
|
||||||
|
@echo $(VERSION) > $(NAME)/VERSION
|
||||||
|
@echo $(VERSION)
|
||||||
|
|
||||||
|
yapf:
|
||||||
|
yapf -i -r $(NAME)
|
||||||
|
yapf -i -r tests
|
||||||
|
|
||||||
|
init:
|
||||||
|
pip install --user pre-commit
|
||||||
|
pre-commit install
|
||||||
|
|
||||||
|
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS))
|
||||||
|
@unlink Dockerfile >/dev/null
|
||||||
|
ln -s Dockerfile-$(PYMAIN) Dockerfile
|
||||||
|
|
||||||
|
Dockerfile-%: Dockerfile.template
|
||||||
|
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
|
||||||
|
|
||||||
|
quick_build: $(addprefix build-, $(PYMAIN))
|
||||||
|
|
||||||
|
build: $(addprefix build-, $(PYVERSIONS))
|
||||||
|
|
||||||
|
build-%: version Dockerfile-%
|
||||||
|
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
|
||||||
|
|
||||||
|
quick_test: $(addprefix test-,$(PYMAIN))
|
||||||
|
|
||||||
|
dev-%:
|
||||||
|
@docker start $(NAME)-dev$* || (\
|
||||||
|
$(MAKE) build-$*; \
|
||||||
|
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
|
||||||
|
)\
|
||||||
|
|
||||||
|
docker exec -ti $(NAME)-dev$* bash
|
||||||
|
|
||||||
|
dev: dev-$(PYMAIN)
|
||||||
|
|
||||||
|
test-all: $(addprefix test-,$(PYVERSIONS))
|
||||||
|
|
||||||
|
test-%:
|
||||||
|
docker run --rm --entrypoint /usr/local/bin/python -w /usr/src/app $(IMAGENAME):python$* setup.py test
|
||||||
|
|
||||||
|
test: test-$(PYMAIN)
|
||||||
|
|
||||||
|
dist/$(TARNAME): version
|
||||||
|
python setup.py sdist;
|
||||||
|
|
||||||
|
sdist: dist/$(TARNAME)
|
||||||
|
|
||||||
|
pip_test-%: sdist
|
||||||
|
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
|
||||||
|
|
||||||
|
pip_test: $(addprefix pip_test-,$(PYVERSIONS))
|
||||||
|
|
||||||
|
pip_upload: pip_test
|
||||||
|
python setup.py sdist upload ;
|
||||||
|
|
||||||
|
clean:
|
||||||
|
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
|
||||||
|
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
|
||||||
|
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
||||||
|
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
|
||||||
|
|
||||||
|
git_commit:
|
||||||
|
git commit -a
|
||||||
|
|
||||||
|
git_tag:
|
||||||
|
git tag ${VERSION}
|
||||||
|
|
||||||
|
git_push:
|
||||||
|
git push --tags origin master
|
||||||
|
|
||||||
|
run-%: build-%
|
||||||
|
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
|
||||||
|
|
||||||
|
run: run-$(PYMAIN)
|
||||||
|
|
||||||
|
push-latest: $(addprefix push-latest-,$(PYVERSIONS))
|
||||||
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||||
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||||
|
docker push '$(IMAGENAME):latest'
|
||||||
|
docker push '$(IMAGEWTAG)'
|
||||||
|
|
||||||
|
push-latest-%: build-%
|
||||||
|
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
||||||
|
docker push $(IMAGENAME):$(VERSION)-python$*
|
||||||
|
docker push $(IMAGENAME):python$*
|
||||||
|
|
||||||
|
push-%: build-%
|
||||||
|
docker push $(IMAGENAME):$(VERSION)-python$*
|
||||||
|
|
||||||
|
push: $(addprefix push-,$(PYVERSIONS))
|
||||||
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||||
|
docker push $(IMAGENAME):$(VERSION)
|
||||||
|
|
||||||
|
push-github:
|
||||||
|
$(eval KEY_FILE := $(shell mktemp))
|
||||||
|
@echo "$$GITHUB_DEPLOY_KEY" > $(KEY_FILE)
|
||||||
|
@git remote rm github-deploy || true
|
||||||
|
git remote add github-deploy $(GITHUB_REPO)
|
||||||
|
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME) || true
|
||||||
|
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy $(CI_COMMIT_REF_NAME)
|
||||||
|
rm $(KEY_FILE)
|
||||||
|
|
||||||
|
ci:
|
||||||
|
gitlab-runner exec docker --docker-volumes /var/run/docker.sock:/var/run/docker.sock --env CI_PROJECT_NAME=$(NAME) ${action}
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
@$(KUBECTL) delete secret $(CI_REGISTRY) || true
|
||||||
|
@$(KUBECTL) create secret docker-registry $(CI_REGISTRY) --docker-server=$(CI_REGISTRY) --docker-username=$(CI_REGISTRY_USER) --docker-email=$(CI_REGISTRY_USER) --docker-password=$(CI_BUILD_TOKEN)
|
||||||
|
@$(KUBECTL) apply -f /tmp/cwd/k8s/
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY: test test-% test-all build-% build test pip_test run yapf push-main push-% dev ci version .FORCE deploy
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"@type": "plugins",
|
"plugins": [
|
||||||
"plugins": {}
|
]
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,8 @@
|
|||||||
"me:EmotionAnalysis1",
|
"me:EmotionAnalysis1",
|
||||||
"me:NER1",
|
"me:NER1",
|
||||||
{
|
{
|
||||||
"description": "missing @id and @type"
|
"@type": "analysis",
|
||||||
|
"@id": "wrong"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
|
@@ -1,78 +0,0 @@
|
|||||||
{
|
|
||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
|
||||||
"@id": "me:Result1",
|
|
||||||
"@type": "results",
|
|
||||||
"analysis": [
|
|
||||||
"me:SAnalysis1",
|
|
||||||
"me:SgAnalysis1",
|
|
||||||
"me:EmotionAnalysis1",
|
|
||||||
"me:NER1",
|
|
||||||
{
|
|
||||||
"@type": "analysis",
|
|
||||||
"@id": "anonymous"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1",
|
|
||||||
"@type": [
|
|
||||||
"nif:RFC5147String",
|
|
||||||
"nif:Context"
|
|
||||||
],
|
|
||||||
"nif:isString": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
|
||||||
"entities": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=5,13",
|
|
||||||
"nif:beginIndex": 5,
|
|
||||||
"nif:endIndex": 13,
|
|
||||||
"nif:anchorOf": "Microsoft",
|
|
||||||
"me:references": "http://dbpedia.org/page/Microsoft",
|
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=25,37",
|
|
||||||
"nif:beginIndex": 25,
|
|
||||||
"nif:endIndex": 37,
|
|
||||||
"nif:anchorOf": "Windows Phone",
|
|
||||||
"me:references": "http://dbpedia.org/page/Windows_Phone",
|
|
||||||
"prov:wasGeneratedBy": "me:NER1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"suggestions": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=16,77",
|
|
||||||
"nif:beginIndex": 16,
|
|
||||||
"nif:endIndex": 77,
|
|
||||||
"nif:anchorOf": "put your Windows Phone on your newest #open technology program",
|
|
||||||
"prov:wasGeneratedBy": "me:SgAnalysis1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sentiments": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=80,97",
|
|
||||||
"nif:beginIndex": 80,
|
|
||||||
"nif:endIndex": 97,
|
|
||||||
"nif:anchorOf": "You'll be awesome.",
|
|
||||||
"marl:hasPolarity": "marl:Positive",
|
|
||||||
"marl:polarityValue": 0.9,
|
|
||||||
"prov:wasGeneratedBy": "me:SAnalysis1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"emotions": [
|
|
||||||
{
|
|
||||||
"@id": "http://micro.blog/status1#char=0,109",
|
|
||||||
"nif:anchorOf": "Dear Microsoft, put your Windows Phone on your newest #open technology program. You'll be awesome. #opensource",
|
|
||||||
"prov:wasGeneratedBy": "me:EAnalysis1",
|
|
||||||
"onyx:hasEmotion": [
|
|
||||||
{
|
|
||||||
"onyx:hasEmotionCategory": "wna:liking"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"onyx:hasEmotionCategory": "wna:excitement"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@@ -3,7 +3,10 @@
|
|||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"analysis": [
|
||||||
"me:SgAnalysis1"
|
{
|
||||||
|
"@id": "me:SgAnalysis1",
|
||||||
|
"@type": "me:SuggestionAnalysis"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
{
|
{
|
||||||
|
@@ -2,7 +2,6 @@ Flask>=0.10.1
|
|||||||
requests>=2.4.1
|
requests>=2.4.1
|
||||||
tornado>=4.4.3
|
tornado>=4.4.3
|
||||||
PyLD>=0.6.5
|
PyLD>=0.6.5
|
||||||
nltk
|
|
||||||
six
|
six
|
||||||
future
|
future
|
||||||
jsonschema
|
jsonschema
|
||||||
|
@@ -95,8 +95,8 @@ def main():
|
|||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.debug = args.debug
|
app.debug = args.debug
|
||||||
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
|
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
|
||||||
sp.install_deps()
|
|
||||||
if args.only_install:
|
if args.only_install:
|
||||||
|
sp.install_deps()
|
||||||
return
|
return
|
||||||
sp.activate_all()
|
sp.activate_all()
|
||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
|
147
senpy/api.py
147
senpy/api.py
@@ -1,55 +1,38 @@
|
|||||||
from future.utils import iteritems
|
from future.utils import iteritems
|
||||||
from .models import Error, Results, Entry, from_string
|
from .models import Error
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
API_PARAMS = {
|
API_PARAMS = {
|
||||||
"algorithm": {
|
"algorithm": {
|
||||||
"aliases": ["algorithms", "a", "algo"],
|
"aliases": ["algorithm", "a", "algo"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"description": ("Algorithms that will be used to process the request."
|
|
||||||
"It may be a list of comma-separated names."),
|
|
||||||
},
|
|
||||||
"expanded-jsonld": {
|
|
||||||
"@id": "expanded-jsonld",
|
|
||||||
"aliases": ["expanded"],
|
|
||||||
"required": True,
|
|
||||||
"default": 0
|
|
||||||
},
|
|
||||||
"with_parameters": {
|
|
||||||
"aliases": ['withparameters',
|
|
||||||
'with-parameters'],
|
|
||||||
"options": "boolean",
|
|
||||||
"default": False,
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"plugin_type": {
|
|
||||||
"@id": "pluginType",
|
|
||||||
"description": 'What kind of plugins to list',
|
|
||||||
"aliases": ["pluginType"],
|
|
||||||
"required": True,
|
|
||||||
"default": "analysisPlugin"
|
|
||||||
},
|
},
|
||||||
"outformat": {
|
"outformat": {
|
||||||
"@id": "outformat",
|
"@id": "outformat",
|
||||||
"aliases": ["o"],
|
"aliases": ["outformat", "o"],
|
||||||
"default": "json-ld",
|
"default": "json-ld",
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": ["json-ld", "turtle"],
|
"options": ["json-ld", "turtle"],
|
||||||
},
|
},
|
||||||
"help": {
|
"expanded-jsonld": {
|
||||||
"@id": "help",
|
"@id": "expanded-jsonld",
|
||||||
"description": "Show additional help to know more about the possible parameters",
|
"aliases": ["expanded", "expanded-jsonld"],
|
||||||
"aliases": ["h"],
|
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": "boolean",
|
"default": 0
|
||||||
"default": False
|
|
||||||
},
|
},
|
||||||
"emotionModel": {
|
"emotionModel": {
|
||||||
"@id": "emotionModel",
|
"@id": "emotionModel",
|
||||||
"aliases": ["emoModel"],
|
"aliases": ["emotionModel", "emoModel"],
|
||||||
"required": False
|
"required": False
|
||||||
},
|
},
|
||||||
|
"plugin_type": {
|
||||||
|
"@id": "pluginType",
|
||||||
|
"description": 'What kind of plugins to list',
|
||||||
|
"aliases": ["pluginType", "plugin_type"],
|
||||||
|
"required": True,
|
||||||
|
"default": "analysisPlugin"
|
||||||
|
},
|
||||||
"conversion": {
|
"conversion": {
|
||||||
"@id": "conversion",
|
"@id": "conversion",
|
||||||
"description": "How to show the elements that have (not) been converted",
|
"description": "How to show the elements that have (not) been converted",
|
||||||
@@ -61,16 +44,15 @@ API_PARAMS = {
|
|||||||
|
|
||||||
WEB_PARAMS = {
|
WEB_PARAMS = {
|
||||||
"inHeaders": {
|
"inHeaders": {
|
||||||
"aliases": ["headers"],
|
"aliases": ["inHeaders", "headers"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": False,
|
"default": "0"
|
||||||
"options": "boolean"
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
CLI_PARAMS = {
|
CLI_PARAMS = {
|
||||||
"plugin_folder": {
|
"plugin_folder": {
|
||||||
"aliases": ["folder"],
|
"aliases": ["plugin_folder", "folder"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": "."
|
"default": "."
|
||||||
},
|
},
|
||||||
@@ -79,71 +61,64 @@ CLI_PARAMS = {
|
|||||||
NIF_PARAMS = {
|
NIF_PARAMS = {
|
||||||
"input": {
|
"input": {
|
||||||
"@id": "input",
|
"@id": "input",
|
||||||
"aliases": ["i"],
|
"aliases": ["i", "input"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"help": "Input text"
|
"help": "Input text"
|
||||||
},
|
},
|
||||||
"intype": {
|
|
||||||
"@id": "intype",
|
|
||||||
"aliases": ["t"],
|
|
||||||
"required": False,
|
|
||||||
"default": "direct",
|
|
||||||
"options": ["direct", "url", "file"],
|
|
||||||
},
|
|
||||||
"informat": {
|
"informat": {
|
||||||
"@id": "informat",
|
"@id": "informat",
|
||||||
"aliases": ["f"],
|
"aliases": ["f", "informat"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "text",
|
"default": "text",
|
||||||
"options": ["turtle", "text", "json-ld"],
|
"options": ["turtle", "text", "json-ld"],
|
||||||
},
|
},
|
||||||
|
"intype": {
|
||||||
|
"@id": "intype",
|
||||||
|
"aliases": ["intype", "t"],
|
||||||
|
"required": False,
|
||||||
|
"default": "direct",
|
||||||
|
"options": ["direct", "url", "file"],
|
||||||
|
},
|
||||||
"language": {
|
"language": {
|
||||||
"@id": "language",
|
"@id": "language",
|
||||||
"aliases": ["l"],
|
"aliases": ["language", "l"],
|
||||||
"required": False,
|
"required": False,
|
||||||
},
|
},
|
||||||
"prefix": {
|
"prefix": {
|
||||||
"@id": "prefix",
|
"@id": "prefix",
|
||||||
"aliases": ["p"],
|
"aliases": ["prefix", "p"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": "",
|
"default": "",
|
||||||
},
|
},
|
||||||
"urischeme": {
|
"urischeme": {
|
||||||
"@id": "urischeme",
|
"@id": "urischeme",
|
||||||
"aliases": ["u"],
|
"aliases": ["urischeme", "u"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "RFC5147String",
|
"default": "RFC5147String",
|
||||||
"options": "RFC5147String"
|
"options": "RFC5147String"
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def parse_params(indict, *specs):
|
def parse_params(indict, spec=NIF_PARAMS):
|
||||||
if not specs:
|
logger.debug("Parsing: {}\n{}".format(indict, spec))
|
||||||
specs = [NIF_PARAMS]
|
|
||||||
logger.debug("Parsing: {}\n{}".format(indict, specs))
|
|
||||||
outdict = indict.copy()
|
outdict = indict.copy()
|
||||||
wrong_params = {}
|
wrong_params = {}
|
||||||
for spec in specs:
|
for param, options in iteritems(spec):
|
||||||
for param, options in iteritems(spec):
|
if param[0] != "@": # Exclude json-ld properties
|
||||||
if param[0] != "@": # Exclude json-ld properties
|
for alias in options.get("aliases", []):
|
||||||
for alias in options.get("aliases", []):
|
if alias in indict:
|
||||||
# Replace each alias with the correct name of the parameter
|
outdict[param] = indict[alias]
|
||||||
if alias in indict and alias is not param:
|
if param not in outdict:
|
||||||
outdict[param] = indict[alias]
|
if options.get("required", False) and "default" not in options:
|
||||||
del indict[alias]
|
wrong_params[param] = spec[param]
|
||||||
continue
|
else:
|
||||||
if param not in outdict:
|
if "default" in options:
|
||||||
if options.get("required", False) and "default" not in options:
|
outdict[param] = options["default"]
|
||||||
wrong_params[param] = spec[param]
|
else:
|
||||||
else:
|
if "options" in spec[param] and \
|
||||||
if "default" in options:
|
outdict[param] not in spec[param]["options"]:
|
||||||
outdict[param] = options["default"]
|
wrong_params[param] = spec[param]
|
||||||
elif "options" in spec[param]:
|
|
||||||
if spec[param]["options"] == "boolean":
|
|
||||||
outdict[param] = outdict[param] in [None, True, 'true', '1']
|
|
||||||
elif outdict[param] not in spec[param]["options"]:
|
|
||||||
wrong_params[param] = spec[param]
|
|
||||||
if wrong_params:
|
if wrong_params:
|
||||||
logger.debug("Error parsing: %s", wrong_params)
|
logger.debug("Error parsing: %s", wrong_params)
|
||||||
message = Error(
|
message = Error(
|
||||||
@@ -153,30 +128,4 @@ def parse_params(indict, *specs):
|
|||||||
errors={param: error
|
errors={param: error
|
||||||
for param, error in iteritems(wrong_params)})
|
for param, error in iteritems(wrong_params)})
|
||||||
raise message
|
raise message
|
||||||
if 'algorithm' in outdict and not isinstance(outdict['algorithm'], list):
|
|
||||||
outdict['algorithm'] = outdict['algorithm'].split(',')
|
|
||||||
return outdict
|
return outdict
|
||||||
|
|
||||||
|
|
||||||
def get_extra_params(request, plugin=None):
|
|
||||||
params = request.parameters.copy()
|
|
||||||
if plugin:
|
|
||||||
extra_params = parse_params(params, plugin.get('extra_params', {}))
|
|
||||||
params.update(extra_params)
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def parse_call(params):
|
|
||||||
'''Return a results object based on the parameters used in a call/request.
|
|
||||||
'''
|
|
||||||
params = parse_params(params, NIF_PARAMS)
|
|
||||||
if params['informat'] == 'text':
|
|
||||||
results = Results()
|
|
||||||
entry = Entry(nif__isString=params['input'])
|
|
||||||
results.entries.append(entry)
|
|
||||||
elif params['informat'] == 'json-ld':
|
|
||||||
results = from_string(params['input'], cls=Results)
|
|
||||||
else:
|
|
||||||
raise NotImplemented('Informat {} is not implemented'.format(params['informat']))
|
|
||||||
results.parameters = params
|
|
||||||
return results
|
|
||||||
|
@@ -19,8 +19,8 @@ Blueprints for Senpy
|
|||||||
"""
|
"""
|
||||||
from flask import (Blueprint, request, current_app, render_template, url_for,
|
from flask import (Blueprint, request, current_app, render_template, url_for,
|
||||||
jsonify)
|
jsonify)
|
||||||
from .models import Error, Response, Help, Plugins, read_schema
|
from .models import Error, Response, Plugins, read_schema
|
||||||
from . import api
|
from .api import WEB_PARAMS, API_PARAMS, parse_params
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
@@ -76,32 +76,35 @@ def basic_api(f):
|
|||||||
def decorated_function(*args, **kwargs):
|
def decorated_function(*args, **kwargs):
|
||||||
raw_params = get_params(request)
|
raw_params = get_params(request)
|
||||||
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
||||||
|
# Get defaults
|
||||||
|
web_params = parse_params({}, spec=WEB_PARAMS)
|
||||||
|
api_params = parse_params({}, spec=API_PARAMS)
|
||||||
|
|
||||||
outformat = 'json-ld'
|
outformat = 'json-ld'
|
||||||
try:
|
try:
|
||||||
print('Getting request:')
|
print('Getting request:')
|
||||||
print(request)
|
print(request)
|
||||||
params = api.parse_params(raw_params, api.WEB_PARAMS, api.API_PARAMS)
|
web_params = parse_params(raw_params, spec=WEB_PARAMS)
|
||||||
if hasattr(request, 'parameters'):
|
api_params = parse_params(raw_params, spec=API_PARAMS)
|
||||||
request.parameters.update(params)
|
if hasattr(request, 'params'):
|
||||||
|
request.params.update(api_params)
|
||||||
else:
|
else:
|
||||||
request.parameters = params
|
request.params = api_params
|
||||||
response = f(*args, **kwargs)
|
response = f(*args, **kwargs)
|
||||||
except Error as ex:
|
except Error as ex:
|
||||||
response = ex
|
response = ex
|
||||||
response.parameters = params
|
|
||||||
logger.error(ex)
|
logger.error(ex)
|
||||||
if current_app.debug:
|
if current_app.debug:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
in_headers = params['inHeaders']
|
in_headers = web_params['inHeaders'] != "0"
|
||||||
expanded = params['expanded-jsonld']
|
expanded = api_params['expanded-jsonld']
|
||||||
outformat = params['outformat']
|
outformat = api_params['outformat']
|
||||||
|
|
||||||
return response.flask(
|
return response.flask(
|
||||||
in_headers=in_headers,
|
in_headers=in_headers,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
prefix=url_for('.api_root', _external=True),
|
prefix=url_for('.api', _external=True),
|
||||||
context_uri=url_for('api.context',
|
context_uri=url_for('api.context',
|
||||||
entity=type(response).__name__,
|
entity=type(response).__name__,
|
||||||
_external=True),
|
_external=True),
|
||||||
@@ -113,22 +116,16 @@ def basic_api(f):
|
|||||||
|
|
||||||
@api_blueprint.route('/', methods=['POST', 'GET'])
|
@api_blueprint.route('/', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def api_root():
|
def api():
|
||||||
if request.parameters['help']:
|
response = current_app.senpy.analyse(**request.params)
|
||||||
dic = dict(api.API_PARAMS, **api.NIF_PARAMS)
|
return response
|
||||||
response = Help(parameters=dic)
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
req = api.parse_call(request.parameters)
|
|
||||||
response = current_app.senpy.analyse(req)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
@api_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def plugins():
|
def plugins():
|
||||||
sp = current_app.senpy
|
sp = current_app.senpy
|
||||||
ptype = request.parameters.get('plugin_type')
|
ptype = request.params.get('plugin_type')
|
||||||
plugins = sp.filter_plugins(plugin_type=ptype)
|
plugins = sp.filter_plugins(plugin_type=ptype)
|
||||||
dic = Plugins(plugins=list(plugins.values()))
|
dic = Plugins(plugins=list(plugins.values()))
|
||||||
return dic
|
return dic
|
||||||
|
26
senpy/cli.py
26
senpy/cli.py
@@ -1,7 +1,7 @@
|
|||||||
import sys
|
import sys
|
||||||
from .models import Error
|
from .models import Error
|
||||||
|
from .api import parse_params, CLI_PARAMS
|
||||||
from .extensions import Senpy
|
from .extensions import Senpy
|
||||||
from . import api
|
|
||||||
|
|
||||||
|
|
||||||
def argv_to_dict(argv):
|
def argv_to_dict(argv):
|
||||||
@@ -13,27 +13,27 @@ def argv_to_dict(argv):
|
|||||||
if argv[i][0] == '-':
|
if argv[i][0] == '-':
|
||||||
key = argv[i].strip('-')
|
key = argv[i].strip('-')
|
||||||
value = argv[i + 1] if len(argv) > i + 1 else None
|
value = argv[i + 1] if len(argv) > i + 1 else None
|
||||||
if not value or value[0] == '-':
|
if value and value[0] == '-':
|
||||||
cli_dict[key] = True
|
cli_dict[key] = ""
|
||||||
else:
|
else:
|
||||||
cli_dict[key] = value
|
cli_dict[key] = value
|
||||||
return cli_dict
|
return cli_dict
|
||||||
|
|
||||||
|
|
||||||
|
def parse_cli(argv):
|
||||||
|
cli_dict = argv_to_dict(argv)
|
||||||
|
cli_params = parse_params(cli_dict, spec=CLI_PARAMS)
|
||||||
|
return cli_params, cli_dict
|
||||||
|
|
||||||
|
|
||||||
def main_function(argv):
|
def main_function(argv):
|
||||||
'''This is the method for unit testing
|
'''This is the method for unit testing
|
||||||
'''
|
'''
|
||||||
params = api.parse_params(argv_to_dict(argv),
|
cli_params, cli_dict = parse_cli(argv)
|
||||||
api.CLI_PARAMS,
|
plugin_folder = cli_params['plugin_folder']
|
||||||
api.API_PARAMS,
|
|
||||||
api.NIF_PARAMS)
|
|
||||||
plugin_folder = params['plugin_folder']
|
|
||||||
sp = Senpy(default_plugins=False, plugin_folder=plugin_folder)
|
sp = Senpy(default_plugins=False, plugin_folder=plugin_folder)
|
||||||
request = api.parse_call(params)
|
sp.activate_all(sync=True)
|
||||||
algos = request.parameters.get('algorithm', sp.plugins.keys())
|
res = sp.analyse(**cli_dict)
|
||||||
for algo in algos:
|
|
||||||
sp.activate_plugin(algo)
|
|
||||||
res = sp.analyse(request)
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,22 +5,35 @@ It orchestrates plugin (de)activation and analysis.
|
|||||||
from future import standard_library
|
from future import standard_library
|
||||||
standard_library.install_aliases()
|
standard_library.install_aliases()
|
||||||
|
|
||||||
from . import plugins, api
|
from . import plugins
|
||||||
from .plugins import SenpyPlugin
|
from .plugins import SenpyPlugin
|
||||||
from .models import Error
|
from .models import Error, Entry, Results, from_string
|
||||||
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
||||||
|
from .api import API_PARAMS, NIF_PARAMS, parse_params
|
||||||
|
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
|
import fnmatch
|
||||||
|
import inspect
|
||||||
|
import sys
|
||||||
|
import importlib
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
import yaml
|
||||||
|
import subprocess
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def log_subprocess_output(process):
|
||||||
|
for line in iter(process.stdout.readline, b''):
|
||||||
|
logger.info('%r', line)
|
||||||
|
for line in iter(process.stderr.readline, b''):
|
||||||
|
logger.error('%r', line)
|
||||||
|
|
||||||
|
|
||||||
class Senpy(object):
|
class Senpy(object):
|
||||||
""" Default Senpy extension for Flask """
|
""" Default Senpy extension for Flask """
|
||||||
|
|
||||||
@@ -72,20 +85,22 @@ class Senpy(object):
|
|||||||
else:
|
else:
|
||||||
logger.debug("Not a folder: %s", folder)
|
logger.debug("Not a folder: %s", folder)
|
||||||
|
|
||||||
def _get_plugins(self, request):
|
def _find_plugins(self, params):
|
||||||
if not self.analysis_plugins:
|
if not self.analysis_plugins:
|
||||||
raise Error(
|
raise Error(
|
||||||
status=404,
|
status=404,
|
||||||
message=("No plugins found."
|
message=("No plugins found."
|
||||||
" Please install one."))
|
" Please install one."))
|
||||||
algos = request.parameters.get('algorithm', None)
|
api_params = parse_params(params, spec=API_PARAMS)
|
||||||
if not algos:
|
algos = None
|
||||||
if self.default_plugin:
|
if "algorithm" in api_params and api_params["algorithm"]:
|
||||||
algos = [self.default_plugin.name, ]
|
algos = api_params["algorithm"].split(',')
|
||||||
else:
|
elif self.default_plugin:
|
||||||
raise Error(
|
algos = [self.default_plugin.name, ]
|
||||||
status=404,
|
else:
|
||||||
message="No default plugin found, and None provided")
|
raise Error(
|
||||||
|
status=404,
|
||||||
|
message="No default plugin found, and None provided")
|
||||||
|
|
||||||
plugins = list()
|
plugins = list()
|
||||||
for algo in algos:
|
for algo in algos:
|
||||||
@@ -96,58 +111,76 @@ class Senpy(object):
|
|||||||
raise Error(
|
raise Error(
|
||||||
status=404,
|
status=404,
|
||||||
message="The algorithm '{}' is not valid".format(algo))
|
message="The algorithm '{}' is not valid".format(algo))
|
||||||
|
|
||||||
|
if not self.plugins[algo].is_activated:
|
||||||
|
logger.debug("Plugin not activated: {}".format(algo))
|
||||||
|
raise Error(
|
||||||
|
status=400,
|
||||||
|
message=("The algorithm '{}'"
|
||||||
|
" is not activated yet").format(algo))
|
||||||
plugins.append(self.plugins[algo])
|
plugins.append(self.plugins[algo])
|
||||||
return plugins
|
return plugins
|
||||||
|
|
||||||
def _process_entries(self, entries, req, plugins):
|
def _get_params(self, params, plugin=None):
|
||||||
"""
|
nif_params = parse_params(params, spec=NIF_PARAMS)
|
||||||
Recursively process the entries with the first plugin in the list, and pass the results
|
if plugin:
|
||||||
to the rest of the plugins.
|
extra_params = plugin.get('extra_params', {})
|
||||||
"""
|
specific_params = parse_params(params, spec=extra_params)
|
||||||
|
nif_params.update(specific_params)
|
||||||
|
return nif_params
|
||||||
|
|
||||||
|
def _get_entries(self, params):
|
||||||
|
if params['informat'] == 'text':
|
||||||
|
results = Results()
|
||||||
|
entry = Entry(text=params['input'])
|
||||||
|
results.entries.append(entry)
|
||||||
|
elif params['informat'] == 'json-ld':
|
||||||
|
results = from_string(params['input'], cls=Results)
|
||||||
|
else:
|
||||||
|
raise NotImplemented('Informat {} is not implemented'.format(params['informat']))
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _process_entries(self, entries, plugins, nif_params):
|
||||||
if not plugins:
|
if not plugins:
|
||||||
for i in entries:
|
for i in entries:
|
||||||
yield i
|
yield i
|
||||||
return
|
return
|
||||||
plugin = plugins[0]
|
plugin = plugins[0]
|
||||||
self._activate(plugin) # Make sure the plugin is activated
|
specific_params = self._get_params(nif_params, plugin)
|
||||||
specific_params = api.get_extra_params(req, plugin)
|
|
||||||
req.analysis.append({'plugin': plugin,
|
|
||||||
'parameters': specific_params})
|
|
||||||
results = plugin.analyse_entries(entries, specific_params)
|
results = plugin.analyse_entries(entries, specific_params)
|
||||||
for i in self._process_entries(results, req, plugins[1:]):
|
for i in self._process_entries(results, plugins[1:], nif_params):
|
||||||
yield i
|
yield i
|
||||||
|
|
||||||
def install_deps(self):
|
def _process_response(self, resp, plugins, nif_params):
|
||||||
for plugin in self.filter_plugins(is_activated=True):
|
entries = resp.entries
|
||||||
plugins.install_deps(plugin)
|
resp.entries = []
|
||||||
|
for plug in plugins:
|
||||||
|
resp.analysis.append(plug.id)
|
||||||
|
for i in self._process_entries(entries, plugins, nif_params):
|
||||||
|
resp.entries.append(i)
|
||||||
|
return resp
|
||||||
|
|
||||||
def analyse(self, request):
|
def analyse(self, **api_params):
|
||||||
"""
|
"""
|
||||||
Main method that analyses a request, either from CLI or HTTP.
|
Main method that analyses a request, either from CLI or HTTP.
|
||||||
It takes a processed request, provided by the user, as returned
|
It uses a dictionary of parameters, provided by the user.
|
||||||
by api.parse_call().
|
|
||||||
"""
|
"""
|
||||||
logger.debug("analysing request: {}".format(request))
|
logger.debug("analysing with params: {}".format(api_params))
|
||||||
|
plugins = self._find_plugins(api_params)
|
||||||
|
nif_params = self._get_params(api_params)
|
||||||
|
resp = self._get_entries(nif_params)
|
||||||
|
if 'with_parameters' in api_params:
|
||||||
|
resp.parameters = nif_params
|
||||||
try:
|
try:
|
||||||
entries = request.entries
|
resp = self._process_response(resp, plugins, nif_params)
|
||||||
request.entries = []
|
self.convert_emotions(resp, plugins, nif_params)
|
||||||
plugins = self._get_plugins(request)
|
logger.debug("Returning analysis result: {}".format(resp))
|
||||||
results = request
|
|
||||||
for i in self._process_entries(entries, results, plugins):
|
|
||||||
results.entries.append(i)
|
|
||||||
self.convert_emotions(results)
|
|
||||||
if 'with_parameters' not in results.parameters:
|
|
||||||
del results.parameters
|
|
||||||
logger.debug("Returning analysis result: {}".format(results))
|
|
||||||
except (Error, Exception) as ex:
|
except (Error, Exception) as ex:
|
||||||
if not isinstance(ex, Error):
|
if not isinstance(ex, Error):
|
||||||
msg = "Error during analysis: {} \n\t{}".format(ex,
|
ex = Error(message=str(ex), status=500)
|
||||||
traceback.format_exc())
|
|
||||||
ex = Error(message=msg, status=500)
|
|
||||||
logger.exception('Error returning analysis result')
|
logger.exception('Error returning analysis result')
|
||||||
raise ex
|
raise ex
|
||||||
results.analysis = [i['plugin'].id for i in results.analysis]
|
return resp
|
||||||
return results
|
|
||||||
|
|
||||||
def _conversion_candidates(self, fromModel, toModel):
|
def _conversion_candidates(self, fromModel, toModel):
|
||||||
candidates = self.filter_plugins(plugin_type='emotionConversionPlugin')
|
candidates = self.filter_plugins(plugin_type='emotionConversionPlugin')
|
||||||
@@ -160,7 +193,7 @@ class Senpy(object):
|
|||||||
# logging.debug('Found candidate: {}'.format(candidate))
|
# logging.debug('Found candidate: {}'.format(candidate))
|
||||||
yield candidate
|
yield candidate
|
||||||
|
|
||||||
def convert_emotions(self, resp):
|
def convert_emotions(self, resp, plugins, params):
|
||||||
"""
|
"""
|
||||||
Conversion of all emotions in a response **in place**.
|
Conversion of all emotions in a response **in place**.
|
||||||
In addition to converting from one model to another, it has
|
In addition to converting from one model to another, it has
|
||||||
@@ -168,8 +201,6 @@ class Senpy(object):
|
|||||||
Needless to say, this is far from an elegant solution, but it works.
|
Needless to say, this is far from an elegant solution, but it works.
|
||||||
@todo refactor and clean up
|
@todo refactor and clean up
|
||||||
"""
|
"""
|
||||||
plugins = [i['plugin'] for i in resp.analysis]
|
|
||||||
params = resp.parameters
|
|
||||||
toModel = params.get('emotionModel', None)
|
toModel = params.get('emotionModel', None)
|
||||||
if not toModel:
|
if not toModel:
|
||||||
return
|
return
|
||||||
@@ -197,8 +228,7 @@ class Senpy(object):
|
|||||||
for j in i.emotions:
|
for j in i.emotions:
|
||||||
plugname = j['prov:wasGeneratedBy']
|
plugname = j['prov:wasGeneratedBy']
|
||||||
candidate = candidates[plugname]
|
candidate = candidates[plugname]
|
||||||
resp.analysis.append({'plugin': candidate,
|
resp.analysis.append(candidate.id)
|
||||||
'parameters': params})
|
|
||||||
for k in candidate.convert(j, fromModel, toModel, params):
|
for k in candidate.convert(j, fromModel, toModel, params):
|
||||||
k.prov__wasGeneratedBy = candidate.id
|
k.prov__wasGeneratedBy = candidate.id
|
||||||
if output == 'nested':
|
if output == 'nested':
|
||||||
@@ -207,6 +237,7 @@ class Senpy(object):
|
|||||||
i.emotions = newemotions
|
i.emotions = newemotions
|
||||||
newentries.append(i)
|
newentries.append(i)
|
||||||
resp.entries = newentries
|
resp.entries = newentries
|
||||||
|
resp.analysis = list(set(resp.analysis))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def default_plugin(self):
|
def default_plugin(self):
|
||||||
@@ -226,42 +257,25 @@ class Senpy(object):
|
|||||||
else:
|
else:
|
||||||
self._default = self.plugins[value]
|
self._default = self.plugins[value]
|
||||||
|
|
||||||
def activate_all(self, sync=True):
|
def activate_all(self, sync=False):
|
||||||
ps = []
|
ps = []
|
||||||
for plug in self.plugins.keys():
|
for plug in self.plugins.keys():
|
||||||
ps.append(self.activate_plugin(plug, sync=sync))
|
ps.append(self.activate_plugin(plug, sync=sync))
|
||||||
return ps
|
return ps
|
||||||
|
|
||||||
def deactivate_all(self, sync=True):
|
def deactivate_all(self, sync=False):
|
||||||
ps = []
|
ps = []
|
||||||
for plug in self.plugins.keys():
|
for plug in self.plugins.keys():
|
||||||
ps.append(self.deactivate_plugin(plug, sync=sync))
|
ps.append(self.deactivate_plugin(plug, sync=sync))
|
||||||
return ps
|
return ps
|
||||||
|
|
||||||
def _set_active(self, plugin, active=True, *args, **kwargs):
|
def _set_active_plugin(self, plugin_name, active=True, *args, **kwargs):
|
||||||
''' We're using a variable in the plugin itself to activate/deactive plugins.\
|
''' We're using a variable in the plugin itself to activate/deactive plugins.\
|
||||||
Note that plugins may activate themselves by setting this variable.
|
Note that plugins may activate themselves by setting this variable.
|
||||||
'''
|
'''
|
||||||
plugin.is_activated = active
|
self.plugins[plugin_name].is_activated = active
|
||||||
|
|
||||||
def _activate(self, plugin):
|
def activate_plugin(self, plugin_name, sync=False):
|
||||||
success = False
|
|
||||||
with plugin._lock:
|
|
||||||
if plugin.is_activated:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
plugin.activate()
|
|
||||||
msg = "Plugin activated: {}".format(plugin.name)
|
|
||||||
logger.info(msg)
|
|
||||||
success = True
|
|
||||||
self._set_active(plugin, success)
|
|
||||||
except Exception as ex:
|
|
||||||
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
|
||||||
plugin.name, ex, traceback.format_exc())
|
|
||||||
logger.error(msg)
|
|
||||||
raise Error(msg)
|
|
||||||
|
|
||||||
def activate_plugin(self, plugin_name, sync=True):
|
|
||||||
try:
|
try:
|
||||||
plugin = self.plugins[plugin_name]
|
plugin = self.plugins[plugin_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -270,17 +284,37 @@ class Senpy(object):
|
|||||||
|
|
||||||
logger.info("Activating plugin: {}".format(plugin.name))
|
logger.info("Activating plugin: {}".format(plugin.name))
|
||||||
|
|
||||||
|
def act():
|
||||||
|
success = False
|
||||||
|
try:
|
||||||
|
plugin.activate()
|
||||||
|
msg = "Plugin activated: {}".format(plugin.name)
|
||||||
|
logger.info(msg)
|
||||||
|
success = True
|
||||||
|
self._set_active_plugin(plugin_name, success)
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
||||||
|
plugin.name, ex, traceback.format_exc())
|
||||||
|
logger.error(msg)
|
||||||
|
raise Error(msg)
|
||||||
|
|
||||||
if sync or 'async' in plugin and not plugin.async:
|
if sync or 'async' in plugin and not plugin.async:
|
||||||
self._activate(plugin)
|
act()
|
||||||
else:
|
else:
|
||||||
th = Thread(target=partial(self._activate, plugin))
|
th = Thread(target=act)
|
||||||
th.start()
|
th.start()
|
||||||
return th
|
return th
|
||||||
|
|
||||||
def _deactivate(self, plugin):
|
def deactivate_plugin(self, plugin_name, sync=False):
|
||||||
with plugin._lock:
|
try:
|
||||||
if not plugin.is_activated:
|
plugin = self.plugins[plugin_name]
|
||||||
return
|
except KeyError:
|
||||||
|
raise Error(
|
||||||
|
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||||
|
|
||||||
|
self._set_active_plugin(plugin_name, False)
|
||||||
|
|
||||||
|
def deact():
|
||||||
try:
|
try:
|
||||||
plugin.deactivate()
|
plugin.deactivate()
|
||||||
logger.info("Plugin deactivated: {}".format(plugin.name))
|
logger.info("Plugin deactivated: {}".format(plugin.name))
|
||||||
@@ -289,22 +323,91 @@ class Senpy(object):
|
|||||||
"Error deactivating plugin {}: {}".format(plugin.name, ex))
|
"Error deactivating plugin {}: {}".format(plugin.name, ex))
|
||||||
logger.error("Trace: {}".format(traceback.format_exc()))
|
logger.error("Trace: {}".format(traceback.format_exc()))
|
||||||
|
|
||||||
def deactivate_plugin(self, plugin_name, sync=True):
|
|
||||||
try:
|
|
||||||
plugin = self.plugins[plugin_name]
|
|
||||||
except KeyError:
|
|
||||||
raise Error(
|
|
||||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
|
||||||
|
|
||||||
self._set_active(plugin, False)
|
|
||||||
|
|
||||||
if sync or 'async' in plugin and not plugin.async:
|
if sync or 'async' in plugin and not plugin.async:
|
||||||
self._deactivate(plugin)
|
deact()
|
||||||
else:
|
else:
|
||||||
th = Thread(target=partial(self._deactivate, plugin))
|
th = Thread(target=deact)
|
||||||
th.start()
|
th.start()
|
||||||
return th
|
return th
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate_info(cls, info):
|
||||||
|
return all(x in info for x in ('name', 'module', 'description', 'version'))
|
||||||
|
|
||||||
|
def install_deps(self):
|
||||||
|
for i in self.plugins.values():
|
||||||
|
self._install_deps(i)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _install_deps(cls, info=None):
|
||||||
|
requirements = info.get('requirements', [])
|
||||||
|
if requirements:
|
||||||
|
pip_args = ['pip']
|
||||||
|
pip_args.append('install')
|
||||||
|
pip_args.append('--use-wheel')
|
||||||
|
for req in requirements:
|
||||||
|
pip_args.append(req)
|
||||||
|
logger.info('Installing requirements: ' + str(requirements))
|
||||||
|
process = subprocess.Popen(pip_args,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
log_subprocess_output(process)
|
||||||
|
exitcode = process.wait()
|
||||||
|
if exitcode != 0:
|
||||||
|
raise Error("Dependencies not properly installed")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _load_module(cls, name, root):
|
||||||
|
sys.path.append(root)
|
||||||
|
tmp = importlib.import_module(name)
|
||||||
|
sys.path.remove(root)
|
||||||
|
return tmp
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _load_plugin_from_info(cls, info, root):
|
||||||
|
if not cls.validate_info(info):
|
||||||
|
logger.warn('The module info is not valid.\n\t{}'.format(info))
|
||||||
|
return None, None
|
||||||
|
module = info["module"]
|
||||||
|
name = info["name"]
|
||||||
|
|
||||||
|
cls._install_deps(info)
|
||||||
|
tmp = cls._load_module(module, root)
|
||||||
|
|
||||||
|
candidate = None
|
||||||
|
for _, obj in inspect.getmembers(tmp):
|
||||||
|
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
||||||
|
logger.debug(("Found plugin class:"
|
||||||
|
" {}@{}").format(obj, inspect.getmodule(obj)))
|
||||||
|
candidate = obj
|
||||||
|
break
|
||||||
|
if not candidate:
|
||||||
|
logger.debug("No valid plugin for: {}".format(module))
|
||||||
|
return
|
||||||
|
module = candidate(info=info)
|
||||||
|
return name, module
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _load_plugin(cls, root, filename):
|
||||||
|
fpath = os.path.join(root, filename)
|
||||||
|
logger.debug("Loading plugin: {}".format(fpath))
|
||||||
|
with open(fpath, 'r') as f:
|
||||||
|
info = yaml.load(f)
|
||||||
|
logger.debug("Info: {}".format(info))
|
||||||
|
return cls._load_plugin_from_info(info, root)
|
||||||
|
|
||||||
|
def _load_plugins(self):
|
||||||
|
plugins = {}
|
||||||
|
for search_folder in self._search_folders:
|
||||||
|
for root, dirnames, filenames in os.walk(search_folder):
|
||||||
|
for filename in fnmatch.filter(filenames, '*.senpy'):
|
||||||
|
name, plugin = self._load_plugin(root, filename)
|
||||||
|
if plugin and name:
|
||||||
|
plugins[name] = plugin
|
||||||
|
|
||||||
|
self._outdated = False
|
||||||
|
return plugins
|
||||||
|
|
||||||
def teardown(self, exception):
|
def teardown(self, exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -312,8 +415,7 @@ class Senpy(object):
|
|||||||
def plugins(self):
|
def plugins(self):
|
||||||
""" Return the plugins registered for a given application. """
|
""" Return the plugins registered for a given application. """
|
||||||
if self._outdated:
|
if self._outdated:
|
||||||
self._plugin_list = plugins.load_plugins(self._search_folders)
|
self._plugin_list = self._load_plugins()
|
||||||
self._outdated = False
|
|
||||||
return self._plugin_list
|
return self._plugin_list
|
||||||
|
|
||||||
def filter_plugins(self, **kwargs):
|
def filter_plugins(self, **kwargs):
|
||||||
|
@@ -140,7 +140,7 @@ class SenpyMixin(object):
|
|||||||
vp = item[kp]
|
vp = item[kp]
|
||||||
temp[kp] = ser_or_down(vp)
|
temp[kp] = ser_or_down(vp)
|
||||||
return temp
|
return temp
|
||||||
elif isinstance(item, list) or isinstance(item, set):
|
elif isinstance(item, list):
|
||||||
return list(ser_or_down(i) for i in item)
|
return list(ser_or_down(i) for i in item)
|
||||||
else:
|
else:
|
||||||
return item
|
return item
|
||||||
@@ -181,10 +181,10 @@ class SenpyMixin(object):
|
|||||||
obj = self
|
obj = self
|
||||||
if hasattr(obj, "jsonld"):
|
if hasattr(obj, "jsonld"):
|
||||||
obj = obj.jsonld()
|
obj = obj.jsonld()
|
||||||
self._validator.validate(obj)
|
jsonschema.validate(obj, self.schema)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.serialize())
|
return str(self.to_JSON())
|
||||||
|
|
||||||
|
|
||||||
class BaseModel(SenpyMixin, dict):
|
class BaseModel(SenpyMixin, dict):
|
||||||
@@ -218,11 +218,12 @@ class BaseModel(SenpyMixin, dict):
|
|||||||
super(BaseModel, self).__init__(temp)
|
super(BaseModel, self).__init__(temp)
|
||||||
|
|
||||||
def _get_key(self, key):
|
def _get_key(self, key):
|
||||||
if key is 'id':
|
|
||||||
key = '@id'
|
|
||||||
key = key.replace("__", ":", 1)
|
key = key.replace("__", ":", 1)
|
||||||
return key
|
return key
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
dict.__setitem__(self, key, value)
|
||||||
|
|
||||||
def __delitem__(self, key):
|
def __delitem__(self, key):
|
||||||
dict.__delitem__(self, key)
|
dict.__delitem__(self, key)
|
||||||
|
|
||||||
@@ -243,16 +244,18 @@ class BaseModel(SenpyMixin, dict):
|
|||||||
|
|
||||||
def _plain_dict(self):
|
def _plain_dict(self):
|
||||||
d = {k: v for (k, v) in self.items() if k[0] != "_"}
|
d = {k: v for (k, v) in self.items() if k[0] != "_"}
|
||||||
|
if 'id' in d:
|
||||||
|
d["@id"] = d.pop('id')
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
_subtypes = {}
|
||||||
|
|
||||||
|
|
||||||
def register(rsubclass, rtype=None):
|
def register(rsubclass, rtype=None):
|
||||||
_subtypes[rtype or rsubclass.__name__] = rsubclass
|
_subtypes[rtype or rsubclass.__name__] = rsubclass
|
||||||
|
|
||||||
|
|
||||||
_subtypes = {}
|
|
||||||
|
|
||||||
|
|
||||||
def from_dict(indict, cls=None):
|
def from_dict(indict, cls=None):
|
||||||
if not cls:
|
if not cls:
|
||||||
target = indict.get('@type', None)
|
target = indict.get('@type', None)
|
||||||
@@ -286,31 +289,15 @@ def from_json(injson):
|
|||||||
return from_dict(indict)
|
return from_dict(indict)
|
||||||
|
|
||||||
|
|
||||||
def from_schema(name, schema=None, schema_file=None, base_classes=None):
|
def from_schema(name, schema_file=None, base_classes=None):
|
||||||
base_classes = base_classes or []
|
base_classes = base_classes or []
|
||||||
base_classes.append(BaseModel)
|
base_classes.append(BaseModel)
|
||||||
schema_file = schema_file or '{}.json'.format(name)
|
schema_file = schema_file or '{}.json'.format(name)
|
||||||
class_name = '{}{}'.format(name[0].upper(), name[1:])
|
class_name = '{}{}'.format(name[0].upper(), name[1:])
|
||||||
if '/' not in 'schema_file':
|
newclass = type(class_name, tuple(base_classes), {})
|
||||||
schema_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
setattr(newclass, '@type', name)
|
||||||
'schemas',
|
setattr(newclass, 'schema', read_schema(schema_file))
|
||||||
schema_file)
|
setattr(newclass, 'class_name', class_name)
|
||||||
|
|
||||||
schema_path = 'file://' + schema_file
|
|
||||||
|
|
||||||
with open(schema_file) as f:
|
|
||||||
schema = json.load(f)
|
|
||||||
|
|
||||||
dct = {}
|
|
||||||
|
|
||||||
resolver = jsonschema.RefResolver(schema_path, schema)
|
|
||||||
dct['@type'] = name
|
|
||||||
dct['_schema_file'] = schema_file
|
|
||||||
dct['schema'] = schema
|
|
||||||
dct['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver)
|
|
||||||
|
|
||||||
newclass = type(class_name, tuple(base_classes), dct)
|
|
||||||
|
|
||||||
register(newclass, name)
|
register(newclass, name)
|
||||||
return newclass
|
return newclass
|
||||||
|
|
||||||
@@ -331,7 +318,6 @@ for i in [
|
|||||||
'emotionPlugin',
|
'emotionPlugin',
|
||||||
'emotionSet',
|
'emotionSet',
|
||||||
'entry',
|
'entry',
|
||||||
'help',
|
|
||||||
'plugin',
|
'plugin',
|
||||||
'plugins',
|
'plugins',
|
||||||
'response',
|
'response',
|
||||||
@@ -351,9 +337,6 @@ class Error(SenpyMixin, Exception):
|
|||||||
self._error = _ErrorModel(message=message, *args, **kwargs)
|
self._error = _ErrorModel(message=message, *args, **kwargs)
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
def validate(self, obj=None):
|
|
||||||
self._error.validate()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self._error[key]
|
return self._error[key]
|
||||||
|
|
||||||
|
@@ -1,22 +1,14 @@
|
|||||||
from future import standard_library
|
from future import standard_library
|
||||||
standard_library.install_aliases()
|
standard_library.install_aliases()
|
||||||
|
|
||||||
|
import inspect
|
||||||
import os.path
|
import os.path
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import logging
|
import logging
|
||||||
import tempfile
|
import tempfile
|
||||||
import copy
|
import copy
|
||||||
|
from .. import models
|
||||||
import fnmatch
|
|
||||||
import inspect
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
import importlib
|
|
||||||
import yaml
|
|
||||||
import threading
|
|
||||||
|
|
||||||
from .. import models, utils
|
|
||||||
from ..api import API_PARAMS
|
from ..api import API_PARAMS
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -35,7 +27,6 @@ class Plugin(models.Plugin):
|
|||||||
id = 'plugins/{}_{}'.format(info['name'], info['version'])
|
id = 'plugins/{}_{}'.format(info['name'], info['version'])
|
||||||
super(Plugin, self).__init__(id=id, **info)
|
super(Plugin, self).__init__(id=id, **info)
|
||||||
self.is_activated = False
|
self.is_activated = False
|
||||||
self._lock = threading.Lock()
|
|
||||||
|
|
||||||
def get_folder(self):
|
def get_folder(self):
|
||||||
return os.path.dirname(inspect.getfile(self.__class__))
|
return os.path.dirname(inspect.getfile(self.__class__))
|
||||||
@@ -46,21 +37,6 @@ class Plugin(models.Plugin):
|
|||||||
def deactivate(self):
|
def deactivate(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test(self):
|
|
||||||
if not hasattr(self, 'test_cases'):
|
|
||||||
import inspect
|
|
||||||
raise AttributeError(('Plugin {} [{}] does not have any defined '
|
|
||||||
'test cases').format(self.id, inspect.getfile(self.__class__)))
|
|
||||||
for case in self.test_cases:
|
|
||||||
res = list(self.analyse_entry(models.Entry(case['entry']),
|
|
||||||
case['params']))
|
|
||||||
exp = case['expected']
|
|
||||||
if not isinstance(exp, list):
|
|
||||||
exp = [exp]
|
|
||||||
utils.check_template(res, exp)
|
|
||||||
for r in res:
|
|
||||||
r.validate()
|
|
||||||
|
|
||||||
|
|
||||||
SenpyPlugin = Plugin
|
SenpyPlugin = Plugin
|
||||||
|
|
||||||
@@ -78,7 +54,7 @@ class AnalysisPlugin(Plugin):
|
|||||||
Note that this method may yield an annotated entry or a list of
|
Note that this method may yield an annotated entry or a list of
|
||||||
entries (e.g. in a tokenizer)
|
entries (e.g. in a tokenizer)
|
||||||
"""
|
"""
|
||||||
text = entry['nif:isString']
|
text = entry['text']
|
||||||
params = copy.copy(parameters)
|
params = copy.copy(parameters)
|
||||||
params['input'] = text
|
params['input'] = text
|
||||||
results = self.analyse(**params)
|
results = self.analyse(**params)
|
||||||
@@ -184,100 +160,3 @@ def pfilter(plugins, **kwargs):
|
|||||||
if kwargs:
|
if kwargs:
|
||||||
candidates = filter(matches, candidates)
|
candidates = filter(matches, candidates)
|
||||||
return {p.name: p for p in candidates}
|
return {p.name: p for p in candidates}
|
||||||
|
|
||||||
|
|
||||||
def validate_info(info):
|
|
||||||
return all(x in info for x in ('name', 'module', 'description', 'version'))
|
|
||||||
|
|
||||||
|
|
||||||
def load_module(name, root=None):
|
|
||||||
if root:
|
|
||||||
sys.path.append(root)
|
|
||||||
tmp = importlib.import_module(name)
|
|
||||||
if root:
|
|
||||||
sys.path.remove(root)
|
|
||||||
return tmp
|
|
||||||
|
|
||||||
|
|
||||||
def log_subprocess_output(process):
|
|
||||||
for line in iter(process.stdout.readline, b''):
|
|
||||||
logger.info('%r', line)
|
|
||||||
for line in iter(process.stderr.readline, b''):
|
|
||||||
logger.error('%r', line)
|
|
||||||
|
|
||||||
|
|
||||||
def install_deps(*plugins):
|
|
||||||
for info in plugins:
|
|
||||||
requirements = info.get('requirements', [])
|
|
||||||
if requirements:
|
|
||||||
pip_args = ['pip']
|
|
||||||
pip_args.append('install')
|
|
||||||
pip_args.append('--use-wheel')
|
|
||||||
for req in requirements:
|
|
||||||
pip_args.append(req)
|
|
||||||
logger.info('Installing requirements: ' + str(requirements))
|
|
||||||
process = subprocess.Popen(pip_args,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE)
|
|
||||||
log_subprocess_output(process)
|
|
||||||
exitcode = process.wait()
|
|
||||||
if exitcode != 0:
|
|
||||||
raise models.Error("Dependencies not properly installed")
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugin_from_info(info, root=None, validator=validate_info, install=True):
|
|
||||||
if not root and '_path' in info:
|
|
||||||
root = os.path.dirname(info['_path'])
|
|
||||||
if not validator(info):
|
|
||||||
raise ValueError('Plugin info is not valid: {}'.format(info))
|
|
||||||
module = info["module"]
|
|
||||||
|
|
||||||
try:
|
|
||||||
tmp = load_module(module, root)
|
|
||||||
except ImportError:
|
|
||||||
if not install:
|
|
||||||
raise
|
|
||||||
install_deps(info)
|
|
||||||
tmp = load_module(module, root)
|
|
||||||
candidate = None
|
|
||||||
for _, obj in inspect.getmembers(tmp):
|
|
||||||
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
|
|
||||||
logger.debug(("Found plugin class:"
|
|
||||||
" {}@{}").format(obj, inspect.getmodule(obj)))
|
|
||||||
candidate = obj
|
|
||||||
break
|
|
||||||
if not candidate:
|
|
||||||
logger.debug("No valid plugin for: {}".format(module))
|
|
||||||
return
|
|
||||||
module = candidate(info=info)
|
|
||||||
return module
|
|
||||||
|
|
||||||
|
|
||||||
def parse_plugin_info(fpath):
|
|
||||||
logger.debug("Loading plugin: {}".format(fpath))
|
|
||||||
with open(fpath, 'r') as f:
|
|
||||||
info = yaml.load(f)
|
|
||||||
info['_path'] = fpath
|
|
||||||
name = info['name']
|
|
||||||
return name, info
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugin(fpath):
|
|
||||||
name, info = parse_plugin_info(fpath)
|
|
||||||
logger.debug("Info: {}".format(info))
|
|
||||||
plugin = load_plugin_from_info(info)
|
|
||||||
return name, plugin
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugins(folders, loader=load_plugin):
|
|
||||||
plugins = {}
|
|
||||||
for search_folder in folders:
|
|
||||||
for root, dirnames, filenames in os.walk(search_folder):
|
|
||||||
# Do not look for plugins in hidden or special folders
|
|
||||||
dirnames[:] = [d for d in dirnames if d[0] not in ['.', '_']]
|
|
||||||
for filename in fnmatch.filter(filenames, '*.senpy'):
|
|
||||||
fpath = os.path.join(root, filename)
|
|
||||||
name, plugin = loader(fpath)
|
|
||||||
if plugin and name:
|
|
||||||
plugins[name] = plugin
|
|
||||||
return plugins
|
|
||||||
|
@@ -100,54 +100,3 @@ class CentroidConversion(EmotionConversionPlugin):
|
|||||||
else:
|
else:
|
||||||
raise Error('EMOTION MODEL NOT KNOWN')
|
raise Error('EMOTION MODEL NOT KNOWN')
|
||||||
yield e
|
yield e
|
||||||
|
|
||||||
def test(self, info=None):
|
|
||||||
if not info:
|
|
||||||
info = {
|
|
||||||
"name": "CentroidTest",
|
|
||||||
"description": "Centroid test",
|
|
||||||
"version": 0,
|
|
||||||
"centroids": {
|
|
||||||
"c1": {"V1": 0.5,
|
|
||||||
"V2": 0.5},
|
|
||||||
"c2": {"V1": -0.5,
|
|
||||||
"V2": 0.5},
|
|
||||||
"c3": {"V1": -0.5,
|
|
||||||
"V2": -0.5},
|
|
||||||
"c4": {"V1": 0.5,
|
|
||||||
"V2": -0.5}},
|
|
||||||
"aliases": {
|
|
||||||
"V1": "X-dimension",
|
|
||||||
"V2": "Y-dimension"
|
|
||||||
},
|
|
||||||
"centroids_direction": ["emoml:big6", "emoml:fsre-dimensions"]
|
|
||||||
}
|
|
||||||
|
|
||||||
c = CentroidConversion(info)
|
|
||||||
|
|
||||||
es1 = EmotionSet()
|
|
||||||
e1 = Emotion()
|
|
||||||
e1.onyx__hasEmotionCategory = "c1"
|
|
||||||
es1.onyx__hasEmotion.append(e1)
|
|
||||||
res = c._forward_conversion(es1)
|
|
||||||
assert res["X-dimension"] == 0.5
|
|
||||||
assert res["Y-dimension"] == 0.5
|
|
||||||
|
|
||||||
e2 = Emotion()
|
|
||||||
e2.onyx__hasEmotionCategory = "c2"
|
|
||||||
es1.onyx__hasEmotion.append(e2)
|
|
||||||
res = c._forward_conversion(es1)
|
|
||||||
assert res["X-dimension"] == 0
|
|
||||||
assert res["Y-dimension"] == 1
|
|
||||||
|
|
||||||
e = Emotion()
|
|
||||||
e["X-dimension"] = -0.2
|
|
||||||
e["Y-dimension"] = -0.3
|
|
||||||
res = c._backwards_conversion(e)
|
|
||||||
assert res["onyx:hasEmotionCategory"] == "c3"
|
|
||||||
|
|
||||||
e = Emotion()
|
|
||||||
e["X-dimension"] = -0.2
|
|
||||||
e["Y-dimension"] = 0.3
|
|
||||||
res = c._backwards_conversion(e)
|
|
||||||
assert res["onyx:hasEmotionCategory"] == "c2"
|
|
||||||
|
@@ -2,51 +2,38 @@
|
|||||||
name: Ekman2FSRE
|
name: Ekman2FSRE
|
||||||
module: senpy.plugins.conversion.emotion.centroids
|
module: senpy.plugins.conversion.emotion.centroids
|
||||||
description: Plugin to convert emotion sets from Ekman to VAD
|
description: Plugin to convert emotion sets from Ekman to VAD
|
||||||
version: 0.2
|
version: 0.1
|
||||||
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
||||||
neutralValue: 5.0
|
|
||||||
centroids:
|
centroids:
|
||||||
anger:
|
anger:
|
||||||
A: 6.95
|
A: 6.95
|
||||||
D: 5.1
|
D: 5.1
|
||||||
V: 2.7
|
V: 2.7
|
||||||
S: 5.0
|
|
||||||
disgust:
|
disgust:
|
||||||
A: 5.3
|
A: 5.3
|
||||||
D: 8.05
|
D: 8.05
|
||||||
V: 2.7
|
V: 2.7
|
||||||
S: 5.0
|
|
||||||
fear:
|
fear:
|
||||||
A: 6.5
|
A: 6.5
|
||||||
D: 3.6
|
D: 3.6
|
||||||
V: 3.2
|
V: 3.2
|
||||||
S: 5.0
|
|
||||||
happiness:
|
happiness:
|
||||||
A: 7.22
|
A: 7.22
|
||||||
D: 6.28
|
D: 6.28
|
||||||
V: 8.6
|
V: 8.6
|
||||||
S: 5.0
|
|
||||||
sadness:
|
sadness:
|
||||||
A: 5.21
|
A: 5.21
|
||||||
D: 2.82
|
D: 2.82
|
||||||
V: 2.21
|
V: 2.21
|
||||||
S: 5.0
|
|
||||||
surprise:
|
|
||||||
A: 5.0
|
|
||||||
D: 5.0
|
|
||||||
V: 5.0
|
|
||||||
S: 10.0
|
|
||||||
centroids_direction:
|
centroids_direction:
|
||||||
- emoml:big6
|
- emoml:big6
|
||||||
- emoml:fsre-dimensions
|
- emoml:fsre-dimensions
|
||||||
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
||||||
A: emoml:fsre-dimensions_arousal
|
A: emoml:arousal
|
||||||
V: emoml:fsre-dimensions_valence
|
V: emoml:valence
|
||||||
D: emoml:fsre-dimensions_potency
|
D: emoml:dominance
|
||||||
S: emoml:fsre-dimensions_unpredictability
|
|
||||||
anger: emoml:big6anger
|
anger: emoml:big6anger
|
||||||
disgust: emoml:big6disgust
|
disgust: emoml:big6disgust
|
||||||
fear: emoml:big6fear
|
fear: emoml:big6fear
|
||||||
happiness: emoml:big6happiness
|
happiness: emoml:big6happiness
|
||||||
sadness: emoml:big6sadness
|
sadness: emoml:big6sadness
|
||||||
surprise: emoml:big6surprise
|
|
@@ -2,9 +2,13 @@
|
|||||||
name: Ekman2PAD
|
name: Ekman2PAD
|
||||||
module: senpy.plugins.conversion.emotion.centroids
|
module: senpy.plugins.conversion.emotion.centroids
|
||||||
description: Plugin to convert emotion sets from Ekman to VAD
|
description: Plugin to convert emotion sets from Ekman to VAD
|
||||||
version: 0.2
|
version: 0.1
|
||||||
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
# No need to specify onyx:doesConversion because centroids.py adds it automatically from centroids_direction
|
||||||
neutralValue: 5.0
|
origin:
|
||||||
|
# Point in VAD space with no emotion (aka Neutral)
|
||||||
|
A: 5.0
|
||||||
|
D: 5.0
|
||||||
|
V: 5.0
|
||||||
centroids:
|
centroids:
|
||||||
anger:
|
anger:
|
||||||
A: 6.95
|
A: 6.95
|
||||||
@@ -30,9 +34,9 @@ centroids_direction:
|
|||||||
- emoml:big6
|
- emoml:big6
|
||||||
- emoml:pad
|
- emoml:pad
|
||||||
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
||||||
A: emoml:pad-dimensions:arousal
|
A: emoml:arousal
|
||||||
V: emoml:pad-dimensions:pleasure
|
V: emoml:valence
|
||||||
D: emoml:pad-dimensions:dominance
|
D: emoml:dominance
|
||||||
anger: emoml:big6anger
|
anger: emoml:big6anger
|
||||||
disgust: emoml:big6disgust
|
disgust: emoml:big6disgust
|
||||||
fear: emoml:big6fear
|
fear: emoml:big6fear
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
import random
|
import random
|
||||||
|
|
||||||
from senpy.plugins import EmotionPlugin
|
from senpy.plugins import EmotionPlugin
|
||||||
from senpy.models import EmotionSet, Emotion, Entry
|
from senpy.models import EmotionSet, Emotion
|
||||||
|
|
||||||
|
|
||||||
class RmoRandPlugin(EmotionPlugin):
|
class RmoRandPlugin(EmotionPlugin):
|
||||||
@@ -16,11 +16,3 @@ class RmoRandPlugin(EmotionPlugin):
|
|||||||
emotionSet.prov__wasGeneratedBy = self.id
|
emotionSet.prov__wasGeneratedBy = self.id
|
||||||
entry.emotions.append(emotionSet)
|
entry.emotions.append(emotionSet)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
|
||||||
params = dict()
|
|
||||||
results = list()
|
|
||||||
for i in range(100):
|
|
||||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
|
||||||
res.validate()
|
|
||||||
results.append(res.emotions[0]['onyx:hasEmotion'][0]['onyx:hasEmotionCategory'])
|
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
import random
|
import random
|
||||||
|
|
||||||
from senpy.plugins import SentimentPlugin
|
from senpy.plugins import SentimentPlugin
|
||||||
from senpy.models import Sentiment, Entry
|
from senpy.models import Sentiment
|
||||||
|
|
||||||
|
|
||||||
class RandPlugin(SentimentPlugin):
|
class RandPlugin(SentimentPlugin):
|
||||||
@@ -22,13 +22,3 @@ class RandPlugin(SentimentPlugin):
|
|||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
entry.language = lang
|
entry.language = lang
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
|
||||||
params = dict()
|
|
||||||
results = list()
|
|
||||||
for i in range(100):
|
|
||||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
|
||||||
res.validate()
|
|
||||||
results.append(res.sentiments[0]['marl:hasPolarity'])
|
|
||||||
assert 'marl:Positive' in results
|
|
||||||
assert 'marl:Negative' in results
|
|
||||||
|
@@ -1,64 +0,0 @@
|
|||||||
from senpy.plugins import AnalysisPlugin
|
|
||||||
from senpy.models import Entry
|
|
||||||
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
|
||||||
from nltk.tokenize.simple import LineTokenizer
|
|
||||||
import nltk
|
|
||||||
|
|
||||||
|
|
||||||
class SplitPlugin(AnalysisPlugin):
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
nltk.download('punkt')
|
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
|
||||||
chunker_type = params.get("delimiter", "sentence")
|
|
||||||
original_text = entry.get('nif:isString', None)
|
|
||||||
if chunker_type == "sentence":
|
|
||||||
tokenizer = PunktSentenceTokenizer()
|
|
||||||
if chunker_type == "paragraph":
|
|
||||||
tokenizer = LineTokenizer()
|
|
||||||
chars = tokenizer.span_tokenize(original_text)
|
|
||||||
for i, chunk in enumerate(tokenizer.tokenize(original_text)):
|
|
||||||
e = Entry()
|
|
||||||
e['nif:isString'] = chunk
|
|
||||||
if entry.id:
|
|
||||||
e.id = entry.id + "#char={},{}".format(chars[i][0], chars[i][1])
|
|
||||||
yield e
|
|
||||||
|
|
||||||
test_cases = [
|
|
||||||
{
|
|
||||||
'entry': {
|
|
||||||
'nif:isString': 'Hello. World.'
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'delimiter': 'sentence',
|
|
||||||
},
|
|
||||||
'expected': [
|
|
||||||
{
|
|
||||||
'nif:isString': 'Hello.'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'nif:isString': 'World.'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'entry': {
|
|
||||||
"id": ":test",
|
|
||||||
'nif:isString': 'Hello. World.'
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'delimiter': 'sentence',
|
|
||||||
},
|
|
||||||
'expected': [
|
|
||||||
{
|
|
||||||
"@id": ":test#char=0,6",
|
|
||||||
'nif:isString': 'Hello.'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"@id": ":test#char=7,13",
|
|
||||||
'nif:isString': 'World.'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
@@ -12,7 +12,7 @@ class Sentiment140Plugin(SentimentPlugin):
|
|||||||
json.dumps({
|
json.dumps({
|
||||||
"language": lang,
|
"language": lang,
|
||||||
"data": [{
|
"data": [{
|
||||||
"text": entry['nif:isString']
|
"text": entry.text
|
||||||
}]
|
}]
|
||||||
}))
|
}))
|
||||||
p = params.get("prefix", None)
|
p = params.get("prefix", None)
|
||||||
@@ -34,20 +34,3 @@ class Sentiment140Plugin(SentimentPlugin):
|
|||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
entry.language = lang
|
entry.language = lang
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
test_cases = [
|
|
||||||
{
|
|
||||||
'entry': {
|
|
||||||
'nif:isString': 'I love Titanic'
|
|
||||||
},
|
|
||||||
'params': {},
|
|
||||||
'expected': {
|
|
||||||
"nif:isString": "I love Titanic",
|
|
||||||
'sentiments': [
|
|
||||||
{
|
|
||||||
'marl:hasPolarity': 'marl:Positive',
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
30
senpy/plugins/split/split.py
Normal file
30
senpy/plugins/split/split.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from senpy.plugins import AnalysisPlugin
|
||||||
|
from senpy.models import Entry
|
||||||
|
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
||||||
|
from nltk.tokenize.simple import LineTokenizer
|
||||||
|
import nltk
|
||||||
|
class SplitPlugin(AnalysisPlugin):
|
||||||
|
def activate(self):
|
||||||
|
nltk.download('punkt')
|
||||||
|
|
||||||
|
def analyse_entry(self, entry, params):
|
||||||
|
chunker_type = params.get("delimiter", "sentence")
|
||||||
|
original_id = entry.id
|
||||||
|
original_text = entry.get("text", None)
|
||||||
|
if chunker_type == "sentence":
|
||||||
|
tokenizer = PunktSentenceTokenizer()
|
||||||
|
chars = tokenizer.span_tokenize(original_text)
|
||||||
|
for i, sentence in enumerate(tokenizer.tokenize(original_text)):
|
||||||
|
e = Entry()
|
||||||
|
e.text = sentence
|
||||||
|
e.id = original_id + "#char={},{}".format(chars[i][0], chars[i][1])
|
||||||
|
yield e
|
||||||
|
if chunker_type == "paragraph":
|
||||||
|
tokenizer = LineTokenizer()
|
||||||
|
chars = tokenizer.span_tokenize(original_text)
|
||||||
|
for i, paragraph in enumerate(tokenizer.tokenize(original_text)):
|
||||||
|
e = Entry()
|
||||||
|
e.text = paragraph
|
||||||
|
chars = [char for char in chars]
|
||||||
|
e.id = original_id + "#char={},{}".format(chars[i][0], chars[i][1])
|
||||||
|
yield e
|
@@ -1,12 +1,11 @@
|
|||||||
---
|
---
|
||||||
name: split
|
name: split
|
||||||
module: senpy.plugins.misc.split
|
module: split
|
||||||
description: A sample plugin that chunks input text
|
description: A sample plugin that chunks input text
|
||||||
author: "@militarpancho"
|
author: "@militarpancho"
|
||||||
version: '0.2'
|
version: '0.1'
|
||||||
url: "https://github.com/gsi-upm/senpy"
|
url: "https://github.com/gsi-upm/senpy"
|
||||||
requirements:
|
requirements: {nltk}
|
||||||
- nltk
|
|
||||||
extra_params:
|
extra_params:
|
||||||
delimiter:
|
delimiter:
|
||||||
aliases:
|
aliases:
|
@@ -20,16 +20,10 @@
|
|||||||
"@id": "me:hasSuggestions",
|
"@id": "me:hasSuggestions",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"onyx:hasEmotion": {
|
|
||||||
"@container": "@set"
|
|
||||||
},
|
|
||||||
"emotions": {
|
"emotions": {
|
||||||
"@id": "onyx:hasEmotionSet",
|
"@id": "onyx:hasEmotionSet",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"onyx:hasEmotion": {
|
|
||||||
"@container": "@set"
|
|
||||||
},
|
|
||||||
"sentiments": {
|
"sentiments": {
|
||||||
"@id": "marl:hasOpinion",
|
"@id": "marl:hasOpinion",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
"nif:beginIndex": {"type": "integer"},
|
"nif:beginIndex": {"type": "integer"},
|
||||||
"nif:endIndex": {"type": "integer"},
|
"nif:endIndex": {"type": "integer"},
|
||||||
"nif:anchorOf": {
|
"nif:anchorOf": {
|
||||||
"description": "Piece of context that contains the Emotion",
|
"description": "Piece of context that contains the Sentiment",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"onyx:hasDimension": {
|
"onyx:hasDimension": {
|
||||||
|
@@ -6,7 +6,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"nif:isString": {
|
"nif:isString": {
|
||||||
"description": "String contained in this Context. Alternative: nif:isString",
|
"description": "String contained in this Context",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"sentiments": {
|
"sentiments": {
|
||||||
|
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "response.json"},
|
|
||||||
{
|
|
||||||
"title": "Help",
|
|
||||||
"description": "Help containing accepted parameters",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"parameters": {
|
|
||||||
"type": "object"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": "parameters"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@@ -3,7 +3,6 @@
|
|||||||
"allOf": [
|
"allOf": [
|
||||||
{"$ref": "response.json"},
|
{"$ref": "response.json"},
|
||||||
{
|
{
|
||||||
"required": ["plugins"],
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"plugins": {
|
"plugins": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
|
@@ -2,12 +2,7 @@
|
|||||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"@type": {"type": "string"},
|
"@type": {"type": "string"}
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"default": {}
|
|
||||||
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"required": ["@type"]
|
"required": ["@type"]
|
||||||
|
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
var ONYX = "http://www.gsi.dit.upm.es/ontologies/onyx/ns#";
|
var ONYX = "http://www.gsi.dit.upm.es/ontologies/onyx/ns#";
|
||||||
var RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
|
var RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
|
||||||
var plugins_params={};
|
var plugins_params={};
|
||||||
var default_params = JSON.parse($.ajax({type: "GET", url: "/api?help=True" , async: false}).responseText);
|
|
||||||
function replaceURLWithHTMLLinks(text) {
|
function replaceURLWithHTMLLinks(text) {
|
||||||
console.log('Text: ' + text);
|
console.log('Text: ' + text);
|
||||||
var exp = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig;
|
var exp = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig;
|
||||||
@@ -28,68 +28,54 @@ function hashchanged(){
|
|||||||
$(document).ready(function() {
|
$(document).ready(function() {
|
||||||
var response = JSON.parse($.ajax({type: "GET", url: "/api/plugins/" , async: false}).responseText);
|
var response = JSON.parse($.ajax({type: "GET", url: "/api/plugins/" , async: false}).responseText);
|
||||||
var defaultPlugin= JSON.parse($.ajax({type: "GET", url: "/api/plugins/default" , async: false}).responseText);
|
var defaultPlugin= JSON.parse($.ajax({type: "GET", url: "/api/plugins/default" , async: false}).responseText);
|
||||||
html="";
|
html="";
|
||||||
var availablePlugins = document.getElementById('availablePlugins');
|
var availablePlugins = document.getElementById('availablePlugins');
|
||||||
plugins = response.plugins;
|
plugins = response.plugins;
|
||||||
gplugins = {};
|
for (r in plugins){
|
||||||
for (r in plugins){
|
plugin = plugins[r]
|
||||||
ptype = plugins[r]['@type'];
|
if (plugin["name"]){
|
||||||
if(gplugins[ptype] == undefined){
|
if (plugin["name"] == defaultPlugin["name"]){
|
||||||
gplugins[ptype] = [r]
|
if (plugin["is_activated"]){
|
||||||
}else{
|
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\">"+plugin["name"]+"</option>"
|
||||||
gplugins[ptype].push(r)
|
}else{
|
||||||
}
|
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
||||||
}
|
}
|
||||||
for (g in gplugins){
|
}
|
||||||
html += "<optgroup label=\""+g+"\">"
|
else{
|
||||||
for (r in gplugins[g]){
|
if (plugin["is_activated"]){
|
||||||
plugin = plugins[r]
|
html+= "<option value=\""+plugin["name"]+"\">"+plugin["name"]+"</option>"
|
||||||
if (plugin["name"]){
|
}
|
||||||
if (plugin["name"] == defaultPlugin["name"]){
|
else{
|
||||||
if (plugin["is_activated"]){
|
html+= "<option value=\""+plugin["name"]+"\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
||||||
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\">"+plugin["name"]+"</option>"
|
}
|
||||||
}else{
|
}
|
||||||
html+= "<option value=\""+plugin["name"]+"\" selected=\"selected\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
}
|
||||||
}
|
if (plugin["extra_params"]){
|
||||||
}
|
plugins_params[plugin["name"]]={};
|
||||||
else{
|
for (param in plugin["extra_params"]){
|
||||||
if (plugin["is_activated"]){
|
if (typeof plugin["extra_params"][param] !="string"){
|
||||||
html+= "<option value=\""+plugin["name"]+"\">"+plugin["name"]+"</option>"
|
var params = new Array();
|
||||||
}
|
var alias = plugin["extra_params"][param]["aliases"][0];
|
||||||
else{
|
params[alias]=new Array();
|
||||||
html+= "<option value=\""+plugin["name"]+"\" disabled=\"disabled\">"+plugin["name"]+"</option>"
|
for (option in plugin["extra_params"][param]["options"]){
|
||||||
}
|
params[alias].push(plugin["extra_params"][param]["options"][option])
|
||||||
}
|
}
|
||||||
}
|
plugins_params[plugin["name"]][alias] = (params[alias])
|
||||||
|
}
|
||||||
if (plugin["extra_params"]){
|
}
|
||||||
plugins_params[plugin["name"]]={};
|
}
|
||||||
for (param in plugin["extra_params"]){
|
var pluginList = document.createElement('li');
|
||||||
if (typeof plugin["extra_params"][param] !="string"){
|
|
||||||
var params = new Array();
|
newHtml = ""
|
||||||
var alias = plugin["extra_params"][param]["aliases"][0];
|
if(plugin.url) {
|
||||||
params[alias]=new Array();
|
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
||||||
for (option in plugin["extra_params"][param]["options"]){
|
}else {
|
||||||
params[alias].push(plugin["extra_params"][param]["options"][option])
|
newHtml= plugin["name"];
|
||||||
}
|
}
|
||||||
plugins_params[plugin["name"]][alias] = (params[alias])
|
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
||||||
}
|
pluginList.innerHTML = newHtml;
|
||||||
}
|
availablePlugins.appendChild(pluginList)
|
||||||
}
|
}
|
||||||
var pluginList = document.createElement('li');
|
|
||||||
|
|
||||||
newHtml = ""
|
|
||||||
if(plugin.url) {
|
|
||||||
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
|
||||||
}else {
|
|
||||||
newHtml= plugin["name"];
|
|
||||||
}
|
|
||||||
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
|
||||||
pluginList.innerHTML = newHtml;
|
|
||||||
availablePlugins.appendChild(pluginList)
|
|
||||||
}
|
|
||||||
html += "</optgroup>"
|
|
||||||
}
|
|
||||||
document.getElementById('plugins').innerHTML = html;
|
document.getElementById('plugins').innerHTML = html;
|
||||||
change_params();
|
change_params();
|
||||||
|
|
||||||
@@ -102,23 +88,8 @@ $(document).ready(function() {
|
|||||||
|
|
||||||
function change_params(){
|
function change_params(){
|
||||||
var plugin = document.getElementById("plugins").options[document.getElementById("plugins").selectedIndex].value;
|
var plugin = document.getElementById("plugins").options[document.getElementById("plugins").selectedIndex].value;
|
||||||
|
|
||||||
html=""
|
html=""
|
||||||
for (param in default_params){
|
|
||||||
if ((default_params[param]['options']) && (['help','conversion'].indexOf(param) < 0)){
|
|
||||||
html+= "<label> "+param+"</label>"
|
|
||||||
html+= "<select id=\""+param+"\" name=\""+param+"\">"
|
|
||||||
for (option in default_params[param]['options']){
|
|
||||||
if (default_params[param]['options'][option] == default_params[param]['default']){
|
|
||||||
html+="<option value \""+default_params[param]['options'][option]+"\" selected >"+default_params[param]['options'][option]+"</option>"
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
html+="<option value \""+default_params[param]['options'][option]+"\">"+default_params[param]['options'][option]+"</option>"
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
html+="</select><br>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (param in plugins_params[plugin]){
|
for (param in plugins_params[plugin]){
|
||||||
if (param || plugins_params[plugin][param].length > 1){
|
if (param || plugins_params[plugin][param].length > 1){
|
||||||
html+= "<label> Parameter "+param+"</label>"
|
html+= "<label> Parameter "+param+"</label>"
|
||||||
@@ -149,32 +120,15 @@ function load_JSON(){
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
var response = JSON.parse($.ajax({type: "GET", url: url , async: false}).responseText);
|
||||||
for (param in default_params){
|
|
||||||
if ((param != null) && (default_params[param]['options']) && (['help','conversion'].indexOf(param) < 0)){
|
|
||||||
var param_value = encodeURIComponent(document.getElementById(param).options[document.getElementById(param).selectedIndex].text);
|
|
||||||
if (param_value){
|
|
||||||
url+="&"+param+"="+param_value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var response = $.ajax({type: "GET", url: url , async: false}).responseText;
|
|
||||||
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response)
|
|
||||||
|
|
||||||
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
|
||||||
document.getElementById("results-div").style.display = 'block';
|
|
||||||
try {
|
|
||||||
response = JSON.parse(response);
|
|
||||||
var options = {
|
var options = {
|
||||||
mode: 'view'
|
mode: 'view'
|
||||||
};
|
};
|
||||||
var editor = new JSONEditor(container, options, response);
|
var editor = new JSONEditor(container, options, response);
|
||||||
editor.expandAll();
|
editor.expandAll();
|
||||||
}
|
rawcontainer.innerHTML = replaceURLWithHTMLLinks(JSON.stringify(response, undefined, 2))
|
||||||
catch(err){
|
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||||
console.log("Error decoding JSON (got turtle?)");
|
document.getElementById("results-div").style.display = 'block';
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -1,25 +0,0 @@
|
|||||||
from . import models
|
|
||||||
|
|
||||||
|
|
||||||
def check_template(indict, template):
|
|
||||||
if isinstance(template, dict) and isinstance(indict, dict):
|
|
||||||
for k, v in template.items():
|
|
||||||
if k not in indict:
|
|
||||||
return '{} not in {}'.format(k, indict)
|
|
||||||
check_template(indict[k], v)
|
|
||||||
elif isinstance(template, list) and isinstance(indict, list):
|
|
||||||
if len(indict) != len(template):
|
|
||||||
raise models.Error('Different size for {} and {}'.format(indict, template))
|
|
||||||
for e in template:
|
|
||||||
found = False
|
|
||||||
for i in indict:
|
|
||||||
try:
|
|
||||||
check_template(i, e)
|
|
||||||
found = True
|
|
||||||
except models.Error as ex:
|
|
||||||
continue
|
|
||||||
if not found:
|
|
||||||
raise models.Error('{} not found in {}'.format(e, indict))
|
|
||||||
else:
|
|
||||||
if indict != template:
|
|
||||||
raise models.Error('{} and {} are different'.format(indict, template))
|
|
@@ -21,6 +21,3 @@ class AsyncPlugin(AnalysisPlugin):
|
|||||||
values = self._do_async(2)
|
values = self._do_async(2)
|
||||||
entry.async_values = values
|
entry.async_values = values
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
|
||||||
pass
|
|
||||||
|
@@ -3,9 +3,6 @@ from senpy.plugins import SentimentPlugin
|
|||||||
|
|
||||||
class DummyPlugin(SentimentPlugin):
|
class DummyPlugin(SentimentPlugin):
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, params):
|
||||||
entry['nif:iString'] = entry['nif:isString'][::-1]
|
entry.text = entry.text[::-1]
|
||||||
entry.reversed = entry.get('reversed', 0) + 1
|
entry.reversed = entry.get('reversed', 0) + 1
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
|
||||||
pass
|
|
||||||
|
@@ -9,6 +9,3 @@ class SleepPlugin(AnalysisPlugin):
|
|||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, params):
|
||||||
sleep(float(params.get("timeout", self.timeout)))
|
sleep(float(params.get("timeout", self.timeout)))
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def test(self):
|
|
||||||
pass
|
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
"description": "I am dummy",
|
"description": "I am dummy",
|
||||||
"author": "@balkian",
|
"author": "@balkian",
|
||||||
"version": "0.1",
|
"version": "0.1",
|
||||||
"timeout": 0.05,
|
"timeout": 0.5,
|
||||||
"extra_params": {
|
"extra_params": {
|
||||||
"timeout": {
|
"timeout": {
|
||||||
"@id": "timeout_sleep",
|
"@id": "timeout_sleep",
|
||||||
|
@@ -11,24 +11,24 @@ class APITest(TestCase):
|
|||||||
|
|
||||||
def test_api_params(self):
|
def test_api_params(self):
|
||||||
"""The API should not define any required parameters without a default"""
|
"""The API should not define any required parameters without a default"""
|
||||||
parse_params({}, API_PARAMS)
|
parse_params({}, spec=API_PARAMS)
|
||||||
|
|
||||||
def test_web_params(self):
|
def test_web_params(self):
|
||||||
"""The WEB should not define any required parameters without a default"""
|
"""The WEB should not define any required parameters without a default"""
|
||||||
parse_params({}, WEB_PARAMS)
|
parse_params({}, spec=WEB_PARAMS)
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
a = {}
|
a = {}
|
||||||
try:
|
try:
|
||||||
parse_params(a, NIF_PARAMS)
|
parse_params(a, spec=NIF_PARAMS)
|
||||||
raise AssertionError()
|
raise AssertionError()
|
||||||
except Error:
|
except Error:
|
||||||
pass
|
pass
|
||||||
a = {'input': 'hello'}
|
a = {'input': 'hello'}
|
||||||
p = parse_params(a, NIF_PARAMS)
|
p = parse_params(a, spec=NIF_PARAMS)
|
||||||
assert 'input' in p
|
assert 'input' in p
|
||||||
b = {'i': 'hello'}
|
b = {'i': 'hello'}
|
||||||
p = parse_params(b, NIF_PARAMS)
|
p = parse_params(b, spec=NIF_PARAMS)
|
||||||
assert 'input' in p
|
assert 'input' in p
|
||||||
|
|
||||||
def test_plugin(self):
|
def test_plugin(self):
|
||||||
@@ -40,18 +40,18 @@ class APITest(TestCase):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
parse_params(query, plug_params)
|
parse_params(query, spec=plug_params)
|
||||||
raise AssertionError()
|
raise AssertionError()
|
||||||
except Error:
|
except Error:
|
||||||
pass
|
pass
|
||||||
query['hello'] = 'world'
|
query['hello'] = 'world'
|
||||||
p = parse_params(query, plug_params)
|
p = parse_params(query, spec=plug_params)
|
||||||
assert 'hello' in p
|
assert 'hello' in p
|
||||||
assert p['hello'] == 'world'
|
assert p['hello'] == 'world'
|
||||||
del query['hello']
|
del query['hello']
|
||||||
|
|
||||||
query['hiya'] = 'dlrow'
|
query['hiya'] = 'dlrow'
|
||||||
p = parse_params(query, plug_params)
|
p = parse_params(query, spec=plug_params)
|
||||||
assert 'hello' in p
|
assert 'hello' in p
|
||||||
assert 'hiya' in p
|
assert 'hiya' in p
|
||||||
assert p['hello'] == 'dlrow'
|
assert p['hello'] == 'dlrow'
|
||||||
@@ -63,17 +63,6 @@ class APITest(TestCase):
|
|||||||
'default': 1
|
'default': 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p = parse_params({}, spec)
|
p = parse_params({}, spec=spec)
|
||||||
assert 'hello' in p
|
assert 'hello' in p
|
||||||
assert p['hello'] == 1
|
assert p['hello'] == 1
|
||||||
|
|
||||||
def test_call(self):
|
|
||||||
call = {
|
|
||||||
'input': "Aloha my friend",
|
|
||||||
'algo': "Dummy"
|
|
||||||
}
|
|
||||||
p = parse_params(call, API_PARAMS, NIF_PARAMS)
|
|
||||||
assert 'algorithm' in p
|
|
||||||
assert "Dummy" in p['algorithm']
|
|
||||||
assert 'input' in p
|
|
||||||
assert p['input'] == 'Aloha my friend'
|
|
||||||
|
@@ -17,19 +17,17 @@ def parse_resp(resp):
|
|||||||
|
|
||||||
|
|
||||||
class BlueprintsTest(TestCase):
|
class BlueprintsTest(TestCase):
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(cls):
|
self.app = Flask("test_extensions")
|
||||||
"""Set up only once, and re-use in every individual test"""
|
self.app.debug = False
|
||||||
cls.app = Flask("test_extensions")
|
self.client = self.app.test_client()
|
||||||
cls.app.debug = False
|
self.senpy = Senpy()
|
||||||
cls.client = cls.app.test_client()
|
self.senpy.init_app(self.app)
|
||||||
cls.senpy = Senpy()
|
self.dir = os.path.join(os.path.dirname(__file__), "..")
|
||||||
cls.senpy.init_app(cls.app)
|
self.senpy.add_folder(self.dir)
|
||||||
cls.dir = os.path.join(os.path.dirname(__file__), "..")
|
self.senpy.activate_plugin("Dummy", sync=True)
|
||||||
cls.senpy.add_folder(cls.dir)
|
self.senpy.activate_plugin("DummyRequired", sync=True)
|
||||||
cls.senpy.activate_plugin("Dummy", sync=True)
|
self.senpy.default_plugin = 'Dummy'
|
||||||
cls.senpy.activate_plugin("DummyRequired", sync=True)
|
|
||||||
cls.senpy.default_plugin = 'Dummy'
|
|
||||||
|
|
||||||
def assertCode(self, resp, code):
|
def assertCode(self, resp, code):
|
||||||
self.assertEqual(resp.status_code, code)
|
self.assertEqual(resp.status_code, code)
|
||||||
@@ -38,7 +36,6 @@ class BlueprintsTest(TestCase):
|
|||||||
"""
|
"""
|
||||||
Calling with no arguments should ask the user for more arguments
|
Calling with no arguments should ask the user for more arguments
|
||||||
"""
|
"""
|
||||||
self.app.debug = False
|
|
||||||
resp = self.client.get("/api/")
|
resp = self.client.get("/api/")
|
||||||
self.assertCode(resp, 400)
|
self.assertCode(resp, 400)
|
||||||
js = parse_resp(resp)
|
js = parse_resp(resp)
|
||||||
@@ -55,7 +52,7 @@ class BlueprintsTest(TestCase):
|
|||||||
The dummy plugin returns an empty response,\
|
The dummy plugin returns an empty response,\
|
||||||
it should contain the context
|
it should contain the context
|
||||||
"""
|
"""
|
||||||
resp = self.client.get("/api/?i=My aloha mohame&with_parameters=True")
|
resp = self.client.get("/api/?i=My aloha mohame")
|
||||||
self.assertCode(resp, 200)
|
self.assertCode(resp, 200)
|
||||||
js = parse_resp(resp)
|
js = parse_resp(resp)
|
||||||
logging.debug("Got response: %s", js)
|
logging.debug("Got response: %s", js)
|
||||||
@@ -66,7 +63,7 @@ class BlueprintsTest(TestCase):
|
|||||||
"""
|
"""
|
||||||
Extra params that have a default should
|
Extra params that have a default should
|
||||||
"""
|
"""
|
||||||
resp = self.client.get("/api/?i=My aloha mohame&algo=Dummy&with_parameters=true")
|
resp = self.client.get("/api/?i=My aloha mohame&algo=Dummy")
|
||||||
self.assertCode(resp, 200)
|
self.assertCode(resp, 200)
|
||||||
js = parse_resp(resp)
|
js = parse_resp(resp)
|
||||||
logging.debug("Got response: %s", js)
|
logging.debug("Got response: %s", js)
|
||||||
@@ -78,7 +75,6 @@ class BlueprintsTest(TestCase):
|
|||||||
Extra params that have a required argument that does not
|
Extra params that have a required argument that does not
|
||||||
have a default should raise an error.
|
have a default should raise an error.
|
||||||
"""
|
"""
|
||||||
self.app.debug = False
|
|
||||||
resp = self.client.get("/api/?i=My aloha mohame&algo=DummyRequired")
|
resp = self.client.get("/api/?i=My aloha mohame&algo=DummyRequired")
|
||||||
self.assertCode(resp, 400)
|
self.assertCode(resp, 400)
|
||||||
js = parse_resp(resp)
|
js = parse_resp(resp)
|
||||||
@@ -90,7 +86,6 @@ class BlueprintsTest(TestCase):
|
|||||||
The dummy plugin returns an empty response,\
|
The dummy plugin returns an empty response,\
|
||||||
it should contain the context
|
it should contain the context
|
||||||
"""
|
"""
|
||||||
self.app.debug = False
|
|
||||||
resp = self.client.get("/api/?i=My aloha mohame&algo=DOESNOTEXIST")
|
resp = self.client.get("/api/?i=My aloha mohame&algo=DOESNOTEXIST")
|
||||||
self.assertCode(resp, 404)
|
self.assertCode(resp, 404)
|
||||||
js = parse_resp(resp)
|
js = parse_resp(resp)
|
||||||
@@ -157,10 +152,3 @@ class BlueprintsTest(TestCase):
|
|||||||
self.assertCode(resp, 200)
|
self.assertCode(resp, 200)
|
||||||
js = parse_resp(resp)
|
js = parse_resp(resp)
|
||||||
assert "$schema" in js
|
assert "$schema" in js
|
||||||
|
|
||||||
def test_help(self):
|
|
||||||
resp = self.client.get("/api/?help=true")
|
|
||||||
self.assertCode(resp, 200)
|
|
||||||
js = parse_resp(resp)
|
|
||||||
assert "parameters" in js
|
|
||||||
assert "help" in js["parameters"]
|
|
||||||
|
@@ -1,6 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
try:
|
||||||
|
from unittest.mock import patch
|
||||||
|
except ImportError:
|
||||||
|
from mock import patch
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
@@ -12,7 +17,11 @@ class CLITest(TestCase):
|
|||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
self.assertRaises(Error, partial(main_function, []))
|
self.assertRaises(Error, partial(main_function, []))
|
||||||
|
|
||||||
res = main_function(['--input', 'test', '--algo', 'rand', '--with-parameters'])
|
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||||
assert res.parameters['input'] == 'test'
|
main_function(['--input', 'test'])
|
||||||
assert 'rand' in res.parameters['algorithm']
|
|
||||||
assert res.parameters['input'] == 'test'
|
patched.assert_called_with(input='test')
|
||||||
|
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||||
|
main_function(['--input', 'test', '--algo', 'rand'])
|
||||||
|
|
||||||
|
patched.assert_called_with(input='test', algo='rand')
|
||||||
|
@@ -10,22 +10,15 @@ except ImportError:
|
|||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from senpy.extensions import Senpy
|
from senpy.extensions import Senpy
|
||||||
from senpy import plugins
|
|
||||||
from senpy.models import Error, Results, Entry, EmotionSet, Emotion, Plugin
|
from senpy.models import Error, Results, Entry, EmotionSet, Emotion, Plugin
|
||||||
from senpy import api
|
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
def analyse(instance, **kwargs):
|
|
||||||
request = api.parse_call(kwargs)
|
|
||||||
return instance.analyse(request)
|
|
||||||
|
|
||||||
|
|
||||||
class ExtensionsTest(TestCase):
|
class ExtensionsTest(TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.app = Flask('test_extensions')
|
self.app = Flask('test_extensions')
|
||||||
self.dir = os.path.dirname(__file__)
|
self.dir = os.path.join(os.path.dirname(__file__))
|
||||||
self.senpy = Senpy(plugin_folder=self.dir,
|
self.senpy = Senpy(plugin_folder=self.dir,
|
||||||
app=self.app,
|
app=self.app,
|
||||||
default_plugins=False)
|
default_plugins=False)
|
||||||
@@ -45,8 +38,8 @@ class ExtensionsTest(TestCase):
|
|||||||
print(self.senpy.plugins)
|
print(self.senpy.plugins)
|
||||||
assert "Dummy" in self.senpy.plugins
|
assert "Dummy" in self.senpy.plugins
|
||||||
|
|
||||||
def test_installing(self):
|
def test_enabling(self):
|
||||||
""" Installing a plugin """
|
""" Enabling a plugin """
|
||||||
info = {
|
info = {
|
||||||
'name': 'TestPip',
|
'name': 'TestPip',
|
||||||
'module': 'dummy',
|
'module': 'dummy',
|
||||||
@@ -55,14 +48,14 @@ class ExtensionsTest(TestCase):
|
|||||||
'version': 0
|
'version': 0
|
||||||
}
|
}
|
||||||
root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
|
root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
|
||||||
module = plugins.load_plugin_from_info(info, root=root)
|
name, module = self.senpy._load_plugin_from_info(info, root=root)
|
||||||
plugins.install_deps(info)
|
assert name == 'TestPip'
|
||||||
assert module.name == 'TestPip'
|
|
||||||
assert module
|
assert module
|
||||||
import noop
|
import noop
|
||||||
dir(noop)
|
dir(noop)
|
||||||
|
self.senpy.install_deps()
|
||||||
|
|
||||||
def test_enabling(self):
|
def test_installing(self):
|
||||||
""" Enabling a plugin """
|
""" Enabling a plugin """
|
||||||
self.senpy.activate_all(sync=True)
|
self.senpy.activate_all(sync=True)
|
||||||
assert len(self.senpy.plugins) >= 3
|
assert len(self.senpy.plugins) >= 3
|
||||||
@@ -77,8 +70,9 @@ class ExtensionsTest(TestCase):
|
|||||||
'requirements': ['IAmMakingThisPackageNameUpToFail'],
|
'requirements': ['IAmMakingThisPackageNameUpToFail'],
|
||||||
'version': 0
|
'version': 0
|
||||||
}
|
}
|
||||||
|
root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
|
||||||
with self.assertRaises(Error):
|
with self.assertRaises(Error):
|
||||||
plugins.install_deps(info)
|
name, module = self.senpy._load_plugin_from_info(info, root=root)
|
||||||
|
|
||||||
def test_disabling(self):
|
def test_disabling(self):
|
||||||
""" Disabling a plugin """
|
""" Disabling a plugin """
|
||||||
@@ -97,17 +91,23 @@ class ExtensionsTest(TestCase):
|
|||||||
def test_noplugin(self):
|
def test_noplugin(self):
|
||||||
""" Don't analyse if there isn't any plugin installed """
|
""" Don't analyse if there isn't any plugin installed """
|
||||||
self.senpy.deactivate_all(sync=True)
|
self.senpy.deactivate_all(sync=True)
|
||||||
self.assertRaises(Error, partial(analyse, self.senpy, input="tupni"))
|
self.assertRaises(Error, partial(self.senpy.analyse, input="tupni"))
|
||||||
|
self.assertRaises(Error,
|
||||||
|
partial(
|
||||||
|
self.senpy.analyse,
|
||||||
|
input="tupni",
|
||||||
|
algorithm='Dummy'))
|
||||||
|
|
||||||
def test_analyse(self):
|
def test_analyse(self):
|
||||||
""" Using a plugin """
|
""" Using a plugin """
|
||||||
# I was using mock until plugin started inheriting
|
# I was using mock until plugin started inheriting
|
||||||
# Leaf (defaultdict with __setattr__ and __getattr__.
|
# Leaf (defaultdict with __setattr__ and __getattr__.
|
||||||
r1 = analyse(self.senpy, algorithm="Dummy", input="tupni", output="tuptuo")
|
r1 = self.senpy.analyse(
|
||||||
r2 = analyse(self.senpy, input="tupni", output="tuptuo")
|
algorithm="Dummy", input="tupni", output="tuptuo")
|
||||||
|
r2 = self.senpy.analyse(input="tupni", output="tuptuo")
|
||||||
assert r1.analysis[0] == "plugins/Dummy_0.1"
|
assert r1.analysis[0] == "plugins/Dummy_0.1"
|
||||||
assert r2.analysis[0] == "plugins/Dummy_0.1"
|
assert r2.analysis[0] == "plugins/Dummy_0.1"
|
||||||
assert r1.entries[0]['nif:iString'] == 'input'
|
assert r1.entries[0].text == 'input'
|
||||||
|
|
||||||
def test_analyse_jsonld(self):
|
def test_analyse_jsonld(self):
|
||||||
""" Using a plugin with JSON-LD input"""
|
""" Using a plugin with JSON-LD input"""
|
||||||
@@ -116,34 +116,30 @@ class ExtensionsTest(TestCase):
|
|||||||
"@type": "results",
|
"@type": "results",
|
||||||
"entries": [
|
"entries": [
|
||||||
{"@id": "entry1",
|
{"@id": "entry1",
|
||||||
"nif:isString": "tupni",
|
"text": "tupni",
|
||||||
"@type": "entry"
|
"@type": "entry"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}'''
|
}'''
|
||||||
r1 = analyse(self.senpy,
|
r1 = self.senpy.analyse(algorithm="Dummy",
|
||||||
algorithm="Dummy",
|
input=js_input,
|
||||||
input=js_input,
|
informat="json-ld",
|
||||||
informat="json-ld",
|
output="tuptuo")
|
||||||
output="tuptuo")
|
r2 = self.senpy.analyse(input="tupni", output="tuptuo")
|
||||||
r2 = analyse(self.senpy,
|
|
||||||
input="tupni",
|
|
||||||
output="tuptuo")
|
|
||||||
assert r1.analysis[0] == "plugins/Dummy_0.1"
|
assert r1.analysis[0] == "plugins/Dummy_0.1"
|
||||||
assert r2.analysis[0] == "plugins/Dummy_0.1"
|
assert r2.analysis[0] == "plugins/Dummy_0.1"
|
||||||
assert r1.entries[0]['nif:iString'] == 'input'
|
assert r1.entries[0].text == 'input'
|
||||||
|
|
||||||
def test_analyse_error(self):
|
def test_analyse_error(self):
|
||||||
mm = mock.MagicMock()
|
mm = mock.MagicMock()
|
||||||
mm.id = 'magic_mock'
|
mm.id = 'magic_mock'
|
||||||
mm.is_activated = True
|
mm.analyse_entries.side_effect = Error('error on analysis', status=500)
|
||||||
mm.analyse_entries.side_effect = Error('error in analysis', status=500)
|
|
||||||
self.senpy.plugins['MOCK'] = mm
|
self.senpy.plugins['MOCK'] = mm
|
||||||
try:
|
try:
|
||||||
analyse(self.senpy, input='nothing', algorithm='MOCK')
|
self.senpy.analyse(input='nothing', algorithm='MOCK')
|
||||||
assert False
|
assert False
|
||||||
except Error as ex:
|
except Error as ex:
|
||||||
assert 'error in analysis' in ex['message']
|
assert ex['message'] == 'error on analysis'
|
||||||
assert ex['status'] == 500
|
assert ex['status'] == 500
|
||||||
|
|
||||||
mm.analyse.side_effect = Exception('generic exception on analysis')
|
mm.analyse.side_effect = Exception('generic exception on analysis')
|
||||||
@@ -151,10 +147,10 @@ class ExtensionsTest(TestCase):
|
|||||||
'generic exception on analysis')
|
'generic exception on analysis')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
analyse(self.senpy, input='nothing', algorithm='MOCK')
|
self.senpy.analyse(input='nothing', algorithm='MOCK')
|
||||||
assert False
|
assert False
|
||||||
except Error as ex:
|
except Error as ex:
|
||||||
assert 'generic exception on analysis' in ex['message']
|
assert ex['message'] == 'generic exception on analysis'
|
||||||
assert ex['status'] == 500
|
assert ex['status'] == 500
|
||||||
|
|
||||||
def test_filtering(self):
|
def test_filtering(self):
|
||||||
@@ -177,34 +173,47 @@ class ExtensionsTest(TestCase):
|
|||||||
'onyx:usesEmotionModel': 'emoml:fsre-dimensions'
|
'onyx:usesEmotionModel': 'emoml:fsre-dimensions'
|
||||||
})
|
})
|
||||||
eSet1 = EmotionSet()
|
eSet1 = EmotionSet()
|
||||||
eSet1.prov__wasGeneratedBy = plugin['@id']
|
eSet1.prov__wasGeneratedBy = plugin['id']
|
||||||
eSet1['onyx:hasEmotion'].append(Emotion({
|
eSet1['onyx:hasEmotion'].append(Emotion({
|
||||||
'emoml:arousal': 1,
|
'emoml:arousal': 1,
|
||||||
'emoml:potency': 0,
|
'emoml:potency': 0,
|
||||||
'emoml:valence': 0
|
'emoml:valence': 0
|
||||||
}))
|
}))
|
||||||
response = Results({
|
response = Results({
|
||||||
'analysis': [{'plugin': plugin}],
|
|
||||||
'entries': [Entry({
|
'entries': [Entry({
|
||||||
'nif:iString': 'much ado about nothing',
|
'text': 'much ado about nothing',
|
||||||
'emotions': [eSet1]
|
'emotions': [eSet1]
|
||||||
})]
|
})]
|
||||||
})
|
})
|
||||||
params = {'emotionModel': 'emoml:big6',
|
params = {'emotionModel': 'emoml:big6',
|
||||||
'conversion': 'full'}
|
'conversion': 'full'}
|
||||||
r1 = deepcopy(response)
|
r1 = deepcopy(response)
|
||||||
r1.parameters = params
|
self.senpy.convert_emotions(r1,
|
||||||
self.senpy.convert_emotions(r1)
|
[plugin, ],
|
||||||
|
params)
|
||||||
assert len(r1.entries[0].emotions) == 2
|
assert len(r1.entries[0].emotions) == 2
|
||||||
params['conversion'] = 'nested'
|
params['conversion'] = 'nested'
|
||||||
r2 = deepcopy(response)
|
r2 = deepcopy(response)
|
||||||
r2.parameters = params
|
self.senpy.convert_emotions(r2,
|
||||||
self.senpy.convert_emotions(r2)
|
[plugin, ],
|
||||||
|
params)
|
||||||
assert len(r2.entries[0].emotions) == 1
|
assert len(r2.entries[0].emotions) == 1
|
||||||
assert r2.entries[0].emotions[0]['prov:wasDerivedFrom'] == eSet1
|
assert r2.entries[0].emotions[0]['prov:wasDerivedFrom'] == eSet1
|
||||||
params['conversion'] = 'filtered'
|
params['conversion'] = 'filtered'
|
||||||
r3 = deepcopy(response)
|
r3 = deepcopy(response)
|
||||||
r3.parameters = params
|
self.senpy.convert_emotions(r3,
|
||||||
self.senpy.convert_emotions(r3)
|
[plugin, ],
|
||||||
|
params)
|
||||||
assert len(r3.entries[0].emotions) == 1
|
assert len(r3.entries[0].emotions) == 1
|
||||||
r3.jsonld()
|
r3.jsonld()
|
||||||
|
|
||||||
|
# def test_async_plugin(self):
|
||||||
|
# """ We should accept multiprocessing plugins with async=False"""
|
||||||
|
# thread1 = self.senpy.activate_plugin("Async", sync=False)
|
||||||
|
# thread1.join(timeout=1)
|
||||||
|
# assert len(self.senpy.plugins['Async'].value) == 4
|
||||||
|
|
||||||
|
# resp = self.senpy.analyse(input='nothing', algorithm='Async')
|
||||||
|
|
||||||
|
# assert len(resp.entries[0].async_values) == 2
|
||||||
|
# self.senpy.activate_plugin("Async", sync=True)
|
||||||
|
@@ -185,7 +185,7 @@ class ModelsTest(TestCase):
|
|||||||
'entries': [{
|
'entries': [{
|
||||||
'@id': 'entry1',
|
'@id': 'entry1',
|
||||||
'@type': 'entry',
|
'@type': 'entry',
|
||||||
'nif:isString': 'TEST'
|
'text': 'TEST'
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
recovered = from_dict(results)
|
recovered = from_dict(results)
|
||||||
|
@@ -7,11 +7,11 @@ import tempfile
|
|||||||
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
from senpy.models import Results, Entry, EmotionSet, Emotion
|
from senpy.models import Results, Entry, EmotionSet, Emotion
|
||||||
from senpy import plugins
|
from senpy.plugins import SentimentPlugin, ShelfMixin
|
||||||
from senpy.plugins.conversion.emotion.centroids import CentroidConversion
|
from senpy.plugins.conversion.emotion.centroids import CentroidConversion
|
||||||
|
|
||||||
|
|
||||||
class ShelfDummyPlugin(plugins.SentimentPlugin, plugins.ShelfMixin):
|
class ShelfDummyPlugin(SentimentPlugin, ShelfMixin):
|
||||||
def activate(self, *args, **kwargs):
|
def activate(self, *args, **kwargs):
|
||||||
if 'counter' not in self.sh:
|
if 'counter' not in self.sh:
|
||||||
self.sh['counter'] = 0
|
self.sh['counter'] = 0
|
||||||
@@ -33,6 +33,7 @@ class PluginsTest(TestCase):
|
|||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if os.path.exists(self.shelf_dir):
|
if os.path.exists(self.shelf_dir):
|
||||||
shutil.rmtree(self.shelf_dir)
|
shutil.rmtree(self.shelf_dir)
|
||||||
|
|
||||||
if os.path.isfile(self.shelf_file):
|
if os.path.isfile(self.shelf_file):
|
||||||
os.remove(self.shelf_file)
|
os.remove(self.shelf_file)
|
||||||
|
|
||||||
@@ -50,29 +51,26 @@ class PluginsTest(TestCase):
|
|||||||
|
|
||||||
def test_shelf(self):
|
def test_shelf(self):
|
||||||
''' A shelf is created and the value is stored '''
|
''' A shelf is created and the value is stored '''
|
||||||
newfile = self.shelf_file + "new"
|
|
||||||
a = ShelfDummyPlugin(info={
|
a = ShelfDummyPlugin(info={
|
||||||
'name': 'shelve',
|
'name': 'shelve',
|
||||||
'version': 'test',
|
'version': 'test',
|
||||||
'shelf_file': newfile
|
'shelf_file': self.shelf_file
|
||||||
})
|
})
|
||||||
assert a.sh == {}
|
assert a.sh == {}
|
||||||
a.activate()
|
a.activate()
|
||||||
assert a.sh == {'counter': 0}
|
assert a.sh == {'counter': 0}
|
||||||
assert a.shelf_file == newfile
|
assert a.shelf_file == self.shelf_file
|
||||||
|
|
||||||
a.sh['a'] = 'fromA'
|
a.sh['a'] = 'fromA'
|
||||||
assert a.sh['a'] == 'fromA'
|
assert a.sh['a'] == 'fromA'
|
||||||
|
|
||||||
a.save()
|
a.save()
|
||||||
|
|
||||||
sh = pickle.load(open(newfile, 'rb'))
|
sh = pickle.load(open(self.shelf_file, 'rb'))
|
||||||
|
|
||||||
assert sh['a'] == 'fromA'
|
assert sh['a'] == 'fromA'
|
||||||
|
|
||||||
def test_dummy_shelf(self):
|
def test_dummy_shelf(self):
|
||||||
with open(self.shelf_file, 'wb') as f:
|
|
||||||
pickle.dump({'counter': 99}, f)
|
|
||||||
a = ShelfDummyPlugin(info={
|
a = ShelfDummyPlugin(info={
|
||||||
'name': 'DummyShelf',
|
'name': 'DummyShelf',
|
||||||
'shelf_file': self.shelf_file,
|
'shelf_file': self.shelf_file,
|
||||||
@@ -82,13 +80,9 @@ class PluginsTest(TestCase):
|
|||||||
|
|
||||||
assert a.shelf_file == self.shelf_file
|
assert a.shelf_file == self.shelf_file
|
||||||
res1 = a.analyse(input=1)
|
res1 = a.analyse(input=1)
|
||||||
assert res1.entries[0].nif__isString == 100
|
assert res1.entries[0].nif__isString == 1
|
||||||
a.deactivate()
|
res2 = a.analyse(input=1)
|
||||||
del a
|
assert res2.entries[0].nif__isString == 2
|
||||||
|
|
||||||
with open(self.shelf_file, 'rb') as f:
|
|
||||||
sh = pickle.load(f)
|
|
||||||
assert sh['counter'] == 100
|
|
||||||
|
|
||||||
def test_corrupt_shelf(self):
|
def test_corrupt_shelf(self):
|
||||||
''' Reusing the values of a previous shelf '''
|
''' Reusing the values of a previous shelf '''
|
||||||
@@ -181,7 +175,6 @@ class PluginsTest(TestCase):
|
|||||||
"centroids_direction": ["emoml:big6", "emoml:fsre-dimensions"]
|
"centroids_direction": ["emoml:big6", "emoml:fsre-dimensions"]
|
||||||
}
|
}
|
||||||
c = CentroidConversion(info)
|
c = CentroidConversion(info)
|
||||||
print(c.serialize())
|
|
||||||
|
|
||||||
es1 = EmotionSet()
|
es1 = EmotionSet()
|
||||||
e1 = Emotion()
|
e1 = Emotion()
|
||||||
@@ -190,7 +183,6 @@ class PluginsTest(TestCase):
|
|||||||
res = c._forward_conversion(es1)
|
res = c._forward_conversion(es1)
|
||||||
assert res["X-dimension"] == 0.5
|
assert res["X-dimension"] == 0.5
|
||||||
assert res["Y-dimension"] == 0.5
|
assert res["Y-dimension"] == 0.5
|
||||||
print(res)
|
|
||||||
|
|
||||||
e2 = Emotion()
|
e2 = Emotion()
|
||||||
e2.onyx__hasEmotionCategory = "c2"
|
e2.onyx__hasEmotionCategory = "c2"
|
||||||
@@ -198,38 +190,15 @@ class PluginsTest(TestCase):
|
|||||||
res = c._forward_conversion(es1)
|
res = c._forward_conversion(es1)
|
||||||
assert res["X-dimension"] == 0
|
assert res["X-dimension"] == 0
|
||||||
assert res["Y-dimension"] == 1
|
assert res["Y-dimension"] == 1
|
||||||
print(res)
|
|
||||||
|
|
||||||
e = Emotion()
|
e = Emotion()
|
||||||
e["X-dimension"] = -0.2
|
e["X-dimension"] = -0.2
|
||||||
e["Y-dimension"] = -0.3
|
e["Y-dimension"] = -0.3
|
||||||
res = c._backwards_conversion(e)
|
res = c._backwards_conversion(e)
|
||||||
assert res["onyx:hasEmotionCategory"] == "c3"
|
assert res["onyx:hasEmotionCategory"] == "c3"
|
||||||
print(res)
|
|
||||||
|
|
||||||
e = Emotion()
|
e = Emotion()
|
||||||
e["X-dimension"] = -0.2
|
e["X-dimension"] = -0.2
|
||||||
e["Y-dimension"] = 0.3
|
e["Y-dimension"] = 0.3
|
||||||
res = c._backwards_conversion(e)
|
res = c._backwards_conversion(e)
|
||||||
assert res["onyx:hasEmotionCategory"] == "c2"
|
assert res["onyx:hasEmotionCategory"] == "c2"
|
||||||
|
|
||||||
|
|
||||||
def make_mini_test(plugin_info):
|
|
||||||
def mini_test(self):
|
|
||||||
plugin = plugins.load_plugin_from_info(plugin_info, install=True)
|
|
||||||
plugin.test()
|
|
||||||
return mini_test
|
|
||||||
|
|
||||||
|
|
||||||
def _add_tests():
|
|
||||||
root = os.path.dirname(__file__)
|
|
||||||
plugs = plugins.load_plugins(os.path.join(root, ".."), loader=plugins.parse_plugin_info)
|
|
||||||
for k, v in plugs.items():
|
|
||||||
pass
|
|
||||||
t_method = make_mini_test(v)
|
|
||||||
t_method.__name__ = 'test_plugin_{}'.format(k)
|
|
||||||
setattr(PluginsTest, t_method.__name__, t_method)
|
|
||||||
del t_method
|
|
||||||
|
|
||||||
|
|
||||||
_add_tests()
|
|
||||||
|
@@ -34,8 +34,7 @@ def do_create_(jsfile, success):
|
|||||||
except (AssertionError, ValidationError, KeyError) as ex:
|
except (AssertionError, ValidationError, KeyError) as ex:
|
||||||
if success:
|
if success:
|
||||||
raise
|
raise
|
||||||
return
|
|
||||||
assert success
|
|
||||||
return do_expected
|
return do_expected
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user