1
0
mirror of https://github.com/gsi-upm/senpy synced 2024-11-22 08:12:27 +00:00

Fixed py2 problems and other improvements

We've changed the way plugins are activated, and removed the notion of
deactivated plugins.
Now plugins activate asynchronously.
When calling a plugin, it will be activated if it wasn't, and the call will wait
for the plugin to be fully activated.
This commit is contained in:
J. Fernando Sánchez 2017-08-27 18:43:40 +02:00
parent 7aa91d1d60
commit 3e3f5555ff
15 changed files with 218 additions and 128 deletions

View File

@ -12,7 +12,7 @@ stages:
- clean - clean
before_script: before_script:
- docker login -u $HUB_USER -p $HUB_PASSWORD - make -e login
.test: &test_definition .test: &test_definition
stage: test stage: test

159
Makefile
View File

@ -19,46 +19,29 @@ KUBE_URL=""
KUBE_TOKEN="" KUBE_TOKEN=""
KUBE_NAMESPACE=$(NAME) KUBE_NAMESPACE=$(NAME)
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -v $$PWD:/tmp/cwd/ -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE) KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -v $$PWD:/tmp/cwd/ -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
CI_REGISTRY=docker.io
CI_REGISTRY_USER=gitlab
CI_BUILD_TOKEN=""
CI_COMMIT_REF_NAME=master CI_COMMIT_REF_NAME=master
help: ## Show this help.
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
config: ## Load config from the environment. You should run it once in every session before other tasks. Run: eval $(make config)
all: build run @echo ". ../.env || true;"
@awk '{ print "export " $$0}' .env
.FORCE: @echo "# Please, run: "
@echo "# eval \$$(make config)"
version: .FORCE # If you need to run a command on the key/value pairs, use this:
@echo $(VERSION) > $(NAME)/VERSION # @awk '{ split($$0, a, "="); "echo " a[2] " | base64 -w 0" |& getline b64; print "export " a[1] "=" a[2]; print "export " a[1] "_BASE64=" b64}' .env
@echo $(VERSION)
yapf:
yapf -i -r $(NAME)
yapf -i -r tests
init:
pip install --user pre-commit
pre-commit install
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS))
@unlink Dockerfile >/dev/null
ln -s Dockerfile-$(PYMAIN) Dockerfile
Dockerfile-%: Dockerfile.template
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
quick_build: $(addprefix build-, $(PYMAIN)) quick_build: $(addprefix build-, $(PYMAIN))
build: $(addprefix build-, $(PYVERSIONS)) build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
build-%: version Dockerfile-% build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .; docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
quick_test: $(addprefix test-,$(PYMAIN)) quick_test: test-$(PYMAIN)
dev-%: dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
@docker start $(NAME)-dev$* || (\ @docker start $(NAME)-dev$* || (\
$(MAKE) build-$*; \ $(MAKE) build-$*; \
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \ docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
@ -66,34 +49,81 @@ dev-%:
docker exec -ti $(NAME)-dev$* bash docker exec -ti $(NAME)-dev$* bash
dev: dev-$(PYMAIN) dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
test-all: $(addprefix test-,$(PYVERSIONS)) test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
# Run setup.py from in an isolated container, built from the base image.
# This speeds tests up because the image has most (if not all) of the dependencies already. # This speeds tests up because the image has most (if not all) of the dependencies already.
test-%:
docker rm $(NAME)-test-$* || true docker rm $(NAME)-test-$* || true
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
docker cp . $(NAME)-test-$*:/usr/src/app docker cp . $(NAME)-test-$*:/usr/src/app
docker start -a $(NAME)-test-$* docker start -a $(NAME)-test-$*
test: test-$(PYMAIN) test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
run-%: build-%
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
run: run-$(PYMAIN)
#
# Deployment and advanced features
#
deploy: ## Deploy to kubernetes using the credentials in KUBE_CA_PEM_FILE (or KUBE_CA_BUNDLE ) and TOKEN
@cat k8s/* | envsubst | $(KUBECTL) apply -f -
deploy-check: ## Get the deployed configuration.
@$(KUBECTL) get deploy,pods,svc,ingress
login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
ifeq ($(CI_BUILD_TOKEN),)
@echo "Not logging in to the docker registry" "$(CI_REGISTRY)"
else
docker login -u gitlab-ci-token -p $(CI_BUILD_TOKEN) $(CI_REGISTRY)
endif
ifeq ($(HUB_USER),)
@echo "Not logging in to global the docker registry"
docker login -u $(HUB_USER) -p $(HUB_PASSWORD)
else
endif
.FORCE:
version: .FORCE
@echo $(VERSION) > $(NAME)/VERSION
@echo $(VERSION)
yapf: ## Format python code
yapf -i -r $(NAME)
yapf -i -r tests
init: ## Init pre-commit hooks (i.e. enforcing format checking before allowing a commit)
pip install --user pre-commit
pre-commit install
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS)) ## Generate dockerfiles for each python version
@unlink Dockerfile >/dev/null
ln -s Dockerfile-$(PYMAIN) Dockerfile
Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Dockerfile-2.7)
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
dist/$(TARNAME): version dist/$(TARNAME): version
python setup.py sdist; python setup.py sdist;
sdist: dist/$(TARNAME) sdist: dist/$(TARNAME) ## Generate the distribution file (wheel)
pip_test-%: sdist pip_test-%: sdist ## Test the distribution file using pip install and a specific python version (e.g. pip_test-2.7)
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME); docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
pip_test: $(addprefix pip_test-,$(PYVERSIONS)) pip_test: $(addprefix pip_test-,$(PYVERSIONS)) ## Test pip installation with the main python version
pip_upload: pip_test pip_upload: pip_test ## Upload package to pip
python setup.py sdist upload ; python setup.py sdist upload ;
clean: clean: ## Clean older docker images and containers related to this project and dev environments
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true @docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true @docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true @docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@ -108,30 +138,58 @@ git_tag:
git_push: git_push:
git push --tags origin master git push --tags origin master
quick_build: $(addprefix build-, $(PYMAIN))
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
quick_test: test-$(PYMAIN)
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
@docker start $(NAME)-dev$* || (\
$(MAKE) build-$*; \
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
)\
docker exec -ti $(NAME)-dev$* bash
dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
# This speeds tests up because the image has most (if not all) of the dependencies already.
docker rm $(NAME)-test-$* || true
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
docker cp . $(NAME)-test-$*:/usr/src/app
docker start -a $(NAME)-test-$*
test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
run-%: build-% run-%: build-%
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
run: run-$(PYMAIN) run: run-$(PYMAIN)
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)' docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)' docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
docker push '$(IMAGENAME):latest' docker push '$(IMAGENAME):latest'
docker push '$(IMAGEWTAG)' docker push '$(IMAGEWTAG)'
push-latest-%: build-% push-latest-%: build-% ## Push the latest image for a specific python version
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$* docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
docker push $(IMAGENAME):$(VERSION)-python$* docker push $(IMAGENAME):$(VERSION)-python$*
docker push $(IMAGENAME):python$* docker push $(IMAGENAME):python$*
push-%: build-% push-%: build-% ## Push the image of the current version (tagged). e.g. push-2.7
docker push $(IMAGENAME):$(VERSION)-python$* docker push $(IMAGENAME):$(VERSION)-python$*
push: $(addprefix push-,$(PYVERSIONS)) push: $(addprefix push-,$(PYVERSIONS)) ## Push an image with the current version for every python version
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)' docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker push $(IMAGENAME):$(VERSION) docker push $(IMAGENAME):$(VERSION)
push-github: push-github: ## Push the code to github. You need to set up HUB_USER and HUB_PASSWORD
$(eval KEY_FILE := $(shell mktemp)) $(eval KEY_FILE := $(shell mktemp))
@echo "$$GITHUB_DEPLOY_KEY" > $(KEY_FILE) @echo "$$GITHUB_DEPLOY_KEY" > $(KEY_FILE)
@git remote rm github-deploy || true @git remote rm github-deploy || true
@ -140,13 +198,8 @@ push-github:
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy $(CI_COMMIT_REF_NAME) @GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy $(CI_COMMIT_REF_NAME)
rm $(KEY_FILE) rm $(KEY_FILE)
ci: ci: ## Run a task using gitlab-runner. Only use to debug problems in the CI pipeline
gitlab-runner exec docker --docker-volumes /var/run/docker.sock:/var/run/docker.sock --env CI_PROJECT_NAME=$(NAME) ${action} gitlab-runner exec shell --builds-dir '.builds' --env CI_PROJECT_NAME=$(NAME) ${action}
deploy:
@$(KUBECTL) delete secret $(CI_REGISTRY) || true
@$(KUBECTL) create secret docker-registry $(CI_REGISTRY) --docker-server=$(CI_REGISTRY) --docker-username=$(CI_REGISTRY_USER) --docker-email=$(CI_REGISTRY_USER) --docker-password=$(CI_BUILD_TOKEN)
@$(KUBECTL) apply -f /tmp/cwd/k8s/
.PHONY: test test-% test-all build-% build test pip_test run yapf push-main push-% dev ci version .FORCE deploy .PHONY: test test-% test-all build-% build test pip_test run yapf push-main push-% dev ci version .FORCE deploy

View File

@ -2,6 +2,7 @@ Flask>=0.10.1
requests>=2.4.1 requests>=2.4.1
tornado>=4.4.3 tornado>=4.4.3
PyLD>=0.6.5 PyLD>=0.6.5
nltk
six six
future future
jsonschema jsonschema

View File

@ -95,8 +95,8 @@ def main():
app = Flask(__name__) app = Flask(__name__)
app.debug = args.debug app.debug = args.debug
sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins) sp = Senpy(app, args.plugins_folder, default_plugins=args.default_plugins)
sp.install_deps()
if args.only_install: if args.only_install:
sp.install_deps()
return return
sp.activate_all() sp.activate_all()
print('Senpy version {}'.format(senpy.__version__)) print('Senpy version {}'.format(senpy.__version__))

View File

@ -153,7 +153,7 @@ def parse_params(indict, *specs):
errors={param: error errors={param: error
for param, error in iteritems(wrong_params)}) for param, error in iteritems(wrong_params)})
raise message raise message
if 'algorithm' in outdict and isinstance(outdict['algorithm'], str): if 'algorithm' in outdict and not isinstance(outdict['algorithm'], list):
outdict['algorithm'] = outdict['algorithm'].split(',') outdict['algorithm'] = outdict['algorithm'].split(',')
return outdict return outdict

View File

@ -89,6 +89,7 @@ def basic_api(f):
response = f(*args, **kwargs) response = f(*args, **kwargs)
except Error as ex: except Error as ex:
response = ex response = ex
response.parameters = params
logger.error(ex) logger.error(ex)
if current_app.debug: if current_app.debug:
raise raise

View File

@ -29,8 +29,10 @@ def main_function(argv):
api.NIF_PARAMS) api.NIF_PARAMS)
plugin_folder = params['plugin_folder'] plugin_folder = params['plugin_folder']
sp = Senpy(default_plugins=False, plugin_folder=plugin_folder) sp = Senpy(default_plugins=False, plugin_folder=plugin_folder)
sp.activate_all(sync=True)
request = api.parse_call(params) request = api.parse_call(params)
algos = request.parameters.get('algorithm', sp.plugins.keys())
for algo in algos:
sp.activate_plugin(algo)
res = sp.analyse(request) res = sp.analyse(request)
return res return res

View File

@ -11,6 +11,7 @@ from .models import Error
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
from threading import Thread from threading import Thread
from functools import partial
import os import os
import copy import copy
@ -95,22 +96,20 @@ class Senpy(object):
raise Error( raise Error(
status=404, status=404,
message="The algorithm '{}' is not valid".format(algo)) message="The algorithm '{}' is not valid".format(algo))
if not self.plugins[algo].is_activated:
logger.debug("Plugin not activated: {}".format(algo))
raise Error(
status=400,
message=("The algorithm '{}'"
" is not activated yet").format(algo))
plugins.append(self.plugins[algo]) plugins.append(self.plugins[algo])
return plugins return plugins
def _process_entries(self, entries, req, plugins): def _process_entries(self, entries, req, plugins):
"""
Recursively process the entries with the first plugin in the list, and pass the results
to the rest of the plugins.
"""
if not plugins: if not plugins:
for i in entries: for i in entries:
yield i yield i
return return
plugin = plugins[0] plugin = plugins[0]
self._activate(plugin) # Make sure the plugin is activated
specific_params = api.get_extra_params(req, plugin) specific_params = api.get_extra_params(req, plugin)
req.analysis.append({'plugin': plugin, req.analysis.append({'plugin': plugin,
'parameters': specific_params}) 'parameters': specific_params})
@ -118,6 +117,10 @@ class Senpy(object):
for i in self._process_entries(results, req, plugins[1:]): for i in self._process_entries(results, req, plugins[1:]):
yield i yield i
def install_deps(self):
for plugin in self.filter_plugins(is_activated=True):
plugins.install_deps(plugin)
def analyse(self, request): def analyse(self, request):
""" """
Main method that analyses a request, either from CLI or HTTP. Main method that analyses a request, either from CLI or HTTP.
@ -223,25 +226,42 @@ class Senpy(object):
else: else:
self._default = self.plugins[value] self._default = self.plugins[value]
def activate_all(self, sync=False): def activate_all(self, sync=True):
ps = [] ps = []
for plug in self.plugins.keys(): for plug in self.plugins.keys():
ps.append(self.activate_plugin(plug, sync=sync)) ps.append(self.activate_plugin(plug, sync=sync))
return ps return ps
def deactivate_all(self, sync=False): def deactivate_all(self, sync=True):
ps = [] ps = []
for plug in self.plugins.keys(): for plug in self.plugins.keys():
ps.append(self.deactivate_plugin(plug, sync=sync)) ps.append(self.deactivate_plugin(plug, sync=sync))
return ps return ps
def _set_active_plugin(self, plugin_name, active=True, *args, **kwargs): def _set_active(self, plugin, active=True, *args, **kwargs):
''' We're using a variable in the plugin itself to activate/deactive plugins.\ ''' We're using a variable in the plugin itself to activate/deactive plugins.\
Note that plugins may activate themselves by setting this variable. Note that plugins may activate themselves by setting this variable.
''' '''
self.plugins[plugin_name].is_activated = active plugin.is_activated = active
def activate_plugin(self, plugin_name, sync=False): def _activate(self, plugin):
success = False
with plugin._lock:
if plugin.is_activated:
return
try:
plugin.activate()
msg = "Plugin activated: {}".format(plugin.name)
logger.info(msg)
success = True
self._set_active(plugin, success)
except Exception as ex:
msg = "Error activating plugin {} - {} : \n\t{}".format(
plugin.name, ex, traceback.format_exc())
logger.error(msg)
raise Error(msg)
def activate_plugin(self, plugin_name, sync=True):
try: try:
plugin = self.plugins[plugin_name] plugin = self.plugins[plugin_name]
except KeyError: except KeyError:
@ -250,37 +270,17 @@ class Senpy(object):
logger.info("Activating plugin: {}".format(plugin.name)) logger.info("Activating plugin: {}".format(plugin.name))
def act():
success = False
try:
plugin.activate()
msg = "Plugin activated: {}".format(plugin.name)
logger.info(msg)
success = True
self._set_active_plugin(plugin_name, success)
except Exception as ex:
msg = "Error activating plugin {} - {} : \n\t{}".format(
plugin.name, ex, traceback.format_exc())
logger.error(msg)
raise Error(msg)
if sync or 'async' in plugin and not plugin.async: if sync or 'async' in plugin and not plugin.async:
act() self._activate(plugin)
else: else:
th = Thread(target=act) th = Thread(target=partial(self._activate, plugin))
th.start() th.start()
return th return th
def deactivate_plugin(self, plugin_name, sync=False): def _deactivate(self, plugin):
try: with plugin._lock:
plugin = self.plugins[plugin_name] if not plugin.is_activated:
except KeyError: return
raise Error(
message="Plugin not found: {}".format(plugin_name), status=404)
self._set_active_plugin(plugin_name, False)
def deact():
try: try:
plugin.deactivate() plugin.deactivate()
logger.info("Plugin deactivated: {}".format(plugin.name)) logger.info("Plugin deactivated: {}".format(plugin.name))
@ -289,10 +289,19 @@ class Senpy(object):
"Error deactivating plugin {}: {}".format(plugin.name, ex)) "Error deactivating plugin {}: {}".format(plugin.name, ex))
logger.error("Trace: {}".format(traceback.format_exc())) logger.error("Trace: {}".format(traceback.format_exc()))
def deactivate_plugin(self, plugin_name, sync=True):
try:
plugin = self.plugins[plugin_name]
except KeyError:
raise Error(
message="Plugin not found: {}".format(plugin_name), status=404)
self._set_active(plugin, False)
if sync or 'async' in plugin and not plugin.async: if sync or 'async' in plugin and not plugin.async:
deact() self._deactivate(plugin)
else: else:
th = Thread(target=deact) th = Thread(target=partial(self._deactivate, plugin))
th.start() th.start()
return th return th

View File

@ -14,6 +14,7 @@ import sys
import subprocess import subprocess
import importlib import importlib
import yaml import yaml
import threading
from .. import models, utils from .. import models, utils
from ..api import API_PARAMS from ..api import API_PARAMS
@ -34,6 +35,7 @@ class Plugin(models.Plugin):
id = 'plugins/{}_{}'.format(info['name'], info['version']) id = 'plugins/{}_{}'.format(info['name'], info['version'])
super(Plugin, self).__init__(id=id, **info) super(Plugin, self).__init__(id=id, **info)
self.is_activated = False self.is_activated = False
self._lock = threading.Lock()
def get_folder(self): def get_folder(self):
return os.path.dirname(inspect.getfile(self.__class__)) return os.path.dirname(inspect.getfile(self.__class__))
@ -188,10 +190,12 @@ def validate_info(info):
return all(x in info for x in ('name', 'module', 'description', 'version')) return all(x in info for x in ('name', 'module', 'description', 'version'))
def load_module(name, root): def load_module(name, root=None):
sys.path.append(root) if root:
sys.path.append(root)
tmp = importlib.import_module(name) tmp = importlib.import_module(name)
sys.path.remove(root) if root:
sys.path.remove(root)
return tmp return tmp
@ -221,16 +225,20 @@ def install_deps(*plugins):
raise models.Error("Dependencies not properly installed") raise models.Error("Dependencies not properly installed")
def load_plugin_from_info(info, root, validator=validate_info): def load_plugin_from_info(info, root=None, validator=validate_info, install=True):
if not root and '_path' in info:
root = os.path.dirname(info['_path'])
if not validator(info): if not validator(info):
logger.warn('The module info is not valid.\n\t{}'.format(info)) raise ValueError('Plugin info is not valid: {}'.format(info))
return None, None
module = info["module"] module = info["module"]
name = info["name"]
install_deps(info)
tmp = load_module(module, root)
try:
tmp = load_module(module, root)
except ImportError:
if not install:
raise
install_deps(info)
tmp = load_module(module, root)
candidate = None candidate = None
for _, obj in inspect.getmembers(tmp): for _, obj in inspect.getmembers(tmp):
if inspect.isclass(obj) and inspect.getmodule(obj) == tmp: if inspect.isclass(obj) and inspect.getmodule(obj) == tmp:
@ -242,16 +250,23 @@ def load_plugin_from_info(info, root, validator=validate_info):
logger.debug("No valid plugin for: {}".format(module)) logger.debug("No valid plugin for: {}".format(module))
return return
module = candidate(info=info) module = candidate(info=info)
return name, module return module
def load_plugin(root, filename): def parse_plugin_info(fpath):
fpath = os.path.join(root, filename)
logger.debug("Loading plugin: {}".format(fpath)) logger.debug("Loading plugin: {}".format(fpath))
with open(fpath, 'r') as f: with open(fpath, 'r') as f:
info = yaml.load(f) info = yaml.load(f)
info['_path'] = fpath
name = info['name']
return name, info
def load_plugin(fpath):
name, info = parse_plugin_info(fpath)
logger.debug("Info: {}".format(info)) logger.debug("Info: {}".format(info))
return load_plugin_from_info(info, root) plugin = load_plugin_from_info(info)
return name, plugin
def load_plugins(folders, loader=load_plugin): def load_plugins(folders, loader=load_plugin):
@ -261,7 +276,8 @@ def load_plugins(folders, loader=load_plugin):
# Do not look for plugins in hidden or special folders # Do not look for plugins in hidden or special folders
dirnames[:] = [d for d in dirnames if d[0] not in ['.', '_']] dirnames[:] = [d for d in dirnames if d[0] not in ['.', '_']]
for filename in fnmatch.filter(filenames, '*.senpy'): for filename in fnmatch.filter(filenames, '*.senpy'):
name, plugin = loader(root, filename) fpath = os.path.join(root, filename)
name, plugin = loader(fpath)
if plugin and name: if plugin and name:
plugins[name] = plugin plugins[name] = plugin
return plugins return plugins

View File

View File

@ -4,7 +4,7 @@
"description": "I am dummy", "description": "I am dummy",
"author": "@balkian", "author": "@balkian",
"version": "0.1", "version": "0.1",
"timeout": 0.5, "timeout": 0.05,
"extra_params": { "extra_params": {
"timeout": { "timeout": {
"@id": "timeout_sleep", "@id": "timeout_sleep",

View File

@ -66,3 +66,14 @@ class APITest(TestCase):
p = parse_params({}, spec) p = parse_params({}, spec)
assert 'hello' in p assert 'hello' in p
assert p['hello'] == 1 assert p['hello'] == 1
def test_call(self):
call = {
'input': "Aloha my friend",
'algo': "Dummy"
}
p = parse_params(call, API_PARAMS, NIF_PARAMS)
assert 'algorithm' in p
assert "Dummy" in p['algorithm']
assert 'input' in p
assert p['input'] == 'Aloha my friend'

View File

@ -66,7 +66,7 @@ class BlueprintsTest(TestCase):
""" """
Extra params that have a default should Extra params that have a default should
""" """
resp = self.client.get("/api/?i=My aloha mohame&algo=Dummy") resp = self.client.get("/api/?i=My aloha mohame&algo=Dummy&with_parameters=true")
self.assertCode(resp, 200) self.assertCode(resp, 200)
js = parse_resp(resp) js = parse_resp(resp)
logging.debug("Got response: %s", js) logging.debug("Got response: %s", js)

View File

@ -55,8 +55,9 @@ class ExtensionsTest(TestCase):
'version': 0 'version': 0
} }
root = os.path.join(self.dir, 'plugins', 'dummy_plugin') root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
name, module = plugins.load_plugin_from_info(info, root=root) module = plugins.load_plugin_from_info(info, root=root)
assert name == 'TestPip' plugins.install_deps(info)
assert module.name == 'TestPip'
assert module assert module
import noop import noop
dir(noop) dir(noop)
@ -76,9 +77,8 @@ class ExtensionsTest(TestCase):
'requirements': ['IAmMakingThisPackageNameUpToFail'], 'requirements': ['IAmMakingThisPackageNameUpToFail'],
'version': 0 'version': 0
} }
root = os.path.join(self.dir, 'plugins', 'dummy_plugin')
with self.assertRaises(Error): with self.assertRaises(Error):
name, module = plugins.load_plugin_from_info(info, root=root) plugins.install_deps(info)
def test_disabling(self): def test_disabling(self):
""" Disabling a plugin """ """ Disabling a plugin """
@ -98,12 +98,6 @@ class ExtensionsTest(TestCase):
""" Don't analyse if there isn't any plugin installed """ """ Don't analyse if there isn't any plugin installed """
self.senpy.deactivate_all(sync=True) self.senpy.deactivate_all(sync=True)
self.assertRaises(Error, partial(analyse, self.senpy, input="tupni")) self.assertRaises(Error, partial(analyse, self.senpy, input="tupni"))
self.assertRaises(Error,
partial(
analyse,
self.senpy,
input="tupni",
algorithm='Dummy'))
def test_analyse(self): def test_analyse(self):
""" Using a plugin """ """ Using a plugin """
@ -142,6 +136,7 @@ class ExtensionsTest(TestCase):
def test_analyse_error(self): def test_analyse_error(self):
mm = mock.MagicMock() mm = mock.MagicMock()
mm.id = 'magic_mock' mm.id = 'magic_mock'
mm.is_activated = True
mm.analyse_entries.side_effect = Error('error in analysis', status=500) mm.analyse_entries.side_effect = Error('error in analysis', status=500)
self.senpy.plugins['MOCK'] = mm self.senpy.plugins['MOCK'] = mm
try: try:

View File

@ -214,20 +214,22 @@ class PluginsTest(TestCase):
assert res["onyx:hasEmotionCategory"] == "c2" assert res["onyx:hasEmotionCategory"] == "c2"
def make_mini_test(plugin): def make_mini_test(plugin_info):
def mini_test(self): def mini_test(self):
plugin = plugins.load_plugin_from_info(plugin_info, install=True)
plugin.test() plugin.test()
return mini_test return mini_test
def add_tests(): def _add_tests():
root = os.path.dirname(__file__) root = os.path.dirname(__file__)
plugs = plugins.load_plugins(os.path.join(root, "..")) plugs = plugins.load_plugins(os.path.join(root, ".."), loader=plugins.parse_plugin_info)
for k, v in plugs.items(): for k, v in plugs.items():
pass
t_method = make_mini_test(v) t_method = make_mini_test(v)
t_method.__name__ = 'test_plugin_{}'.format(k) t_method.__name__ = 'test_plugin_{}'.format(k)
setattr(PluginsTest, t_method.__name__, t_method) setattr(PluginsTest, t_method.__name__, t_method)
del t_method del t_method
add_tests() _add_tests()