mirror of
https://github.com/gsi-upm/senpy
synced 2025-09-16 19:42:21 +00:00
Compare commits
12 Commits
44-add-bas
...
0.10.6
Author | SHA1 | Date | |
---|---|---|---|
|
4675d9acf1 | ||
|
6832a2816d | ||
|
7a8abf1823 | ||
|
a21ce0d90e | ||
|
a964e586d7 | ||
|
bce42b5bb4 | ||
|
1313853788 | ||
|
697e779767 | ||
|
48f5ffafa1 | ||
|
73f7cbbe8a | ||
|
07a41236f8 | ||
|
55db97cf62 |
@@ -18,6 +18,8 @@ before_script:
|
|||||||
stage: test
|
stage: test
|
||||||
script:
|
script:
|
||||||
- make -e test-$PYTHON_VERSION
|
- make -e test-$PYTHON_VERSION
|
||||||
|
except:
|
||||||
|
- tags # Avoid unnecessary double testing
|
||||||
|
|
||||||
test-3.5:
|
test-3.5:
|
||||||
<<: *test_definition
|
<<: *test_definition
|
||||||
|
@@ -1,5 +1,14 @@
|
|||||||
IMAGENAME?=$(NAME)
|
ifndef IMAGENAME
|
||||||
|
ifdef CI_REGISTRY_IMAGE
|
||||||
|
IMAGENAME=$(CI_REGISTRY_IMAGE)
|
||||||
|
else
|
||||||
|
IMAGENAME=$(NAME)
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
|
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
|
||||||
|
DOCKER_FLAGS?=$(-ti)
|
||||||
|
DOCKER_CMD?=
|
||||||
|
|
||||||
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
||||||
ifeq ($(CI_BUILD_TOKEN),)
|
ifeq ($(CI_BUILD_TOKEN),)
|
||||||
@@ -19,6 +28,19 @@ else
|
|||||||
@docker logout
|
@docker logout
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
docker-run: ## Build a generic docker image
|
||||||
|
docker run $(DOCKER_FLAGS) $(IMAGEWTAG) $(DOCKER_CMD)
|
||||||
|
|
||||||
|
docker-build: ## Build a generic docker image
|
||||||
|
docker build . -t $(IMAGEWTAG)
|
||||||
|
|
||||||
|
docker-push: docker-login ## Push a generic docker image
|
||||||
|
docker push $(IMAGEWTAG)
|
||||||
|
|
||||||
|
docker-latest-push: docker-login ## Push the latest image
|
||||||
|
docker tag $(IMAGEWTAG) $(IMAGENAME)
|
||||||
|
docker push $(IMAGENAME)
|
||||||
|
|
||||||
login:: docker-login
|
login:: docker-login
|
||||||
|
|
||||||
clean:: docker-clean
|
clean:: docker-clean
|
||||||
|
@@ -14,7 +14,7 @@ push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
|
|||||||
ifeq ($(GITHUB_DEPLOY_KEY),)
|
ifeq ($(GITHUB_DEPLOY_KEY),)
|
||||||
else
|
else
|
||||||
$(eval KEY_FILE := "$(shell mktemp)")
|
$(eval KEY_FILE := "$(shell mktemp)")
|
||||||
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
|
@printf '%b' '$(GITHUB_DEPLOY_KEY)' > $(KEY_FILE)
|
||||||
@git remote rm github-deploy || true
|
@git remote rm github-deploy || true
|
||||||
git remote add github-deploy $(GITHUB_REPO)
|
git remote add github-deploy $(GITHUB_REPO)
|
||||||
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
|
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
|
||||||
|
@@ -13,7 +13,7 @@
|
|||||||
KUBE_CA_TEMP=false
|
KUBE_CA_TEMP=false
|
||||||
ifndef KUBE_CA_PEM_FILE
|
ifndef KUBE_CA_PEM_FILE
|
||||||
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
||||||
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
|
CREATED:=$(shell printf '%b\n' '$(KUBE_CA_BUNDLE)' > $(KUBE_CA_PEM_FILE))
|
||||||
endif
|
endif
|
||||||
KUBE_TOKEN?=""
|
KUBE_TOKEN?=""
|
||||||
KUBE_NAMESPACE?=$(NAME)
|
KUBE_NAMESPACE?=$(NAME)
|
||||||
|
@@ -26,6 +26,7 @@ Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Docke
|
|||||||
quick_build: $(addprefix build-, $(PYMAIN))
|
quick_build: $(addprefix build-, $(PYMAIN))
|
||||||
|
|
||||||
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
||||||
|
docker tag $(IMAGEWTAG)-python$(PYMAIN) $(IMAGEWTAG)
|
||||||
|
|
||||||
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
||||||
docker build -t '$(IMAGEWTAG)-python$*' -f Dockerfile-$* .;
|
docker build -t '$(IMAGEWTAG)-python$*' -f Dockerfile-$* .;
|
||||||
@@ -77,7 +78,6 @@ push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to
|
|||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||||
docker push '$(IMAGENAME):latest'
|
docker push '$(IMAGENAME):latest'
|
||||||
docker push '$(IMAGEWTAG)'
|
|
||||||
|
|
||||||
push-latest-%: build-% ## Push the latest image for a specific python version
|
push-latest-%: build-% ## Push the latest image for a specific python version
|
||||||
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
||||||
|
@@ -6,8 +6,6 @@ RUN apt-get update && apt-get install -y \
|
|||||||
libblas-dev liblapack-dev liblapacke-dev gfortran \
|
libblas-dev liblapack-dev liblapacke-dev gfortran \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN pip install --no-cache-dir --upgrade numpy scipy scikit-learn
|
|
||||||
|
|
||||||
RUN mkdir /cache/ /senpy-plugins /data/
|
RUN mkdir /cache/ /senpy-plugins /data/
|
||||||
|
|
||||||
VOLUME /data/
|
VOLUME /data/
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
include requirements.txt
|
include requirements.txt
|
||||||
include test-requirements.txt
|
include test-requirements.txt
|
||||||
|
include extra-requirements.txt
|
||||||
include README.rst
|
include README.rst
|
||||||
include senpy/VERSION
|
include senpy/VERSION
|
||||||
graft senpy/plugins
|
graft senpy/plugins
|
||||||
|
@@ -9,3 +9,6 @@ jsonref
|
|||||||
PyYAML
|
PyYAML
|
||||||
rdflib
|
rdflib
|
||||||
rdflib-jsonld
|
rdflib-jsonld
|
||||||
|
numpy
|
||||||
|
scipy
|
||||||
|
scikit-learn
|
||||||
|
@@ -130,7 +130,7 @@ def main():
|
|||||||
return
|
return
|
||||||
sp.activate_all()
|
sp.activate_all()
|
||||||
if args.only_test:
|
if args.only_test:
|
||||||
easy_test(sp.plugins())
|
easy_test(sp.plugins(), debug=args.debug)
|
||||||
return
|
return
|
||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
||||||
|
@@ -147,7 +147,7 @@ def parse_params(indict, *specs):
|
|||||||
for param, options in iteritems(spec):
|
for param, options in iteritems(spec):
|
||||||
for alias in options.get("aliases", []):
|
for alias in options.get("aliases", []):
|
||||||
# Replace each alias with the correct name of the parameter
|
# Replace each alias with the correct name of the parameter
|
||||||
if alias in indict and alias is not param:
|
if alias in indict and alias != param:
|
||||||
outdict[param] = indict[alias]
|
outdict[param] = indict[alias]
|
||||||
del outdict[alias]
|
del outdict[alias]
|
||||||
continue
|
continue
|
||||||
|
@@ -19,7 +19,7 @@ Blueprints for Senpy
|
|||||||
"""
|
"""
|
||||||
from flask import (Blueprint, request, current_app, render_template, url_for,
|
from flask import (Blueprint, request, current_app, render_template, url_for,
|
||||||
jsonify)
|
jsonify)
|
||||||
from .models import Error, Response, Help, Plugins, read_schema, Datasets
|
from .models import Error, Response, Help, Plugins, read_schema, dump_schema, Datasets
|
||||||
from . import api
|
from . import api
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
@@ -67,9 +67,9 @@ def index():
|
|||||||
@api_blueprint.route('/schemas/<schema>')
|
@api_blueprint.route('/schemas/<schema>')
|
||||||
def schema(schema="definitions"):
|
def schema(schema="definitions"):
|
||||||
try:
|
try:
|
||||||
return jsonify(read_schema(schema))
|
return dump_schema(read_schema(schema))
|
||||||
except Exception: # Should be FileNotFoundError, but it's missing from py2
|
except Exception as ex: # Should be FileNotFoundError, but it's missing from py2
|
||||||
return Error(message="Schema not found", status=404).flask()
|
return Error(message="Schema not found: {}".format(ex), status=404).flask()
|
||||||
|
|
||||||
|
|
||||||
def basic_api(f):
|
def basic_api(f):
|
||||||
@@ -133,6 +133,7 @@ def api_root():
|
|||||||
req = api.parse_call(request.parameters)
|
req = api.parse_call(request.parameters)
|
||||||
return current_app.senpy.analyse(req)
|
return current_app.senpy.analyse(req)
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/evaluate/', methods=['POST', 'GET'])
|
@api_blueprint.route('/evaluate/', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def evaluate():
|
def evaluate():
|
||||||
@@ -145,6 +146,7 @@ def evaluate():
|
|||||||
response = current_app.senpy.evaluate(params)
|
response = current_app.senpy.evaluate(params)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
@api_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def plugins():
|
def plugins():
|
||||||
@@ -163,10 +165,10 @@ def plugin(plugin=None):
|
|||||||
return sp.get_plugin(plugin)
|
return sp.get_plugin(plugin)
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/datasets/', methods=['POST','GET'])
|
@api_blueprint.route('/datasets/', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def datasets():
|
def datasets():
|
||||||
sp = current_app.senpy
|
sp = current_app.senpy
|
||||||
datasets = sp.datasets
|
datasets = sp.datasets
|
||||||
dic = Datasets(datasets = list(datasets.values()))
|
dic = Datasets(datasets=list(datasets.values()))
|
||||||
return dic
|
return dic
|
||||||
|
@@ -51,6 +51,10 @@ def read_schema(schema_file, absolute=False):
|
|||||||
return jsonref.load(f, base_uri=schema_uri)
|
return jsonref.load(f, base_uri=schema_uri)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_schema(schema):
|
||||||
|
return jsonref.dumps(schema)
|
||||||
|
|
||||||
|
|
||||||
def load_context(context):
|
def load_context(context):
|
||||||
logging.debug('Loading context: {}'.format(context))
|
logging.debug('Loading context: {}'.format(context))
|
||||||
if not context:
|
if not context:
|
||||||
@@ -199,24 +203,27 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
|||||||
context_uri=None,
|
context_uri=None,
|
||||||
prefix=None,
|
prefix=None,
|
||||||
expanded=False):
|
expanded=False):
|
||||||
ser = self.serializable()
|
|
||||||
|
|
||||||
result = jsonld.compact(
|
result = self.serializable()
|
||||||
ser,
|
if context_uri or with_context:
|
||||||
self._context,
|
result['@context'] = context_uri or self._context
|
||||||
options={
|
|
||||||
'base': prefix,
|
# result = jsonld.compact(result,
|
||||||
'expandContext': self._context,
|
# self._context,
|
||||||
'senpy': prefix
|
# options={
|
||||||
})
|
# 'base': prefix,
|
||||||
if context_uri:
|
# 'expandContext': self._context,
|
||||||
result['@context'] = context_uri
|
# 'senpy': prefix
|
||||||
|
# })
|
||||||
if expanded:
|
if expanded:
|
||||||
result = jsonld.expand(
|
result = jsonld.expand(
|
||||||
result, options={'base': prefix,
|
result, options={'base': prefix,
|
||||||
'expandContext': self._context})
|
'expandContext': self._context})
|
||||||
if not with_context:
|
if not with_context:
|
||||||
del result['@context']
|
try:
|
||||||
|
del result['@context']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def validate(self, obj=None):
|
def validate(self, obj=None):
|
||||||
@@ -319,7 +326,10 @@ def _add_class_from_schema(*args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
for i in [
|
for i in [
|
||||||
|
'aggregatedEvaluation',
|
||||||
'analysis',
|
'analysis',
|
||||||
|
'dataset',
|
||||||
|
'datasets',
|
||||||
'emotion',
|
'emotion',
|
||||||
'emotionConversion',
|
'emotionConversion',
|
||||||
'emotionConversionPlugin',
|
'emotionConversionPlugin',
|
||||||
@@ -327,19 +337,17 @@ for i in [
|
|||||||
'emotionModel',
|
'emotionModel',
|
||||||
'emotionPlugin',
|
'emotionPlugin',
|
||||||
'emotionSet',
|
'emotionSet',
|
||||||
|
'evaluation',
|
||||||
'entity',
|
'entity',
|
||||||
'help',
|
'help',
|
||||||
|
'metric',
|
||||||
'plugin',
|
'plugin',
|
||||||
'plugins',
|
'plugins',
|
||||||
'response',
|
'response',
|
||||||
'results',
|
'results',
|
||||||
'sentimentPlugin',
|
'sentimentPlugin',
|
||||||
'suggestion',
|
'suggestion',
|
||||||
'aggregatedEvaluation',
|
'topic',
|
||||||
'evaluation',
|
|
||||||
'metric',
|
|
||||||
'dataset',
|
|
||||||
'datasets',
|
|
||||||
|
|
||||||
]:
|
]:
|
||||||
_add_class_from_schema(i)
|
_add_class_from_schema(i)
|
||||||
|
@@ -18,8 +18,7 @@ import subprocess
|
|||||||
import importlib
|
import importlib
|
||||||
import yaml
|
import yaml
|
||||||
import threading
|
import threading
|
||||||
|
import nltk
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from .. import models, utils
|
from .. import models, utils
|
||||||
from .. import api
|
from .. import api
|
||||||
@@ -49,11 +48,11 @@ class PluginMeta(models.BaseMeta):
|
|||||||
attrs['name'] = alias
|
attrs['name'] = alias
|
||||||
if 'description' not in attrs:
|
if 'description' not in attrs:
|
||||||
doc = attrs.get('__doc__', None)
|
doc = attrs.get('__doc__', None)
|
||||||
if not doc:
|
if doc:
|
||||||
raise Exception(('Please, add a description or '
|
attrs['description'] = doc
|
||||||
'documentation to class {}').format(name))
|
else:
|
||||||
attrs['description'] = doc
|
logger.warn(('Plugin {} does not have a description. '
|
||||||
attrs['name'] = alias
|
'Please, add a short summary to help other developers').format(name))
|
||||||
cls = super(PluginMeta, mcs).__new__(mcs, name, bases, attrs)
|
cls = super(PluginMeta, mcs).__new__(mcs, name, bases, attrs)
|
||||||
|
|
||||||
if alias in mcs._classes:
|
if alias in mcs._classes:
|
||||||
@@ -96,7 +95,27 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
self.id = 'plugins/{}_{}'.format(self['name'], self['version'])
|
self.id = 'plugins/{}_{}'.format(self['name'], self['version'])
|
||||||
self.is_activated = False
|
self.is_activated = False
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
self.data_folder = data_folder or os.getcwd()
|
self._directory = os.path.abspath(os.path.dirname(inspect.getfile(self.__class__)))
|
||||||
|
|
||||||
|
data_folder = data_folder or os.getcwd()
|
||||||
|
subdir = os.path.join(data_folder, self.name)
|
||||||
|
|
||||||
|
self._data_paths = [
|
||||||
|
data_folder,
|
||||||
|
subdir,
|
||||||
|
self._directory,
|
||||||
|
os.path.join(self._directory, 'data'),
|
||||||
|
]
|
||||||
|
|
||||||
|
if os.path.exists(subdir):
|
||||||
|
data_folder = subdir
|
||||||
|
self.data_folder = data_folder
|
||||||
|
|
||||||
|
self._log = logging.getLogger('{}.{}'.format(__name__, self.name))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log(self):
|
||||||
|
return self._log
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
missing = []
|
missing = []
|
||||||
@@ -125,9 +144,9 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
for case in test_cases:
|
for case in test_cases:
|
||||||
try:
|
try:
|
||||||
self.test_case(case)
|
self.test_case(case)
|
||||||
logger.debug('Test case passed:\n{}'.format(pprint.pformat(case)))
|
self.log.debug('Test case passed:\n{}'.format(pprint.pformat(case)))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warn('Test case failed:\n{}'.format(pprint.pformat(case)))
|
self.log.warn('Test case failed:\n{}'.format(pprint.pformat(case)))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def test_case(self, case):
|
def test_case(self, case):
|
||||||
@@ -150,10 +169,22 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
raise
|
raise
|
||||||
assert not should_fail
|
assert not should_fail
|
||||||
|
|
||||||
def open(self, fpath, *args, **kwargs):
|
def find_file(self, fname):
|
||||||
if not os.path.isabs(fpath):
|
for p in self._data_paths:
|
||||||
fpath = os.path.join(self.data_folder, fpath)
|
alternative = os.path.join(p, fname)
|
||||||
return open(fpath, *args, **kwargs)
|
if os.path.exists(alternative):
|
||||||
|
return alternative
|
||||||
|
raise IOError('File does not exist: {}'.format(fname))
|
||||||
|
|
||||||
|
def open(self, fpath, mode='r'):
|
||||||
|
if 'w' in mode:
|
||||||
|
# When writing, only use absolute paths or data_folder
|
||||||
|
if not os.path.isabs(fpath):
|
||||||
|
fpath = os.path.join(self.data_folder, fpath)
|
||||||
|
else:
|
||||||
|
fpath = self.find_file(fpath)
|
||||||
|
|
||||||
|
return open(fpath, mode=mode)
|
||||||
|
|
||||||
def serve(self, debug=True, **kwargs):
|
def serve(self, debug=True, **kwargs):
|
||||||
utils.easy(plugin_list=[self, ], plugin_folder=None, debug=debug, **kwargs)
|
utils.easy(plugin_list=[self, ], plugin_folder=None, debug=debug, **kwargs)
|
||||||
@@ -188,7 +219,7 @@ class Analysis(Plugin):
|
|||||||
|
|
||||||
def analyse_entries(self, entries, parameters):
|
def analyse_entries(self, entries, parameters):
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
logger.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
self.log.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
||||||
results = self.analyse_entry(entry, parameters)
|
results = self.analyse_entry(entry, parameters)
|
||||||
if inspect.isgenerator(results):
|
if inspect.isgenerator(results):
|
||||||
for result in results:
|
for result in results:
|
||||||
@@ -291,7 +322,7 @@ class Box(AnalysisPlugin):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def transform(self, X):
|
def transform(self, X):
|
||||||
return np.array([self.predict_one(x) for x in X])
|
return [self.predict_one(x) for x in X]
|
||||||
|
|
||||||
def predict(self, X):
|
def predict(self, X):
|
||||||
return self.transform(X)
|
return self.transform(X)
|
||||||
@@ -377,7 +408,7 @@ class ShelfMixin(object):
|
|||||||
with self.open(self.shelf_file, 'rb') as p:
|
with self.open(self.shelf_file, 'rb') as p:
|
||||||
self._sh = pickle.load(p)
|
self._sh = pickle.load(p)
|
||||||
except (IndexError, EOFError, pickle.UnpicklingError):
|
except (IndexError, EOFError, pickle.UnpicklingError):
|
||||||
logger.warning('{} has a corrupted shelf file!'.format(self.id))
|
self.log.warning('Corrupted shelf file: {}'.format(self.shelf_file))
|
||||||
if not self.get('force_shelf', False):
|
if not self.get('force_shelf', False):
|
||||||
raise
|
raise
|
||||||
return self._sh
|
return self._sh
|
||||||
@@ -404,32 +435,31 @@ class ShelfMixin(object):
|
|||||||
self._shelf_file = value
|
self._shelf_file = value
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
logger.debug('saving pickle')
|
self.log.debug('Saving pickle')
|
||||||
if hasattr(self, '_sh') and self._sh is not None:
|
if hasattr(self, '_sh') and self._sh is not None:
|
||||||
with self.open(self.shelf_file, 'wb') as f:
|
with self.open(self.shelf_file, 'wb') as f:
|
||||||
pickle.dump(self._sh, f)
|
pickle.dump(self._sh, f)
|
||||||
|
|
||||||
|
|
||||||
def pfilter(plugins, **kwargs):
|
def pfilter(plugins, plugin_type=Analysis, **kwargs):
|
||||||
""" Filter plugins by different criteria """
|
""" Filter plugins by different criteria """
|
||||||
if isinstance(plugins, models.Plugins):
|
if isinstance(plugins, models.Plugins):
|
||||||
plugins = plugins.plugins
|
plugins = plugins.plugins
|
||||||
elif isinstance(plugins, dict):
|
elif isinstance(plugins, dict):
|
||||||
plugins = plugins.values()
|
plugins = plugins.values()
|
||||||
ptype = kwargs.pop('plugin_type', Plugin)
|
|
||||||
logger.debug('#' * 100)
|
logger.debug('#' * 100)
|
||||||
logger.debug('ptype {}'.format(ptype))
|
logger.debug('plugin_type {}'.format(plugin_type))
|
||||||
if ptype:
|
if plugin_type:
|
||||||
if isinstance(ptype, PluginMeta):
|
if isinstance(plugin_type, PluginMeta):
|
||||||
ptype = ptype.__name__
|
plugin_type = plugin_type.__name__
|
||||||
try:
|
try:
|
||||||
ptype = ptype[0].upper() + ptype[1:]
|
plugin_type = plugin_type[0].upper() + plugin_type[1:]
|
||||||
pclass = globals()[ptype]
|
pclass = globals()[plugin_type]
|
||||||
logger.debug('Class: {}'.format(pclass))
|
logger.debug('Class: {}'.format(pclass))
|
||||||
candidates = filter(lambda x: isinstance(x, pclass),
|
candidates = filter(lambda x: isinstance(x, pclass),
|
||||||
plugins)
|
plugins)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise models.Error('{} is not a valid type'.format(ptype))
|
raise models.Error('{} is not a valid type'.format(plugin_type))
|
||||||
else:
|
else:
|
||||||
candidates = plugins
|
candidates = plugins
|
||||||
|
|
||||||
@@ -464,6 +494,7 @@ def _log_subprocess_output(process):
|
|||||||
|
|
||||||
def install_deps(*plugins):
|
def install_deps(*plugins):
|
||||||
installed = False
|
installed = False
|
||||||
|
nltk_resources = set()
|
||||||
for info in plugins:
|
for info in plugins:
|
||||||
requirements = info.get('requirements', [])
|
requirements = info.get('requirements', [])
|
||||||
if requirements:
|
if requirements:
|
||||||
@@ -479,6 +510,9 @@ def install_deps(*plugins):
|
|||||||
installed = True
|
installed = True
|
||||||
if exitcode != 0:
|
if exitcode != 0:
|
||||||
raise models.Error("Dependencies not properly installed")
|
raise models.Error("Dependencies not properly installed")
|
||||||
|
nltk_resources |= set(info.get('nltk_resources', []))
|
||||||
|
|
||||||
|
installed |= nltk.download(list(nltk_resources))
|
||||||
return installed
|
return installed
|
||||||
|
|
||||||
|
|
||||||
@@ -575,12 +609,14 @@ def _instances_in_module(module):
|
|||||||
def _from_module_name(module, root, info=None, install=True, **kwargs):
|
def _from_module_name(module, root, info=None, install=True, **kwargs):
|
||||||
try:
|
try:
|
||||||
module = load_module(module, root)
|
module = load_module(module, root)
|
||||||
except ImportError:
|
except (ImportError, LookupError):
|
||||||
if not install or not info:
|
if not install or not info:
|
||||||
raise
|
raise
|
||||||
install_deps(info)
|
install_deps(info)
|
||||||
module = load_module(module, root)
|
module = load_module(module, root)
|
||||||
for plugin in _from_loaded_module(module=module, root=root, info=info, **kwargs):
|
for plugin in _from_loaded_module(module=module, root=root, info=info, **kwargs):
|
||||||
|
if install:
|
||||||
|
install_deps(plugin)
|
||||||
yield plugin
|
yield plugin
|
||||||
|
|
||||||
|
|
||||||
|
@@ -41,7 +41,7 @@ class Sentiment140Plugin(SentimentPlugin):
|
|||||||
To avoid calling the sentiment140 API, we will mock the results
|
To avoid calling the sentiment140 API, we will mock the results
|
||||||
from requests.
|
from requests.
|
||||||
'''
|
'''
|
||||||
from senpy.test import patch_requests
|
from senpy.testing import patch_requests
|
||||||
expected = {"data": [{"polarity": 4}]}
|
expected = {"data": [{"polarity": 4}]}
|
||||||
with patch_requests(expected) as (request, response):
|
with patch_requests(expected) as (request, response):
|
||||||
super(Sentiment140Plugin, self).test(*args, **kwargs)
|
super(Sentiment140Plugin, self).test(*args, **kwargs)
|
||||||
|
@@ -10,8 +10,10 @@
|
|||||||
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
||||||
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
||||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||||
|
"fam": "http://vocab.fusepool.info/fam#",
|
||||||
"topics": {
|
"topics": {
|
||||||
"@id": "dc:subject"
|
"@id": "nif:topic",
|
||||||
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"entities": {
|
"entities": {
|
||||||
"@id": "me:hasEntities"
|
"@id": "me:hasEntities"
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
"name": "Evalation",
|
"name": "Evaluation",
|
||||||
"properties": {
|
"properties": {
|
||||||
"@id": {
|
"@id": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
from . import models, __version__
|
from . import models, __version__
|
||||||
from collections import MutableMapping
|
from collections import MutableMapping
|
||||||
import pprint
|
import pprint
|
||||||
|
import pdb
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -32,8 +33,8 @@ def check_template(indict, template):
|
|||||||
if indict != template:
|
if indict != template:
|
||||||
raise models.Error(('Differences found.\n'
|
raise models.Error(('Differences found.\n'
|
||||||
'\tExpected: {}\n'
|
'\tExpected: {}\n'
|
||||||
'\tFound: {}').format(pprint.pformat(indict),
|
'\tFound: {}').format(pprint.pformat(template),
|
||||||
pprint.pformat(template)))
|
pprint.pformat(indict)))
|
||||||
|
|
||||||
|
|
||||||
def convert_dictionary(original, mappings):
|
def convert_dictionary(original, mappings):
|
||||||
@@ -67,18 +68,23 @@ def easy_load(app=None, plugin_list=None, plugin_folder=None, **kwargs):
|
|||||||
return sp, app
|
return sp, app
|
||||||
|
|
||||||
|
|
||||||
def easy_test(plugin_list=None):
|
def easy_test(plugin_list=None, debug=True):
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
if not plugin_list:
|
try:
|
||||||
import __main__
|
if not plugin_list:
|
||||||
logger.info('Loading classes from {}'.format(__main__))
|
import __main__
|
||||||
from . import plugins
|
logger.info('Loading classes from {}'.format(__main__))
|
||||||
plugin_list = plugins.from_module(__main__)
|
from . import plugins
|
||||||
for plug in plugin_list:
|
plugin_list = plugins.from_module(__main__)
|
||||||
plug.test()
|
for plug in plugin_list:
|
||||||
logger.info('The tests for {} passed!'.format(plug.name))
|
plug.test()
|
||||||
logger.info('All tests passed!')
|
plug.log.info('My tests passed!')
|
||||||
|
logger.info('All tests passed!')
|
||||||
|
except Exception:
|
||||||
|
if not debug:
|
||||||
|
raise
|
||||||
|
pdb.post_mortem()
|
||||||
|
|
||||||
|
|
||||||
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
||||||
|
@@ -32,7 +32,7 @@ class APITest(TestCase):
|
|||||||
query = {}
|
query = {}
|
||||||
plug_params = {
|
plug_params = {
|
||||||
'hello': {
|
'hello': {
|
||||||
'aliases': ['hello', 'hiya'],
|
'aliases': ['hiya', 'hello'],
|
||||||
'required': True
|
'required': True
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -48,6 +48,26 @@ class APITest(TestCase):
|
|||||||
assert 'hello' in p
|
assert 'hello' in p
|
||||||
assert p['hello'] == 'dlrow'
|
assert p['hello'] == 'dlrow'
|
||||||
|
|
||||||
|
def test_parameters2(self):
|
||||||
|
in1 = {
|
||||||
|
'meaningcloud-key': 5
|
||||||
|
}
|
||||||
|
in2 = {
|
||||||
|
'apikey': 25
|
||||||
|
}
|
||||||
|
extra_params = {
|
||||||
|
"apikey": {
|
||||||
|
"aliases": [
|
||||||
|
"apikey",
|
||||||
|
"meaningcloud-key"
|
||||||
|
],
|
||||||
|
"required": True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p1 = parse_params(in1, extra_params)
|
||||||
|
p2 = parse_params(in2, extra_params)
|
||||||
|
assert (p2['apikey'] / p1['apikey']) == 5
|
||||||
|
|
||||||
def test_default(self):
|
def test_default(self):
|
||||||
spec = {
|
spec = {
|
||||||
'hello': {
|
'hello': {
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
from senpy.test import patch_requests
|
from senpy.testing import patch_requests
|
||||||
from senpy.client import Client
|
from senpy.client import Client
|
||||||
from senpy.models import Results, Plugins, Error
|
from senpy.models import Results, Plugins, Error
|
||||||
from senpy.plugins import AnalysisPlugin
|
from senpy.plugins import AnalysisPlugin
|
||||||
|
@@ -47,7 +47,7 @@ class ExtensionsTest(TestCase):
|
|||||||
|
|
||||||
def test_add_delete(self):
|
def test_add_delete(self):
|
||||||
'''Should be able to add and delete new plugins. '''
|
'''Should be able to add and delete new plugins. '''
|
||||||
new = plugins.Plugin(name='new', description='new', version=0)
|
new = plugins.Analysis(name='new', description='new', version=0)
|
||||||
self.senpy.add_plugin(new)
|
self.senpy.add_plugin(new)
|
||||||
assert new in self.senpy.plugins()
|
assert new in self.senpy.plugins()
|
||||||
self.senpy.delete_plugin(new)
|
self.senpy.delete_plugin(new)
|
||||||
|
@@ -8,6 +8,8 @@ from fnmatch import fnmatch
|
|||||||
|
|
||||||
from jsonschema import RefResolver, Draft4Validator, ValidationError
|
from jsonschema import RefResolver, Draft4Validator, ValidationError
|
||||||
|
|
||||||
|
from senpy.models import read_schema
|
||||||
|
|
||||||
root_path = path.join(path.dirname(path.realpath(__file__)), '..')
|
root_path = path.join(path.dirname(path.realpath(__file__)), '..')
|
||||||
schema_folder = path.join(root_path, 'senpy', 'schemas')
|
schema_folder = path.join(root_path, 'senpy', 'schemas')
|
||||||
examples_path = path.join(root_path, 'docs', 'examples')
|
examples_path = path.join(root_path, 'docs', 'examples')
|
||||||
@@ -15,7 +17,8 @@ bad_examples_path = path.join(root_path, 'docs', 'bad-examples')
|
|||||||
|
|
||||||
|
|
||||||
class JSONSchemaTests(unittest.TestCase):
|
class JSONSchemaTests(unittest.TestCase):
|
||||||
pass
|
def test_definitions(self):
|
||||||
|
read_schema('definitions.json')
|
||||||
|
|
||||||
|
|
||||||
def do_create_(jsfile, success):
|
def do_create_(jsfile, success):
|
||||||
|
@@ -2,7 +2,7 @@ from unittest import TestCase
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
from senpy.test import patch_requests
|
from senpy.testing import patch_requests
|
||||||
from senpy.models import Results
|
from senpy.models import Results
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user