mirror of
https://github.com/gsi-upm/senpy
synced 2025-09-16 19:42:21 +00:00
Compare commits
12 Commits
44-add-bas
...
0.10.6
Author | SHA1 | Date | |
---|---|---|---|
|
4675d9acf1 | ||
|
6832a2816d | ||
|
7a8abf1823 | ||
|
a21ce0d90e | ||
|
a964e586d7 | ||
|
bce42b5bb4 | ||
|
1313853788 | ||
|
697e779767 | ||
|
48f5ffafa1 | ||
|
73f7cbbe8a | ||
|
07a41236f8 | ||
|
55db97cf62 |
@@ -18,6 +18,8 @@ before_script:
|
||||
stage: test
|
||||
script:
|
||||
- make -e test-$PYTHON_VERSION
|
||||
except:
|
||||
- tags # Avoid unnecessary double testing
|
||||
|
||||
test-3.5:
|
||||
<<: *test_definition
|
||||
|
@@ -1,5 +1,14 @@
|
||||
IMAGENAME?=$(NAME)
|
||||
ifndef IMAGENAME
|
||||
ifdef CI_REGISTRY_IMAGE
|
||||
IMAGENAME=$(CI_REGISTRY_IMAGE)
|
||||
else
|
||||
IMAGENAME=$(NAME)
|
||||
endif
|
||||
endif
|
||||
|
||||
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
|
||||
DOCKER_FLAGS?=$(-ti)
|
||||
DOCKER_CMD?=
|
||||
|
||||
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
|
||||
ifeq ($(CI_BUILD_TOKEN),)
|
||||
@@ -19,6 +28,19 @@ else
|
||||
@docker logout
|
||||
endif
|
||||
|
||||
docker-run: ## Build a generic docker image
|
||||
docker run $(DOCKER_FLAGS) $(IMAGEWTAG) $(DOCKER_CMD)
|
||||
|
||||
docker-build: ## Build a generic docker image
|
||||
docker build . -t $(IMAGEWTAG)
|
||||
|
||||
docker-push: docker-login ## Push a generic docker image
|
||||
docker push $(IMAGEWTAG)
|
||||
|
||||
docker-latest-push: docker-login ## Push the latest image
|
||||
docker tag $(IMAGEWTAG) $(IMAGENAME)
|
||||
docker push $(IMAGENAME)
|
||||
|
||||
login:: docker-login
|
||||
|
||||
clean:: docker-clean
|
||||
|
@@ -14,7 +14,7 @@ push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
|
||||
ifeq ($(GITHUB_DEPLOY_KEY),)
|
||||
else
|
||||
$(eval KEY_FILE := "$(shell mktemp)")
|
||||
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
|
||||
@printf '%b' '$(GITHUB_DEPLOY_KEY)' > $(KEY_FILE)
|
||||
@git remote rm github-deploy || true
|
||||
git remote add github-deploy $(GITHUB_REPO)
|
||||
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
|
||||
|
@@ -13,7 +13,7 @@
|
||||
KUBE_CA_TEMP=false
|
||||
ifndef KUBE_CA_PEM_FILE
|
||||
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
|
||||
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
|
||||
CREATED:=$(shell printf '%b\n' '$(KUBE_CA_BUNDLE)' > $(KUBE_CA_PEM_FILE))
|
||||
endif
|
||||
KUBE_TOKEN?=""
|
||||
KUBE_NAMESPACE?=$(NAME)
|
||||
|
@@ -26,6 +26,7 @@ Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Docke
|
||||
quick_build: $(addprefix build-, $(PYMAIN))
|
||||
|
||||
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
||||
docker tag $(IMAGEWTAG)-python$(PYMAIN) $(IMAGEWTAG)
|
||||
|
||||
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
||||
docker build -t '$(IMAGEWTAG)-python$*' -f Dockerfile-$* .;
|
||||
@@ -77,7 +78,6 @@ push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
|
||||
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||
docker push '$(IMAGENAME):latest'
|
||||
docker push '$(IMAGEWTAG)'
|
||||
|
||||
push-latest-%: build-% ## Push the latest image for a specific python version
|
||||
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
|
||||
|
@@ -6,8 +6,6 @@ RUN apt-get update && apt-get install -y \
|
||||
libblas-dev liblapack-dev liblapacke-dev gfortran \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade numpy scipy scikit-learn
|
||||
|
||||
RUN mkdir /cache/ /senpy-plugins /data/
|
||||
|
||||
VOLUME /data/
|
||||
|
@@ -1,5 +1,6 @@
|
||||
include requirements.txt
|
||||
include test-requirements.txt
|
||||
include extra-requirements.txt
|
||||
include README.rst
|
||||
include senpy/VERSION
|
||||
graft senpy/plugins
|
||||
|
@@ -9,3 +9,6 @@ jsonref
|
||||
PyYAML
|
||||
rdflib
|
||||
rdflib-jsonld
|
||||
numpy
|
||||
scipy
|
||||
scikit-learn
|
||||
|
@@ -130,7 +130,7 @@ def main():
|
||||
return
|
||||
sp.activate_all()
|
||||
if args.only_test:
|
||||
easy_test(sp.plugins())
|
||||
easy_test(sp.plugins(), debug=args.debug)
|
||||
return
|
||||
print('Senpy version {}'.format(senpy.__version__))
|
||||
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
||||
|
@@ -147,7 +147,7 @@ def parse_params(indict, *specs):
|
||||
for param, options in iteritems(spec):
|
||||
for alias in options.get("aliases", []):
|
||||
# Replace each alias with the correct name of the parameter
|
||||
if alias in indict and alias is not param:
|
||||
if alias in indict and alias != param:
|
||||
outdict[param] = indict[alias]
|
||||
del outdict[alias]
|
||||
continue
|
||||
|
@@ -19,7 +19,7 @@ Blueprints for Senpy
|
||||
"""
|
||||
from flask import (Blueprint, request, current_app, render_template, url_for,
|
||||
jsonify)
|
||||
from .models import Error, Response, Help, Plugins, read_schema, Datasets
|
||||
from .models import Error, Response, Help, Plugins, read_schema, dump_schema, Datasets
|
||||
from . import api
|
||||
from .version import __version__
|
||||
from functools import wraps
|
||||
@@ -67,9 +67,9 @@ def index():
|
||||
@api_blueprint.route('/schemas/<schema>')
|
||||
def schema(schema="definitions"):
|
||||
try:
|
||||
return jsonify(read_schema(schema))
|
||||
except Exception: # Should be FileNotFoundError, but it's missing from py2
|
||||
return Error(message="Schema not found", status=404).flask()
|
||||
return dump_schema(read_schema(schema))
|
||||
except Exception as ex: # Should be FileNotFoundError, but it's missing from py2
|
||||
return Error(message="Schema not found: {}".format(ex), status=404).flask()
|
||||
|
||||
|
||||
def basic_api(f):
|
||||
@@ -133,6 +133,7 @@ def api_root():
|
||||
req = api.parse_call(request.parameters)
|
||||
return current_app.senpy.analyse(req)
|
||||
|
||||
|
||||
@api_blueprint.route('/evaluate/', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def evaluate():
|
||||
@@ -145,6 +146,7 @@ def evaluate():
|
||||
response = current_app.senpy.evaluate(params)
|
||||
return response
|
||||
|
||||
|
||||
@api_blueprint.route('/plugins/', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def plugins():
|
||||
@@ -163,10 +165,10 @@ def plugin(plugin=None):
|
||||
return sp.get_plugin(plugin)
|
||||
|
||||
|
||||
@api_blueprint.route('/datasets/', methods=['POST','GET'])
|
||||
@api_blueprint.route('/datasets/', methods=['POST', 'GET'])
|
||||
@basic_api
|
||||
def datasets():
|
||||
sp = current_app.senpy
|
||||
datasets = sp.datasets
|
||||
dic = Datasets(datasets = list(datasets.values()))
|
||||
return dic
|
||||
dic = Datasets(datasets=list(datasets.values()))
|
||||
return dic
|
||||
|
@@ -51,6 +51,10 @@ def read_schema(schema_file, absolute=False):
|
||||
return jsonref.load(f, base_uri=schema_uri)
|
||||
|
||||
|
||||
def dump_schema(schema):
|
||||
return jsonref.dumps(schema)
|
||||
|
||||
|
||||
def load_context(context):
|
||||
logging.debug('Loading context: {}'.format(context))
|
||||
if not context:
|
||||
@@ -199,24 +203,27 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
||||
context_uri=None,
|
||||
prefix=None,
|
||||
expanded=False):
|
||||
ser = self.serializable()
|
||||
|
||||
result = jsonld.compact(
|
||||
ser,
|
||||
self._context,
|
||||
options={
|
||||
'base': prefix,
|
||||
'expandContext': self._context,
|
||||
'senpy': prefix
|
||||
})
|
||||
if context_uri:
|
||||
result['@context'] = context_uri
|
||||
result = self.serializable()
|
||||
if context_uri or with_context:
|
||||
result['@context'] = context_uri or self._context
|
||||
|
||||
# result = jsonld.compact(result,
|
||||
# self._context,
|
||||
# options={
|
||||
# 'base': prefix,
|
||||
# 'expandContext': self._context,
|
||||
# 'senpy': prefix
|
||||
# })
|
||||
if expanded:
|
||||
result = jsonld.expand(
|
||||
result, options={'base': prefix,
|
||||
'expandContext': self._context})
|
||||
if not with_context:
|
||||
del result['@context']
|
||||
try:
|
||||
del result['@context']
|
||||
except KeyError:
|
||||
pass
|
||||
return result
|
||||
|
||||
def validate(self, obj=None):
|
||||
@@ -319,7 +326,10 @@ def _add_class_from_schema(*args, **kwargs):
|
||||
|
||||
|
||||
for i in [
|
||||
'aggregatedEvaluation',
|
||||
'analysis',
|
||||
'dataset',
|
||||
'datasets',
|
||||
'emotion',
|
||||
'emotionConversion',
|
||||
'emotionConversionPlugin',
|
||||
@@ -327,19 +337,17 @@ for i in [
|
||||
'emotionModel',
|
||||
'emotionPlugin',
|
||||
'emotionSet',
|
||||
'evaluation',
|
||||
'entity',
|
||||
'help',
|
||||
'metric',
|
||||
'plugin',
|
||||
'plugins',
|
||||
'response',
|
||||
'results',
|
||||
'sentimentPlugin',
|
||||
'suggestion',
|
||||
'aggregatedEvaluation',
|
||||
'evaluation',
|
||||
'metric',
|
||||
'dataset',
|
||||
'datasets',
|
||||
'topic',
|
||||
|
||||
]:
|
||||
_add_class_from_schema(i)
|
||||
|
@@ -18,8 +18,7 @@ import subprocess
|
||||
import importlib
|
||||
import yaml
|
||||
import threading
|
||||
|
||||
import numpy as np
|
||||
import nltk
|
||||
|
||||
from .. import models, utils
|
||||
from .. import api
|
||||
@@ -49,11 +48,11 @@ class PluginMeta(models.BaseMeta):
|
||||
attrs['name'] = alias
|
||||
if 'description' not in attrs:
|
||||
doc = attrs.get('__doc__', None)
|
||||
if not doc:
|
||||
raise Exception(('Please, add a description or '
|
||||
'documentation to class {}').format(name))
|
||||
attrs['description'] = doc
|
||||
attrs['name'] = alias
|
||||
if doc:
|
||||
attrs['description'] = doc
|
||||
else:
|
||||
logger.warn(('Plugin {} does not have a description. '
|
||||
'Please, add a short summary to help other developers').format(name))
|
||||
cls = super(PluginMeta, mcs).__new__(mcs, name, bases, attrs)
|
||||
|
||||
if alias in mcs._classes:
|
||||
@@ -96,7 +95,27 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
||||
self.id = 'plugins/{}_{}'.format(self['name'], self['version'])
|
||||
self.is_activated = False
|
||||
self._lock = threading.Lock()
|
||||
self.data_folder = data_folder or os.getcwd()
|
||||
self._directory = os.path.abspath(os.path.dirname(inspect.getfile(self.__class__)))
|
||||
|
||||
data_folder = data_folder or os.getcwd()
|
||||
subdir = os.path.join(data_folder, self.name)
|
||||
|
||||
self._data_paths = [
|
||||
data_folder,
|
||||
subdir,
|
||||
self._directory,
|
||||
os.path.join(self._directory, 'data'),
|
||||
]
|
||||
|
||||
if os.path.exists(subdir):
|
||||
data_folder = subdir
|
||||
self.data_folder = data_folder
|
||||
|
||||
self._log = logging.getLogger('{}.{}'.format(__name__, self.name))
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
return self._log
|
||||
|
||||
def validate(self):
|
||||
missing = []
|
||||
@@ -125,9 +144,9 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
||||
for case in test_cases:
|
||||
try:
|
||||
self.test_case(case)
|
||||
logger.debug('Test case passed:\n{}'.format(pprint.pformat(case)))
|
||||
self.log.debug('Test case passed:\n{}'.format(pprint.pformat(case)))
|
||||
except Exception as ex:
|
||||
logger.warn('Test case failed:\n{}'.format(pprint.pformat(case)))
|
||||
self.log.warn('Test case failed:\n{}'.format(pprint.pformat(case)))
|
||||
raise
|
||||
|
||||
def test_case(self, case):
|
||||
@@ -150,10 +169,22 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
||||
raise
|
||||
assert not should_fail
|
||||
|
||||
def open(self, fpath, *args, **kwargs):
|
||||
if not os.path.isabs(fpath):
|
||||
fpath = os.path.join(self.data_folder, fpath)
|
||||
return open(fpath, *args, **kwargs)
|
||||
def find_file(self, fname):
|
||||
for p in self._data_paths:
|
||||
alternative = os.path.join(p, fname)
|
||||
if os.path.exists(alternative):
|
||||
return alternative
|
||||
raise IOError('File does not exist: {}'.format(fname))
|
||||
|
||||
def open(self, fpath, mode='r'):
|
||||
if 'w' in mode:
|
||||
# When writing, only use absolute paths or data_folder
|
||||
if not os.path.isabs(fpath):
|
||||
fpath = os.path.join(self.data_folder, fpath)
|
||||
else:
|
||||
fpath = self.find_file(fpath)
|
||||
|
||||
return open(fpath, mode=mode)
|
||||
|
||||
def serve(self, debug=True, **kwargs):
|
||||
utils.easy(plugin_list=[self, ], plugin_folder=None, debug=debug, **kwargs)
|
||||
@@ -188,7 +219,7 @@ class Analysis(Plugin):
|
||||
|
||||
def analyse_entries(self, entries, parameters):
|
||||
for entry in entries:
|
||||
logger.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
||||
self.log.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
||||
results = self.analyse_entry(entry, parameters)
|
||||
if inspect.isgenerator(results):
|
||||
for result in results:
|
||||
@@ -291,7 +322,7 @@ class Box(AnalysisPlugin):
|
||||
return self
|
||||
|
||||
def transform(self, X):
|
||||
return np.array([self.predict_one(x) for x in X])
|
||||
return [self.predict_one(x) for x in X]
|
||||
|
||||
def predict(self, X):
|
||||
return self.transform(X)
|
||||
@@ -377,7 +408,7 @@ class ShelfMixin(object):
|
||||
with self.open(self.shelf_file, 'rb') as p:
|
||||
self._sh = pickle.load(p)
|
||||
except (IndexError, EOFError, pickle.UnpicklingError):
|
||||
logger.warning('{} has a corrupted shelf file!'.format(self.id))
|
||||
self.log.warning('Corrupted shelf file: {}'.format(self.shelf_file))
|
||||
if not self.get('force_shelf', False):
|
||||
raise
|
||||
return self._sh
|
||||
@@ -404,32 +435,31 @@ class ShelfMixin(object):
|
||||
self._shelf_file = value
|
||||
|
||||
def save(self):
|
||||
logger.debug('saving pickle')
|
||||
self.log.debug('Saving pickle')
|
||||
if hasattr(self, '_sh') and self._sh is not None:
|
||||
with self.open(self.shelf_file, 'wb') as f:
|
||||
pickle.dump(self._sh, f)
|
||||
|
||||
|
||||
def pfilter(plugins, **kwargs):
|
||||
def pfilter(plugins, plugin_type=Analysis, **kwargs):
|
||||
""" Filter plugins by different criteria """
|
||||
if isinstance(plugins, models.Plugins):
|
||||
plugins = plugins.plugins
|
||||
elif isinstance(plugins, dict):
|
||||
plugins = plugins.values()
|
||||
ptype = kwargs.pop('plugin_type', Plugin)
|
||||
logger.debug('#' * 100)
|
||||
logger.debug('ptype {}'.format(ptype))
|
||||
if ptype:
|
||||
if isinstance(ptype, PluginMeta):
|
||||
ptype = ptype.__name__
|
||||
logger.debug('plugin_type {}'.format(plugin_type))
|
||||
if plugin_type:
|
||||
if isinstance(plugin_type, PluginMeta):
|
||||
plugin_type = plugin_type.__name__
|
||||
try:
|
||||
ptype = ptype[0].upper() + ptype[1:]
|
||||
pclass = globals()[ptype]
|
||||
plugin_type = plugin_type[0].upper() + plugin_type[1:]
|
||||
pclass = globals()[plugin_type]
|
||||
logger.debug('Class: {}'.format(pclass))
|
||||
candidates = filter(lambda x: isinstance(x, pclass),
|
||||
plugins)
|
||||
except KeyError:
|
||||
raise models.Error('{} is not a valid type'.format(ptype))
|
||||
raise models.Error('{} is not a valid type'.format(plugin_type))
|
||||
else:
|
||||
candidates = plugins
|
||||
|
||||
@@ -464,6 +494,7 @@ def _log_subprocess_output(process):
|
||||
|
||||
def install_deps(*plugins):
|
||||
installed = False
|
||||
nltk_resources = set()
|
||||
for info in plugins:
|
||||
requirements = info.get('requirements', [])
|
||||
if requirements:
|
||||
@@ -479,6 +510,9 @@ def install_deps(*plugins):
|
||||
installed = True
|
||||
if exitcode != 0:
|
||||
raise models.Error("Dependencies not properly installed")
|
||||
nltk_resources |= set(info.get('nltk_resources', []))
|
||||
|
||||
installed |= nltk.download(list(nltk_resources))
|
||||
return installed
|
||||
|
||||
|
||||
@@ -575,12 +609,14 @@ def _instances_in_module(module):
|
||||
def _from_module_name(module, root, info=None, install=True, **kwargs):
|
||||
try:
|
||||
module = load_module(module, root)
|
||||
except ImportError:
|
||||
except (ImportError, LookupError):
|
||||
if not install or not info:
|
||||
raise
|
||||
install_deps(info)
|
||||
module = load_module(module, root)
|
||||
for plugin in _from_loaded_module(module=module, root=root, info=info, **kwargs):
|
||||
if install:
|
||||
install_deps(plugin)
|
||||
yield plugin
|
||||
|
||||
|
||||
|
@@ -41,7 +41,7 @@ class Sentiment140Plugin(SentimentPlugin):
|
||||
To avoid calling the sentiment140 API, we will mock the results
|
||||
from requests.
|
||||
'''
|
||||
from senpy.test import patch_requests
|
||||
from senpy.testing import patch_requests
|
||||
expected = {"data": [{"polarity": 4}]}
|
||||
with patch_requests(expected) as (request, response):
|
||||
super(Sentiment140Plugin, self).test(*args, **kwargs)
|
||||
|
@@ -10,8 +10,10 @@
|
||||
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
||||
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||
"fam": "http://vocab.fusepool.info/fam#",
|
||||
"topics": {
|
||||
"@id": "dc:subject"
|
||||
"@id": "nif:topic",
|
||||
"@container": "@set"
|
||||
},
|
||||
"entities": {
|
||||
"@id": "me:hasEntities"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"name": "Evalation",
|
||||
"name": "Evaluation",
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
|
@@ -1,6 +1,7 @@
|
||||
from . import models, __version__
|
||||
from collections import MutableMapping
|
||||
import pprint
|
||||
import pdb
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -32,8 +33,8 @@ def check_template(indict, template):
|
||||
if indict != template:
|
||||
raise models.Error(('Differences found.\n'
|
||||
'\tExpected: {}\n'
|
||||
'\tFound: {}').format(pprint.pformat(indict),
|
||||
pprint.pformat(template)))
|
||||
'\tFound: {}').format(pprint.pformat(template),
|
||||
pprint.pformat(indict)))
|
||||
|
||||
|
||||
def convert_dictionary(original, mappings):
|
||||
@@ -67,18 +68,23 @@ def easy_load(app=None, plugin_list=None, plugin_folder=None, **kwargs):
|
||||
return sp, app
|
||||
|
||||
|
||||
def easy_test(plugin_list=None):
|
||||
def easy_test(plugin_list=None, debug=True):
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
if not plugin_list:
|
||||
import __main__
|
||||
logger.info('Loading classes from {}'.format(__main__))
|
||||
from . import plugins
|
||||
plugin_list = plugins.from_module(__main__)
|
||||
for plug in plugin_list:
|
||||
plug.test()
|
||||
logger.info('The tests for {} passed!'.format(plug.name))
|
||||
logger.info('All tests passed!')
|
||||
try:
|
||||
if not plugin_list:
|
||||
import __main__
|
||||
logger.info('Loading classes from {}'.format(__main__))
|
||||
from . import plugins
|
||||
plugin_list = plugins.from_module(__main__)
|
||||
for plug in plugin_list:
|
||||
plug.test()
|
||||
plug.log.info('My tests passed!')
|
||||
logger.info('All tests passed!')
|
||||
except Exception:
|
||||
if not debug:
|
||||
raise
|
||||
pdb.post_mortem()
|
||||
|
||||
|
||||
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
||||
|
@@ -32,7 +32,7 @@ class APITest(TestCase):
|
||||
query = {}
|
||||
plug_params = {
|
||||
'hello': {
|
||||
'aliases': ['hello', 'hiya'],
|
||||
'aliases': ['hiya', 'hello'],
|
||||
'required': True
|
||||
}
|
||||
}
|
||||
@@ -48,6 +48,26 @@ class APITest(TestCase):
|
||||
assert 'hello' in p
|
||||
assert p['hello'] == 'dlrow'
|
||||
|
||||
def test_parameters2(self):
|
||||
in1 = {
|
||||
'meaningcloud-key': 5
|
||||
}
|
||||
in2 = {
|
||||
'apikey': 25
|
||||
}
|
||||
extra_params = {
|
||||
"apikey": {
|
||||
"aliases": [
|
||||
"apikey",
|
||||
"meaningcloud-key"
|
||||
],
|
||||
"required": True
|
||||
}
|
||||
}
|
||||
p1 = parse_params(in1, extra_params)
|
||||
p2 = parse_params(in2, extra_params)
|
||||
assert (p2['apikey'] / p1['apikey']) == 5
|
||||
|
||||
def test_default(self):
|
||||
spec = {
|
||||
'hello': {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
from unittest import TestCase
|
||||
|
||||
from senpy.test import patch_requests
|
||||
from senpy.testing import patch_requests
|
||||
from senpy.client import Client
|
||||
from senpy.models import Results, Plugins, Error
|
||||
from senpy.plugins import AnalysisPlugin
|
||||
|
@@ -47,7 +47,7 @@ class ExtensionsTest(TestCase):
|
||||
|
||||
def test_add_delete(self):
|
||||
'''Should be able to add and delete new plugins. '''
|
||||
new = plugins.Plugin(name='new', description='new', version=0)
|
||||
new = plugins.Analysis(name='new', description='new', version=0)
|
||||
self.senpy.add_plugin(new)
|
||||
assert new in self.senpy.plugins()
|
||||
self.senpy.delete_plugin(new)
|
||||
|
@@ -8,6 +8,8 @@ from fnmatch import fnmatch
|
||||
|
||||
from jsonschema import RefResolver, Draft4Validator, ValidationError
|
||||
|
||||
from senpy.models import read_schema
|
||||
|
||||
root_path = path.join(path.dirname(path.realpath(__file__)), '..')
|
||||
schema_folder = path.join(root_path, 'senpy', 'schemas')
|
||||
examples_path = path.join(root_path, 'docs', 'examples')
|
||||
@@ -15,7 +17,8 @@ bad_examples_path = path.join(root_path, 'docs', 'bad-examples')
|
||||
|
||||
|
||||
class JSONSchemaTests(unittest.TestCase):
|
||||
pass
|
||||
def test_definitions(self):
|
||||
read_schema('definitions.json')
|
||||
|
||||
|
||||
def do_create_(jsfile, success):
|
||||
|
@@ -2,7 +2,7 @@ from unittest import TestCase
|
||||
|
||||
import requests
|
||||
import json
|
||||
from senpy.test import patch_requests
|
||||
from senpy.testing import patch_requests
|
||||
from senpy.models import Results
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user