1
0
mirror of https://github.com/gsi-upm/senpy synced 2024-12-22 04:58:12 +00:00

Several changes

* Add flag to run tests (and exit, or run the server)
* Add ntriples outformat
* Modify dependency installation logic to avoid installing several times
* Add encoded URLs as base/prefix
* Allow plugin activation to fail
This commit is contained in:
J. Fernando Sánchez 2018-07-04 16:24:42 +02:00
commit a3eb8f196c
15 changed files with 241 additions and 110 deletions

10
docker-compose.dev.yml Normal file
View File

@ -0,0 +1,10 @@
version: '3'
services:
senpy:
image: "${IMAGENAME-gsiupm/senpy}:${VERSION-latest}"
entrypoint: ["/bin/bash"]
working_dir: "/senpy-plugins"
ports:
- 5000:5000
volumes:
- ".:/usr/src/app/"

9
docker-compose.test.yml Normal file
View File

@ -0,0 +1,9 @@
version: '3'
services:
test:
image: "${IMAGENAME-gsiupm/senpy}:${VERSION-dev}"
entrypoint: ["py.test"]
volumes:
- ".:/usr/src/app/"
command:
[]

11
docker-compose.yml Normal file
View File

@ -0,0 +1,11 @@
version: '3'
services:
senpy:
image: "${IMAGENAME-gsiupm/senpy}:${VERSION-dev}"
build:
context: .
dockerfile: Dockerfile${PYVERSION--2.7}
ports:
- 5001:5000
volumes:
- "./data:/data"

View File

@ -78,10 +78,15 @@ def main():
help='Do not run a server, only install plugin dependencies')
parser.add_argument(
'--only-test',
'-t',
action='store_true',
default=False,
help='Do not run a server, just test all plugins')
parser.add_argument(
'--test',
'-t',
action='store_true',
default=False,
help='Test all plugins before launching the server')
parser.add_argument(
'--only-list',
'--list',
@ -99,12 +104,24 @@ def main():
action='store_false',
default=True,
help='Run a threaded server')
parser.add_argument(
'--no-deps',
'-n',
action='store_true',
default=False,
help='Skip installing dependencies')
parser.add_argument(
'--version',
'-v',
action='store_true',
default=False,
help='Output the senpy version and exit')
parser.add_argument(
'--allow-fail',
'--fail',
action='store_true',
default=False,
help='Do not exit if some plugins fail to activate')
args = parser.parse_args()
if args.version:
print('Senpy version {}'.format(senpy.__version__))
@ -119,19 +136,27 @@ def main():
data_folder=args.data_folder)
if args.only_list:
plugins = sp.plugins()
maxwidth = max(len(x.id) for x in plugins)
maxname = max(len(x.name) for x in plugins)
maxversion = max(len(x.version) for x in plugins)
print('Found {} plugins:'.format(len(plugins)))
for plugin in plugins:
import inspect
fpath = inspect.getfile(plugin.__class__)
print('{: <{width}} @ {}'.format(plugin.id, fpath, width=maxwidth))
print('\t{: <{maxname}} @ {: <{maxversion}} -> {}'.format(plugin.name,
plugin.version,
fpath,
maxname=maxname,
maxversion=maxversion))
return
sp.install_deps()
if not args.no_deps:
sp.install_deps()
if args.only_install:
return
sp.activate_all()
if args.only_test:
sp.activate_all(allow_fail=args.allow_fail)
if args.test or args.only_test:
easy_test(sp.plugins(), debug=args.debug)
return
if args.only_test:
return
print('Senpy version {}'.format(senpy.__version__))
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
args.port))

View File

@ -3,6 +3,10 @@ from .models import Error, Results, Entry, from_string
import logging
logger = logging.getLogger(__name__)
boolean = [True, False]
API_PARAMS = {
"algorithm": {
"aliases": ["algorithms", "a", "algo"],
@ -13,14 +17,14 @@ API_PARAMS = {
"expanded-jsonld": {
"@id": "expanded-jsonld",
"aliases": ["expanded"],
"options": "boolean",
"options": boolean,
"required": True,
"default": False
},
"with_parameters": {
"aliases": ['withparameters',
'with-parameters'],
"options": "boolean",
"options": boolean,
"default": False,
"required": True
},
@ -29,14 +33,14 @@ API_PARAMS = {
"aliases": ["o"],
"default": "json-ld",
"required": True,
"options": ["json-ld", "turtle"],
"options": ["json-ld", "turtle", "ntriples"],
},
"help": {
"@id": "help",
"description": "Show additional help to know more about the possible parameters",
"aliases": ["h"],
"required": True,
"options": "boolean",
"options": boolean,
"default": False
},
"emotionModel": {
@ -83,7 +87,7 @@ WEB_PARAMS = {
"aliases": ["headers"],
"required": True,
"default": False,
"options": "boolean"
"options": boolean
},
}
@ -132,7 +136,7 @@ NIF_PARAMS = {
"aliases": ["u"],
"required": False,
"default": "RFC5147String",
"options": "RFC5147String"
"options": ["RFC5147String", ]
}
}
@ -159,7 +163,7 @@ def parse_params(indict, *specs):
wrong_params[param] = spec[param]
continue
if "options" in options:
if options["options"] == "boolean":
if options["options"] == boolean:
outdict[param] = outdict[param] in [None, True, 'true', '1']
elif outdict[param] not in options["options"]:
wrong_params[param] = spec[param]
@ -172,7 +176,7 @@ def parse_params(indict, *specs):
errors=wrong_params)
raise message
if 'algorithm' in outdict and not isinstance(outdict['algorithm'], list):
outdict['algorithm'] = outdict['algorithm'].split(',')
outdict['algorithm'] = list(outdict['algorithm'].split(','))
return outdict
@ -190,7 +194,8 @@ def parse_call(params):
params = parse_params(params, NIF_PARAMS)
if params['informat'] == 'text':
results = Results()
entry = Entry(nif__isString=params['input'])
entry = Entry(nif__isString=params['input'],
id='#') # Use @base
results.entries.append(entry)
elif params['informat'] == 'json-ld':
results = from_string(params['input'], cls=Results)

View File

@ -18,15 +18,15 @@
Blueprints for Senpy
"""
from flask import (Blueprint, request, current_app, render_template, url_for,
jsonify)
jsonify, redirect)
from .models import Error, Response, Help, Plugins, read_schema, dump_schema, Datasets
from . import api
from .version import __version__
from functools import wraps
import logging
import traceback
import json
import base64
logger = logging.getLogger(__name__)
@ -34,6 +34,24 @@ api_blueprint = Blueprint("api", __name__)
demo_blueprint = Blueprint("demo", __name__, template_folder='templates')
ns_blueprint = Blueprint("ns", __name__)
_mimetypes_r = {'json-ld': ['application/ld+json'],
'turtle': ['text/turtle'],
'ntriples': ['application/n-triples'],
'text': ['text/plain']}
MIMETYPES = {}
for k, vs in _mimetypes_r.items():
for v in vs:
if v in MIMETYPES:
raise Exception('MIMETYPE {} specified for two formats: {} and {}'.format(v,
v,
MIMETYPES[v]))
MIMETYPES[v] = k
DEFAULT_MIMETYPE = 'application/ld+json'
DEFAULT_FORMAT = 'json-ld'
def get_params(req):
if req.method == 'POST':
@ -45,6 +63,30 @@ def get_params(req):
return indict
def encoded_url(url=None, base=None):
code = ''
if not url:
if request.method == 'GET':
url = request.full_path[1:] # Remove the first slash
else:
hash(frozenset(request.form.params().items()))
code = 'hash:{}'.format(hash)
code = code or base64.urlsafe_b64encode(url.encode()).decode()
if base:
return base + code
return url_for('api.decode', code=code, _external=True)
def decoded_url(code, base=None):
if code.startswith('hash:'):
raise Exception('Can not decode a URL for a POST request')
base = base or request.url_root
path = base64.urlsafe_b64decode(code.encode()).decode()
return base + path
@demo_blueprint.route('/')
def index():
ev = str(get_params(request).get('evaluation', False))
@ -59,13 +101,22 @@ def index():
def context(entity="context"):
context = Response._context
context['@vocab'] = url_for('ns.index', _external=True)
context['endpoint'] = url_for('api.api_root', _external=True)
return jsonify({"@context": context})
@api_blueprint.route('/d/<code>')
def decode(code):
try:
return redirect(decoded_url(code))
except Exception:
return Error('invalid URL').flask()
@ns_blueprint.route('/') # noqa: F811
def index():
context = Response._context
context['@vocab'] = url_for('.ns', _external=True)
context = Response._context.copy()
context['endpoint'] = url_for('api.api_root', _external=True)
return jsonify({"@context": context})
@ -81,7 +132,7 @@ def basic_api(f):
default_params = {
'inHeaders': False,
'expanded-jsonld': False,
'outformat': 'json-ld',
'outformat': None,
'with_parameters': True,
}
@ -100,29 +151,34 @@ def basic_api(f):
request.parameters = params
response = f(*args, **kwargs)
except (Exception) as ex:
if current_app.debug:
if current_app.debug or current_app.config['TESTING']:
raise
if not isinstance(ex, Error):
msg = "{}:\n\t{}".format(ex,
traceback.format_exc())
msg = "{}".format(ex)
ex = Error(message=msg, status=500)
logger.exception('Error returning analysis result')
response = ex
response.parameters = raw_params
logger.error(ex)
logger.exception(ex)
if 'parameters' in response and not params['with_parameters']:
del response.parameters
logger.info('Response: {}'.format(response))
mime = request.accept_mimetypes\
.best_match(MIMETYPES.keys(),
DEFAULT_MIMETYPE)
mimeformat = MIMETYPES.get(mime, DEFAULT_FORMAT)
outformat = params['outformat'] or mimeformat
return response.flask(
in_headers=params['inHeaders'],
headers=headers,
prefix=url_for('.api_root', _external=True),
prefix=params.get('prefix', encoded_url()),
context_uri=url_for('api.context',
entity=type(response).__name__,
_external=True),
outformat=params['outformat'],
outformat=outformat,
expanded=params['expanded-jsonld'])
return decorated_function

View File

@ -18,14 +18,9 @@ import errno
import logging
logger = logging.getLogger(__name__)
from . import gsitk_compat
try:
from gsitk.datasets.datasets import DatasetManager
GSITK_AVAILABLE = True
except ImportError:
logger.warn('GSITK is not installed. Some functions will be unavailable.')
GSITK_AVAILABLE = False
logger = logging.getLogger(__name__)
class Senpy(object):
@ -95,7 +90,7 @@ class Senpy(object):
if plugin in self._plugins:
return self._plugins[plugin]
results = self.plugins(id='plugins/{}'.format(name))
results = self.plugins(id='endpoint:plugins/{}'.format(name))
if not results:
return Error(message="Plugin not found", status=404)
@ -167,8 +162,7 @@ class Senpy(object):
yield i
def install_deps(self):
for plugin in self.plugins(is_activated=True):
plugins.install_deps(plugin)
plugins.install_deps(*self.plugins())
def analyse(self, request):
"""
@ -203,16 +197,14 @@ class Senpy(object):
raise Error(
status=404,
message="The dataset '{}' is not valid".format(dataset))
dm = DatasetManager()
dm = gsitk_compat.DatasetManager()
datasets = dm.prepare_datasets(datasets_name)
return datasets
@property
def datasets(self):
if not GSITK_AVAILABLE:
raise Exception('GSITK is not available. Install it to use this function.')
self._dataset_list = {}
dm = DatasetManager()
dm = gsitk_compat.DatasetManager()
for item in dm.get_datasets():
for key in item:
if key in self._dataset_list:
@ -223,8 +215,6 @@ class Senpy(object):
return self._dataset_list
def evaluate(self, params):
if not GSITK_AVAILABLE:
raise Exception('GSITK is not available. Install it to use this function.')
logger.debug("evaluating request: {}".format(params))
results = AggregatedEvaluation()
results.parameters = params
@ -318,10 +308,15 @@ class Senpy(object):
else:
self._default = self._plugins[value.lower()]
def activate_all(self, sync=True):
def activate_all(self, sync=True, allow_fail=False):
ps = []
for plug in self._plugins.keys():
ps.append(self.activate_plugin(plug, sync=sync))
try:
self.activate_plugin(plug, sync=sync)
except Exception as ex:
if not allow_fail:
raise
logger.error('Could not activate {}: {}'.format(plug, ex))
return ps
def deactivate_all(self, sync=True):
@ -346,6 +341,7 @@ class Senpy(object):
logger.info(msg)
success = True
self._set_active(plugin, success)
return success
def activate_plugin(self, plugin_name, sync=True):
plugin_name = plugin_name.lower()
@ -357,7 +353,7 @@ class Senpy(object):
logger.info("Activating plugin: {}".format(plugin.name))
if sync or 'async' in plugin and not plugin.async:
self._activate(plugin)
return self._activate(plugin)
else:
th = Thread(target=partial(self._activate, plugin))
th.start()

23
senpy/gsitk_compat.py Normal file
View File

@ -0,0 +1,23 @@
import logging
logger = logging.getLogger(__name__)
MSG = 'GSITK is not (properly) installed.'
IMPORTMSG = '{} Some functions will be unavailable.'.format(MSG)
RUNMSG = '{} Install it to use this function.'.format(MSG)
def raise_exception(*args, **kwargs):
raise Exception(RUNMSG)
try:
from gsitk.datasets.datasets import DatasetManager
from gsitk.evaluation.evaluation import Evaluation as Eval
from sklearn.pipeline import Pipeline
GSITK_AVAILABLE = True
modules = locals()
except ImportError:
logger.warn(IMPORTMSG)
GSITK_AVAILABLE = False
DatasetManager = Eval = Pipeline = raise_exception

View File

@ -138,7 +138,7 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
@property
def id(self):
if '@id' not in self:
self['@id'] = ':{}_{}'.format(type(self).__name__, time.time())
self['@id'] = '_:{}_{}'.format(type(self).__name__, time.time())
return self['@id']
@id.setter
@ -146,7 +146,7 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
self['@id'] = value
def flask(self,
in_headers=True,
in_headers=False,
headers=None,
outformat='json-ld',
**kwargs):
@ -176,20 +176,22 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
def serialize(self, format='json-ld', with_mime=False, **kwargs):
js = self.jsonld(**kwargs)
content = json.dumps(js, indent=2, sort_keys=True)
if format == 'json-ld':
content = json.dumps(js, indent=2, sort_keys=True)
mimetype = "application/json"
elif format in ['turtle', ]:
elif format in ['turtle', 'ntriples']:
logger.debug(js)
content = json.dumps(js, indent=2, sort_keys=True)
base = kwargs.get('prefix')
g = Graph().parse(
data=content,
format='json-ld',
base=kwargs.get('prefix'),
context=self._context)
base=base,
context=[self._context,
{'@base': base}])
logger.debug(
'Parsing with prefix: {}'.format(kwargs.get('prefix')))
content = g.serialize(format='turtle').decode('utf-8')
content = g.serialize(format=format,
base=base).decode('utf-8')
mimetype = 'text/{}'.format(format)
else:
raise Error('Unknown outformat: {}'.format(format))
@ -205,25 +207,21 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
expanded=False):
result = self.serializable()
if context_uri or with_context:
result['@context'] = context_uri or self._context
# result = jsonld.compact(result,
# self._context,
# options={
# 'base': prefix,
# 'expandContext': self._context,
# 'senpy': prefix
# })
if expanded:
result = jsonld.expand(
result, options={'base': prefix,
'expandContext': self._context})
'expandContext': self._context})[0]
if not with_context:
try:
del result['@context']
except KeyError:
pass
elif context_uri:
result['@context'] = context_uri
else:
result['@context'] = self._context
return result
def validate(self, obj=None):

View File

@ -3,6 +3,7 @@ standard_library.install_aliases()
from future.utils import with_metaclass
from functools import partial
import os.path
import os
@ -22,18 +23,11 @@ import nltk
from .. import models, utils
from .. import api
from .. import gsitk_compat
logger = logging.getLogger(__name__)
try:
from gsitk.evaluation.evaluation import Evaluation as Eval
from sklearn.pipeline import Pipeline
GSITK_AVAILABLE = True
except ImportError:
logger.warn('GSITK is not installed. Some functions will be unavailable.')
GSITK_AVAILABLE = False
class PluginMeta(models.BaseMeta):
_classes = {}
@ -92,7 +86,7 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
if info:
self.update(info)
self.validate()
self.id = 'plugins/{}_{}'.format(self['name'], self['version'])
self.id = 'endpoint:plugins/{}_{}'.format(self['name'], self['version'])
self.is_activated = False
self._lock = threading.Lock()
self._directory = os.path.abspath(os.path.dirname(inspect.getfile(self.__class__)))
@ -332,7 +326,7 @@ class Box(AnalysisPlugin):
return self.transform(X)
def as_pipe(self):
pipe = Pipeline([('plugin', self)])
pipe = gsitk_compat.Pipeline([('plugin', self)])
pipe.name = self.name
return pipe
@ -530,7 +524,7 @@ def find_plugins(folders):
yield fpath
def from_path(fpath, **kwargs):
def from_path(fpath, install_on_fail=False, **kwargs):
logger.debug("Loading plugin from {}".format(fpath))
if fpath.endswith('.py'):
# We asume root is the dir of the file, and module is the name of the file
@ -540,7 +534,7 @@ def from_path(fpath, **kwargs):
yield instance
else:
info = parse_plugin_info(fpath)
yield from_info(info, **kwargs)
yield from_info(info, install_on_fail=install_on_fail, **kwargs)
def from_folder(folders, loader=from_path, **kwargs):
@ -551,7 +545,7 @@ def from_folder(folders, loader=from_path, **kwargs):
return plugins
def from_info(info, root=None, **kwargs):
def from_info(info, root=None, install_on_fail=True, **kwargs):
if any(x not in info for x in ('module',)):
raise ValueError('Plugin info is not valid: {}'.format(info))
module = info["module"]
@ -559,7 +553,12 @@ def from_info(info, root=None, **kwargs):
if not root and '_path' in info:
root = os.path.dirname(info['_path'])
return one_from_module(module, root=root, info=info, **kwargs)
fun = partial(one_from_module, module, root=root, info=info, **kwargs)
try:
return fun()
except (ImportError, LookupError):
install_deps(info)
return fun()
def parse_plugin_info(fpath):
@ -606,17 +605,9 @@ def _instances_in_module(module):
yield obj
def _from_module_name(module, root, info=None, install=True, **kwargs):
try:
module = load_module(module, root)
except (ImportError, LookupError):
if not install or not info:
raise
install_deps(info)
module = load_module(module, root)
def _from_module_name(module, root, info=None, **kwargs):
module = load_module(module, root)
for plugin in _from_loaded_module(module=module, root=root, info=info, **kwargs):
if install:
install_deps(plugin)
yield plugin
@ -628,12 +619,9 @@ def _from_loaded_module(module, info=None, **kwargs):
def evaluate(plugins, datasets, **kwargs):
if not GSITK_AVAILABLE:
raise Exception('GSITK is not available. Install it to use this function.')
ev = Eval(tuples=None,
datasets=datasets,
pipelines=[plugin.as_pipe() for plugin in plugins])
ev = gsitk_compat.Eval(tuples=None,
datasets=datasets,
pipelines=[plugin.as_pipe() for plugin in plugins])
ev.evaluate()
results = ev.results
evaluations = evaluations_to_JSONLD(results, **kwargs)

View File

@ -413,7 +413,7 @@ function evaluate_JSON(){
url += "?algo="+plugin+"&dataset="+datasets
$('#doevaluate').attr("disabled", true);
$.ajax({type: "GET", url: url, dataType: 'json'}).done(function(resp) {
$.ajax({type: "GET", url: url, dataType: 'json'}).always(function(resp) {
$('#doevaluate').attr("disabled", false);
response = resp.responseText;

View File

@ -80,7 +80,7 @@ def easy_test(plugin_list=None, debug=True):
for plug in plugin_list:
plug.test()
plug.log.info('My tests passed!')
logger.info('All tests passed!')
logger.info('All tests passed for {} plugins!'.format(len(plugin_list)))
except Exception:
if not debug:
raise

View File

@ -21,7 +21,6 @@ class BlueprintsTest(TestCase):
def setUpClass(cls):
"""Set up only once, and re-use in every individual test"""
cls.app = Flask("test_extensions")
cls.app.debug = False
cls.client = cls.app.test_client()
cls.senpy = Senpy(default_plugins=True)
cls.senpy.init_app(cls.app)
@ -31,6 +30,9 @@ class BlueprintsTest(TestCase):
cls.senpy.activate_plugin("DummyRequired", sync=True)
cls.senpy.default_plugin = 'Dummy'
def setUp(self):
self.app.config['TESTING'] = True # Tell Flask not to catch Exceptions
def assertCode(self, resp, code):
self.assertEqual(resp.status_code, code)
@ -42,6 +44,7 @@ class BlueprintsTest(TestCase):
"""
Calling with no arguments should ask the user for more arguments
"""
self.app.config['TESTING'] = False # Errors are expected in this case
resp = self.client.get("/api/")
self.assertCode(resp, 400)
js = parse_resp(resp)
@ -81,7 +84,7 @@ class BlueprintsTest(TestCase):
Extra params that have a required argument that does not
have a default should raise an error.
"""
self.app.debug = False
self.app.config['TESTING'] = False # Errors are expected in this case
resp = self.client.get("/api/?i=My aloha mohame&algo=DummyRequired")
self.assertCode(resp, 400)
js = parse_resp(resp)
@ -97,7 +100,7 @@ class BlueprintsTest(TestCase):
The dummy plugin returns an empty response,\
it should contain the context
"""
self.app.debug = False
self.app.config['TESTING'] = False # Errors are expected in this case
resp = self.client.get("/api/?i=My aloha mohame&algo=DOESNOTEXIST")
self.assertCode(resp, 404)
js = parse_resp(resp)
@ -139,7 +142,7 @@ class BlueprintsTest(TestCase):
js = parse_resp(resp)
logging.debug(js)
assert "@id" in js
assert js["@id"] == "plugins/Dummy_0.1"
assert js["@id"] == "endpoint:plugins/Dummy_0.1"
def test_default(self):
""" Show only one plugin"""
@ -148,7 +151,7 @@ class BlueprintsTest(TestCase):
js = parse_resp(resp)
logging.debug(js)
assert "@id" in js
assert js["@id"] == "plugins/Dummy_0.1"
assert js["@id"] == "endpoint:plugins/Dummy_0.1"
def test_context(self):
resp = self.client.get("/api/contexts/context.jsonld")
@ -172,5 +175,6 @@ class BlueprintsTest(TestCase):
assert "help" in js["valid_parameters"]
def test_conversion(self):
self.app.config['TESTING'] = False # Errors are expected in this case
resp = self.client.get("/api/?input=hello&algo=emoRand&emotionModel=DOES NOT EXIST")
self.assertCode(resp, 404)

View File

@ -121,8 +121,8 @@ class ExtensionsTest(TestCase):
# Leaf (defaultdict with __setattr__ and __getattr__.
r1 = analyse(self.senpy, algorithm="Dummy", input="tupni", output="tuptuo")
r2 = analyse(self.senpy, input="tupni", output="tuptuo")
assert r1.analysis[0] == "plugins/Dummy_0.1"
assert r2.analysis[0] == "plugins/Dummy_0.1"
assert r1.analysis[0] == "endpoint:plugins/Dummy_0.1"
assert r2.analysis[0] == "endpoint:plugins/Dummy_0.1"
assert r1.entries[0]['nif:isString'] == 'input'
def test_analyse_empty(self):
@ -156,8 +156,8 @@ class ExtensionsTest(TestCase):
r2 = analyse(self.senpy,
input="tupni",
output="tuptuo")
assert r1.analysis[0] == "plugins/Dummy_0.1"
assert r2.analysis[0] == "plugins/Dummy_0.1"
assert r1.analysis[0] == "endpoint:plugins/Dummy_0.1"
assert r2.analysis[0] == "endpoint:plugins/Dummy_0.1"
assert r1.entries[0]['nif:isString'] == 'input'
def test_analyse_error(self):

View File

@ -6,7 +6,7 @@ import pickle
import shutil
import tempfile
from unittest import TestCase, skipIf
from unittest import TestCase
from senpy.models import Results, Entry, EmotionSet, Emotion, Plugins
from senpy import plugins
from senpy.plugins.conversion.emotion.centroids import CentroidConversion
@ -312,9 +312,7 @@ class PluginsTest(TestCase):
res = c._backwards_conversion(e)
assert res["onyx:hasEmotionCategory"] == "c2"
@skipIf(sys.version_info < (3, 0),
reason="requires Python3")
def test_evaluation(self):
def _test_evaluation(self):
testdata = []
for i in range(50):
testdata.append(["good", 1])
@ -348,6 +346,14 @@ class PluginsTest(TestCase):
smart_metrics = results[0].metrics[0]
assert abs(smart_metrics['accuracy'] - 1) < 0.01
def test_evaluation(self):
if sys.version_info < (3, 0):
with self.assertRaises(Exception) as context:
self._test_evaluation()
self.assertTrue('GSITK ' in str(context.exception))
else:
self._test_evaluation()
def make_mini_test(fpath):
def mini_test(self):