mirror of
https://github.com/gsi-upm/senpy
synced 2024-11-22 00:02:28 +00:00
Several fixes and improvements
* Add Topic model * Add PDB post-mortem debugging * Add logger to plugins (`self.log`) * Add NLTK resource auto-download * Force installation of requirements even if adding doesn't work * Add a method to find files in several possible locations. Now the plugin.open method will try these locations IF the file is to be opened in read mode. Otherwise only the SENPY_DATA folder will be used (to avoid writing to the package folder).
This commit is contained in:
parent
697e779767
commit
1313853788
@ -130,7 +130,7 @@ def main():
|
|||||||
return
|
return
|
||||||
sp.activate_all()
|
sp.activate_all()
|
||||||
if args.only_test:
|
if args.only_test:
|
||||||
easy_test(sp.plugins())
|
easy_test(sp.plugins(), debug=args.debug)
|
||||||
return
|
return
|
||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
||||||
|
@ -203,24 +203,27 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
|||||||
context_uri=None,
|
context_uri=None,
|
||||||
prefix=None,
|
prefix=None,
|
||||||
expanded=False):
|
expanded=False):
|
||||||
ser = self.serializable()
|
|
||||||
|
|
||||||
result = jsonld.compact(
|
result = self.serializable()
|
||||||
ser,
|
if context_uri or with_context:
|
||||||
self._context,
|
result['@context'] = context_uri or self._context
|
||||||
options={
|
|
||||||
'base': prefix,
|
# result = jsonld.compact(result,
|
||||||
'expandContext': self._context,
|
# self._context,
|
||||||
'senpy': prefix
|
# options={
|
||||||
})
|
# 'base': prefix,
|
||||||
if context_uri:
|
# 'expandContext': self._context,
|
||||||
result['@context'] = context_uri
|
# 'senpy': prefix
|
||||||
|
# })
|
||||||
if expanded:
|
if expanded:
|
||||||
result = jsonld.expand(
|
result = jsonld.expand(
|
||||||
result, options={'base': prefix,
|
result, options={'base': prefix,
|
||||||
'expandContext': self._context})
|
'expandContext': self._context})
|
||||||
if not with_context:
|
if not with_context:
|
||||||
|
try:
|
||||||
del result['@context']
|
del result['@context']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def validate(self, obj=None):
|
def validate(self, obj=None):
|
||||||
@ -323,7 +326,10 @@ def _add_class_from_schema(*args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
for i in [
|
for i in [
|
||||||
|
'aggregatedEvaluation',
|
||||||
'analysis',
|
'analysis',
|
||||||
|
'dataset',
|
||||||
|
'datasets',
|
||||||
'emotion',
|
'emotion',
|
||||||
'emotionConversion',
|
'emotionConversion',
|
||||||
'emotionConversionPlugin',
|
'emotionConversionPlugin',
|
||||||
@ -331,19 +337,17 @@ for i in [
|
|||||||
'emotionModel',
|
'emotionModel',
|
||||||
'emotionPlugin',
|
'emotionPlugin',
|
||||||
'emotionSet',
|
'emotionSet',
|
||||||
|
'evaluation',
|
||||||
'entity',
|
'entity',
|
||||||
'help',
|
'help',
|
||||||
|
'metric',
|
||||||
'plugin',
|
'plugin',
|
||||||
'plugins',
|
'plugins',
|
||||||
'response',
|
'response',
|
||||||
'results',
|
'results',
|
||||||
'sentimentPlugin',
|
'sentimentPlugin',
|
||||||
'suggestion',
|
'suggestion',
|
||||||
'aggregatedEvaluation',
|
'topic',
|
||||||
'evaluation',
|
|
||||||
'metric',
|
|
||||||
'dataset',
|
|
||||||
'datasets',
|
|
||||||
|
|
||||||
]:
|
]:
|
||||||
_add_class_from_schema(i)
|
_add_class_from_schema(i)
|
||||||
|
@ -18,6 +18,7 @@ import subprocess
|
|||||||
import importlib
|
import importlib
|
||||||
import yaml
|
import yaml
|
||||||
import threading
|
import threading
|
||||||
|
import nltk
|
||||||
|
|
||||||
from .. import models, utils
|
from .. import models, utils
|
||||||
from .. import api
|
from .. import api
|
||||||
@ -95,6 +96,16 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
self.is_activated = False
|
self.is_activated = False
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
self.data_folder = data_folder or os.getcwd()
|
self.data_folder = data_folder or os.getcwd()
|
||||||
|
self._directory = os.path.abspath(os.path.dirname(inspect.getfile(self.__class__)))
|
||||||
|
self._data_paths = ['',
|
||||||
|
self._directory,
|
||||||
|
os.path.join(self._directory, 'data'),
|
||||||
|
self.data_folder]
|
||||||
|
self._log = logging.getLogger('{}.{}'.format(__name__, self.name))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log(self):
|
||||||
|
return self._log
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
missing = []
|
missing = []
|
||||||
@ -123,9 +134,9 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
for case in test_cases:
|
for case in test_cases:
|
||||||
try:
|
try:
|
||||||
self.test_case(case)
|
self.test_case(case)
|
||||||
logger.debug('Test case passed:\n{}'.format(pprint.pformat(case)))
|
self.log.debug('Test case passed:\n{}'.format(pprint.pformat(case)))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warn('Test case failed:\n{}'.format(pprint.pformat(case)))
|
self.log.warn('Test case failed:\n{}'.format(pprint.pformat(case)))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def test_case(self, case):
|
def test_case(self, case):
|
||||||
@ -148,10 +159,22 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
raise
|
raise
|
||||||
assert not should_fail
|
assert not should_fail
|
||||||
|
|
||||||
def open(self, fpath, *args, **kwargs):
|
def find_file(self, fname):
|
||||||
|
for p in self._data_paths:
|
||||||
|
alternative = os.path.join(p, fname)
|
||||||
|
if os.path.exists(alternative):
|
||||||
|
return alternative
|
||||||
|
raise IOError('File does not exist: {}'.format(fname))
|
||||||
|
|
||||||
|
def open(self, fpath, mode='r'):
|
||||||
|
if 'w' in mode:
|
||||||
|
# When writing, only use absolute paths or data_folder
|
||||||
if not os.path.isabs(fpath):
|
if not os.path.isabs(fpath):
|
||||||
fpath = os.path.join(self.data_folder, fpath)
|
fpath = os.path.join(self.data_folder, fpath)
|
||||||
return open(fpath, *args, **kwargs)
|
else:
|
||||||
|
fpath = self.find_file(fpath)
|
||||||
|
|
||||||
|
return open(fpath, mode=mode)
|
||||||
|
|
||||||
def serve(self, debug=True, **kwargs):
|
def serve(self, debug=True, **kwargs):
|
||||||
utils.easy(plugin_list=[self, ], plugin_folder=None, debug=debug, **kwargs)
|
utils.easy(plugin_list=[self, ], plugin_folder=None, debug=debug, **kwargs)
|
||||||
@ -186,7 +209,7 @@ class Analysis(Plugin):
|
|||||||
|
|
||||||
def analyse_entries(self, entries, parameters):
|
def analyse_entries(self, entries, parameters):
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
logger.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
self.log.debug('Analysing entry with plugin {}: {}'.format(self, entry))
|
||||||
results = self.analyse_entry(entry, parameters)
|
results = self.analyse_entry(entry, parameters)
|
||||||
if inspect.isgenerator(results):
|
if inspect.isgenerator(results):
|
||||||
for result in results:
|
for result in results:
|
||||||
@ -375,7 +398,7 @@ class ShelfMixin(object):
|
|||||||
with self.open(self.shelf_file, 'rb') as p:
|
with self.open(self.shelf_file, 'rb') as p:
|
||||||
self._sh = pickle.load(p)
|
self._sh = pickle.load(p)
|
||||||
except (IndexError, EOFError, pickle.UnpicklingError):
|
except (IndexError, EOFError, pickle.UnpicklingError):
|
||||||
logger.warning('{} has a corrupted shelf file!'.format(self.id))
|
self.log.warning('Corrupted shelf file: {}'.format(self.shelf_file))
|
||||||
if not self.get('force_shelf', False):
|
if not self.get('force_shelf', False):
|
||||||
raise
|
raise
|
||||||
return self._sh
|
return self._sh
|
||||||
@ -402,32 +425,31 @@ class ShelfMixin(object):
|
|||||||
self._shelf_file = value
|
self._shelf_file = value
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
logger.debug('saving pickle')
|
self.log.debug('Saving pickle')
|
||||||
if hasattr(self, '_sh') and self._sh is not None:
|
if hasattr(self, '_sh') and self._sh is not None:
|
||||||
with self.open(self.shelf_file, 'wb') as f:
|
with self.open(self.shelf_file, 'wb') as f:
|
||||||
pickle.dump(self._sh, f)
|
pickle.dump(self._sh, f)
|
||||||
|
|
||||||
|
|
||||||
def pfilter(plugins, **kwargs):
|
def pfilter(plugins, plugin_type=Analysis, **kwargs):
|
||||||
""" Filter plugins by different criteria """
|
""" Filter plugins by different criteria """
|
||||||
if isinstance(plugins, models.Plugins):
|
if isinstance(plugins, models.Plugins):
|
||||||
plugins = plugins.plugins
|
plugins = plugins.plugins
|
||||||
elif isinstance(plugins, dict):
|
elif isinstance(plugins, dict):
|
||||||
plugins = plugins.values()
|
plugins = plugins.values()
|
||||||
ptype = kwargs.pop('plugin_type', Plugin)
|
|
||||||
logger.debug('#' * 100)
|
logger.debug('#' * 100)
|
||||||
logger.debug('ptype {}'.format(ptype))
|
logger.debug('plugin_type {}'.format(plugin_type))
|
||||||
if ptype:
|
if plugin_type:
|
||||||
if isinstance(ptype, PluginMeta):
|
if isinstance(plugin_type, PluginMeta):
|
||||||
ptype = ptype.__name__
|
plugin_type = plugin_type.__name__
|
||||||
try:
|
try:
|
||||||
ptype = ptype[0].upper() + ptype[1:]
|
plugin_type = plugin_type[0].upper() + plugin_type[1:]
|
||||||
pclass = globals()[ptype]
|
pclass = globals()[plugin_type]
|
||||||
logger.debug('Class: {}'.format(pclass))
|
logger.debug('Class: {}'.format(pclass))
|
||||||
candidates = filter(lambda x: isinstance(x, pclass),
|
candidates = filter(lambda x: isinstance(x, pclass),
|
||||||
plugins)
|
plugins)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise models.Error('{} is not a valid type'.format(ptype))
|
raise models.Error('{} is not a valid type'.format(plugin_type))
|
||||||
else:
|
else:
|
||||||
candidates = plugins
|
candidates = plugins
|
||||||
|
|
||||||
@ -462,6 +484,7 @@ def _log_subprocess_output(process):
|
|||||||
|
|
||||||
def install_deps(*plugins):
|
def install_deps(*plugins):
|
||||||
installed = False
|
installed = False
|
||||||
|
nltk_resources = set()
|
||||||
for info in plugins:
|
for info in plugins:
|
||||||
requirements = info.get('requirements', [])
|
requirements = info.get('requirements', [])
|
||||||
if requirements:
|
if requirements:
|
||||||
@ -477,6 +500,9 @@ def install_deps(*plugins):
|
|||||||
installed = True
|
installed = True
|
||||||
if exitcode != 0:
|
if exitcode != 0:
|
||||||
raise models.Error("Dependencies not properly installed")
|
raise models.Error("Dependencies not properly installed")
|
||||||
|
nltk_resources |= set(info.get('nltk_resources', []))
|
||||||
|
|
||||||
|
installed |= nltk.download(list(nltk_resources))
|
||||||
return installed
|
return installed
|
||||||
|
|
||||||
|
|
||||||
@ -573,12 +599,14 @@ def _instances_in_module(module):
|
|||||||
def _from_module_name(module, root, info=None, install=True, **kwargs):
|
def _from_module_name(module, root, info=None, install=True, **kwargs):
|
||||||
try:
|
try:
|
||||||
module = load_module(module, root)
|
module = load_module(module, root)
|
||||||
except ImportError:
|
except (ImportError, LookupError):
|
||||||
if not install or not info:
|
if not install or not info:
|
||||||
raise
|
raise
|
||||||
install_deps(info)
|
install_deps(info)
|
||||||
module = load_module(module, root)
|
module = load_module(module, root)
|
||||||
for plugin in _from_loaded_module(module=module, root=root, info=info, **kwargs):
|
for plugin in _from_loaded_module(module=module, root=root, info=info, **kwargs):
|
||||||
|
if install:
|
||||||
|
install_deps(plugin)
|
||||||
yield plugin
|
yield plugin
|
||||||
|
|
||||||
|
|
||||||
|
@ -10,8 +10,10 @@
|
|||||||
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
"wna": "http://www.gsi.dit.upm.es/ontologies/wnaffect/ns#",
|
||||||
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
"emoml": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#",
|
||||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||||
|
"fam": "http://vocab.fusepool.info/fam#",
|
||||||
"topics": {
|
"topics": {
|
||||||
"@id": "dc:subject"
|
"@id": "nif:topic",
|
||||||
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"entities": {
|
"entities": {
|
||||||
"@id": "me:hasEntities"
|
"@id": "me:hasEntities"
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from . import models, __version__
|
from . import models, __version__
|
||||||
from collections import MutableMapping
|
from collections import MutableMapping
|
||||||
import pprint
|
import pprint
|
||||||
|
import pdb
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -32,8 +33,8 @@ def check_template(indict, template):
|
|||||||
if indict != template:
|
if indict != template:
|
||||||
raise models.Error(('Differences found.\n'
|
raise models.Error(('Differences found.\n'
|
||||||
'\tExpected: {}\n'
|
'\tExpected: {}\n'
|
||||||
'\tFound: {}').format(pprint.pformat(indict),
|
'\tFound: {}').format(pprint.pformat(template),
|
||||||
pprint.pformat(template)))
|
pprint.pformat(indict)))
|
||||||
|
|
||||||
|
|
||||||
def convert_dictionary(original, mappings):
|
def convert_dictionary(original, mappings):
|
||||||
@ -67,9 +68,10 @@ def easy_load(app=None, plugin_list=None, plugin_folder=None, **kwargs):
|
|||||||
return sp, app
|
return sp, app
|
||||||
|
|
||||||
|
|
||||||
def easy_test(plugin_list=None):
|
def easy_test(plugin_list=None, debug=True):
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
try:
|
||||||
if not plugin_list:
|
if not plugin_list:
|
||||||
import __main__
|
import __main__
|
||||||
logger.info('Loading classes from {}'.format(__main__))
|
logger.info('Loading classes from {}'.format(__main__))
|
||||||
@ -77,8 +79,12 @@ def easy_test(plugin_list=None):
|
|||||||
plugin_list = plugins.from_module(__main__)
|
plugin_list = plugins.from_module(__main__)
|
||||||
for plug in plugin_list:
|
for plug in plugin_list:
|
||||||
plug.test()
|
plug.test()
|
||||||
logger.info('The tests for {} passed!'.format(plug.name))
|
plug.log.info('My tests passed!')
|
||||||
logger.info('All tests passed!')
|
logger.info('All tests passed!')
|
||||||
|
except Exception:
|
||||||
|
if not debug:
|
||||||
|
raise
|
||||||
|
pdb.post_mortem()
|
||||||
|
|
||||||
|
|
||||||
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
||||||
|
@ -47,7 +47,7 @@ class ExtensionsTest(TestCase):
|
|||||||
|
|
||||||
def test_add_delete(self):
|
def test_add_delete(self):
|
||||||
'''Should be able to add and delete new plugins. '''
|
'''Should be able to add and delete new plugins. '''
|
||||||
new = plugins.Plugin(name='new', description='new', version=0)
|
new = plugins.Analysis(name='new', description='new', version=0)
|
||||||
self.senpy.add_plugin(new)
|
self.senpy.add_plugin(new)
|
||||||
assert new in self.senpy.plugins()
|
assert new in self.senpy.plugins()
|
||||||
self.senpy.delete_plugin(new)
|
self.senpy.delete_plugin(new)
|
||||||
|
Loading…
Reference in New Issue
Block a user