2014-11-20 18:29:49 +00:00
|
|
|
"""
|
2017-02-27 10:37:43 +00:00
|
|
|
Main class for Senpy.
|
|
|
|
It orchestrates plugin (de)activation and analysis.
|
2014-11-20 18:29:49 +00:00
|
|
|
"""
|
2017-03-01 17:28:20 +00:00
|
|
|
from future import standard_library
|
|
|
|
standard_library.install_aliases()
|
2015-06-18 15:52:02 +00:00
|
|
|
|
2017-06-21 17:58:18 +00:00
|
|
|
from . import plugins, api
|
2018-04-24 17:36:50 +00:00
|
|
|
from .plugins import Plugin, evaluate
|
2018-01-22 10:17:03 +00:00
|
|
|
from .models import Error, AggregatedEvaluation
|
2017-02-27 10:37:43 +00:00
|
|
|
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
2015-02-24 06:15:25 +00:00
|
|
|
|
2017-02-02 15:35:58 +00:00
|
|
|
from threading import Thread
|
2017-08-27 16:43:40 +00:00
|
|
|
from functools import partial
|
2014-10-17 10:47:17 +00:00
|
|
|
import os
|
2017-02-28 03:01:05 +00:00
|
|
|
import copy
|
2017-11-22 16:46:52 +00:00
|
|
|
import errno
|
2014-11-07 18:12:21 +00:00
|
|
|
import logging
|
|
|
|
|
2018-01-22 10:17:03 +00:00
|
|
|
|
2014-11-07 18:12:21 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-10-17 10:47:17 +00:00
|
|
|
|
2018-04-24 18:02:03 +00:00
|
|
|
try:
|
|
|
|
from gsitk.datasets.datasets import DatasetManager
|
|
|
|
GSITK_AVAILABLE = True
|
|
|
|
except ImportError:
|
|
|
|
logger.warn('GSITK is not installed. Some functions will be unavailable.')
|
|
|
|
GSITK_AVAILABLE = False
|
|
|
|
|
2014-11-07 18:12:21 +00:00
|
|
|
|
2014-10-17 10:47:17 +00:00
|
|
|
class Senpy(object):
|
2014-11-07 18:12:21 +00:00
|
|
|
""" Default Senpy extension for Flask """
|
2017-01-10 09:16:45 +00:00
|
|
|
def __init__(self,
|
|
|
|
app=None,
|
2017-02-27 10:37:43 +00:00
|
|
|
plugin_folder=".",
|
2017-11-22 16:46:52 +00:00
|
|
|
data_folder=None,
|
2017-01-10 09:16:45 +00:00
|
|
|
default_plugins=False):
|
2018-01-03 08:39:30 +00:00
|
|
|
|
|
|
|
default_data = os.path.join(os.getcwd(), 'senpy_data')
|
|
|
|
self.data_folder = data_folder or os.environ.get('SENPY_DATA', default_data)
|
|
|
|
try:
|
|
|
|
os.makedirs(self.data_folder)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.EEXIST:
|
|
|
|
logger.debug('Data folder exists: {}'.format(self.data_folder))
|
|
|
|
else: # pragma: no cover
|
|
|
|
raise
|
|
|
|
|
2017-02-27 10:37:43 +00:00
|
|
|
self._default = None
|
2018-01-03 08:39:30 +00:00
|
|
|
self._plugins = {}
|
|
|
|
if plugin_folder:
|
|
|
|
self.add_folder(plugin_folder)
|
2014-11-07 18:12:21 +00:00
|
|
|
|
2015-09-29 09:14:28 +00:00
|
|
|
if default_plugins:
|
2017-02-27 10:37:43 +00:00
|
|
|
self.add_folder('plugins', from_root=True)
|
|
|
|
else:
|
|
|
|
# Add only conversion plugins
|
|
|
|
self.add_folder(os.path.join('plugins', 'conversion'),
|
|
|
|
from_root=True)
|
2018-01-03 08:39:30 +00:00
|
|
|
self.app = app
|
2014-10-17 10:47:17 +00:00
|
|
|
if app is not None:
|
|
|
|
self.init_app(app)
|
|
|
|
|
2014-11-07 18:12:21 +00:00
|
|
|
def init_app(self, app):
|
|
|
|
""" Initialise a flask app to add plugins to its context """
|
|
|
|
"""
|
2014-10-17 10:47:17 +00:00
|
|
|
Note: I'm not particularly fond of adding self.app and app.senpy, but
|
|
|
|
I can't think of a better way to do it.
|
2014-11-07 18:12:21 +00:00
|
|
|
"""
|
2014-10-17 10:47:17 +00:00
|
|
|
app.senpy = self
|
|
|
|
# Use the newstyle teardown_appcontext if it's available,
|
|
|
|
# otherwise fall back to the request context
|
|
|
|
if hasattr(app, 'teardown_appcontext'):
|
|
|
|
app.teardown_appcontext(self.teardown)
|
2018-01-03 08:39:30 +00:00
|
|
|
else: # pragma: no cover
|
2014-10-17 10:47:17 +00:00
|
|
|
app.teardown_request(self.teardown)
|
2016-02-21 18:36:24 +00:00
|
|
|
app.register_blueprint(api_blueprint, url_prefix="/api")
|
2017-02-27 10:37:43 +00:00
|
|
|
app.register_blueprint(ns_blueprint, url_prefix="/ns")
|
2016-02-19 18:24:09 +00:00
|
|
|
app.register_blueprint(demo_blueprint, url_prefix="/")
|
2014-10-17 10:47:17 +00:00
|
|
|
|
2018-01-03 08:39:30 +00:00
|
|
|
def add_plugin(self, plugin):
|
|
|
|
self._plugins[plugin.name.lower()] = plugin
|
|
|
|
|
|
|
|
def delete_plugin(self, plugin):
|
|
|
|
del self._plugins[plugin.name.lower()]
|
|
|
|
|
|
|
|
def plugins(self, **kwargs):
|
|
|
|
""" Return the plugins registered for a given application. Filtered by criteria """
|
|
|
|
return list(plugins.pfilter(self._plugins, **kwargs))
|
|
|
|
|
|
|
|
def get_plugin(self, name, default=None):
|
|
|
|
if name == 'default':
|
|
|
|
return self.default_plugin
|
|
|
|
plugin = name.lower()
|
|
|
|
if plugin in self._plugins:
|
|
|
|
return self._plugins[plugin]
|
|
|
|
|
|
|
|
results = self.plugins(id='plugins/{}'.format(name))
|
|
|
|
|
|
|
|
if not results:
|
|
|
|
return Error(message="Plugin not found", status=404)
|
|
|
|
return results[0]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def analysis_plugins(self):
|
|
|
|
""" Return only the analysis plugins """
|
|
|
|
return self.plugins(plugin_type='analysisPlugin')
|
|
|
|
|
2017-02-27 10:37:43 +00:00
|
|
|
def add_folder(self, folder, from_root=False):
|
2018-01-03 08:39:30 +00:00
|
|
|
""" Find plugins in this folder and add them to this instance """
|
2017-02-27 10:37:43 +00:00
|
|
|
if from_root:
|
|
|
|
folder = os.path.join(os.path.dirname(__file__), folder)
|
2014-12-01 17:27:20 +00:00
|
|
|
logger.debug("Adding folder: %s", folder)
|
2014-11-07 18:12:21 +00:00
|
|
|
if os.path.isdir(folder):
|
2018-01-03 08:39:30 +00:00
|
|
|
new_plugins = plugins.from_folder([folder],
|
|
|
|
data_folder=self.data_folder)
|
|
|
|
for plugin in new_plugins:
|
|
|
|
self.add_plugin(plugin)
|
2014-11-07 18:12:21 +00:00
|
|
|
else:
|
2018-01-03 08:39:30 +00:00
|
|
|
raise AttributeError("Not a folder or does not exist: %s", folder)
|
2014-11-07 18:12:21 +00:00
|
|
|
|
2017-06-21 17:58:18 +00:00
|
|
|
def _get_plugins(self, request):
|
2017-03-13 20:06:19 +00:00
|
|
|
if not self.analysis_plugins:
|
2017-01-10 09:16:45 +00:00
|
|
|
raise Error(
|
|
|
|
status=404,
|
|
|
|
message=("No plugins found."
|
2017-03-13 20:06:19 +00:00
|
|
|
" Please install one."))
|
2017-06-21 17:58:18 +00:00
|
|
|
algos = request.parameters.get('algorithm', None)
|
|
|
|
if not algos:
|
|
|
|
if self.default_plugin:
|
|
|
|
algos = [self.default_plugin.name, ]
|
|
|
|
else:
|
|
|
|
raise Error(
|
|
|
|
status=404,
|
|
|
|
message="No default plugin found, and None provided")
|
2016-02-21 18:36:24 +00:00
|
|
|
|
2017-03-13 20:06:19 +00:00
|
|
|
plugins = list()
|
|
|
|
for algo in algos:
|
2018-01-03 08:39:30 +00:00
|
|
|
algo = algo.lower()
|
|
|
|
if algo not in self._plugins:
|
|
|
|
msg = ("The algorithm '{}' is not valid\n"
|
|
|
|
"Valid algorithms: {}").format(algo,
|
|
|
|
self._plugins.keys())
|
|
|
|
logger.debug(msg)
|
2017-03-13 20:06:19 +00:00
|
|
|
raise Error(
|
|
|
|
status=404,
|
2018-01-03 08:39:30 +00:00
|
|
|
message=msg)
|
|
|
|
plugins.append(self._plugins[algo])
|
2017-03-13 20:06:19 +00:00
|
|
|
return plugins
|
|
|
|
|
2017-06-21 17:58:18 +00:00
|
|
|
def _process_entries(self, entries, req, plugins):
|
2017-08-27 16:43:40 +00:00
|
|
|
"""
|
|
|
|
Recursively process the entries with the first plugin in the list, and pass the results
|
|
|
|
to the rest of the plugins.
|
|
|
|
"""
|
2017-03-13 20:06:19 +00:00
|
|
|
if not plugins:
|
|
|
|
for i in entries:
|
|
|
|
yield i
|
|
|
|
return
|
|
|
|
plugin = plugins[0]
|
2017-08-27 16:43:40 +00:00
|
|
|
self._activate(plugin) # Make sure the plugin is activated
|
2018-01-01 12:13:17 +00:00
|
|
|
specific_params = api.parse_extra_params(req, plugin)
|
2017-06-21 17:58:18 +00:00
|
|
|
req.analysis.append({'plugin': plugin,
|
|
|
|
'parameters': specific_params})
|
2017-03-13 20:06:19 +00:00
|
|
|
results = plugin.analyse_entries(entries, specific_params)
|
2017-06-21 17:58:18 +00:00
|
|
|
for i in self._process_entries(results, req, plugins[1:]):
|
2017-03-13 20:06:19 +00:00
|
|
|
yield i
|
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def install_deps(self):
|
2018-01-03 08:39:30 +00:00
|
|
|
for plugin in self.plugins(is_activated=True):
|
2017-08-27 16:43:40 +00:00
|
|
|
plugins.install_deps(plugin)
|
|
|
|
|
2017-06-21 17:58:18 +00:00
|
|
|
def analyse(self, request):
|
2017-03-13 20:06:19 +00:00
|
|
|
"""
|
|
|
|
Main method that analyses a request, either from CLI or HTTP.
|
2017-06-21 17:58:18 +00:00
|
|
|
It takes a processed request, provided by the user, as returned
|
|
|
|
by api.parse_call().
|
2017-03-13 20:06:19 +00:00
|
|
|
"""
|
2017-06-21 17:58:18 +00:00
|
|
|
logger.debug("analysing request: {}".format(request))
|
2018-01-18 12:25:20 +00:00
|
|
|
entries = request.entries
|
|
|
|
request.entries = []
|
|
|
|
plugins = self._get_plugins(request)
|
|
|
|
results = request
|
|
|
|
for i in self._process_entries(entries, results, plugins):
|
|
|
|
results.entries.append(i)
|
|
|
|
self.convert_emotions(results)
|
|
|
|
logger.debug("Returning analysis result: {}".format(results))
|
2017-06-21 17:58:18 +00:00
|
|
|
results.analysis = [i['plugin'].id for i in results.analysis]
|
|
|
|
return results
|
2014-11-04 20:31:41 +00:00
|
|
|
|
2018-01-22 10:17:03 +00:00
|
|
|
def _get_datasets(self, request):
|
|
|
|
if not self.datasets:
|
|
|
|
raise Error(
|
|
|
|
status=404,
|
|
|
|
message=("No datasets found."
|
|
|
|
" Please verify DatasetManager"))
|
|
|
|
datasets_name = request.parameters.get('dataset', None).split(',')
|
|
|
|
for dataset in datasets_name:
|
|
|
|
if dataset not in self.datasets:
|
|
|
|
logger.debug(("The dataset '{}' is not valid\n"
|
|
|
|
"Valid datasets: {}").format(dataset,
|
2018-04-24 17:36:50 +00:00
|
|
|
self.datasets.keys()))
|
2018-01-22 10:17:03 +00:00
|
|
|
raise Error(
|
|
|
|
status=404,
|
|
|
|
message="The dataset '{}' is not valid".format(dataset))
|
2018-04-24 18:02:03 +00:00
|
|
|
dm = DatasetManager()
|
|
|
|
datasets = dm.prepare_datasets(datasets_name)
|
2018-01-22 10:17:03 +00:00
|
|
|
return datasets
|
2018-04-24 17:36:50 +00:00
|
|
|
|
2018-01-22 10:17:03 +00:00
|
|
|
@property
|
|
|
|
def datasets(self):
|
2018-04-24 18:02:03 +00:00
|
|
|
if not GSITK_AVAILABLE:
|
|
|
|
raise Exception('GSITK is not available. Install it to use this function.')
|
2018-01-22 10:17:03 +00:00
|
|
|
self._dataset_list = {}
|
2018-04-24 18:02:03 +00:00
|
|
|
dm = DatasetManager()
|
|
|
|
for item in dm.get_datasets():
|
2018-01-22 10:17:03 +00:00
|
|
|
for key in item:
|
|
|
|
if key in self._dataset_list:
|
|
|
|
continue
|
|
|
|
properties = item[key]
|
|
|
|
properties['@id'] = key
|
|
|
|
self._dataset_list[key] = properties
|
|
|
|
return self._dataset_list
|
|
|
|
|
|
|
|
def evaluate(self, params):
|
2018-04-24 18:02:03 +00:00
|
|
|
if not GSITK_AVAILABLE:
|
|
|
|
raise Exception('GSITK is not available. Install it to use this function.')
|
2018-01-22 10:17:03 +00:00
|
|
|
logger.debug("evaluating request: {}".format(params))
|
2018-04-24 17:36:50 +00:00
|
|
|
results = AggregatedEvaluation()
|
|
|
|
results.parameters = params
|
|
|
|
datasets = self._get_datasets(results)
|
|
|
|
plugins = self._get_plugins(results)
|
|
|
|
for eval in evaluate(plugins, datasets):
|
|
|
|
results.evaluations.append(eval)
|
|
|
|
if 'with_parameters' not in results.parameters:
|
|
|
|
del results.parameters
|
|
|
|
logger.debug("Returning evaluation result: {}".format(results))
|
2018-01-22 10:17:03 +00:00
|
|
|
return results
|
|
|
|
|
2017-02-27 10:37:43 +00:00
|
|
|
def _conversion_candidates(self, fromModel, toModel):
|
2018-01-03 08:39:30 +00:00
|
|
|
candidates = self.plugins(plugin_type='emotionConversionPlugin')
|
|
|
|
for candidate in candidates:
|
2017-02-27 10:37:43 +00:00
|
|
|
for pair in candidate.onyx__doesConversion:
|
|
|
|
logging.debug(pair)
|
|
|
|
|
|
|
|
if pair['onyx:conversionFrom'] == fromModel \
|
|
|
|
and pair['onyx:conversionTo'] == toModel:
|
|
|
|
yield candidate
|
|
|
|
|
2017-06-21 17:58:18 +00:00
|
|
|
def convert_emotions(self, resp):
|
2017-02-27 10:37:43 +00:00
|
|
|
"""
|
2017-05-05 15:05:17 +00:00
|
|
|
Conversion of all emotions in a response **in place**.
|
2017-02-27 10:37:43 +00:00
|
|
|
In addition to converting from one model to another, it has
|
|
|
|
to include the conversion plugin to the analysis list.
|
|
|
|
Needless to say, this is far from an elegant solution, but it works.
|
|
|
|
@todo refactor and clean up
|
|
|
|
"""
|
2017-06-21 17:58:18 +00:00
|
|
|
plugins = [i['plugin'] for i in resp.analysis]
|
|
|
|
params = resp.parameters
|
2017-02-27 10:37:43 +00:00
|
|
|
toModel = params.get('emotionModel', None)
|
|
|
|
if not toModel:
|
|
|
|
return
|
2017-03-13 20:06:19 +00:00
|
|
|
|
|
|
|
logger.debug('Asked for model: {}'.format(toModel))
|
|
|
|
output = params.get('conversion', None)
|
|
|
|
candidates = {}
|
|
|
|
for plugin in plugins:
|
|
|
|
try:
|
|
|
|
fromModel = plugin.get('onyx:usesEmotionModel', None)
|
|
|
|
candidates[plugin.id] = next(self._conversion_candidates(fromModel, toModel))
|
|
|
|
logger.debug('Analysis plugin {} uses model: {}'.format(plugin.id, fromModel))
|
|
|
|
except StopIteration:
|
|
|
|
e = Error(('No conversion plugin found for: '
|
2018-01-03 08:39:30 +00:00
|
|
|
'{} -> {}'.format(fromModel, toModel)),
|
|
|
|
status=404)
|
2017-03-13 20:06:19 +00:00
|
|
|
e.original_response = resp
|
|
|
|
e.parameters = params
|
|
|
|
raise e
|
2017-02-27 10:37:43 +00:00
|
|
|
newentries = []
|
|
|
|
for i in resp.entries:
|
|
|
|
if output == "full":
|
2017-02-28 03:28:54 +00:00
|
|
|
newemotions = copy.deepcopy(i.emotions)
|
2017-02-27 10:37:43 +00:00
|
|
|
else:
|
|
|
|
newemotions = []
|
|
|
|
for j in i.emotions:
|
2017-03-13 20:06:19 +00:00
|
|
|
plugname = j['prov:wasGeneratedBy']
|
|
|
|
candidate = candidates[plugname]
|
2017-06-21 17:58:18 +00:00
|
|
|
resp.analysis.append({'plugin': candidate,
|
|
|
|
'parameters': params})
|
2017-02-27 10:37:43 +00:00
|
|
|
for k in candidate.convert(j, fromModel, toModel, params):
|
|
|
|
k.prov__wasGeneratedBy = candidate.id
|
|
|
|
if output == 'nested':
|
|
|
|
k.prov__wasDerivedFrom = j
|
|
|
|
newemotions.append(k)
|
|
|
|
i.emotions = newemotions
|
|
|
|
newentries.append(i)
|
|
|
|
resp.entries = newentries
|
|
|
|
|
2014-11-04 20:31:41 +00:00
|
|
|
@property
|
|
|
|
def default_plugin(self):
|
2018-01-03 08:39:30 +00:00
|
|
|
if not self._default or not self._default.is_activated:
|
|
|
|
candidates = self.plugins(plugin_type='analysisPlugin',
|
|
|
|
is_activated=True)
|
2017-02-27 10:37:43 +00:00
|
|
|
if len(candidates) > 0:
|
2018-01-03 08:39:30 +00:00
|
|
|
self._default = candidates[0]
|
|
|
|
else:
|
|
|
|
self._default = None
|
|
|
|
logger.debug("Default: {}".format(self._default))
|
|
|
|
return self._default
|
2017-02-27 10:37:43 +00:00
|
|
|
|
|
|
|
@default_plugin.setter
|
|
|
|
def default_plugin(self, value):
|
2018-01-03 08:39:30 +00:00
|
|
|
if isinstance(value, Plugin):
|
|
|
|
if not value.is_activated:
|
|
|
|
raise AttributeError('The default plugin has to be activated.')
|
2017-02-27 10:37:43 +00:00
|
|
|
self._default = value
|
2018-01-03 08:39:30 +00:00
|
|
|
|
2014-11-04 20:31:41 +00:00
|
|
|
else:
|
2018-01-03 08:39:30 +00:00
|
|
|
self._default = self._plugins[value.lower()]
|
2014-10-17 10:47:17 +00:00
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def activate_all(self, sync=True):
|
2014-12-01 17:27:20 +00:00
|
|
|
ps = []
|
2018-01-03 08:39:30 +00:00
|
|
|
for plug in self._plugins.keys():
|
2014-12-01 17:27:20 +00:00
|
|
|
ps.append(self.activate_plugin(plug, sync=sync))
|
|
|
|
return ps
|
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def deactivate_all(self, sync=True):
|
2014-12-01 17:27:20 +00:00
|
|
|
ps = []
|
2018-01-03 08:39:30 +00:00
|
|
|
for plug in self._plugins.keys():
|
2014-12-01 17:27:20 +00:00
|
|
|
ps.append(self.deactivate_plugin(plug, sync=sync))
|
|
|
|
return ps
|
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def _set_active(self, plugin, active=True, *args, **kwargs):
|
2018-01-03 08:39:30 +00:00
|
|
|
''' We're using a variable in the plugin itself to activate/deactivate plugins.\
|
2016-02-20 17:15:04 +00:00
|
|
|
Note that plugins may activate themselves by setting this variable.
|
|
|
|
'''
|
2017-08-27 16:43:40 +00:00
|
|
|
plugin.is_activated = active
|
2017-01-10 09:16:45 +00:00
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def _activate(self, plugin):
|
|
|
|
success = False
|
|
|
|
with plugin._lock:
|
|
|
|
if plugin.is_activated:
|
|
|
|
return
|
2018-01-01 12:13:17 +00:00
|
|
|
plugin.activate()
|
|
|
|
msg = "Plugin activated: {}".format(plugin.name)
|
|
|
|
logger.info(msg)
|
|
|
|
success = True
|
|
|
|
self._set_active(plugin, success)
|
2017-02-27 10:37:43 +00:00
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def activate_plugin(self, plugin_name, sync=True):
|
2018-01-03 08:39:30 +00:00
|
|
|
plugin_name = plugin_name.lower()
|
|
|
|
if plugin_name not in self._plugins:
|
2017-01-10 09:16:45 +00:00
|
|
|
raise Error(
|
|
|
|
message="Plugin not found: {}".format(plugin_name), status=404)
|
2018-01-03 08:39:30 +00:00
|
|
|
plugin = self._plugins[plugin_name]
|
2016-02-20 17:15:04 +00:00
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
logger.info("Activating plugin: {}".format(plugin.name))
|
|
|
|
|
|
|
|
if sync or 'async' in plugin and not plugin.async:
|
|
|
|
self._activate(plugin)
|
|
|
|
else:
|
|
|
|
th = Thread(target=partial(self._activate, plugin))
|
|
|
|
th.start()
|
|
|
|
return th
|
2017-02-02 15:35:58 +00:00
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def _deactivate(self, plugin):
|
|
|
|
with plugin._lock:
|
|
|
|
if not plugin.is_activated:
|
|
|
|
return
|
2018-01-01 12:13:17 +00:00
|
|
|
plugin.deactivate()
|
|
|
|
logger.info("Plugin deactivated: {}".format(plugin.name))
|
2016-02-20 17:15:04 +00:00
|
|
|
|
2017-08-27 16:43:40 +00:00
|
|
|
def deactivate_plugin(self, plugin_name, sync=True):
|
2018-01-03 08:39:30 +00:00
|
|
|
plugin_name = plugin_name.lower()
|
|
|
|
if plugin_name not in self._plugins:
|
2017-08-27 16:43:40 +00:00
|
|
|
raise Error(
|
|
|
|
message="Plugin not found: {}".format(plugin_name), status=404)
|
2018-01-03 08:39:30 +00:00
|
|
|
plugin = self._plugins[plugin_name]
|
2017-08-27 16:43:40 +00:00
|
|
|
|
|
|
|
self._set_active(plugin, False)
|
|
|
|
|
2017-03-01 11:25:07 +00:00
|
|
|
if sync or 'async' in plugin and not plugin.async:
|
2017-08-27 16:43:40 +00:00
|
|
|
self._deactivate(plugin)
|
2014-12-01 17:27:20 +00:00
|
|
|
else:
|
2017-08-27 16:43:40 +00:00
|
|
|
th = Thread(target=partial(self._deactivate, plugin))
|
2017-02-02 15:35:58 +00:00
|
|
|
th.start()
|
2017-04-10 14:36:43 +00:00
|
|
|
return th
|
2014-11-04 20:31:41 +00:00
|
|
|
|
2014-10-17 10:47:17 +00:00
|
|
|
def teardown(self, exception):
|
|
|
|
pass
|