mirror of
https://github.com/gsi-upm/senpy
synced 2024-12-22 04:58:12 +00:00
Released v0.7
Bug-fixes and improvements: * Closes #5 * Closes #1 * Adds Client (beta) * Added several schemas * Lighter string representation -> should avoid delays in the analysis with plugins that have 'heavy' attributes Backwards-incompatible changes: * Context in headers by default * All schemas include a "@type" argument that is used for autodetection in the client ... And possibly many more, this is still <1.0
This commit is contained in:
parent
fbf0384985
commit
908090f634
@ -1,4 +1,4 @@
|
||||
from python:2.7-slim
|
||||
from python:2.7
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
|
@ -1,4 +1,4 @@
|
||||
from python:3.4-slim
|
||||
from python:3.4
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
|
@ -1,4 +1,4 @@
|
||||
from python:3.5-slim
|
||||
from python:3.5
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
|
@ -1,5 +0,0 @@
|
||||
from python:3.4
|
||||
|
||||
RUN pip install pytest
|
||||
ADD requirements.txt /usr/src/app/
|
||||
RUN pip install -r /usr/src/app/requirements.txt
|
@ -1,4 +1,4 @@
|
||||
from python:{{PYVERSION}}-slim
|
||||
from python:{{PYVERSION}}
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ADD requirements.txt /usr/src/app/
|
||||
|
31
Makefile
31
Makefile
@ -4,6 +4,7 @@ NAME=senpy
|
||||
REPO=gsiupm
|
||||
VERSION=$(shell cat $(NAME)/VERSION)
|
||||
TARNAME=$(NAME)-$(subst -,.,$(VERSION)).tar.gz
|
||||
IMAGENAME=$(REPO)/$(NAME):$(VERSION)
|
||||
|
||||
all: build run
|
||||
|
||||
@ -22,27 +23,24 @@ dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS))
|
||||
Dockerfile-%: Dockerfile.template
|
||||
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
|
||||
|
||||
build: $(addprefix build-, $(PYMAIN))
|
||||
quick_build: $(addprefix build-, $(PYMAIN))
|
||||
|
||||
buildall: $(addprefix build-, $(PYVERSIONS))
|
||||
build: $(addprefix build-, $(PYVERSIONS))
|
||||
|
||||
build-%: Dockerfile-%
|
||||
docker build -t '$(REPO)/$(NAME):$(VERSION)-python$*' -f Dockerfile-$* .;
|
||||
docker build -t '$(IMAGENAME)-python$*' -f Dockerfile-$* .;
|
||||
|
||||
build-debug-%:
|
||||
docker build -t '$(NAME)-debug' -f Dockerfile-debug-$* .;
|
||||
quick_test: $(addprefix test-,$(PYMAIN))
|
||||
|
||||
test: $(addprefix test-,$(PYMAIN))
|
||||
|
||||
testall: $(addprefix test-,$(PYVERSIONS))
|
||||
test: $(addprefix test-,$(PYVERSIONS))
|
||||
|
||||
debug-%:
|
||||
docker run --rm -w /usr/src/app/ -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti $(NAME)-debug ;
|
||||
(docker start $(NAME)-debug && docker attach $(NAME)-debug) || docker run -w /usr/src/app/ -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-debug '$(IMAGENAME)-python$*'
|
||||
|
||||
debug: debug-$(PYMAIN)
|
||||
|
||||
test-%: build-%
|
||||
docker run --rm -w /usr/src/app/ --entrypoint=/usr/local/bin/python -ti '$(REPO)/$(NAME):$(VERSION)-python$*' setup.py test --addopts "-vvv -s" ;
|
||||
docker run --rm -w /usr/src/app/ --entrypoint=/usr/local/bin/python -ti '$(IMAGENAME)-python$*' setup.py test --addopts "-vvv -s" ;
|
||||
|
||||
dist/$(TARNAME):
|
||||
docker run --rm -ti -v $$PWD:/usr/src/app/ -w /usr/src/app/ python:$(PYMAIN) python setup.py sdist;
|
||||
@ -55,12 +53,13 @@ pip_test-%: sdist
|
||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS))
|
||||
|
||||
upload-%: test-%
|
||||
docker push '$(REPO)/$(NAME):$(VERSION)-python$*'
|
||||
docker push '$(IMAGENAME)-python$*'
|
||||
|
||||
upload: testall $(addprefix upload-,$(PYVERSIONS))
|
||||
docker tag '$(REPO)/$(NAME):$(VERSION)-python$(PYMAIN)' '$(REPO)/$(NAME):$(VERSION)'
|
||||
docker tag '$(REPO)/$(NAME):$(VERSION)-python$(PYMAIN)' '$(REPO)/$(NAME)'
|
||||
docker push '$(REPO)/$(NAME):$(VERSION)'
|
||||
upload: test $(addprefix upload-,$(PYVERSIONS))
|
||||
docker tag '$(IMAGENAME)-python$(PYMAIN)' '$(IMAGENAME)'
|
||||
docker tag '$(IMAGENAME)-python$(PYMAIN)' '$(REPO)/$(NAME)'
|
||||
docker push '$(IMAGENAME)'
|
||||
docker push '$(REPO)/$(NAME)'
|
||||
|
||||
clean:
|
||||
@docker ps -a | awk '/$(REPO)\/$(NAME)/{ split($$2, vers, "-"); if(vers[1] != "${VERSION}"){ print $$1;}}' | xargs docker rm 2>/dev/null|| true
|
||||
@ -78,6 +77,6 @@ pip_upload:
|
||||
pip_test: $(addprefix pip_test-,$(PYVERSIONS))
|
||||
|
||||
run: build
|
||||
docker run --rm -p 5000:5000 -ti '$(REPO)/$(NAME):$(VERSION)-python$(PYMAIN)'
|
||||
docker run --rm -p 5000:5000 -ti '$(IMAGENAME)-python$(PYMAIN)'
|
||||
|
||||
.PHONY: test test-% build-% build test pip_test run yapf dev
|
||||
|
@ -1 +1 @@
|
||||
0.7.0-dev3
|
||||
0.7.0
|
||||
|
45
senpy/client.py
Normal file
45
senpy/client.py
Normal file
@ -0,0 +1,45 @@
|
||||
import requests
|
||||
import logging
|
||||
from . import models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Client(object):
|
||||
|
||||
def __init__(self, endpoint):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def analyse(self, input, method='GET', **kwargs):
|
||||
return self.request('/', method=method, input=input, **kwargs)
|
||||
|
||||
def request(self, path=None, method='GET', **params):
|
||||
url = '{}{}'.format(self.endpoint, path)
|
||||
response = requests.request(method=method,
|
||||
url=url,
|
||||
params=params)
|
||||
try:
|
||||
resp = models.from_dict(response.json())
|
||||
resp.validate(resp)
|
||||
return resp
|
||||
except Exception as ex:
|
||||
logger.error(('There seems to be a problem with the response:\n'
|
||||
'\tURL: {url}\n'
|
||||
'\tError: {error}\n'
|
||||
'\t\n'
|
||||
'#### Response:\n'
|
||||
'\tCode: {code}'
|
||||
'\tContent: {content}'
|
||||
'\n').format(error=ex,
|
||||
url=url,
|
||||
code=response.status_code,
|
||||
content=response.content))
|
||||
raise ex
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
c = Client('http://senpy.cluster.gsi.dit.upm.es/api/')
|
||||
resp = c.analyse('hello')
|
||||
# print(resp)
|
||||
print(resp.entries)
|
||||
resp.validate()
|
@ -161,7 +161,7 @@ class Senpy(object):
|
||||
self._set_active_plugin(plugin_name, success)
|
||||
except Exception as ex:
|
||||
msg = "Error activating plugin {} - {} : \n\t{}".format(
|
||||
plugin.name, ex, ex.format_exc())
|
||||
plugin.name, ex, traceback.format_exc())
|
||||
logger.error(msg)
|
||||
raise Error(msg)
|
||||
if sync:
|
||||
|
129
senpy/models.py
129
senpy/models.py
@ -12,12 +12,15 @@ import time
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
import jsonref
|
||||
import jsonschema
|
||||
|
||||
from flask import Response as FlaskResponse
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFINITIONS_FILE = 'definitions.json'
|
||||
CONTEXT_PATH = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), 'schemas', 'context.jsonld')
|
||||
@ -40,7 +43,6 @@ def read_schema(schema_file, absolute=False):
|
||||
|
||||
|
||||
base_schema = read_schema(DEFINITIONS_FILE)
|
||||
logging.debug(base_schema)
|
||||
|
||||
|
||||
class Context(dict):
|
||||
@ -72,7 +74,7 @@ base_context = Context.load(CONTEXT_PATH)
|
||||
class SenpyMixin(object):
|
||||
context = base_context["@context"]
|
||||
|
||||
def flask(self, in_headers=False, headers=None, **kwargs):
|
||||
def flask(self, in_headers=True, headers=None, **kwargs):
|
||||
"""
|
||||
Return the values and error to be used in flask.
|
||||
So far, it returns a fixed context. We should store/generate different
|
||||
@ -151,14 +153,16 @@ class SenpyMixin(object):
|
||||
return str(self.to_JSON())
|
||||
|
||||
|
||||
class SenpyModel(SenpyMixin, dict):
|
||||
class BaseModel(SenpyMixin, dict):
|
||||
|
||||
schema = base_schema
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.id = kwargs.pop('id', '{}_{}'.format(
|
||||
type(self).__name__, time.time()))
|
||||
|
||||
if 'id' in kwargs:
|
||||
self.id = kwargs.pop('id')
|
||||
elif kwargs.pop('_auto_id', True):
|
||||
self.id = '_:{}_{}'.format(
|
||||
type(self).__name__, time.time())
|
||||
temp = dict(*args, **kwargs)
|
||||
|
||||
for obj in [self.schema, ] + self.schema.get('allOf', []):
|
||||
@ -175,7 +179,11 @@ class SenpyModel(SenpyMixin, dict):
|
||||
context = temp['context']
|
||||
del temp['context']
|
||||
self.__dict__['context'] = Context.load(context)
|
||||
super(SenpyModel, self).__init__(temp)
|
||||
try:
|
||||
temp['@type'] = getattr(self, '@type')
|
||||
except AttributeError:
|
||||
logger.warn('Creating an instance of an unknown model')
|
||||
super(BaseModel, self).__init__(temp)
|
||||
|
||||
def _get_key(self, key):
|
||||
key = key.replace("__", ":", 1)
|
||||
@ -206,73 +214,80 @@ class SenpyModel(SenpyMixin, dict):
|
||||
return d
|
||||
|
||||
|
||||
class Response(SenpyModel):
|
||||
schema = read_schema('response.json')
|
||||
_subtypes = {}
|
||||
|
||||
|
||||
class Results(SenpyModel):
|
||||
schema = read_schema('results.json')
|
||||
def register(rsubclass, rtype=None):
|
||||
_subtypes[rtype or rsubclass.__name__] = rsubclass
|
||||
|
||||
|
||||
class Entry(SenpyModel):
|
||||
schema = read_schema('entry.json')
|
||||
def from_dict(indict):
|
||||
target = indict.get('@type', None)
|
||||
if target and target in _subtypes:
|
||||
cls = _subtypes[target]
|
||||
else:
|
||||
cls = BaseModel
|
||||
return cls(**indict)
|
||||
|
||||
|
||||
class Sentiment(SenpyModel):
|
||||
schema = read_schema('sentiment.json')
|
||||
def from_schema(name, schema_file=None, base_classes=None):
|
||||
base_classes = base_classes or []
|
||||
base_classes.append(BaseModel)
|
||||
schema_file = schema_file or '{}.json'.format(name)
|
||||
class_name = '{}{}'.format(i[0].upper(), i[1:])
|
||||
newclass = type(class_name, tuple(base_classes), {})
|
||||
setattr(newclass, '@type', name)
|
||||
setattr(newclass, 'schema', read_schema(schema_file))
|
||||
register(newclass, name)
|
||||
return newclass
|
||||
|
||||
|
||||
class Analysis(SenpyModel):
|
||||
schema = read_schema('analysis.json')
|
||||
def _add_from_schema(*args, **kwargs):
|
||||
generatedClass = from_schema(*args, **kwargs)
|
||||
globals()[generatedClass.__name__] = generatedClass
|
||||
del generatedClass
|
||||
|
||||
|
||||
class EmotionSet(SenpyModel):
|
||||
schema = read_schema('emotionSet.json')
|
||||
for i in ['response',
|
||||
'results',
|
||||
'entry',
|
||||
'sentiment',
|
||||
'analysis',
|
||||
'emotionSet',
|
||||
'emotion',
|
||||
'emotionModel',
|
||||
'suggestion',
|
||||
'plugin',
|
||||
'emotionPlugin',
|
||||
'sentimentPlugin',
|
||||
'plugins']:
|
||||
_add_from_schema(i)
|
||||
|
||||
|
||||
class Emotion(SenpyModel):
|
||||
schema = read_schema('emotion.json')
|
||||
|
||||
|
||||
class EmotionModel(SenpyModel):
|
||||
schema = read_schema('emotionModel.json')
|
||||
|
||||
|
||||
class Suggestion(SenpyModel):
|
||||
schema = read_schema('suggestion.json')
|
||||
|
||||
|
||||
class PluginModel(SenpyModel):
|
||||
schema = read_schema('plugin.json')
|
||||
|
||||
|
||||
class EmotionPluginModel(SenpyModel):
|
||||
schema = read_schema('plugin.json')
|
||||
|
||||
|
||||
class SentimentPluginModel(SenpyModel):
|
||||
schema = read_schema('plugin.json')
|
||||
|
||||
|
||||
class Plugins(SenpyModel):
|
||||
schema = read_schema('plugins.json')
|
||||
_ErrorModel = from_schema('error')
|
||||
|
||||
|
||||
class Error(SenpyMixin, BaseException):
|
||||
def __init__(self,
|
||||
message,
|
||||
status=500,
|
||||
params=None,
|
||||
errors=None,
|
||||
*args,
|
||||
**kwargs):
|
||||
super(Error, self).__init__(self, message, message)
|
||||
self._error = _ErrorModel(message=message, *args, **kwargs)
|
||||
self.message = message
|
||||
self.status = status
|
||||
self.params = params or {}
|
||||
self.errors = errors or ""
|
||||
|
||||
def _plain_dict(self):
|
||||
return self.__dict__
|
||||
def __getattr__(self, key):
|
||||
if key != '_error' and hasattr(self._error, key):
|
||||
return getattr(self._error, key)
|
||||
raise AttributeError(key)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.jsonld())
|
||||
def __setattr__(self, key, value):
|
||||
if key != '_error':
|
||||
return setattr(self._error, key, value)
|
||||
else:
|
||||
super(Error, self).__setattr__(key, value)
|
||||
|
||||
def __delattr__(self, key):
|
||||
delattr(self._error, key)
|
||||
|
||||
|
||||
register(Error, 'error')
|
||||
|
@ -6,16 +6,16 @@ import os.path
|
||||
import pickle
|
||||
import logging
|
||||
import tempfile
|
||||
from .models import PluginModel, Error
|
||||
from . import models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SenpyPlugin(PluginModel):
|
||||
class SenpyPlugin(models.Plugin):
|
||||
def __init__(self, info=None):
|
||||
if not info:
|
||||
raise Error(message=("You need to provide configuration"
|
||||
"information for the plugin."))
|
||||
raise models.Error(message=("You need to provide configuration"
|
||||
"information for the plugin."))
|
||||
logger.debug("Initialising {}".format(info))
|
||||
super(SenpyPlugin, self).__init__(info)
|
||||
self.id = '{}_{}'.format(self.name, self.version)
|
||||
@ -40,7 +40,7 @@ class SenpyPlugin(PluginModel):
|
||||
self.deactivate()
|
||||
|
||||
|
||||
class SentimentPlugin(SenpyPlugin):
|
||||
class SentimentPlugin(SenpyPlugin, models.SentimentPlugin):
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
super(SentimentPlugin, self).__init__(info, *args, **kwargs)
|
||||
self.minPolarityValue = float(info.get("minPolarityValue", 0))
|
||||
@ -48,7 +48,7 @@ class SentimentPlugin(SenpyPlugin):
|
||||
self["@type"] = "marl:SentimentAnalysis"
|
||||
|
||||
|
||||
class EmotionPlugin(SenpyPlugin):
|
||||
class EmotionPlugin(SentimentPlugin, models.EmotionPlugin):
|
||||
def __init__(self, info, *args, **kwargs):
|
||||
self.minEmotionValue = float(info.get("minEmotionValue", 0))
|
||||
self.maxEmotionValue = float(info.get("maxEmotionValue", 0))
|
||||
@ -71,10 +71,6 @@ class ShelfMixin(object):
|
||||
del self.__dict__['_sh']
|
||||
self.save()
|
||||
|
||||
def __del__(self):
|
||||
self.save()
|
||||
super(ShelfMixin, self).__del__()
|
||||
|
||||
@property
|
||||
def shelf_file(self):
|
||||
if not hasattr(self, '_shelf_file') or not self._shelf_file:
|
||||
@ -86,8 +82,7 @@ class ShelfMixin(object):
|
||||
return self._shelf_file
|
||||
|
||||
def save(self):
|
||||
logger.debug('closing pickle')
|
||||
logger.debug('saving pickle')
|
||||
if hasattr(self, '_sh') and self._sh is not None:
|
||||
with open(self.shelf_file, 'wb') as f:
|
||||
pickle.dump(self._sh, f)
|
||||
del (self.__dict__['_sh'])
|
||||
|
7
senpy/schemas/\
Normal file
7
senpy/schemas/\
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Senpy analysis",
|
||||
"allOf": [{
|
||||
"$ref": "atom.json"
|
||||
}]
|
||||
}
|
15
senpy/schemas/atom.json
Normal file
15
senpy/schemas/atom.json
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Base schema for all Senpy objects",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"@type": {
|
||||
"type": "string",
|
||||
"description": "Type of the atom. e.g., 'onyx:EmotionAnalysis', 'nif:Entry'"
|
||||
}
|
||||
},
|
||||
"required": ["@id", "@type"]
|
||||
}
|
19
senpy/schemas/emotionPlugin.json
Normal file
19
senpy/schemas/emotionPlugin.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object",
|
||||
"$allOf": [
|
||||
{
|
||||
"$ref": "plugin.json"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"onyx:usesEmotionModel": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "emotionModel.json"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -5,9 +5,6 @@
|
||||
"@id": {
|
||||
"type": "string"
|
||||
},
|
||||
"@type": {
|
||||
"enum": [["nif:RFC5147String", "nif:Context"]]
|
||||
},
|
||||
"nif:isString": {
|
||||
"description": "String contained in this Context",
|
||||
"type": "string"
|
||||
|
23
senpy/schemas/error.json
Normal file
23
senpy/schemas/error.json
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Base schema for all Senpy objects",
|
||||
"type": "object",
|
||||
"$allOf": [
|
||||
{"$ref": "atom.json"},
|
||||
{
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
"errors": {
|
||||
"type": "list",
|
||||
"items": {"type": "object"}
|
||||
},
|
||||
"code": {
|
||||
"type": "int"
|
||||
},
|
||||
"required": ["message"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -4,7 +4,12 @@
|
||||
"required": ["@id", "extra_params"],
|
||||
"properties": {
|
||||
"@id": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the plugin, usually comprised of the name of the plugin and the version."
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the plugin, which will be used in the algorithm detection phase"
|
||||
},
|
||||
"extra_params": {
|
||||
"type": "object",
|
||||
|
19
senpy/schemas/sentimentPlugin.json
Normal file
19
senpy/schemas/sentimentPlugin.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"type": "object",
|
||||
"$allOf": [
|
||||
{
|
||||
"$ref": "plugin.json"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"marl:minPolarityValue": {
|
||||
"type": "number"
|
||||
},
|
||||
"marl:maxPolarityValue": {
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -1,6 +1,11 @@
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from unittest.mock import patch
|
||||
except ImportError:
|
||||
from mock import patch
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from unittest import TestCase
|
||||
@ -11,9 +16,12 @@ from senpy.models import Error
|
||||
class CLITest(TestCase):
|
||||
def test_basic(self):
|
||||
self.assertRaises(Error, partial(main_function, []))
|
||||
res = main_function(['--input', 'test'])
|
||||
assert 'entries' in res
|
||||
res = main_function(['--input', 'test', '--algo', 'rand'])
|
||||
assert 'entries' in res
|
||||
assert 'analysis' in res
|
||||
assert res['analysis'][0]['name'] == 'rand'
|
||||
|
||||
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||
main_function(['--input', 'test'])
|
||||
|
||||
patched.assert_called_with(input='test')
|
||||
with patch('senpy.extensions.Senpy.analyse') as patched:
|
||||
main_function(['--input', 'test', '--algo', 'rand'])
|
||||
|
||||
patched.assert_called_with(input='test', algo='rand')
|
||||
|
42
tests/test_client.py
Normal file
42
tests/test_client.py
Normal file
@ -0,0 +1,42 @@
|
||||
from unittest import TestCase
|
||||
try:
|
||||
from unittest.mock import patch
|
||||
except ImportError:
|
||||
from mock import patch
|
||||
|
||||
from senpy.client import Client
|
||||
from senpy.models import Results, Error
|
||||
|
||||
|
||||
class Call(dict):
|
||||
|
||||
def __init__(self, obj):
|
||||
self.obj = obj.jsonld()
|
||||
|
||||
def json(self):
|
||||
return self.obj
|
||||
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
def setUp(self):
|
||||
self.host = '0.0.0.0'
|
||||
self.port = 5000
|
||||
|
||||
def test_client(self):
|
||||
endpoint = 'http://dummy/'
|
||||
client = Client(endpoint)
|
||||
success = Call(Results())
|
||||
with patch('requests.request', return_value=success) as patched:
|
||||
resp = client.analyse('hello')
|
||||
assert isinstance(resp, Results)
|
||||
patched.assert_called_with(url=endpoint + '/',
|
||||
method='GET',
|
||||
params={'input': 'hello'})
|
||||
error = Call(Error('Nothing'))
|
||||
with patch('requests.request', return_value=error) as patched:
|
||||
resp = client.analyse(input='hello', algorithm='NONEXISTENT')
|
||||
assert isinstance(resp, Error)
|
||||
patched.assert_called_with(url=endpoint + '/',
|
||||
method='GET',
|
||||
params={'input': 'hello',
|
||||
'algorithm': 'NONEXISTENT'})
|
@ -11,7 +11,9 @@ from pprint import pprint
|
||||
|
||||
class ModelsTest(TestCase):
|
||||
def test_jsonld(self):
|
||||
prueba = {"id": "test", "analysis": [], "entries": []}
|
||||
prueba = {"id": "test",
|
||||
"analysis": [],
|
||||
"entries": []}
|
||||
r = Results(**prueba)
|
||||
print("Response's context: ")
|
||||
pprint(r.context)
|
||||
@ -28,11 +30,11 @@ class ModelsTest(TestCase):
|
||||
assert "id" not in j
|
||||
|
||||
r6 = Results(**prueba)
|
||||
r6.entries.append(
|
||||
Entry({
|
||||
"@id": "ohno",
|
||||
"nif:isString": "Just testing"
|
||||
}))
|
||||
e = Entry({
|
||||
"@id": "ohno",
|
||||
"nif:isString": "Just testing"
|
||||
})
|
||||
r6.entries.append(e)
|
||||
logging.debug("Reponse 6: %s", r6)
|
||||
assert ("marl" in r6.context)
|
||||
assert ("entries" in r6.context)
|
||||
|
@ -44,7 +44,6 @@ class PluginsTest(TestCase):
|
||||
a = ShelfDummyPlugin(
|
||||
info={'name': 'default_shelve_file',
|
||||
'version': 'test'})
|
||||
assert os.path.dirname(a.shelf_file) == tempfile.gettempdir()
|
||||
a.activate()
|
||||
assert os.path.isfile(a.shelf_file)
|
||||
os.remove(a.shelf_file)
|
||||
|
Loading…
Reference in New Issue
Block a user