Improved schema validation

* Added debug Dockerfile/Makefile
* Validation of examples in docs
pull/10/merge
J. Fernando Sánchez 7 years ago
parent bc1f9e4cf5
commit b543a4614e

@ -0,0 +1,5 @@
from python:3.4
RUN pip install pytest
ADD requirements.txt /usr/src/app/
RUN pip install -r /usr/src/app/requirements.txt

@ -20,12 +20,17 @@ buildall: $(addprefix build-, $(PYVERSIONS))
build-%: Dockerfile-%
docker build -t '$(REPO)/$(NAME):$(VERSION)-python$*' -f Dockerfile-$* .;
build-debug-%:
docker build -t '$(NAME)-debug' -f Dockerfile-debug-$* .;
test: $(addprefix test-,$(PYMAIN))
testall: $(addprefix test-,$(PYVERSIONS))
debug-%: build-%
docker run --rm -w /usr/src/app/ -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti '$(REPO)/$(NAME):$(VERSION)-python$*' ;
debug-%: build-debug-%
docker run --rm -w /usr/src/app/ -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti $(NAME)-debug ;
debug: debug-$(PYMAIN)
test-%: build-%
docker run --rm -w /usr/src/app/ --entrypoint=/usr/local/bin/python -ti '$(REPO)/$(NAME):$(VERSION)-python$*' setup.py test --addopts "-vvv -s" ;
@ -51,6 +56,7 @@ upload: testall $(addprefix upload-,$(PYVERSIONS))
clean:
@docker ps -a | awk '/$(REPO)\/$(NAME)/{ split($$2, vers, "-"); if(vers[1] != "${VERSION}"){ print $$1;}}' | xargs docker rm 2>/dev/null|| true
@docker images | awk '/$(REPO)\/$(NAME)/{ split($$2, vers, "-"); if(vers[1] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
@docker rmi $(NAME)-debug 2>/dev/null || true
upload_git:
git commit -a

@ -0,0 +1,18 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "http://example.com#NIFExample",
"@type": "results",
"analysis": [
],
"entries": [
{
"@type": [
"nif:RFC5147String",
"nif:Context"
],
"nif:beginIndex": 0,
"nif:endIndex": 40,
"nif:isString": "My favourite actress is Natalie Portman"
}
]
}

@ -0,0 +1,5 @@
{
"@type": "plugins",
"plugins": [
]
}

@ -1,6 +1,7 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "http://example.com#NIFExample",
"@type": "results",
"analysis": [
],
"entries": [

@ -1,6 +1,7 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "me:Result1",
"@type": "results",
"analysis": [
{
"@id": "me:SAnalysis1",

@ -1,6 +1,7 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "me:Result1",
"@type": "results",
"analysis": [
{
"@id": "me:EmotionAnalysis1",

@ -1,6 +1,7 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "me:Result1",
"@type": "results",
"analysis": [
{
"@id": "me:NER1",

@ -0,0 +1,46 @@
{
"@context": [
"http://mixedemotions-project.eu/ns/context.jsonld",
{
"emovoc": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/emotionml/ns#"
}
],
"@id": "me:Result1",
"@type": "results",
"analysis": [
{
"@id": "me:HesamsAnalysis",
"@type": "onyx:EmotionAnalysis",
"onyx:usesEmotionModel": "emovoc:pad-dimensions"
}
],
"entries": [
{
"@id": "Entry1",
"@type": [
"nif:RFC5147String",
"nif:Context"
],
"nif:isString": "This is a test string",
"entities": [
],
"suggestions": [
],
"sentiments": [
],
"emotions": [
{
"@id": "Entry1#char=0,21",
"nif:anchorOf": "This is a test string",
"prov:wasGeneratedBy": "me:HesamAnalysis",
"onyx:hasEmotion": [
{
"emovoc:pleasure": 0.5,
"emovoc:arousal": 0.7
}
]
}
]
}
]
}

@ -1,6 +1,7 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "me:Result1",
"@type": "results",
"analysis": [
{
"@id": "me:SAnalysis1",

@ -1,6 +1,7 @@
{
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
"@id": "me:Result1",
"@type": "results",
"analysis": [
{
"@id": "me:SgAnalysis1",

@ -1 +1 @@
0.6.2
pre-0.7.0

@ -36,7 +36,6 @@ def read_schema(schema_file, absolute=False):
return jsonref.load(f, base_uri=schema_uri)
base_schema = read_schema(DEFINITIONS_FILE)
logging.debug(base_schema)
@ -157,18 +156,16 @@ class SenpyModel(SenpyMixin, dict):
temp = dict(*args, **kwargs)
for obj in [self.schema,]+self.schema.get('allOf', []):
for k, v in obj.get('properties', {}).items():
if 'default' in v:
temp[k] = copy.deepcopy(v['default'])
for i in temp:
nk = self._get_key(i)
if nk != i:
temp[nk] = temp[i]
del temp[i]
reqs = self.schema.get('required', [])
for i in reqs:
if i not in temp:
prop = self.schema['properties'][i]
if 'default' in prop:
temp[i] = copy.deepcopy(prop['default'])
if 'context' in temp:
context = temp['context']
del temp['context']
@ -226,12 +223,21 @@ class EmotionSet(SenpyModel):
class Emotion(SenpyModel):
schema = read_schema('emotion.json')
class EmotionModel(SenpyModel):
schema = read_schema('emotionModel.json')
class Suggestion(SenpyModel):
schema = read_schema('suggestion.json')
class PluginModel(SenpyModel):
schema = read_schema('plugin.json')
class EmotionPluginModel(SenpyModel):
schema = read_schema('plugin.json')
class SentimentPluginModel(SenpyModel):
schema = read_schema('plugin.json')
class Plugins(SenpyModel):
schema = read_schema('plugins.json')

@ -0,0 +1,9 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"name": {"type": "string"},
"maxValue": {"type": "number"},
"minValue": {"type": "number"}
},
"required": ["name", "maxValue", "minValue"]
}

@ -0,0 +1,18 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Senpy Emotion analysis",
"type": "object",
"allOf": [
{"$ref": "analysis.json"},
{"properties":
{
"onyx:hasEmotionModel": {
"anyOf": [
{"type": "string"},
{"$ref": "emotionModel.json"}
]
}
},
"required": ["onyx:hasEmotionModel"]
}]
}

@ -8,17 +8,20 @@
"description": "Piece of context that contains the Sentiment",
"type": "string"
},
"onyx:hasEmotion": {
"onyx:hasDimension": {
"type": "array",
"items": {
"$ref": "dimensions.json"
},
"uniqueItems": true
},
"onyx:hasEmotionCategory": {
"type": "array",
"items": {
"$ref": "emotion.json"
},
"default": []
},
"prov:wasGeneratedBy": {
"type": "string",
"description": "The ID of the analysis that generated this Emotion. The full object should be included in the \"analysis\" property of the root object"
}
},
"required": ["@id", "prov:wasGeneratedBy", "onyx:hasEmotion"]
"required": ["@id", "onyx:hasEmotion"]
}

@ -1,11 +1,18 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"plugins": {
"type": "array",
"items": {
"$ref": "plugin.json"
"allOf": [
{"$ref": "response.json"},
{
"properties": {
"plugins": {
"type": "array",
"items": {
"$ref": "plugin.json"
}
},
"@type": {
}
}
}
}
]
}

@ -1,4 +1,9 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object"
"type": "object",
"properties": {
"@type": {"type": "string"}
},
"required": ["@type"]
}

@ -1,31 +1,39 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Results",
"description": "The results of an analysis",
"type": "object",
"properties": {
"@context": {
"$ref": "context.json"
},
"@id": {
"description": "ID of the analysis",
"type": "string"
},
"analysis": {
"type": "array",
"default": [],
"items": {
"$ref": "analysis.json"
}
},
"entries": {
"type": "array",
"default": [],
"items": {
"$ref": "entry.json"
}
"allOf": [
{"$ref": "response.json"},
{
"title": "Results",
"description": "The results of an analysis",
"type": "object",
"properties": {
"@context": {
"$ref": "context.json"
},
"@type": {
"default": "results"
},
"@id": {
"description": "ID of the analysis",
"type": "string"
},
"analysis": {
"type": "array",
"default": [],
"items": {
"$ref": "analysis.json"
}
},
"entries": {
"type": "array",
"default": [],
"items": {
"$ref": "entry.json"
}
}
},
"required": ["@id", "analysis", "entries"]
}
},
"required": ["@id", "analysis", "entries"]
]
}

@ -0,0 +1,52 @@
from __future__ import print_function
import json
import unittest
import os
from os import path
from fnmatch import fnmatch
import pyld
from jsonschema import validate, RefResolver, Draft4Validator, ValidationError
root_path = path.join(path.dirname(path.realpath(__file__)), '..')
schema_folder = path.join(root_path, 'senpy', 'schemas')
examples_path = path.join(root_path, 'docs', 'examples')
bad_examples_path = path.join(root_path, 'docs', 'bad-examples')
class JSONSchemaTests(unittest.TestCase):
pass
def do_create_(jsfile, success):
def do_expected(self):
with open(jsfile) as f:
js = json.load(f)
try:
assert '@type' in js
schema_name = js['@type']
with open(os.path.join(schema_folder, schema_name+".json")) as file_object:
schema = json.load(file_object)
resolver = RefResolver('file://' + schema_folder + '/', schema)
validator = Draft4Validator(schema, resolver=resolver)
validator.validate(js)
except (AssertionError, ValidationError, KeyError) as ex:
if success:
raise
return do_expected
def add_examples(dirname, success):
for dirpath, dirnames, filenames in os.walk(dirname):
for i in filenames:
if fnmatch(i, '*.json'):
filename = path.join(dirpath, i)
test_method = do_create_(filename, success)
test_method.__name__ = 'test_file_%s_success_%s' % (filename, success)
test_method.__doc__ = '%s should %svalidate' % (filename, '' if success else 'not' )
setattr(JSONSchemaTests, test_method.__name__, test_method)
del test_method
add_examples(examples_path, True)
add_examples(bad_examples_path, False)
if __name__ == '__main__':
unittest.main()
Loading…
Cancel
Save