mirror of
https://github.com/gsi-upm/senpy
synced 2024-11-21 07:42:28 +00:00
Multiple changes in the API, schemas and UI
Check out the CHANGELOG.md file for more information
This commit is contained in:
parent
4ba30304a4
commit
8a516d927e
@ -28,6 +28,7 @@ test-3.5:
|
|||||||
|
|
||||||
test-2.7:
|
test-2.7:
|
||||||
<<: *test_definition
|
<<: *test_definition
|
||||||
|
allow_failure: true
|
||||||
variables:
|
variables:
|
||||||
PYTHON_VERSION: "2.7"
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
|
|||||||
docker tag $(IMAGEWTAG)-python$(PYMAIN) $(IMAGEWTAG)
|
docker tag $(IMAGEWTAG)-python$(PYMAIN) $(IMAGEWTAG)
|
||||||
|
|
||||||
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
|
||||||
docker build -t '$(IMAGEWTAG)-python$*' -f Dockerfile-$* .;
|
docker build --pull -t '$(IMAGEWTAG)-python$*' -f Dockerfile-$* .;
|
||||||
|
|
||||||
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
|
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
|
||||||
@docker start $(NAME)-dev$* || (\
|
@docker start $(NAME)-dev$* || (\
|
||||||
|
53
CHANGELOG.md
Normal file
53
CHANGELOG.md
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# Changelog
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
* Objects can control the keys that will be used in `serialize`/`jsonld`/`as_dict` by specifying a list of keys in `terse_keys`.
|
||||||
|
e.g.
|
||||||
|
```python
|
||||||
|
>>> class MyModel(senpy.models.BaseModel):
|
||||||
|
... _terse_keys = ['visible']
|
||||||
|
... invisible = 5
|
||||||
|
... visible = 1
|
||||||
|
...
|
||||||
|
>>> m = MyModel(id='testing')
|
||||||
|
>>> m.jsonld()
|
||||||
|
{'invisible': 5, 'visible': 1, '@id': 'testing'}
|
||||||
|
>>> m.jsonld(verbose=False)
|
||||||
|
{'visible': 1}
|
||||||
|
```
|
||||||
|
* Configurable logging format.
|
||||||
|
* Added default terse keys for the most common classes (entry, sentiment, emotion...).
|
||||||
|
* Flag parameters (boolean) are set to true even when no value is added (e.g. `&verbose` is the same as `&verbose=true`).
|
||||||
|
* Plugin and parameter descriptions are now formatted with (showdown)[https://github.com/showdownjs/showdown].
|
||||||
|
* The web UI requests extra_parameters from the server. This is useful for pipelines. See #52
|
||||||
|
* First batch of semantic tests (using SPARQL)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
* `install_deps` now checks what requirements are already met before installing with pip.
|
||||||
|
* Help is now provided verbosely by default
|
||||||
|
* Other outputs are terse by default. This means some properties are now hidden unless verbose is set.
|
||||||
|
* `sentiments` and `emotions` are now `marl:hasOpinion` and `onyx:hasEmotionSet`, respectively.
|
||||||
|
* Nicer logging format
|
||||||
|
* Context aliases (e.g. `sentiments` and `emotions` properties) have been replaced with the original properties (e.g. `marl:hasOpinion` and `onyx:hasEmotionSet**), to use aliases, pass the `aliases** parameter.
|
||||||
|
* Several UI improvements
|
||||||
|
* Dedicated tab to show the list of plugins
|
||||||
|
* URLs in plugin descriptions are shown as links
|
||||||
|
* The format of the response is selected by clicking on a tab instead of selecting from a drop-down
|
||||||
|
* list of examples
|
||||||
|
* Bootstrap v4
|
||||||
|
* RandEmotion and RandSentiment are no longer included in the base set of plugins
|
||||||
|
* The `--plugin-folder` option can be used more than once, and every folder will be added to the app.
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
### Removed
|
||||||
|
* Python 2.7 is no longer test or officially supported
|
||||||
|
### Fixed
|
||||||
|
* Plugin descriptions are now dedented when they are extracted from the docstring.
|
||||||
|
### Security
|
||||||
|
|
2
Makefile
2
Makefile
@ -5,7 +5,7 @@ IMAGENAME=gsiupm/senpy
|
|||||||
|
|
||||||
# The first version is the main one (used for quick builds)
|
# The first version is the main one (used for quick builds)
|
||||||
# See .makefiles/python.mk for more info
|
# See .makefiles/python.mk for more info
|
||||||
PYVERSIONS=3.5 2.7
|
PYVERSIONS=3.6 3.7
|
||||||
|
|
||||||
DEVPORT=5000
|
DEVPORT=5000
|
||||||
|
|
||||||
|
2
Procfile
2
Procfile
@ -1 +1 @@
|
|||||||
web: python -m senpy --host 0.0.0.0 --port $PORT --default-plugins
|
web: python -m senpy --host 0.0.0.0 --port $PORT
|
||||||
|
27
README.rst
27
README.rst
@ -1,10 +1,19 @@
|
|||||||
.. image:: img/header.png
|
.. image:: img/header.png
|
||||||
:width: 100%
|
:width: 100%
|
||||||
:target: http://demos.gsi.dit.upm.es/senpy
|
:target: http://senpy.gsi.upm.es
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/gsi-upm/senpy.svg?branch=master
|
.. image:: https://travis-ci.org/gsi-upm/senpy.svg?branch=master
|
||||||
:target: https://travis-ci.org/gsi-upm/senpy
|
:target: https://travis-ci.org/gsi-upm/senpy
|
||||||
|
|
||||||
|
.. image:: https://lab.gsi.upm.es/senpy/senpy/badges/master/pipeline.svg
|
||||||
|
:target: https://lab.gsi.upm.es/senpy/senpy/commits/master
|
||||||
|
|
||||||
|
.. image:: https://lab.gsi.upm.es/senpy/senpy/badges/master/coverage.svg
|
||||||
|
:target: https://lab.gsi.upm.es/senpy/senpy/commits/master
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/l/requests.svg
|
||||||
|
:target: https://lab.gsi.upm.es/senpy/senpy/
|
||||||
|
|
||||||
Senpy lets you create sentiment analysis web services easily, fast and using a well known API.
|
Senpy lets you create sentiment analysis web services easily, fast and using a well known API.
|
||||||
As a bonus, senpy services use semantic vocabularies (e.g. `NIF <http://persistence.uni-leipzig.org/nlp2rdf/>`_, `Marl <http://www.gsi.dit.upm.es/ontologies/marl>`_, `Onyx <http://www.gsi.dit.upm.es/ontologies/onyx>`_) and formats (turtle, JSON-LD, xml-rdf).
|
As a bonus, senpy services use semantic vocabularies (e.g. `NIF <http://persistence.uni-leipzig.org/nlp2rdf/>`_, `Marl <http://www.gsi.dit.upm.es/ontologies/marl>`_, `Onyx <http://www.gsi.dit.upm.es/ontologies/onyx>`_) and formats (turtle, JSON-LD, xml-rdf).
|
||||||
|
|
||||||
@ -12,7 +21,7 @@ Have you ever wanted to turn your sentiment analysis algorithms into a service?
|
|||||||
With senpy, now you can.
|
With senpy, now you can.
|
||||||
It provides all the tools so you just have to worry about improving your algorithms:
|
It provides all the tools so you just have to worry about improving your algorithms:
|
||||||
|
|
||||||
`See it in action. <http://senpy.cluster.gsi.dit.upm.es/>`_
|
`See it in action. <http://senpy.gsi.upm.es/>`_
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
------------
|
------------
|
||||||
@ -38,9 +47,9 @@ If you want to install senpy globally, use sudo instead of the ``--user`` flag.
|
|||||||
|
|
||||||
Docker Image
|
Docker Image
|
||||||
************
|
************
|
||||||
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 gsiupm/senpy --default-plugins``.
|
Build the image or use the pre-built one: ``docker run -ti -p 5000:5000 gsiupm/senpy``.
|
||||||
|
|
||||||
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins gsiupm/senpy --default-plugins -f /plugins``
|
To add custom plugins, add a volume and tell senpy where to find the plugins: ``docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins gsiupm/senpy -f /plugins``
|
||||||
|
|
||||||
|
|
||||||
Developing
|
Developing
|
||||||
@ -125,6 +134,16 @@ For more information, check out the `documentation <http://senpy.readthedocs.org
|
|||||||
------------------------------------------------------------------------------------
|
------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
Python 2.x compatibility
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Keeping compatibility between python 2.7 and 3.x is not always easy, especially for a framework that deals both with text and web requests.
|
||||||
|
Hence, starting February 2019, this project will no longer make efforts to support python 2.7, which will reach its end of life in 2020.
|
||||||
|
Most of the functionality should still work, and the compatibility shims will remain for now, but we cannot make any guarantees at this point.
|
||||||
|
Instead, the maintainers will focus their efforts on keeping the codebase compatible across different Python 3.3+ versions, including upcoming ones.
|
||||||
|
We apologize for the inconvenience.
|
||||||
|
|
||||||
|
|
||||||
Acknowledgement
|
Acknowledgement
|
||||||
---------------
|
---------------
|
||||||
This development has been partially funded by the European Union through the MixedEmotions Project (project number H2020 655632), as part of the `RIA ICT 15 Big data and Open Data Innovation and take-up` programme.
|
This development has been partially funded by the European Union through the MixedEmotions Project (project number H2020 655632), as part of the `RIA ICT 15 Big data and Open Data Innovation and take-up` programme.
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
SERVER_PORT = os.environ.get("SERVER_PORT", 5000)
|
|
||||||
DEBUG = os.environ.get("DEBUG", True)
|
|
@ -24,6 +24,7 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|||||||
help:
|
help:
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
@echo " html to make standalone HTML files"
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " entr to watch for changes and continuously make HTML files"
|
||||||
@echo " dirhtml to make HTML files named index.html in directories"
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
@echo " singlehtml to make a single large HTML file"
|
@echo " singlehtml to make a single large HTML file"
|
||||||
@echo " pickle to make pickle files"
|
@echo " pickle to make pickle files"
|
||||||
@ -49,6 +50,9 @@ help:
|
|||||||
clean:
|
clean:
|
||||||
rm -rf $(BUILDDIR)/*
|
rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
|
entr:
|
||||||
|
while true; do ag -g rst | entr -d make html; done
|
||||||
|
|
||||||
html:
|
html:
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
|
4659
docs/Quickstart.ipynb
Normal file
4659
docs/Quickstart.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
2599
docs/Quickstart.rst
Normal file
2599
docs/Quickstart.rst
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,317 +0,0 @@
|
|||||||
{
|
|
||||||
"cells": [
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:05:31.465571Z",
|
|
||||||
"start_time": "2017-04-10T19:05:31.458282+02:00"
|
|
||||||
},
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"# Client"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"collapsed": true,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"The built-in senpy client allows you to query any Senpy endpoint. We will illustrate how to use it with the public demo endpoint, and then show you how to spin up your own endpoint using docker."
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"Demo Endpoint\n",
|
|
||||||
"-------------"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"To start using senpy, simply create a new Client and point it to your endpoint. In this case, the latest version of Senpy at GSI."
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:12.827640Z",
|
|
||||||
"start_time": "2017-04-10T19:29:12.818617+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"from senpy.client import Client\n",
|
|
||||||
"\n",
|
|
||||||
"c = Client('http://latest.senpy.cluster.gsi.dit.upm.es/api')\n"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"Now, let's use that client analyse some queries:"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:14.011657Z",
|
|
||||||
"start_time": "2017-04-10T19:29:13.701808+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"r = c.analyse('I like sugar!!', algorithm='sentiment140')\n",
|
|
||||||
"r"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:08:19.616754Z",
|
|
||||||
"start_time": "2017-04-10T19:08:19.610767+02:00"
|
|
||||||
},
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"As you can see, that gave us the full JSON result. A more concise way to print it would be:"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:14.854213Z",
|
|
||||||
"start_time": "2017-04-10T19:29:14.842068+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"for entry in r.entries:\n",
|
|
||||||
" print('{} -> {}'.format(entry['text'], entry['sentiments'][0]['marl:hasPolarity']))"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"We can also obtain a list of available plugins with the client:"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:16.245198Z",
|
|
||||||
"start_time": "2017-04-10T19:29:16.056545+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"c.plugins()"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"Or, more concisely:"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:17.663275Z",
|
|
||||||
"start_time": "2017-04-10T19:29:17.484623+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"c.plugins().keys()"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"Local Endpoint\n",
|
|
||||||
"--------------"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"To run your own instance of senpy, just create a docker container with the latest Senpy image. Using `--default-plugins` you will get some extra plugins to start playing with the API."
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:20.637539Z",
|
|
||||||
"start_time": "2017-04-10T19:29:19.938322+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"!docker run -ti --name 'SenpyEndpoint' -d -p 6000:5000 gsiupm/senpy:0.8.6 --host 0.0.0.0 --default-plugins"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"To use this endpoint:"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:21.263976Z",
|
|
||||||
"start_time": "2017-04-10T19:29:21.260595+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"c_local = Client('http://127.0.0.1:6000/api')"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"source": [
|
|
||||||
"That's all! After you are done with your analysis, stop the docker container:"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {
|
|
||||||
"ExecuteTime": {
|
|
||||||
"end_time": "2017-04-10T17:29:33.226686Z",
|
|
||||||
"start_time": "2017-04-10T19:29:22.392121+02:00"
|
|
||||||
},
|
|
||||||
"collapsed": false,
|
|
||||||
"deletable": true,
|
|
||||||
"editable": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"!docker stop SenpyEndpoint\n",
|
|
||||||
"!docker rm SenpyEndpoint"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"anaconda-cloud": {},
|
|
||||||
"kernelspec": {
|
|
||||||
"display_name": "Python 3",
|
|
||||||
"language": "python",
|
|
||||||
"name": "python3"
|
|
||||||
},
|
|
||||||
"language_info": {
|
|
||||||
"codemirror_mode": {
|
|
||||||
"name": "ipython",
|
|
||||||
"version": 3
|
|
||||||
},
|
|
||||||
"file_extension": ".py",
|
|
||||||
"mimetype": "text/x-python",
|
|
||||||
"name": "python",
|
|
||||||
"nbconvert_exporter": "python",
|
|
||||||
"pygments_lexer": "ipython3",
|
|
||||||
"version": "3.6.0"
|
|
||||||
},
|
|
||||||
"toc": {
|
|
||||||
"colors": {
|
|
||||||
"hover_highlight": "#DAA520",
|
|
||||||
"running_highlight": "#FF0000",
|
|
||||||
"selected_highlight": "#FFD700"
|
|
||||||
},
|
|
||||||
"moveMenuLeft": true,
|
|
||||||
"nav_menu": {
|
|
||||||
"height": "68px",
|
|
||||||
"width": "252px"
|
|
||||||
},
|
|
||||||
"navigate_menu": true,
|
|
||||||
"number_sections": true,
|
|
||||||
"sideBar": true,
|
|
||||||
"threshold": 4,
|
|
||||||
"toc_cell": false,
|
|
||||||
"toc_section_display": "block",
|
|
||||||
"toc_window_display": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nbformat": 4,
|
|
||||||
"nbformat_minor": 1
|
|
||||||
}
|
|
@ -1,106 +0,0 @@
|
|||||||
|
|
||||||
Client
|
|
||||||
======
|
|
||||||
|
|
||||||
Demo Endpoint
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Import Client and send a request
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from senpy.client import Client
|
|
||||||
|
|
||||||
c = Client('http://latest.senpy.cluster.gsi.dit.upm.es/api')
|
|
||||||
r = c.analyse('I like Pizza', algorithm='sentiment140')
|
|
||||||
|
|
||||||
Print response
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
for entry in r.entries:
|
|
||||||
print('{} -> {}'.format(entry['text'], entry['sentiments'][0]['marl:hasPolarity']))
|
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
|
||||||
|
|
||||||
I like Pizza -> marl:Positive
|
|
||||||
|
|
||||||
|
|
||||||
Obtain a list of available plugins
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
for plugin in c.request('/plugins')['plugins']:
|
|
||||||
print(plugin['name'])
|
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
|
||||||
|
|
||||||
emoRand
|
|
||||||
rand
|
|
||||||
sentiment140
|
|
||||||
|
|
||||||
|
|
||||||
Local Endpoint
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Run a docker container with Senpy image and default plugins
|
|
||||||
|
|
||||||
.. code::
|
|
||||||
|
|
||||||
docker run -ti --name 'SenpyEndpoint' -d -p 5000:5000 gsiupm/senpy:0.8.6 --host 0.0.0.0 --default-plugins
|
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
|
||||||
|
|
||||||
a0157cd98057072388bfebeed78a830da7cf0a796f4f1a3fd9188f9f2e5fe562
|
|
||||||
|
|
||||||
|
|
||||||
Import client and send a request to localhost
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
c_local = Client('http://127.0.0.1:5000/api')
|
|
||||||
r = c_local.analyse('Hello world', algorithm='sentiment140')
|
|
||||||
|
|
||||||
Print response
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
for entry in r.entries:
|
|
||||||
print('{} -> {}'.format(entry['text'], entry['sentiments'][0]['marl:hasPolarity']))
|
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
|
||||||
|
|
||||||
Hello world -> marl:Neutral
|
|
||||||
|
|
||||||
|
|
||||||
Obtain a list of available plugins deployed locally
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
c_local.plugins().keys()
|
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
|
||||||
|
|
||||||
rand
|
|
||||||
sentiment140
|
|
||||||
emoRand
|
|
||||||
|
|
||||||
|
|
||||||
Stop the docker container
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
!docker stop SenpyEndpoint
|
|
||||||
!docker rm SenpyEndpoint
|
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
|
||||||
|
|
||||||
SenpyEndpoint
|
|
||||||
SenpyEndpoint
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
|||||||
About
|
|
||||||
--------
|
|
||||||
|
|
||||||
If you use Senpy in your research, please cite `Senpy: A Pragmatic Linked Sentiment Analysis Framework <http://gsi.dit.upm.es/index.php/es/investigacion/publicaciones?view=publication&task=show&id=417>`__ (`BibTex <http://gsi.dit.upm.es/index.php/es/investigacion/publicaciones?controller=publications&task=export&format=bibtex&id=417>`__):
|
|
||||||
|
|
||||||
.. code-block:: text
|
|
||||||
|
|
||||||
Sánchez-Rada, J. F., Iglesias, C. A., Corcuera, I., & Araque, Ó. (2016, October).
|
|
||||||
Senpy: A Pragmatic Linked Sentiment Analysis Framework.
|
|
||||||
In Data Science and Advanced Analytics (DSAA),
|
|
||||||
2016 IEEE International Conference on (pp. 735-742). IEEE.
|
|
9
docs/advanced.rst
Normal file
9
docs/advanced.rst
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
Advanced usage
|
||||||
|
--------------
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
server-cli
|
||||||
|
conversion
|
||||||
|
commandline
|
17
docs/api.rst
17
docs/api.rst
@ -25,7 +25,7 @@ NIF API
|
|||||||
"@context":"http://127.0.0.1/api/contexts/Results.jsonld",
|
"@context":"http://127.0.0.1/api/contexts/Results.jsonld",
|
||||||
"@id":"_:Results_11241245.22",
|
"@id":"_:Results_11241245.22",
|
||||||
"@type":"results"
|
"@type":"results"
|
||||||
"analysis": [
|
"activities": [
|
||||||
"plugins/sentiment-140_0.1"
|
"plugins/sentiment-140_0.1"
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
@ -73,7 +73,7 @@ NIF API
|
|||||||
.. http:get:: /api/plugins
|
.. http:get:: /api/plugins
|
||||||
|
|
||||||
Returns a list of installed plugins.
|
Returns a list of installed plugins.
|
||||||
**Example request**:
|
**Example request and response**:
|
||||||
|
|
||||||
.. sourcecode:: http
|
.. sourcecode:: http
|
||||||
|
|
||||||
@ -82,10 +82,6 @@ NIF API
|
|||||||
Accept: application/json, text/javascript
|
Accept: application/json, text/javascript
|
||||||
|
|
||||||
|
|
||||||
**Example response**:
|
|
||||||
|
|
||||||
.. sourcecode:: http
|
|
||||||
|
|
||||||
{
|
{
|
||||||
"@id": "plugins/sentiment-140_0.1",
|
"@id": "plugins/sentiment-140_0.1",
|
||||||
"@type": "sentimentPlugin",
|
"@type": "sentimentPlugin",
|
||||||
@ -143,19 +139,14 @@ NIF API
|
|||||||
.. http:get:: /api/plugins/<pluginname>
|
.. http:get:: /api/plugins/<pluginname>
|
||||||
|
|
||||||
Returns the information of a specific plugin.
|
Returns the information of a specific plugin.
|
||||||
**Example request**:
|
**Example request and response**:
|
||||||
|
|
||||||
.. sourcecode:: http
|
.. sourcecode:: http
|
||||||
|
|
||||||
GET /api/plugins/rand/ HTTP/1.1
|
GET /api/plugins/sentiment-random/ HTTP/1.1
|
||||||
Host: localhost
|
Host: localhost
|
||||||
Accept: application/json, text/javascript
|
Accept: application/json, text/javascript
|
||||||
|
|
||||||
|
|
||||||
**Example response**:
|
|
||||||
|
|
||||||
.. sourcecode:: http
|
|
||||||
|
|
||||||
{
|
{
|
||||||
"@context": "http://127.0.0.1/api/contexts/ExamplePlugin.jsonld",
|
"@context": "http://127.0.0.1/api/contexts/ExamplePlugin.jsonld",
|
||||||
"@id": "plugins/ExamplePlugin_0.1",
|
"@id": "plugins/ExamplePlugin_0.1",
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
API and Examples
|
API and vocabularies
|
||||||
################
|
####################
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
|
||||||
vocabularies.rst
|
vocabularies.rst
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
"me:SAnalysis1",
|
"me:SAnalysis1",
|
||||||
"me:SgAnalysis1",
|
"me:SgAnalysis1",
|
||||||
"me:EmotionAnalysis1",
|
"me:EmotionAnalysis1",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "http://example.com#NIFExample",
|
"@id": "http://example.com#NIFExample",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
],
|
],
|
||||||
"entries": [
|
"entries": [
|
||||||
{
|
{
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
Command line
|
Command line
|
||||||
============
|
============
|
||||||
|
|
||||||
This video shows how to analyse text directly on the command line using the senpy tool.
|
Although the main use of senpy is to publish services, the tool can also be used locally to analyze text in the command line.
|
||||||
|
This is a short video demonstration:
|
||||||
|
|
||||||
.. image:: https://asciinema.org/a/9uwef1ghkjk062cw2t4mhzpyk.png
|
.. image:: https://asciinema.org/a/9uwef1ghkjk062cw2t4mhzpyk.png
|
||||||
:width: 100%
|
:width: 100%
|
||||||
|
@ -7,9 +7,9 @@ Senpy includes experimental support for emotion/sentiment conversion plugins.
|
|||||||
Use
|
Use
|
||||||
===
|
===
|
||||||
|
|
||||||
Consider the original query: http://127.0.0.1:5000/api/?i=hello&algo=emoRand
|
Consider the original query: http://127.0.0.1:5000/api/?i=hello&algo=emotion-random
|
||||||
|
|
||||||
The requested plugin (emoRand) returns emotions using Ekman's model (or big6 in EmotionML):
|
The requested plugin (emotion-random) returns emotions using Ekman's model (or big6 in EmotionML):
|
||||||
|
|
||||||
.. code:: json
|
.. code:: json
|
||||||
|
|
||||||
@ -21,14 +21,14 @@ The requested plugin (emoRand) returns emotions using Ekman's model (or big6 in
|
|||||||
"@type": "emotion",
|
"@type": "emotion",
|
||||||
"onyx:hasEmotionCategory": "emoml:big6anger"
|
"onyx:hasEmotionCategory": "emoml:big6anger"
|
||||||
},
|
},
|
||||||
"prov:wasGeneratedBy": "plugins/emoRand_0.1"
|
"prov:wasGeneratedBy": "plugins/emotion-random_0.1"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
To get these emotions in VAD space (FSRE dimensions in EmotionML), we'd do this:
|
To get these emotions in VAD space (FSRE dimensions in EmotionML), we'd do this:
|
||||||
|
|
||||||
http://127.0.0.1:5000/api/?i=hello&algo=emoRand&emotionModel=emoml:fsre-dimensions
|
http://127.0.0.1:5000/api/?i=hello&algo=emotion-random&emotionModel=emoml:fsre-dimensions
|
||||||
|
|
||||||
This call, provided there is a valid conversion plugin from Ekman's to VAD, would return something like this:
|
This call, provided there is a valid conversion plugin from Ekman's to VAD, would return something like this:
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ This call, provided there is a valid conversion plugin from Ekman's to VAD, woul
|
|||||||
"@type": "emotion",
|
"@type": "emotion",
|
||||||
"onyx:hasEmotionCategory": "emoml:big6anger"
|
"onyx:hasEmotionCategory": "emoml:big6anger"
|
||||||
},
|
},
|
||||||
"prov:wasGeneratedBy": "plugins/emoRand_0.1"
|
"prov:wasGeneratedBy": "plugins/emotion-random.1"
|
||||||
}, {
|
}, {
|
||||||
"@type": "emotionSet",
|
"@type": "emotionSet",
|
||||||
"onyx:hasEmotion": {
|
"onyx:hasEmotion": {
|
||||||
@ -69,7 +69,7 @@ It is also possible to get the original emotion nested within the new converted
|
|||||||
"@type": "emotion",
|
"@type": "emotion",
|
||||||
"onyx:hasEmotionCategory": "emoml:big6anger"
|
"onyx:hasEmotionCategory": "emoml:big6anger"
|
||||||
},
|
},
|
||||||
"prov:wasGeneratedBy": "plugins/emoRand_0.1"
|
"prov:wasGeneratedBy": "plugins/emotion-random.1"
|
||||||
"onyx:wasDerivedFrom": {
|
"onyx:wasDerivedFrom": {
|
||||||
"@type": "emotionSet",
|
"@type": "emotionSet",
|
||||||
"onyx:hasEmotion": {
|
"onyx:hasEmotion": {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
Demo
|
Demo
|
||||||
----
|
----
|
||||||
|
|
||||||
There is a demo available on http://senpy.cluster.gsi.dit.upm.es/, where you can test a serie of different plugins.
|
There is a demo available on http://senpy.gsi.upm.es/, where you can test a live instance of Senpy, with several open source plugins.
|
||||||
You can use the playground (a web interface) or make HTTP requests to the service API.
|
You can use the playground (a web interface) or make HTTP requests to the service API.
|
||||||
|
|
||||||
.. image:: senpy-playground.png
|
.. image:: senpy-playground.png
|
||||||
@ -10,7 +10,5 @@ You can use the playground (a web interface) or make HTTP requests to the servic
|
|||||||
:scale: 100 %
|
:scale: 100 %
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
Plugins Demo
|
|
||||||
============
|
|
||||||
|
|
||||||
The source code and description of the plugins used in the demo is available here: https://lab.cluster.gsi.dit.upm.es/senpy/senpy-plugins-community/.
|
The source code and description of the plugins used in the demo are available here: https://lab.gsi.upm.es/senpy/senpy-plugins-community/.
|
||||||
|
27
docs/development.rst
Normal file
27
docs/development.rst
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
Developing new services
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
Developing web services can be hard.
|
||||||
|
To illustrate it, the figure below summarizes the typical features in a text analysis service.
|
||||||
|
|
||||||
|
.. image:: senpy-framework.png
|
||||||
|
:width: 60%
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Senpy implements all the common blocks, so developers can focus on what really matters: great analysis algorithms that solve real problems.
|
||||||
|
Among other things, Senpy takes care of these tasks:
|
||||||
|
|
||||||
|
* Interfacing with the user: parameter validation, error handling.
|
||||||
|
* Formatting: JSON-LD, Turtle/n-triples input and output, or simple text input
|
||||||
|
* Linked Data: senpy results are semantically annotated, using a series of well established vocabularies, and sane default URIs.
|
||||||
|
* User interface: a web UI where users can explore your service and test different settings
|
||||||
|
* A client to interact with the service. Currently only available in Python.
|
||||||
|
|
||||||
|
You only need to provide the algorithm to turn a piece of text into an annotation
|
||||||
|
Sharing your sentiment analysis with the world has never been easier!
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
plugins-quickstart
|
||||||
|
plugins-faq
|
BIN
docs/evaluation-results.png
Normal file
BIN
docs/evaluation-results.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 76 KiB |
@ -1,5 +1,6 @@
|
|||||||
Examples
|
Examples
|
||||||
------
|
--------
|
||||||
|
|
||||||
All the examples in this page use the :download:`the main schema <_static/schemas/definitions.json>`.
|
All the examples in this page use the :download:`the main schema <_static/schemas/definitions.json>`.
|
||||||
|
|
||||||
Simple NIF annotation
|
Simple NIF annotation
|
||||||
@ -17,6 +18,7 @@ Sentiment Analysis
|
|||||||
.....................
|
.....................
|
||||||
Description
|
Description
|
||||||
,,,,,,,,,,,
|
,,,,,,,,,,,
|
||||||
|
|
||||||
This annotation corresponds to the sentiment analysis of an input. The example shows the sentiment represented according to Marl format.
|
This annotation corresponds to the sentiment analysis of an input. The example shows the sentiment represented according to Marl format.
|
||||||
The sentiments detected are contained in the Sentiments array with their related part of the text.
|
The sentiments detected are contained in the Sentiments array with their related part of the text.
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "_:SAnalysis1_Activity",
|
"@id": "_:SAnalysis1_Activity",
|
||||||
"@type": "marl:SentimentAnalysis",
|
"@type": "marl:SentimentAnalysis",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [ ],
|
"activities": [ ],
|
||||||
"entries": [
|
"entries": [
|
||||||
{
|
{
|
||||||
"@id": "http://example.org#char=0,40",
|
"@id": "http://example.org#char=0,40",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "_:SAnalysis1_Activity",
|
"@id": "_:SAnalysis1_Activity",
|
||||||
"@type": "marl:SentimentAnalysis",
|
"@type": "marl:SentimentAnalysis",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "me:EmotionAnalysis1_Activity",
|
"@id": "me:EmotionAnalysis1_Activity",
|
||||||
"@type": "me:EmotionAnalysis1",
|
"@type": "me:EmotionAnalysis1",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "_:NER1_Activity",
|
"@id": "_:NER1_Activity",
|
||||||
"@type": "me:NERAnalysis",
|
"@type": "me:NERAnalysis",
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
],
|
],
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "me:HesamsAnalysis_Activity",
|
"@id": "me:HesamsAnalysis_Activity",
|
||||||
"@type": "onyx:EmotionAnalysis",
|
"@type": "onyx:EmotionAnalysis",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "_:SAnalysis1_Activity",
|
"@id": "_:SAnalysis1_Activity",
|
||||||
"@type": "marl:SentimentAnalysis",
|
"@type": "marl:SentimentAnalysis",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
"@context": "http://mixedemotions-project.eu/ns/context.jsonld",
|
||||||
"@id": "me:Result1",
|
"@id": "me:Result1",
|
||||||
"@type": "results",
|
"@type": "results",
|
||||||
"analysis": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"@id": "_:SgAnalysis1_Activity",
|
"@id": "_:SgAnalysis1_Activity",
|
||||||
"@type": "me:SuggestionAnalysis",
|
"@type": "me:SuggestionAnalysis",
|
||||||
|
@ -4,32 +4,31 @@ Welcome to Senpy's documentation!
|
|||||||
:target: http://senpy.readthedocs.io/en/latest/
|
:target: http://senpy.readthedocs.io/en/latest/
|
||||||
.. image:: https://badge.fury.io/py/senpy.svg
|
.. image:: https://badge.fury.io/py/senpy.svg
|
||||||
:target: https://badge.fury.io/py/senpy
|
:target: https://badge.fury.io/py/senpy
|
||||||
.. image:: https://lab.cluster.gsi.dit.upm.es/senpy/senpy/badges/master/build.svg
|
.. image:: https://lab.gsi.upm.es/senpy/senpy/badges/master/build.svg
|
||||||
:target: https://lab.cluster.gsi.dit.upm.es/senpy/senpy/commits/master
|
:target: https://lab.gsi.upm.es/senpy/senpy/commits/master
|
||||||
.. image:: https://lab.cluster.gsi.dit.upm.es/senpy/senpy/badges/master/coverage.svg
|
.. image:: https://lab.gsi.upm.es/senpy/senpy/badges/master/coverage.svg
|
||||||
:target: https://lab.cluster.gsi.dit.upm.es/senpy/senpy/commits/master
|
:target: https://lab.gsi.upm.es/senpy/senpy/commits/master
|
||||||
.. image:: https://img.shields.io/pypi/l/requests.svg
|
.. image:: https://img.shields.io/pypi/l/requests.svg
|
||||||
:target: https://lab.cluster.gsi.dit.upm.es/senpy/senpy/
|
:target: https://lab.gsi.upm.es/senpy/senpy/
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Senpy is a framework for sentiment and emotion analysis services.
|
Senpy is a framework for sentiment and emotion analysis services.
|
||||||
Services built with senpy are interchangeable and easy to use because they share a common :doc:`apischema`.
|
Senpy services are interchangeable and easy to use because they share a common semantic :doc:`apischema`.
|
||||||
It also simplifies service development.
|
|
||||||
|
|
||||||
.. image:: senpy-architecture.png
|
If you interested in consuming Senpy services, read :doc:`Quickstart`.
|
||||||
:width: 100%
|
To get familiar with the concepts behind Senpy, and what it can offer for service developers, check out :doc:`development`.
|
||||||
:align: center
|
:doc:`apischema` contains information about the semantic models and vocabularies used by Senpy.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:caption: Learn more about senpy:
|
:caption: Learn more about senpy:
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
senpy
|
senpy
|
||||||
|
Quickstart
|
||||||
installation
|
installation
|
||||||
demo
|
development
|
||||||
usage
|
|
||||||
apischema
|
apischema
|
||||||
plugins
|
advanced
|
||||||
conversion
|
demo
|
||||||
about
|
publications
|
||||||
|
@ -32,27 +32,25 @@ If you want to install senpy globally, use sudo instead of the ``--user`` flag.
|
|||||||
|
|
||||||
Docker Image
|
Docker Image
|
||||||
************
|
************
|
||||||
Build the image or use the pre-built one:
|
The base image of senpy comes with some builtin plugins that you can use:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
docker run -ti -p 5000:5000 gsiupm/senpy --host 0.0.0.0 --default-plugins
|
docker run -ti -p 5000:5000 gsiupm/senpy --host 0.0.0.0
|
||||||
|
|
||||||
To add custom plugins, use a docker volume:
|
To add your custom plugins, you can use a docker volume:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins gsiupm/senpy --host 0.0.0.0 --default-plugins -f /plugins
|
docker run -ti -p 5000:5000 -v <PATH OF PLUGINS>:/plugins gsiupm/senpy --host 0.0.0.0 --plugins -f /plugins
|
||||||
|
|
||||||
|
|
||||||
Python 2
|
|
||||||
........
|
|
||||||
|
|
||||||
There is a Senpy version for python2 too:
|
There is a Senpy image for **python 2**, too:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
docker run -ti -p 5000:5000 gsiupm/senpy:python2.7 --host 0.0.0.0 --default-plugins
|
docker run -ti -p 5000:5000 gsiupm/senpy:python2.7 --host 0.0.0.0
|
||||||
|
|
||||||
|
|
||||||
Alias
|
Alias
|
||||||
@ -62,7 +60,7 @@ If you are using the docker approach regularly, it is advisable to use a script
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
alias senpy='docker run --rm -ti -p 5000:5000 -v $PWD:/senpy-plugins gsiupm/senpy --default-plugins'
|
alias senpy='docker run --rm -ti -p 5000:5000 -v $PWD:/senpy-plugins gsiupm/senpy'
|
||||||
|
|
||||||
|
|
||||||
Now, you may run senpy from any folder in your computer like so:
|
Now, you may run senpy from any folder in your computer like so:
|
||||||
|
@ -110,4 +110,4 @@ Now, in a file named ``helloworld.py``:
|
|||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
The complete code of the example plugin is available `here <https://lab.cluster.gsi.dit.upm.es/senpy/plugin-prueba>`__.
|
The complete code of the example plugin is available `here <https://lab.gsi.upm.es/senpy/plugin-prueba>`__.
|
||||||
|
@ -1,61 +1,18 @@
|
|||||||
Developing new plugins
|
F.A.Q.
|
||||||
----------------------
|
======
|
||||||
This document contains the minimum to get you started with developing new analysis plugin.
|
|
||||||
For an example of conversion plugins, see :doc:`conversion`.
|
|
||||||
For a description of definition files, see :doc:`plugins-definition`.
|
|
||||||
|
|
||||||
A more step-by-step tutorial with slides is available `here <https://lab.cluster.gsi.dit.upm.es/senpy/senpy-tutorial>`__
|
|
||||||
|
|
||||||
.. contents:: :local:
|
.. contents:: :local:
|
||||||
|
|
||||||
What is a plugin?
|
|
||||||
=================
|
|
||||||
|
|
||||||
A plugin is a python object that can process entries. Given an entry, it will modify it, add annotations to it, or generate new entries.
|
|
||||||
|
|
||||||
|
|
||||||
What is an entry?
|
|
||||||
=================
|
|
||||||
|
|
||||||
Entries are objects that can be annotated.
|
|
||||||
In general, they will be a piece of text.
|
|
||||||
By default, entries are `NIF contexts <http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core/nif-core.html>`_ represented in JSON-LD format.
|
|
||||||
It is a dictionary/JSON object that looks like this:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
{
|
|
||||||
"@id": "<unique identifier or blank node name>",
|
|
||||||
"nif:isString": "input text",
|
|
||||||
"sentiments": [ {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
],
|
|
||||||
...
|
|
||||||
}
|
|
||||||
|
|
||||||
Annotations are added to the object like this:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
entry = Entry()
|
|
||||||
entry.vocabulary__annotationName = 'myvalue'
|
|
||||||
entry['vocabulary:annotationName'] = 'myvalue'
|
|
||||||
entry['annotationNameURI'] = 'myvalue'
|
|
||||||
|
|
||||||
Where vocabulary is one of the prefixes defined in the default senpy context, and annotationURI is a full URI.
|
|
||||||
The value may be any valid JSON-LD dictionary.
|
|
||||||
For simplicity, senpy includes a series of models by default in the ``senpy.models`` module.
|
|
||||||
|
|
||||||
|
|
||||||
What are annotations?
|
What are annotations?
|
||||||
=====================
|
#####################
|
||||||
They are objects just like entries.
|
They are objects just like entries.
|
||||||
Senpy ships with several default annotations, including: ``Sentiment``, ``Emotion``, ``EmotionSet``...jk bb
|
Senpy ships with several default annotations, including: ``Sentiment``, ``Emotion``, ``EmotionSet``...jk bb
|
||||||
|
|
||||||
|
|
||||||
What's a plugin made of?
|
What's a plugin made of?
|
||||||
========================
|
########################
|
||||||
|
|
||||||
When receiving a query, senpy selects what plugin or plugins should process each entry, and in what order.
|
When receiving a query, senpy selects what plugin or plugins should process each entry, and in what order.
|
||||||
It also makes sure the every entry and the parameters provided by the user meet the plugin requirements.
|
It also makes sure the every entry and the parameters provided by the user meet the plugin requirements.
|
||||||
@ -73,37 +30,25 @@ In practice, this is what a plugin looks like, tests included:
|
|||||||
The lines highlighted contain some information about the plugin.
|
The lines highlighted contain some information about the plugin.
|
||||||
In particular, the following information is mandatory:
|
In particular, the following information is mandatory:
|
||||||
|
|
||||||
* A unique name for the class. In our example, Rand.
|
* A unique name for the class. In our example, sentiment-random.
|
||||||
* The subclass/type of plugin. This is typically either `SentimentPlugin` or `EmotionPlugin`. However, new types of plugin can be created for different annotations. The only requirement is that these new types inherit from `senpy.Analysis`
|
* The subclass/type of plugin. This is typically either `SentimentPlugin` or `EmotionPlugin`. However, new types of plugin can be created for different annotations. The only requirement is that these new types inherit from `senpy.Analysis`
|
||||||
* A description of the plugin. This can be done simply by adding a doc to the class.
|
* A description of the plugin. This can be done simply by adding a doc to the class.
|
||||||
* A version, which should get updated.
|
* A version, which should get updated.
|
||||||
* An author name.
|
* An author name.
|
||||||
|
|
||||||
|
|
||||||
Plugins Code
|
|
||||||
============
|
|
||||||
|
|
||||||
The basic methods in a plugin are:
|
|
||||||
|
|
||||||
* analyse_entry: called in every user requests. It takes two parameters: ``Entry``, the entry object, and ``params``, the parameters supplied by the user. It should yield one or more ``Entry`` objects.
|
|
||||||
* activate: used to load memory-hungry resources. For instance, to train a classifier.
|
|
||||||
* deactivate: used to free up resources when the plugin is no longer needed.
|
|
||||||
|
|
||||||
Plugins are loaded asynchronously, so don't worry if the activate method takes too long. The plugin will be marked as activated once it is finished executing the method.
|
|
||||||
|
|
||||||
|
|
||||||
How does senpy find modules?
|
How does senpy find modules?
|
||||||
============================
|
############################
|
||||||
|
|
||||||
Senpy looks for files of two types:
|
Senpy looks for files of two types:
|
||||||
|
|
||||||
* Python files of the form `senpy_<NAME>.py` or `<NAME>_plugin.py`. In these files, it will look for: 1) Instances that inherit from `senpy.Plugin`, or subclasses of `senpy.Plugin` that can be initialized without a configuration file. i.e. classes that contain all the required attributes for a plugin.
|
* Python files of the form `senpy_<NAME>.py` or `<NAME>_plugin.py`. In these files, it will look for: 1) Instances that inherit from `senpy.Plugin`, or subclasses of `senpy.Plugin` that can be initialized without a configuration file. i.e. classes that contain all the required attributes for a plugin.
|
||||||
* Plugin definition files (see :doc:`advanced-plugins`)
|
* Plugin definition files (see :doc:`plugins-definition`)
|
||||||
|
|
||||||
Defining additional parameters
|
How can I define additional parameters for my plugin?
|
||||||
==============================
|
#####################################################
|
||||||
|
|
||||||
Your plugin may ask for additional parameters from the users of the service by using the attribute ``extra_params`` in your plugin definition.
|
Your plugin may ask for additional parameters from users by using the attribute ``extra_params`` in your plugin definition.
|
||||||
It takes a dictionary, where the keys are the name of the argument/parameter, and the value has the following fields:
|
It takes a dictionary, where the keys are the name of the argument/parameter, and the value has the following fields:
|
||||||
|
|
||||||
* aliases: the different names which can be used in the request to use the parameter.
|
* aliases: the different names which can be used in the request to use the parameter.
|
||||||
@ -124,8 +69,8 @@ It takes a dictionary, where the keys are the name of the argument/parameter, an
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
Loading data and files
|
How should I load external data and files
|
||||||
======================
|
#########################################
|
||||||
|
|
||||||
Most plugins will need access to files (dictionaries, lexicons, etc.).
|
Most plugins will need access to files (dictionaries, lexicons, etc.).
|
||||||
These files are usually heavy or under a license that does not allow redistribution.
|
These files are usually heavy or under a license that does not allow redistribution.
|
||||||
@ -144,7 +89,7 @@ Plugins have a convenience function `self.open` which will automatically prepend
|
|||||||
file_in_data = <FILE PATH>
|
file_in_data = <FILE PATH>
|
||||||
file_in_sources = <FILE PATH>
|
file_in_sources = <FILE PATH>
|
||||||
|
|
||||||
def activate(self):
|
def on activate(self):
|
||||||
with self.open(self.file_in_data) as f:
|
with self.open(self.file_in_data) as f:
|
||||||
self._classifier = train_from_file(f)
|
self._classifier = train_from_file(f)
|
||||||
file_in_source = os.path.join(self.get_folder(), self.file_in_sources)
|
file_in_source = os.path.join(self.get_folder(), self.file_in_sources)
|
||||||
@ -155,8 +100,8 @@ Plugins have a convenience function `self.open` which will automatically prepend
|
|||||||
It is good practice to specify the paths of these files in the plugin configuration, so the same code can be reused with different resources.
|
It is good practice to specify the paths of these files in the plugin configuration, so the same code can be reused with different resources.
|
||||||
|
|
||||||
|
|
||||||
Docker image
|
Can I build a docker image for my plugin?
|
||||||
============
|
#########################################
|
||||||
|
|
||||||
Add the following dockerfile to your project to generate a docker image with your plugin:
|
Add the following dockerfile to your project to generate a docker image with your plugin:
|
||||||
|
|
||||||
@ -204,17 +149,15 @@ Adding data to the image:
|
|||||||
FROM gsiupm/senpy:1.0.1
|
FROM gsiupm/senpy:1.0.1
|
||||||
COPY data /
|
COPY data /
|
||||||
|
|
||||||
F.A.Q.
|
|
||||||
======
|
|
||||||
What annotations can I use?
|
What annotations can I use?
|
||||||
???????????????????????????
|
###########################
|
||||||
|
|
||||||
You can add almost any annotation to an entry.
|
You can add almost any annotation to an entry.
|
||||||
The most common use cases are covered in the :doc:`apischema`.
|
The most common use cases are covered in the :doc:`apischema`.
|
||||||
|
|
||||||
|
|
||||||
Why does the analyse function yield instead of return?
|
Why does the analyse function yield instead of return?
|
||||||
??????????????????????????????????????????????????????
|
######################################################
|
||||||
|
|
||||||
This is so that plugins may add new entries to the response or filter some of them.
|
This is so that plugins may add new entries to the response or filter some of them.
|
||||||
For instance, a chunker may split one entry into several.
|
For instance, a chunker may split one entry into several.
|
||||||
@ -222,7 +165,7 @@ On the other hand, a conversion plugin may leave out those entries that do not c
|
|||||||
|
|
||||||
|
|
||||||
If I'm using a classifier, where should I train it?
|
If I'm using a classifier, where should I train it?
|
||||||
???????????????????????????????????????????????????
|
###################################################
|
||||||
|
|
||||||
Training a classifier can be time time consuming. To avoid running the training unnecessarily, you can use ShelfMixin to store the classifier. For instance:
|
Training a classifier can be time time consuming. To avoid running the training unnecessarily, you can use ShelfMixin to store the classifier. For instance:
|
||||||
|
|
||||||
@ -256,7 +199,7 @@ A corrupt shelf prevents the plugin from loading.
|
|||||||
If you do not care about the data in the shelf, you can force your plugin to remove the corrupted file and load anyway, set the 'force_shelf' to True in your plugin and start it again.
|
If you do not care about the data in the shelf, you can force your plugin to remove the corrupted file and load anyway, set the 'force_shelf' to True in your plugin and start it again.
|
||||||
|
|
||||||
How can I turn an external service into a plugin?
|
How can I turn an external service into a plugin?
|
||||||
?????????????????????????????????????????????????
|
#################################################
|
||||||
|
|
||||||
This example ilustrate how to implement a plugin that accesses the Sentiment140 service.
|
This example ilustrate how to implement a plugin that accesses the Sentiment140 service.
|
||||||
|
|
||||||
@ -292,8 +235,8 @@ This example ilustrate how to implement a plugin that accesses the Sentiment140
|
|||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
|
||||||
Can I activate a DEBUG mode for my plugin?
|
How can I activate a DEBUG mode for my plugin?
|
||||||
???????????????????????????????????????????
|
###############################################
|
||||||
|
|
||||||
You can activate the DEBUG mode by the command-line tool using the option -d.
|
You can activate the DEBUG mode by the command-line tool using the option -d.
|
||||||
|
|
||||||
@ -309,6 +252,6 @@ Additionally, with the ``--pdb`` option you will be dropped into a pdb post mort
|
|||||||
python -m pdb yourplugin.py
|
python -m pdb yourplugin.py
|
||||||
|
|
||||||
Where can I find more code examples?
|
Where can I find more code examples?
|
||||||
????????????????????????????????????
|
####################################
|
||||||
|
|
||||||
See: `<http://github.com/gsi-upm/senpy-plugins-community>`_.
|
See: `<http://github.com/gsi-upm/senpy-plugins-community>`_.
|
86
docs/plugins-quickstart.rst
Normal file
86
docs/plugins-quickstart.rst
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
Quickstart for service developers
|
||||||
|
=================================
|
||||||
|
|
||||||
|
This document contains the minimum to get you started with developing new services using Senpy.
|
||||||
|
|
||||||
|
For an example of conversion plugins, see :doc:`conversion`.
|
||||||
|
For a description of definition files, see :doc:`plugins-definition`.
|
||||||
|
|
||||||
|
A more step-by-step tutorial with slides is available `here <https://lab.gsi.upm.es/senpy/senpy-tutorial>`__
|
||||||
|
|
||||||
|
.. contents:: :local:
|
||||||
|
|
||||||
|
Installation
|
||||||
|
############
|
||||||
|
|
||||||
|
First of all, you need to install the package.
|
||||||
|
See :doc:`installation` for instructions.
|
||||||
|
Once installed, the `senpy` command should be available.
|
||||||
|
|
||||||
|
Architecture
|
||||||
|
############
|
||||||
|
|
||||||
|
The main component of a sentiment analysis service is the algorithm itself. However, for the algorithm to work, it needs to get the appropriate parameters from the user, format the results according to the defined API, interact with the user whn errors occur or more information is needed, etc.
|
||||||
|
|
||||||
|
Senpy proposes a modular and dynamic architecture that allows:
|
||||||
|
|
||||||
|
* Implementing different algorithms in a extensible way, yet offering a common interface.
|
||||||
|
* Offering common services that facilitate development, so developers can focus on implementing new and better algorithms.
|
||||||
|
|
||||||
|
The framework consists of two main modules: Senpy core, which is the building block of the service, and Senpy plugins, which consist of the analysis algorithm. The next figure depicts a simplified version of the processes involved in an analysis with the Senpy framework.
|
||||||
|
|
||||||
|
.. image:: senpy-architecture.png
|
||||||
|
:width: 100%
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
|
||||||
|
What is a plugin?
|
||||||
|
#################
|
||||||
|
|
||||||
|
A plugin is a python object that can process entries. Given an entry, it will modify it, add annotations to it, or generate new entries.
|
||||||
|
|
||||||
|
|
||||||
|
What is an entry?
|
||||||
|
#################
|
||||||
|
|
||||||
|
Entries are objects that can be annotated.
|
||||||
|
In general, they will be a piece of text.
|
||||||
|
By default, entries are `NIF contexts <http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core/nif-core.html>`_ represented in JSON-LD format.
|
||||||
|
It is a dictionary/JSON object that looks like this:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
{
|
||||||
|
"@id": "<unique identifier or blank node name>",
|
||||||
|
"nif:isString": "input text",
|
||||||
|
"sentiments": [ {
|
||||||
|
...
|
||||||
|
}
|
||||||
|
],
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
Annotations are added to the object like this:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
entry = Entry()
|
||||||
|
entry.vocabulary__annotationName = 'myvalue'
|
||||||
|
entry['vocabulary:annotationName'] = 'myvalue'
|
||||||
|
entry['annotationNameURI'] = 'myvalue'
|
||||||
|
|
||||||
|
Where vocabulary is one of the prefixes defined in the default senpy context, and annotationURI is a full URI.
|
||||||
|
The value may be any valid JSON-LD dictionary.
|
||||||
|
For simplicity, senpy includes a series of models by default in the ``senpy.models`` module.
|
||||||
|
|
||||||
|
Plugins Code
|
||||||
|
############
|
||||||
|
|
||||||
|
The basic methods in a plugin are:
|
||||||
|
|
||||||
|
* analyse_entry: called in every user requests. It takes two parameters: ``Entry``, the entry object, and ``params``, the parameters supplied by the user. It should yield one or more ``Entry`` objects.
|
||||||
|
* activate: used to load memory-hungry resources. For instance, to train a classifier.
|
||||||
|
* deactivate: used to free up resources when the plugin is no longer needed.
|
||||||
|
|
||||||
|
Plugins are loaded asynchronously, so don't worry if the activate method takes too long. The plugin will be marked as activated once it is finished executing the method.
|
||||||
|
|
46
docs/publications.rst
Normal file
46
docs/publications.rst
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
Publications
|
||||||
|
============
|
||||||
|
|
||||||
|
|
||||||
|
If you use Senpy in your research, please cite `Senpy: A Pragmatic Linked Sentiment Analysis Framework <http://gsi.dit.upm.es/index.php/es/investigacion/publicaciones?view=publication&task=show&id=417>`__ (`BibTex <http://gsi.dit.upm.es/index.php/es/investigacion/publicaciones?controller=publications&task=export&format=bibtex&id=417>`__):
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
Sánchez-Rada, J. F., Iglesias, C. A., Corcuera, I., & Araque, Ó. (2016, October).
|
||||||
|
Senpy: A Pragmatic Linked Sentiment Analysis Framework.
|
||||||
|
In Data Science and Advanced Analytics (DSAA),
|
||||||
|
2016 IEEE International Conference on (pp. 735-742). IEEE.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Senpy uses Onyx for emotion representation, first introduced in:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
Sánchez-Rada, J. F., & Iglesias, C. A. (2016).
|
||||||
|
Onyx: A linked data approach to emotion representation.
|
||||||
|
Information Processing & Management, 52(1), 99-114.
|
||||||
|
|
||||||
|
Senpy uses Marl for sentiment representation, which was presented in:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
Westerski, A., Iglesias Fernandez, C. A., & Tapia Rico, F. (2011).
|
||||||
|
Linked opinions: Describing sentiments on the structured web of data.
|
||||||
|
|
||||||
|
|
||||||
|
Senpy has been used extensively in the toolbox of the MixedEmotions project:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
Buitelaar, P., Wood, I. D., Arcan, M., McCrae, J. P., Abele, A., Robin, C., … Tummarello, G. (2018).
|
||||||
|
MixedEmotions: An Open-Source Toolbox for Multi-Modal Emotion Analysis.
|
||||||
|
IEEE Transactions on Multimedia.
|
||||||
|
|
||||||
|
The representation models, formats and challenges are partially covered in a chapter of the book Sentiment Analysis in Social Networks:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
Iglesias, C. A., Sánchez-Rada, J. F., Vulcu, G., & Buitelaar, P. (2017).
|
||||||
|
Linked Data Models for Sentiment and Emotion Analysis in Social Networks.
|
||||||
|
In Sentiment Analysis in Social Networks (pp. 49-69).
|
@ -1,54 +1,27 @@
|
|||||||
What is Senpy?
|
What is Senpy?
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
Senpy is a framework for text analysis using Linked Data. There are three main applications of Senpy so far: sentiment and emotion analysis, user profiling and entity recoginition. Annotations and Services are compliant with NIF (NLP Interchange Format).
|
Senpy is a framework for sentiment and emotion analysis services.
|
||||||
|
Its goal is to produce analysis services that are interchangeable and fully interoperable.
|
||||||
Senpy aims at providing a framework where analysis modules can be integrated easily as plugins, and providing a core functionality for managing tasks such as data validation, user interaction, formatting, logging, translation to linked data, etc.
|
|
||||||
|
|
||||||
The figure below summarizes the typical features in a text analysis service.
|
|
||||||
Senpy implements all the common blocks, so developers can focus on what really matters: great analysis algorithms that solve real problems.
|
|
||||||
|
|
||||||
.. image:: senpy-framework.png
|
|
||||||
:width: 60%
|
|
||||||
:align: center
|
|
||||||
|
|
||||||
|
|
||||||
Senpy for end users
|
|
||||||
===================
|
|
||||||
|
|
||||||
All services built using senpy share a common interface.
|
|
||||||
This allows users to use them (almost) interchangeably.
|
|
||||||
Senpy comes with a :ref:`built-in client`.
|
|
||||||
|
|
||||||
|
|
||||||
Senpy for service developers
|
|
||||||
============================
|
|
||||||
|
|
||||||
Senpy is a framework that turns your sentiment or emotion analysis algorithm into a full blown semantic service.
|
|
||||||
Senpy takes care of:
|
|
||||||
|
|
||||||
* Interfacing with the user: parameter validation, error handling.
|
|
||||||
* Formatting: JSON-LD, Turtle/n-triples input and output, or simple text input
|
|
||||||
* Linked Data: senpy results are semantically annotated, using a series of well established vocabularies, and sane default URIs.
|
|
||||||
* User interface: a web UI where users can explore your service and test different settings
|
|
||||||
* A client to interact with the service. Currently only available in Python.
|
|
||||||
|
|
||||||
Sharing your sentiment analysis with the world has never been easier!
|
|
||||||
|
|
||||||
Check out the :doc:`plugins` if you have developed an analysis algorithm (e.g. sentiment analysis) and you want to publish it as a service.
|
|
||||||
|
|
||||||
Architecture
|
|
||||||
============
|
|
||||||
|
|
||||||
The main component of a sentiment analysis service is the algorithm itself. However, for the algorithm to work, it needs to get the appropriate parameters from the user, format the results according to the defined API, interact with the user whn errors occur or more information is needed, etc.
|
|
||||||
|
|
||||||
Senpy proposes a modular and dynamic architecture that allows:
|
|
||||||
|
|
||||||
* Implementing different algorithms in a extensible way, yet offering a common interface.
|
|
||||||
* Offering common services that facilitate development, so developers can focus on implementing new and better algorithms.
|
|
||||||
|
|
||||||
The framework consists of two main modules: Senpy core, which is the building block of the service, and Senpy plugins, which consist of the analysis algorithm. The next figure depicts a simplified version of the processes involved in an analysis with the Senpy framework.
|
|
||||||
|
|
||||||
.. image:: senpy-architecture.png
|
.. image:: senpy-architecture.png
|
||||||
:width: 100%
|
:width: 100%
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
|
All services built using senpy share a common interface.
|
||||||
|
This allows users to use them (almost) interchangeably, with the same API and tools, simply by pointing to a different URL or changing a parameter.
|
||||||
|
The common schema also makes it easier to evaluate the performance of different algorithms and services.
|
||||||
|
In fact, Senpy has a built-in evaluation API you can use to compare results with different algorithms.
|
||||||
|
|
||||||
|
Services can also use the common interface to communicate with each other.
|
||||||
|
And higher level features can be built on top of these services, such as automatic fusion of results, emotion model conversion, and service discovery.
|
||||||
|
|
||||||
|
These benefits are not limited to new services.
|
||||||
|
The community has developed wrappers for some proprietary and commercial services (such as sentiment140 and Meaning Cloud), so you can consult them as.
|
||||||
|
Senpy comes with a :ref:`built-in client`.
|
||||||
|
|
||||||
|
|
||||||
|
To achieve this goal, Senpy uses a Linked Data principled approach, based on the NIF (NLP Interchange Format) specification, and open vocabularies such as Marl and Onyx.
|
||||||
|
You can learn more about this in :doc:`vocabularies`.
|
||||||
|
|
||||||
|
Check out :doc:`plugins` if you have developed an analysis algorithm (e.g. sentiment analysis) and you want to publish it as a service.
|
||||||
|
@ -5,10 +5,11 @@ The senpy server is launched via the `senpy` command:
|
|||||||
|
|
||||||
.. code:: text
|
.. code:: text
|
||||||
|
|
||||||
usage: senpy [-h] [--level logging_level] [--debug] [--default-plugins]
|
usage: senpy [-h] [--level logging_level] [--log-format log_format] [--debug]
|
||||||
[--host HOST] [--port PORT] [--plugins-folder PLUGINS_FOLDER]
|
[--no-default-plugins] [--host HOST] [--port PORT]
|
||||||
[--only-install] [--only-list] [--data-folder DATA_FOLDER]
|
[--plugins-folder PLUGINS_FOLDER] [--only-install] [--only-test]
|
||||||
[--threaded] [--version]
|
[--test] [--only-list] [--data-folder DATA_FOLDER]
|
||||||
|
[--no-threaded] [--no-deps] [--version] [--allow-fail]
|
||||||
|
|
||||||
Run a Senpy server
|
Run a Senpy server
|
||||||
|
|
||||||
@ -16,20 +17,25 @@ The senpy server is launched via the `senpy` command:
|
|||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
--level logging_level, -l logging_level
|
--level logging_level, -l logging_level
|
||||||
Logging level
|
Logging level
|
||||||
|
--log-format log_format
|
||||||
|
Logging format
|
||||||
--debug, -d Run the application in debug mode
|
--debug, -d Run the application in debug mode
|
||||||
--default-plugins Load the default plugins
|
--no-default-plugins Do not load the default plugins
|
||||||
--host HOST Use 0.0.0.0 to accept requests from any host.
|
--host HOST Use 0.0.0.0 to accept requests from any host.
|
||||||
--port PORT, -p PORT Port to listen on.
|
--port PORT, -p PORT Port to listen on.
|
||||||
--plugins-folder PLUGINS_FOLDER, -f PLUGINS_FOLDER
|
--plugins-folder PLUGINS_FOLDER, -f PLUGINS_FOLDER
|
||||||
Where to look for plugins.
|
Where to look for plugins.
|
||||||
--only-install, -i Do not run a server, only install plugin dependencies
|
--only-install, -i Do not run a server, only install plugin dependencies
|
||||||
|
--only-test Do not run a server, just test all plugins
|
||||||
|
--test, -t Test all plugins before launching the server
|
||||||
--only-list, --list Do not run a server, only list plugins found
|
--only-list, --list Do not run a server, only list plugins found
|
||||||
--data-folder DATA_FOLDER, --data DATA_FOLDER
|
--data-folder DATA_FOLDER, --data DATA_FOLDER
|
||||||
Where to look for data. It be set with the SENPY_DATA
|
Where to look for data. It be set with the SENPY_DATA
|
||||||
environment variable as well.
|
environment variable as well.
|
||||||
--threaded Run a threaded server
|
--no-threaded Run the server without threading
|
||||||
|
--no-deps, -n Skip installing dependencies
|
||||||
--version, -v Output the senpy version and exit
|
--version, -v Output the senpy version and exit
|
||||||
|
--allow-fail, --fail Do not exit if some plugins fail to activate
|
||||||
|
|
||||||
|
|
||||||
When launched, the server will recursively look for plugins in the specified plugins folder (the current working directory by default).
|
When launched, the server will recursively look for plugins in the specified plugins folder (the current working directory by default).
|
||||||
@ -40,9 +46,9 @@ Let's run senpy with the default plugins:
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
senpy -f . --default-plugins
|
senpy -f .
|
||||||
|
|
||||||
Now go to `http://localhost:5000 <http://localhost:5000>`_, you should be greeted by the senpy playground:
|
Now open your browser and go to `http://localhost:5000 <http://localhost:5000>`_, where you should be greeted by the senpy playground:
|
||||||
|
|
||||||
.. image:: senpy-playground.png
|
.. image:: senpy-playground.png
|
||||||
:width: 100%
|
:width: 100%
|
||||||
@ -51,9 +57,9 @@ Now go to `http://localhost:5000 <http://localhost:5000>`_, you should be greete
|
|||||||
The playground is a user-friendly way to test your plugins, but you can always use the service directly: `http://localhost:5000/api?input=hello <http://localhost:5000/api?input=hello>`_.
|
The playground is a user-friendly way to test your plugins, but you can always use the service directly: `http://localhost:5000/api?input=hello <http://localhost:5000/api?input=hello>`_.
|
||||||
|
|
||||||
|
|
||||||
By default, senpy will listen only on the `127.0.0.1` address.
|
By default, senpy will listen only on `127.0.0.1`.
|
||||||
That means you can only access the API from your (or localhost).
|
That means you can only access the API from your PC (i.e. localhost).
|
||||||
You can listen on a different address using the `--host` flag (e.g., 0.0.0.0).
|
You can listen on a different address using the `--host` flag (e.g., 0.0.0.0, to allow any computer to access it).
|
||||||
The default port is 5000.
|
The default port is 5000.
|
||||||
You can change it with the `--port` flag.
|
You can change it with the `--port` flag.
|
||||||
|
|
@ -1,15 +0,0 @@
|
|||||||
Usage
|
|
||||||
-----
|
|
||||||
|
|
||||||
First of all, you need to install the package.
|
|
||||||
See :doc:`installation` for instructions.
|
|
||||||
Once installed, the `senpy` command should be available.
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
server
|
|
||||||
SenpyClientUse
|
|
||||||
commandline
|
|
||||||
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
|||||||
This is a collection of plugins that exemplify certain aspects of plugin development with senpy.
|
This is a collection of plugins that exemplify certain aspects of plugin development with senpy.
|
||||||
|
|
||||||
The first series of plugins the `basic` ones.
|
The first series of plugins are the `basic` ones.
|
||||||
Their starting point is a classification function defined in `basic.py`.
|
Their starting point is a classification function defined in `basic.py`.
|
||||||
They all include testing and running them as a script will run all tests.
|
They all include testing and running them as a script will run all tests.
|
||||||
In ascending order of customization, the plugins are:
|
In ascending order of customization, the plugins are:
|
||||||
@ -19,5 +19,5 @@ In rest of the plugins show advanced topics:
|
|||||||
|
|
||||||
All of the plugins in this folder include a set of test cases and they are periodically tested with the latest version of senpy.
|
All of the plugins in this folder include a set of test cases and they are periodically tested with the latest version of senpy.
|
||||||
|
|
||||||
Additioanlly, for an example of stand-alone plugin that can be tested and deployed with docker, take a look at: lab.cluster.gsi.dit.upm.es/senpy/plugin-example
|
Additioanlly, for an example of stand-alone plugin that can be tested and deployed with docker, take a look at: lab.gsi.upm.es/senpy/plugin-example
|
||||||
bbm
|
bbm
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/local/bin/python
|
||||||
# coding: utf-8
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
emoticons = {
|
emoticons = {
|
||||||
'pos': [':)', ':]', '=)', ':D'],
|
'pos': [':)', ':]', '=)', ':D'],
|
||||||
@ -7,17 +7,19 @@ emoticons = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
emojis = {
|
emojis = {
|
||||||
'pos': ['😁', '😂', '😃', '😄', '😆', '😅', '😄' '😍'],
|
'pos': [u'😁', u'😂', u'😃', u'😄', u'😆', u'😅', u'😄', u'😍'],
|
||||||
'neg': ['😢', '😡', '😠', '😞', '😖', '😔', '😓', '😒']
|
'neg': [u'😢', u'😡', u'😠', u'😞', u'😖', u'😔', u'😓', u'😒']
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_polarity(text, dictionaries=[emoticons, emojis]):
|
def get_polarity(text, dictionaries=[emoticons, emojis]):
|
||||||
polarity = 'marl:Neutral'
|
polarity = 'marl:Neutral'
|
||||||
|
print('Input for get_polarity', text)
|
||||||
for dictionary in dictionaries:
|
for dictionary in dictionaries:
|
||||||
for label, values in dictionary.items():
|
for label, values in dictionary.items():
|
||||||
for emoticon in values:
|
for emoticon in values:
|
||||||
if emoticon and emoticon in text:
|
if emoticon and emoticon in text:
|
||||||
polarity = label
|
polarity = label
|
||||||
break
|
break
|
||||||
|
print('Polarity', polarity)
|
||||||
return polarity
|
return polarity
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/local/bin/python
|
||||||
# coding: utf-8
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from senpy import easy_test, models, plugins
|
from senpy import easy_test, models, plugins
|
||||||
|
|
||||||
@ -18,13 +18,13 @@ class BasicAnalyseEntry(plugins.SentimentPlugin):
|
|||||||
'default': 'marl:Neutral'
|
'default': 'marl:Neutral'
|
||||||
}
|
}
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, activity):
|
||||||
polarity = basic.get_polarity(entry.text)
|
polarity = basic.get_polarity(entry.text)
|
||||||
|
|
||||||
polarity = self.mappings.get(polarity, self.mappings['default'])
|
polarity = self.mappings.get(polarity, self.mappings['default'])
|
||||||
|
|
||||||
s = models.Sentiment(marl__hasPolarity=polarity)
|
s = models.Sentiment(marl__hasPolarity=polarity)
|
||||||
s.prov(self)
|
s.prov(activity)
|
||||||
entry.sentiments.append(s)
|
entry.sentiments.append(s)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/local/bin/python
|
||||||
# coding: utf-8
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from senpy import easy_test, SentimentBox
|
from senpy import easy_test, SentimentBox
|
||||||
|
|
||||||
@ -12,15 +12,13 @@ class BasicBox(SentimentBox):
|
|||||||
author = '@balkian'
|
author = '@balkian'
|
||||||
version = '0.1'
|
version = '0.1'
|
||||||
|
|
||||||
mappings = {
|
def predict_one(self, features, **kwargs):
|
||||||
'pos': 'marl:Positive',
|
output = basic.get_polarity(features[0])
|
||||||
'neg': 'marl:Negative',
|
if output == 'pos':
|
||||||
'default': 'marl:Neutral'
|
return [1, 0, 0]
|
||||||
}
|
if output == 'neg':
|
||||||
|
return [0, 0, 1]
|
||||||
def predict_one(self, input):
|
return [0, 1, 0]
|
||||||
output = basic.get_polarity(input)
|
|
||||||
return self.mappings.get(output, self.mappings['default'])
|
|
||||||
|
|
||||||
test_cases = [{
|
test_cases = [{
|
||||||
'input': 'Hello :)',
|
'input': 'Hello :)',
|
||||||
|
@ -1,37 +1,36 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/local/bin/python
|
||||||
# coding: utf-8
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from senpy import easy_test, SentimentBox, MappingMixin
|
from senpy import easy_test, SentimentBox
|
||||||
|
|
||||||
import basic
|
import basic
|
||||||
|
|
||||||
|
|
||||||
class Basic(MappingMixin, SentimentBox):
|
class Basic(SentimentBox):
|
||||||
'''Provides sentiment annotation using a lexicon'''
|
'''Provides sentiment annotation using a lexicon'''
|
||||||
|
|
||||||
author = '@balkian'
|
author = '@balkian'
|
||||||
version = '0.1'
|
version = '0.1'
|
||||||
|
|
||||||
mappings = {
|
def predict_one(self, features, **kwargs):
|
||||||
'pos': 'marl:Positive',
|
output = basic.get_polarity(features[0])
|
||||||
'neg': 'marl:Negative',
|
if output == 'pos':
|
||||||
'default': 'marl:Neutral'
|
return [1, 0, 0]
|
||||||
}
|
if output == 'neu':
|
||||||
|
return [0, 1, 0]
|
||||||
def predict_one(self, input):
|
return [0, 0, 1]
|
||||||
return basic.get_polarity(input)
|
|
||||||
|
|
||||||
test_cases = [{
|
test_cases = [{
|
||||||
'input': 'Hello :)',
|
'input': u'Hello :)',
|
||||||
'polarity': 'marl:Positive'
|
'polarity': 'marl:Positive'
|
||||||
}, {
|
}, {
|
||||||
'input': 'So sad :(',
|
'input': u'So sad :(',
|
||||||
'polarity': 'marl:Negative'
|
'polarity': 'marl:Negative'
|
||||||
}, {
|
}, {
|
||||||
'input': 'Yay! Emojis 😁',
|
'input': u'Yay! Emojis 😁',
|
||||||
'polarity': 'marl:Positive'
|
'polarity': 'marl:Positive'
|
||||||
}, {
|
}, {
|
||||||
'input': 'But no emoticons 😢',
|
'input': u'But no emoticons 😢',
|
||||||
'polarity': 'marl:Negative'
|
'polarity': 'marl:Negative'
|
||||||
}]
|
}]
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/local/bin/python
|
||||||
# coding: utf-8
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from senpy import easy_test, models, plugins
|
from senpy import easy_test, models, plugins
|
||||||
|
|
||||||
@ -16,7 +16,7 @@ class Dictionary(plugins.SentimentPlugin):
|
|||||||
|
|
||||||
mappings = {'pos': 'marl:Positive', 'neg': 'marl:Negative'}
|
mappings = {'pos': 'marl:Positive', 'neg': 'marl:Negative'}
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, *args, **kwargs):
|
||||||
polarity = basic.get_polarity(entry.text, self.dictionaries)
|
polarity = basic.get_polarity(entry.text, self.dictionaries)
|
||||||
if polarity in self.mappings:
|
if polarity in self.mappings:
|
||||||
polarity = self.mappings[polarity]
|
polarity = self.mappings[polarity]
|
||||||
|
@ -6,12 +6,13 @@ from senpy.models import EmotionSet, Emotion, Entry
|
|||||||
|
|
||||||
class EmoRand(EmotionPlugin):
|
class EmoRand(EmotionPlugin):
|
||||||
'''A sample plugin that returns a random emotion annotation'''
|
'''A sample plugin that returns a random emotion annotation'''
|
||||||
|
name = 'emotion-random'
|
||||||
author = '@balkian'
|
author = '@balkian'
|
||||||
version = '0.1'
|
version = '0.1'
|
||||||
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
||||||
onyx__usesEmotionModel = "emoml:big6"
|
onyx__usesEmotionModel = "emoml:big6"
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, activity):
|
||||||
category = "emoml:big6happiness"
|
category = "emoml:big6happiness"
|
||||||
number = max(-1, min(1, random.gauss(0, 0.5)))
|
number = max(-1, min(1, random.gauss(0, 0.5)))
|
||||||
if number > 0:
|
if number > 0:
|
||||||
@ -19,7 +20,7 @@ class EmoRand(EmotionPlugin):
|
|||||||
emotionSet = EmotionSet()
|
emotionSet = EmotionSet()
|
||||||
emotion = Emotion({"onyx:hasEmotionCategory": category})
|
emotion = Emotion({"onyx:hasEmotionCategory": category})
|
||||||
emotionSet.onyx__hasEmotion.append(emotion)
|
emotionSet.onyx__hasEmotion.append(emotion)
|
||||||
emotionSet.prov__wasGeneratedBy = self.id
|
emotionSet.prov(activity)
|
||||||
entry.emotions.append(emotionSet)
|
entry.emotions.append(emotionSet)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
@ -27,6 +28,6 @@ class EmoRand(EmotionPlugin):
|
|||||||
params = dict()
|
params = dict()
|
||||||
results = list()
|
results = list()
|
||||||
for i in range(100):
|
for i in range(100):
|
||||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
res = next(self.analyse_entry(Entry(nif__isString="Hello"), self.activity(params)))
|
||||||
res.validate()
|
res.validate()
|
||||||
results.append(res.emotions[0]['onyx:hasEmotion'][0]['onyx:hasEmotionCategory'])
|
results.append(res.emotions[0]['onyx:hasEmotion'][0]['onyx:hasEmotionCategory'])
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/local/bin/python
|
#!/usr/local/bin/python
|
||||||
# coding: utf-8
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from senpy import easy_test, models, plugins
|
from senpy import easy_test, models, plugins
|
||||||
|
|
||||||
@ -25,7 +25,8 @@ class ParameterizedDictionary(plugins.SentimentPlugin):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, activity):
|
||||||
|
params = activity.params
|
||||||
positive_words = params['positive-words'].split(',')
|
positive_words = params['positive-words'].split(',')
|
||||||
negative_words = params['negative-words'].split(',')
|
negative_words = params['negative-words'].split(',')
|
||||||
dictionary = {
|
dictionary = {
|
||||||
@ -35,7 +36,7 @@ class ParameterizedDictionary(plugins.SentimentPlugin):
|
|||||||
polarity = basic.get_polarity(entry.text, [dictionary])
|
polarity = basic.get_polarity(entry.text, [dictionary])
|
||||||
|
|
||||||
s = models.Sentiment(marl__hasPolarity=polarity)
|
s = models.Sentiment(marl__hasPolarity=polarity)
|
||||||
s.prov(self)
|
s.prov(activity)
|
||||||
entry.sentiments.append(s)
|
entry.sentiments.append(s)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
@ -2,15 +2,16 @@ import random
|
|||||||
from senpy import SentimentPlugin, Sentiment, Entry
|
from senpy import SentimentPlugin, Sentiment, Entry
|
||||||
|
|
||||||
|
|
||||||
class Rand(SentimentPlugin):
|
class RandSent(SentimentPlugin):
|
||||||
'''A sample plugin that returns a random sentiment annotation'''
|
'''A sample plugin that returns a random sentiment annotation'''
|
||||||
|
name = 'sentiment-random'
|
||||||
author = "@balkian"
|
author = "@balkian"
|
||||||
version = '0.1'
|
version = '0.1'
|
||||||
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
||||||
marl__maxPolarityValue = '1'
|
marl__maxPolarityValue = '1'
|
||||||
marl__minPolarityValue = "-1"
|
marl__minPolarityValue = "-1"
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
def analyse_entry(self, entry, activity):
|
||||||
polarity_value = max(-1, min(1, random.gauss(0.2, 0.2)))
|
polarity_value = max(-1, min(1, random.gauss(0.2, 0.2)))
|
||||||
polarity = "marl:Neutral"
|
polarity = "marl:Neutral"
|
||||||
if polarity_value > 0:
|
if polarity_value > 0:
|
||||||
@ -19,7 +20,7 @@ class Rand(SentimentPlugin):
|
|||||||
polarity = "marl:Negative"
|
polarity = "marl:Negative"
|
||||||
sentiment = Sentiment(marl__hasPolarity=polarity,
|
sentiment = Sentiment(marl__hasPolarity=polarity,
|
||||||
marl__polarityValue=polarity_value)
|
marl__polarityValue=polarity_value)
|
||||||
sentiment.prov(self)
|
sentiment.prov(activity)
|
||||||
entry.sentiments.append(sentiment)
|
entry.sentiments.append(sentiment)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
@ -28,8 +29,9 @@ class Rand(SentimentPlugin):
|
|||||||
params = dict()
|
params = dict()
|
||||||
results = list()
|
results = list()
|
||||||
for i in range(50):
|
for i in range(50):
|
||||||
|
activity = self.activity(params)
|
||||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"),
|
res = next(self.analyse_entry(Entry(nif__isString="Hello"),
|
||||||
params))
|
activity))
|
||||||
res.validate()
|
res.validate()
|
||||||
results.append(res.sentiments[0]['marl:hasPolarity'])
|
results.append(res.sentiments[0]['marl:hasPolarity'])
|
||||||
assert 'marl:Positive' in results
|
assert 'marl:Positive' in results
|
@ -1,25 +1,20 @@
|
|||||||
from senpy import SentimentBox, MappingMixin, easy_test
|
from senpy import SentimentBox, easy_test
|
||||||
|
|
||||||
from mypipeline import pipeline
|
from mypipeline import pipeline
|
||||||
|
|
||||||
|
|
||||||
class PipelineSentiment(MappingMixin, SentimentBox):
|
class PipelineSentiment(SentimentBox):
|
||||||
'''
|
'''This is a pipeline plugin that wraps a classifier defined in another module
|
||||||
This is a pipeline plugin that wraps a classifier defined in another module
|
(mypipeline).'''
|
||||||
(mypipeline).
|
|
||||||
'''
|
|
||||||
author = '@balkian'
|
author = '@balkian'
|
||||||
version = 0.1
|
version = 0.1
|
||||||
maxPolarityValue = 1
|
maxPolarityValue = 1
|
||||||
minPolarityValue = -1
|
minPolarityValue = -1
|
||||||
|
|
||||||
mappings = {
|
def predict_one(self, features, **kwargs):
|
||||||
1: 'marl:Positive',
|
if pipeline.predict(features) > 0:
|
||||||
-1: 'marl:Negative'
|
return [1, 0, 0]
|
||||||
}
|
return [0, 0, 1]
|
||||||
|
|
||||||
def predict_one(self, input):
|
|
||||||
return pipeline.predict([input, ])[0]
|
|
||||||
|
|
||||||
test_cases = [
|
test_cases = [
|
||||||
{
|
{
|
||||||
|
@ -1 +1 @@
|
|||||||
gsitk
|
gsitk>0.1.9.1
|
||||||
|
@ -15,8 +15,6 @@ spec:
|
|||||||
- name: senpy-latest
|
- name: senpy-latest
|
||||||
image: $IMAGEWTAG
|
image: $IMAGEWTAG
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
args:
|
|
||||||
- "--default-plugins"
|
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
memory: "512Mi"
|
memory: "512Mi"
|
||||||
|
@ -12,3 +12,10 @@ spec:
|
|||||||
backend:
|
backend:
|
||||||
serviceName: senpy-latest
|
serviceName: senpy-latest
|
||||||
servicePort: 5000
|
servicePort: 5000
|
||||||
|
- host: latest.senpy.gsi.upm.es
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
backend:
|
||||||
|
serviceName: senpy-latest
|
||||||
|
servicePort: 5000
|
||||||
|
@ -11,5 +11,6 @@ rdflib
|
|||||||
rdflib-jsonld
|
rdflib-jsonld
|
||||||
numpy
|
numpy
|
||||||
scipy
|
scipy
|
||||||
scikit-learn
|
scikit-learn>=0.20
|
||||||
responses
|
responses
|
||||||
|
jmespath
|
||||||
|
@ -40,8 +40,14 @@ def main():
|
|||||||
'-l',
|
'-l',
|
||||||
metavar='logging_level',
|
metavar='logging_level',
|
||||||
type=str,
|
type=str,
|
||||||
default="WARN",
|
default="INFO",
|
||||||
help='Logging level')
|
help='Logging level')
|
||||||
|
parser.add_argument(
|
||||||
|
'--log-format',
|
||||||
|
metavar='log_format',
|
||||||
|
type=str,
|
||||||
|
default='%(asctime)s %(levelname)-10s %(name)-30s \t %(message)s',
|
||||||
|
help='Logging format')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--debug',
|
'--debug',
|
||||||
'-d',
|
'-d',
|
||||||
@ -49,10 +55,10 @@ def main():
|
|||||||
default=False,
|
default=False,
|
||||||
help='Run the application in debug mode')
|
help='Run the application in debug mode')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--default-plugins',
|
'--no-default-plugins',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help='Load the default plugins')
|
help='Do not load the default plugins')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--host',
|
'--host',
|
||||||
type=str,
|
type=str,
|
||||||
@ -68,7 +74,7 @@ def main():
|
|||||||
'--plugins-folder',
|
'--plugins-folder',
|
||||||
'-f',
|
'-f',
|
||||||
type=str,
|
type=str,
|
||||||
default='.',
|
action='append',
|
||||||
help='Where to look for plugins.')
|
help='Where to look for plugins.')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--only-install',
|
'--only-install',
|
||||||
@ -100,10 +106,10 @@ def main():
|
|||||||
default=None,
|
default=None,
|
||||||
help='Where to look for data. It be set with the SENPY_DATA environment variable as well.')
|
help='Where to look for data. It be set with the SENPY_DATA environment variable as well.')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--threaded',
|
'--no-threaded',
|
||||||
action='store_false',
|
action='store_true',
|
||||||
default=True,
|
default=False,
|
||||||
help='Run a threaded server')
|
help='Run a single-threaded server')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-deps',
|
'--no-deps',
|
||||||
'-n',
|
'-n',
|
||||||
@ -123,21 +129,32 @@ def main():
|
|||||||
default=False,
|
default=False,
|
||||||
help='Do not exit if some plugins fail to activate')
|
help='Do not exit if some plugins fail to activate')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if args.version:
|
|
||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
print(sys.version)
|
print(sys.version)
|
||||||
|
if args.version:
|
||||||
exit(1)
|
exit(1)
|
||||||
rl = logging.getLogger()
|
rl = logging.getLogger()
|
||||||
rl.setLevel(getattr(logging, args.level))
|
rl.setLevel(getattr(logging, args.level))
|
||||||
|
logger_handler = rl.handlers[0]
|
||||||
|
|
||||||
|
# First, generic formatter:
|
||||||
|
logger_handler.setFormatter(logging.Formatter(args.log_format))
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.debug = args.debug
|
app.debug = args.debug
|
||||||
sp = Senpy(app, args.plugins_folder,
|
sp = Senpy(app,
|
||||||
default_plugins=args.default_plugins,
|
plugin_folder=None,
|
||||||
|
default_plugins=not args.no_default_plugins,
|
||||||
data_folder=args.data_folder)
|
data_folder=args.data_folder)
|
||||||
if args.only_list:
|
folders = list(args.plugins_folder) if args.plugins_folder else []
|
||||||
plugins = sp.plugins()
|
if not folders:
|
||||||
|
folders.append(".")
|
||||||
|
for p in folders:
|
||||||
|
sp.add_folder(p)
|
||||||
|
|
||||||
|
plugins = sp.plugins(plugin_type=None, is_activated=False)
|
||||||
maxname = max(len(x.name) for x in plugins)
|
maxname = max(len(x.name) for x in plugins)
|
||||||
maxversion = max(len(x.version) for x in plugins)
|
maxversion = max(len(str(x.version)) for x in plugins)
|
||||||
print('Found {} plugins:'.format(len(plugins)))
|
print('Found {} plugins:'.format(len(plugins)))
|
||||||
for plugin in plugins:
|
for plugin in plugins:
|
||||||
import inspect
|
import inspect
|
||||||
@ -147,6 +164,7 @@ def main():
|
|||||||
fpath,
|
fpath,
|
||||||
maxname=maxname,
|
maxname=maxname,
|
||||||
maxversion=maxversion))
|
maxversion=maxversion))
|
||||||
|
if args.only_list:
|
||||||
return
|
return
|
||||||
if not args.no_deps:
|
if not args.no_deps:
|
||||||
sp.install_deps()
|
sp.install_deps()
|
||||||
@ -160,10 +178,13 @@ def main():
|
|||||||
print('Senpy version {}'.format(senpy.__version__))
|
print('Senpy version {}'.format(senpy.__version__))
|
||||||
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
print('Server running on port %s:%d. Ctrl+C to quit' % (args.host,
|
||||||
args.port))
|
args.port))
|
||||||
|
try:
|
||||||
app.run(args.host,
|
app.run(args.host,
|
||||||
args.port,
|
args.port,
|
||||||
threaded=args.threaded,
|
threaded=not args.no_threaded,
|
||||||
debug=app.debug)
|
debug=app.debug)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('Bye!')
|
||||||
sp.deactivate_all()
|
sp.deactivate_all()
|
||||||
|
|
||||||
|
|
||||||
|
137
senpy/api.py
137
senpy/api.py
@ -5,24 +5,31 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
boolean = [True, False]
|
boolean = [True, False]
|
||||||
|
|
||||||
|
processors = {
|
||||||
|
'string_to_tuple': lambda p: p if isinstance(p, (tuple, list)) else tuple(p.split(','))
|
||||||
|
}
|
||||||
|
|
||||||
API_PARAMS = {
|
API_PARAMS = {
|
||||||
"algorithm": {
|
"algorithm": {
|
||||||
"aliases": ["algorithms", "a", "algo"],
|
"aliases": ["algorithms", "a", "algo"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": 'default',
|
"default": 'default',
|
||||||
|
"processor": 'string_to_tuple',
|
||||||
"description": ("Algorithms that will be used to process the request."
|
"description": ("Algorithms that will be used to process the request."
|
||||||
"It may be a list of comma-separated names."),
|
"It may be a list of comma-separated names."),
|
||||||
},
|
},
|
||||||
"expanded-jsonld": {
|
"expanded-jsonld": {
|
||||||
"@id": "expanded-jsonld",
|
"@id": "expanded-jsonld",
|
||||||
"aliases": ["expanded"],
|
"description": "use JSON-LD expansion to get full URIs",
|
||||||
|
"aliases": ["expanded", "expanded_jsonld"],
|
||||||
"options": boolean,
|
"options": boolean,
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": False
|
"default": False
|
||||||
},
|
},
|
||||||
"with_parameters": {
|
"with-parameters": {
|
||||||
"aliases": ['withparameters',
|
"aliases": ['withparameters',
|
||||||
'with-parameters'],
|
'with_parameters'],
|
||||||
|
"description": "include initial parameters in the response",
|
||||||
"options": boolean,
|
"options": boolean,
|
||||||
"default": False,
|
"default": False,
|
||||||
"required": True
|
"required": True
|
||||||
@ -31,9 +38,67 @@ API_PARAMS = {
|
|||||||
"@id": "outformat",
|
"@id": "outformat",
|
||||||
"aliases": ["o"],
|
"aliases": ["o"],
|
||||||
"default": "json-ld",
|
"default": "json-ld",
|
||||||
|
"description": """The data can be semantically formatted (JSON-LD, turtle or n-triples),
|
||||||
|
given as a list of comma-separated fields (see the fields option) or constructed from a Jinja2
|
||||||
|
template (see the template option).""",
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": ["json-ld", "turtle", "ntriples"],
|
"options": ["json-ld", "turtle", "ntriples"],
|
||||||
},
|
},
|
||||||
|
"template": {
|
||||||
|
"@id": "template",
|
||||||
|
"required": False,
|
||||||
|
"description": """Jinja2 template for the result. The input data for the template will
|
||||||
|
be the results as a dictionary.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
Consider the results before templating:
|
||||||
|
|
||||||
|
```
|
||||||
|
[{
|
||||||
|
"@type": "entry",
|
||||||
|
"onyx:hasEmotionSet": [],
|
||||||
|
"nif:isString": "testing the template",
|
||||||
|
"marl:hasOpinion": [
|
||||||
|
{
|
||||||
|
"@type": "sentiment",
|
||||||
|
"marl:hasPolarity": "marl:Positive"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
And the template:
|
||||||
|
|
||||||
|
```
|
||||||
|
{% for entry in entries %}
|
||||||
|
{{ entry["nif:isString"] | upper }},{{entry.sentiments[0]["marl:hasPolarity"].split(":")[1]}}
|
||||||
|
{% endfor %}
|
||||||
|
```
|
||||||
|
|
||||||
|
The final result would be:
|
||||||
|
|
||||||
|
```
|
||||||
|
TESTING THE TEMPLATE,Positive
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
},
|
||||||
|
"fields": {
|
||||||
|
"@id": "fields",
|
||||||
|
"required": False,
|
||||||
|
"description": """A jmespath selector, that can be used to extract a new dictionary, array or value
|
||||||
|
from the results.
|
||||||
|
jmespath is a powerful query language for json and/or dictionaries.
|
||||||
|
It allows you to change the structure (and data) of your objects through queries.
|
||||||
|
|
||||||
|
e.g., the following expression gets a list of `[emotion label, intensity]` for each entry:
|
||||||
|
`entries[]."onyx:hasEmotionSet"[]."onyx:hasEmotion"[]["onyx:hasEmotionCategory","onyx:hasEmotionIntensity"]`
|
||||||
|
|
||||||
|
For more information, see: https://jmespath.org
|
||||||
|
|
||||||
|
"""
|
||||||
|
},
|
||||||
"help": {
|
"help": {
|
||||||
"@id": "help",
|
"@id": "help",
|
||||||
"description": "Show additional help to know more about the possible parameters",
|
"description": "Show additional help to know more about the possible parameters",
|
||||||
@ -44,21 +109,39 @@ API_PARAMS = {
|
|||||||
},
|
},
|
||||||
"verbose": {
|
"verbose": {
|
||||||
"@id": "verbose",
|
"@id": "verbose",
|
||||||
"description": ("Show all help, including the common API parameters, or "
|
"description": "Show all properties in the result",
|
||||||
"only plugin-related info"),
|
|
||||||
"aliases": ["v"],
|
"aliases": ["v"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": boolean,
|
"options": boolean,
|
||||||
"default": True
|
"default": False
|
||||||
},
|
},
|
||||||
"emotionModel": {
|
"aliases": {
|
||||||
|
"@id": "aliases",
|
||||||
|
"description": "Replace JSON properties with their aliases",
|
||||||
|
"aliases": [],
|
||||||
|
"required": True,
|
||||||
|
"options": boolean,
|
||||||
|
"default": False
|
||||||
|
},
|
||||||
|
"emotion-model": {
|
||||||
"@id": "emotionModel",
|
"@id": "emotionModel",
|
||||||
"aliases": ["emoModel"],
|
"description": """Emotion model to use in the response.
|
||||||
|
Senpy will try to convert the output to this model automatically.
|
||||||
|
|
||||||
|
Examples: `wna:liking` and `emoml:big6`.
|
||||||
|
""",
|
||||||
|
"aliases": ["emoModel", "emotionModel"],
|
||||||
"required": False
|
"required": False
|
||||||
},
|
},
|
||||||
"conversion": {
|
"conversion": {
|
||||||
"@id": "conversion",
|
"@id": "conversion",
|
||||||
"description": "How to show the elements that have (not) been converted",
|
"description": """How to show the elements that have (not) been converted.
|
||||||
|
|
||||||
|
* full: converted and original elements will appear side-by-side
|
||||||
|
* filtered: only converted elements will be shown
|
||||||
|
* nested: converted elements will be shown, and they will include a link to the original element
|
||||||
|
(using `prov:wasGeneratedBy`).
|
||||||
|
""",
|
||||||
"required": True,
|
"required": True,
|
||||||
"options": ["filtered", "nested", "full"],
|
"options": ["filtered", "nested", "full"],
|
||||||
"default": "full"
|
"default": "full"
|
||||||
@ -68,9 +151,10 @@ API_PARAMS = {
|
|||||||
EVAL_PARAMS = {
|
EVAL_PARAMS = {
|
||||||
"algorithm": {
|
"algorithm": {
|
||||||
"aliases": ["plug", "p", "plugins", "algorithms", 'algo', 'a', 'plugin'],
|
"aliases": ["plug", "p", "plugins", "algorithms", 'algo', 'a', 'plugin'],
|
||||||
"description": "Plugins to be evaluated",
|
"description": "Plugins to evaluate",
|
||||||
"required": True,
|
"required": True,
|
||||||
"help": "See activated plugins in /plugins"
|
"help": "See activated plugins in /plugins",
|
||||||
|
"processor": API_PARAMS['algorithm']['processor']
|
||||||
},
|
},
|
||||||
"dataset": {
|
"dataset": {
|
||||||
"aliases": ["datasets", "data", "d"],
|
"aliases": ["datasets", "data", "d"],
|
||||||
@ -81,18 +165,19 @@ EVAL_PARAMS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
PLUGINS_PARAMS = {
|
PLUGINS_PARAMS = {
|
||||||
"plugin_type": {
|
"plugin-type": {
|
||||||
"@id": "pluginType",
|
"@id": "pluginType",
|
||||||
"description": 'What kind of plugins to list',
|
"description": 'What kind of plugins to list',
|
||||||
"aliases": ["pluginType"],
|
"aliases": ["pluginType", "plugin_type"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": 'analysisPlugin'
|
"default": 'analysisPlugin'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WEB_PARAMS = {
|
WEB_PARAMS = {
|
||||||
"inHeaders": {
|
"in-headers": {
|
||||||
"aliases": ["headers"],
|
"aliases": ["headers", "inheaders", "inHeaders", "in-headers", "in_headers"],
|
||||||
|
"description": "Only include the JSON-LD context in the headers",
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": False,
|
"default": False,
|
||||||
"options": boolean
|
"options": boolean
|
||||||
@ -100,8 +185,8 @@ WEB_PARAMS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CLI_PARAMS = {
|
CLI_PARAMS = {
|
||||||
"plugin_folder": {
|
"plugin-folder": {
|
||||||
"aliases": ["folder"],
|
"aliases": ["folder", "plugin_folder"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": "."
|
"default": "."
|
||||||
},
|
},
|
||||||
@ -116,6 +201,7 @@ NIF_PARAMS = {
|
|||||||
},
|
},
|
||||||
"intype": {
|
"intype": {
|
||||||
"@id": "intype",
|
"@id": "intype",
|
||||||
|
"description": "input type",
|
||||||
"aliases": ["t"],
|
"aliases": ["t"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "direct",
|
"default": "direct",
|
||||||
@ -123,6 +209,7 @@ NIF_PARAMS = {
|
|||||||
},
|
},
|
||||||
"informat": {
|
"informat": {
|
||||||
"@id": "informat",
|
"@id": "informat",
|
||||||
|
"description": "input format",
|
||||||
"aliases": ["f"],
|
"aliases": ["f"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "text",
|
"default": "text",
|
||||||
@ -130,17 +217,20 @@ NIF_PARAMS = {
|
|||||||
},
|
},
|
||||||
"language": {
|
"language": {
|
||||||
"@id": "language",
|
"@id": "language",
|
||||||
|
"description": "language of the input",
|
||||||
"aliases": ["l"],
|
"aliases": ["l"],
|
||||||
"required": False,
|
"required": False,
|
||||||
},
|
},
|
||||||
"prefix": {
|
"prefix": {
|
||||||
"@id": "prefix",
|
"@id": "prefix",
|
||||||
|
"description": "prefix to use for new entities",
|
||||||
"aliases": ["p"],
|
"aliases": ["p"],
|
||||||
"required": True,
|
"required": True,
|
||||||
"default": "",
|
"default": "",
|
||||||
},
|
},
|
||||||
"urischeme": {
|
"urischeme": {
|
||||||
"@id": "urischeme",
|
"@id": "urischeme",
|
||||||
|
"description": "scheme for NIF URIs",
|
||||||
"aliases": ["u"],
|
"aliases": ["u"],
|
||||||
"required": False,
|
"required": False,
|
||||||
"default": "RFC5147String",
|
"default": "RFC5147String",
|
||||||
@ -171,16 +261,19 @@ def parse_params(indict, *specs):
|
|||||||
if alias in indict and alias != param:
|
if alias in indict and alias != param:
|
||||||
outdict[param] = indict[alias]
|
outdict[param] = indict[alias]
|
||||||
del outdict[alias]
|
del outdict[alias]
|
||||||
continue
|
break
|
||||||
if param not in outdict:
|
if param not in outdict:
|
||||||
if "default" in options:
|
if "default" in options:
|
||||||
# We assume the default is correct
|
# We assume the default is correct
|
||||||
outdict[param] = options["default"]
|
outdict[param] = options["default"]
|
||||||
elif options.get("required", False):
|
elif options.get("required", False):
|
||||||
wrong_params[param] = spec[param]
|
wrong_params[param] = spec[param]
|
||||||
elif "options" in options:
|
continue
|
||||||
|
if 'processor' in options:
|
||||||
|
outdict[param] = processors[options['processor']](outdict[param])
|
||||||
|
if "options" in options:
|
||||||
if options["options"] == boolean:
|
if options["options"] == boolean:
|
||||||
outdict[param] = str(outdict[param]).lower() in ['true', '1']
|
outdict[param] = str(outdict[param]).lower() in ['true', '1', '']
|
||||||
elif outdict[param] not in options["options"]:
|
elif outdict[param] not in options["options"]:
|
||||||
wrong_params[param] = spec[param]
|
wrong_params[param] = spec[param]
|
||||||
if wrong_params:
|
if wrong_params:
|
||||||
@ -253,7 +346,7 @@ def get_extra_params(plugins):
|
|||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
def parse_analysis(params, plugins):
|
def parse_analyses(params, plugins):
|
||||||
'''
|
'''
|
||||||
Parse the given parameters individually for each plugin, and get a list of the parameters that
|
Parse the given parameters individually for each plugin, and get a list of the parameters that
|
||||||
belong to each of the plugins. Each item can then be used in the plugin.analyse_entries method.
|
belong to each of the plugins. Each item can then be used in the plugin.analyse_entries method.
|
||||||
@ -305,7 +398,7 @@ def parse_call(params):
|
|||||||
params = parse_params(params, NIF_PARAMS)
|
params = parse_params(params, NIF_PARAMS)
|
||||||
if params['informat'] == 'text':
|
if params['informat'] == 'text':
|
||||||
results = Results()
|
results = Results()
|
||||||
entry = Entry(nif__isString=params['input'], id='#') # Use @base
|
entry = Entry(nif__isString=params['input'], id='prefix:') # Use @base
|
||||||
results.entries.append(entry)
|
results.entries.append(entry)
|
||||||
elif params['informat'] == 'json-ld':
|
elif params['informat'] == 'json-ld':
|
||||||
results = from_string(params['input'], cls=Results)
|
results = from_string(params['input'], cls=Results)
|
||||||
|
@ -24,6 +24,8 @@ from . import api
|
|||||||
from .version import __version__
|
from .version import __version__
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
from .gsitk_compat import GSITK_AVAILABLE
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import base64
|
import base64
|
||||||
@ -63,44 +65,44 @@ def get_params(req):
|
|||||||
return indict
|
return indict
|
||||||
|
|
||||||
|
|
||||||
def encoded_url(url=None, base=None):
|
def encode_url(url=None):
|
||||||
code = ''
|
code = ''
|
||||||
if not url:
|
if not url:
|
||||||
if request.method == 'GET':
|
url = request.parameters.get('prefix', request.full_path[1:] + '#')
|
||||||
url = request.full_path[1:] # Remove the first slash
|
return code or base64.urlsafe_b64encode(url.encode()).decode()
|
||||||
else:
|
|
||||||
hash(frozenset(tuple(request.parameters.items())))
|
|
||||||
code = 'hash:{}'.format(hash)
|
|
||||||
|
|
||||||
code = code or base64.urlsafe_b64encode(url.encode()).decode()
|
|
||||||
|
|
||||||
if base:
|
def url_for_code(code, base=None):
|
||||||
return base + code
|
# if base:
|
||||||
return url_for('api.decode', code=code, _external=True)
|
# return base + code
|
||||||
|
# return url_for('api.decode', code=code, _external=True)
|
||||||
|
# This was producing unique yet very long URIs, which wasn't ideal for visualization.
|
||||||
|
return 'http://senpy.invalid/'
|
||||||
|
|
||||||
|
|
||||||
def decoded_url(code, base=None):
|
def decoded_url(code, base=None):
|
||||||
if code.startswith('hash:'):
|
|
||||||
raise Exception('Can not decode a URL for a POST request')
|
|
||||||
base = base or request.url_root
|
|
||||||
path = base64.urlsafe_b64decode(code.encode()).decode()
|
path = base64.urlsafe_b64decode(code.encode()).decode()
|
||||||
|
if path[:4] == 'http':
|
||||||
|
return path
|
||||||
|
base = base or request.url_root
|
||||||
return base + path
|
return base + path
|
||||||
|
|
||||||
|
|
||||||
@demo_blueprint.route('/')
|
@demo_blueprint.route('/')
|
||||||
def index():
|
def index():
|
||||||
ev = str(get_params(request).get('evaluation', False))
|
# ev = str(get_params(request).get('evaluation', True))
|
||||||
evaluation_enabled = ev.lower() not in ['false', 'no', 'none']
|
# evaluation_enabled = ev.lower() not in ['false', 'no', 'none']
|
||||||
|
evaluation_enabled = GSITK_AVAILABLE
|
||||||
|
|
||||||
return render_template("index.html",
|
return render_template("index.html",
|
||||||
evaluation=evaluation_enabled,
|
evaluation=evaluation_enabled,
|
||||||
version=__version__)
|
version=__version__)
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/contexts/<entity>.jsonld')
|
@api_blueprint.route('/contexts/<code>')
|
||||||
def context(entity="context"):
|
def context(code=''):
|
||||||
context = Response._context
|
context = Response._context
|
||||||
context['@vocab'] = url_for('ns.index', _external=True)
|
context['@base'] = url_for('api.decode', code=code, _external=True)
|
||||||
context['endpoint'] = url_for('api.api_root', _external=True)
|
context['endpoint'] = url_for('api.api_root', _external=True)
|
||||||
return jsonify({"@context": context})
|
return jsonify({"@context": context})
|
||||||
|
|
||||||
@ -130,26 +132,59 @@ def schema(schema="definitions"):
|
|||||||
|
|
||||||
def basic_api(f):
|
def basic_api(f):
|
||||||
default_params = {
|
default_params = {
|
||||||
'inHeaders': False,
|
'in-headers': False,
|
||||||
'expanded-jsonld': False,
|
'expanded-jsonld': False,
|
||||||
'outformat': None,
|
'outformat': None,
|
||||||
'with_parameters': True,
|
'with-parameters': True,
|
||||||
}
|
}
|
||||||
|
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def decorated_function(*args, **kwargs):
|
def decorated_function(*args, **kwargs):
|
||||||
raw_params = get_params(request)
|
raw_params = get_params(request)
|
||||||
logger.info('Getting request: {}'.format(raw_params))
|
# logger.info('Getting request: {}'.format(raw_params))
|
||||||
|
logger.debug('Getting request. Params: {}'.format(raw_params))
|
||||||
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
headers = {'X-ORIGINAL-PARAMS': json.dumps(raw_params)}
|
||||||
params = default_params
|
params = default_params
|
||||||
|
|
||||||
|
mime = request.accept_mimetypes\
|
||||||
|
.best_match(MIMETYPES.keys(),
|
||||||
|
DEFAULT_MIMETYPE)
|
||||||
|
|
||||||
|
mimeformat = MIMETYPES.get(mime, DEFAULT_FORMAT)
|
||||||
|
outformat = mimeformat
|
||||||
|
|
||||||
try:
|
try:
|
||||||
params = api.parse_params(raw_params, api.WEB_PARAMS, api.API_PARAMS)
|
params = api.parse_params(raw_params, api.WEB_PARAMS, api.API_PARAMS)
|
||||||
|
outformat = params.get('outformat', mimeformat)
|
||||||
if hasattr(request, 'parameters'):
|
if hasattr(request, 'parameters'):
|
||||||
request.parameters.update(params)
|
request.parameters.update(params)
|
||||||
else:
|
else:
|
||||||
request.parameters = params
|
request.parameters = params
|
||||||
response = f(*args, **kwargs)
|
response = f(*args, **kwargs)
|
||||||
|
|
||||||
|
if 'parameters' in response and not params['with-parameters']:
|
||||||
|
del response.parameters
|
||||||
|
|
||||||
|
logger.debug('Response: {}'.format(response))
|
||||||
|
|
||||||
|
prefix = params.get('prefix')
|
||||||
|
code = encode_url(prefix)
|
||||||
|
|
||||||
|
return response.flask(
|
||||||
|
in_headers=params['in-headers'],
|
||||||
|
headers=headers,
|
||||||
|
prefix=prefix or url_for_code(code),
|
||||||
|
base=prefix,
|
||||||
|
context_uri=url_for('api.context',
|
||||||
|
code=code,
|
||||||
|
_external=True),
|
||||||
|
outformat=outformat,
|
||||||
|
expanded=params['expanded-jsonld'],
|
||||||
|
template=params.get('template'),
|
||||||
|
verbose=params['verbose'],
|
||||||
|
aliases=params['aliases'],
|
||||||
|
fields=params.get('fields'))
|
||||||
|
|
||||||
except (Exception) as ex:
|
except (Exception) as ex:
|
||||||
if current_app.debug or current_app.config['TESTING']:
|
if current_app.debug or current_app.config['TESTING']:
|
||||||
raise
|
raise
|
||||||
@ -159,56 +194,48 @@ def basic_api(f):
|
|||||||
response = ex
|
response = ex
|
||||||
response.parameters = raw_params
|
response.parameters = raw_params
|
||||||
logger.exception(ex)
|
logger.exception(ex)
|
||||||
|
|
||||||
if 'parameters' in response and not params['with_parameters']:
|
|
||||||
del response.parameters
|
|
||||||
|
|
||||||
logger.info('Response: {}'.format(response))
|
|
||||||
mime = request.accept_mimetypes\
|
|
||||||
.best_match(MIMETYPES.keys(),
|
|
||||||
DEFAULT_MIMETYPE)
|
|
||||||
|
|
||||||
mimeformat = MIMETYPES.get(mime, DEFAULT_FORMAT)
|
|
||||||
outformat = params['outformat'] or mimeformat
|
|
||||||
|
|
||||||
return response.flask(
|
return response.flask(
|
||||||
in_headers=params['inHeaders'],
|
|
||||||
headers=headers,
|
|
||||||
prefix=params.get('prefix', encoded_url()),
|
|
||||||
context_uri=url_for('api.context',
|
|
||||||
entity=type(response).__name__,
|
|
||||||
_external=True),
|
|
||||||
outformat=outformat,
|
outformat=outformat,
|
||||||
expanded=params['expanded-jsonld'])
|
expanded=params['expanded-jsonld'],
|
||||||
|
verbose=params.get('verbose', True),
|
||||||
|
)
|
||||||
|
|
||||||
return decorated_function
|
return decorated_function
|
||||||
|
|
||||||
|
|
||||||
@api_blueprint.route('/', defaults={'plugin': None}, methods=['POST', 'GET'])
|
@api_blueprint.route('/', defaults={'plugins': None}, methods=['POST', 'GET'])
|
||||||
@api_blueprint.route('/<path:plugin>', methods=['POST', 'GET'])
|
@api_blueprint.route('/<path:plugins>', methods=['POST', 'GET'])
|
||||||
@basic_api
|
@basic_api
|
||||||
def api_root(plugin):
|
def api_root(plugins):
|
||||||
if plugin:
|
if plugins:
|
||||||
if request.parameters['algorithm'] != api.API_PARAMS['algorithm']['default']:
|
if request.parameters['algorithm'] != api.API_PARAMS['algorithm']['default']:
|
||||||
raise Error('You cannot specify the algorithm with a parameter and a URL variable.'
|
raise Error('You cannot specify the algorithm with a parameter and a URL variable.'
|
||||||
' Please, remove one of them')
|
' Please, remove one of them')
|
||||||
request.parameters['algorithm'] = tuple(plugin.replace('+', '/').split('/'))
|
plugins = plugins.replace('+', ',').replace('/', ',')
|
||||||
|
plugins = api.processors['string_to_tuple'](plugins)
|
||||||
|
else:
|
||||||
|
plugins = request.parameters['algorithm']
|
||||||
|
|
||||||
plugin = request.parameters['algorithm']
|
print(plugins)
|
||||||
|
|
||||||
sp = current_app.senpy
|
sp = current_app.senpy
|
||||||
plugins = sp.get_plugins(plugin)
|
plugins = sp.get_plugins(plugins)
|
||||||
|
|
||||||
if request.parameters['help']:
|
if request.parameters['help']:
|
||||||
|
apis = [api.WEB_PARAMS, api.API_PARAMS, api.NIF_PARAMS]
|
||||||
|
# Verbose is set to False as default, but we want it to default to
|
||||||
|
# True for help. This checks the original value, to make sure it wasn't
|
||||||
|
# set by default.
|
||||||
|
if not request.parameters['verbose'] and get_params(request).get('verbose'):
|
||||||
apis = []
|
apis = []
|
||||||
if request.parameters['verbose']:
|
if request.parameters['algorithm'] == ['default', ]:
|
||||||
apis.append(api.BUILTIN_PARAMS)
|
plugins = []
|
||||||
allparameters = api.get_all_params(plugins, *apis)
|
allparameters = api.get_all_params(plugins, *apis)
|
||||||
response = Help(valid_parameters=allparameters)
|
response = Help(valid_parameters=allparameters)
|
||||||
return response
|
return response
|
||||||
req = api.parse_call(request.parameters)
|
req = api.parse_call(request.parameters)
|
||||||
analysis = api.parse_analysis(req.parameters, plugins)
|
analyses = api.parse_analyses(req.parameters, plugins)
|
||||||
results = current_app.senpy.analyse(req, analysis)
|
results = current_app.senpy.analyse(req, analyses)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -230,8 +257,8 @@ def evaluate():
|
|||||||
def plugins():
|
def plugins():
|
||||||
sp = current_app.senpy
|
sp = current_app.senpy
|
||||||
params = api.parse_params(request.parameters, api.PLUGINS_PARAMS)
|
params = api.parse_params(request.parameters, api.PLUGINS_PARAMS)
|
||||||
ptype = params.get('plugin_type')
|
ptype = params.get('plugin-type')
|
||||||
plugins = list(sp.plugins(plugin_type=ptype))
|
plugins = list(sp.analysis_plugins(plugin_type=ptype))
|
||||||
dic = Plugins(plugins=plugins)
|
dic = Plugins(plugins=plugins)
|
||||||
return dic
|
return dic
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from .models import Error
|
from .models import Error
|
||||||
from .extensions import Senpy
|
from .extensions import Senpy
|
||||||
@ -27,8 +29,8 @@ def main_function(argv):
|
|||||||
api.CLI_PARAMS,
|
api.CLI_PARAMS,
|
||||||
api.API_PARAMS,
|
api.API_PARAMS,
|
||||||
api.NIF_PARAMS)
|
api.NIF_PARAMS)
|
||||||
plugin_folder = params['plugin_folder']
|
plugin_folder = params['plugin-folder']
|
||||||
default_plugins = params.get('default-plugins', False)
|
default_plugins = not params.get('no-default-plugins', False)
|
||||||
sp = Senpy(default_plugins=default_plugins, plugin_folder=plugin_folder)
|
sp = Senpy(default_plugins=default_plugins, plugin_folder=plugin_folder)
|
||||||
request = api.parse_call(params)
|
request = api.parse_call(params)
|
||||||
algos = sp.get_plugins(request.parameters.get('algorithm', None))
|
algos = sp.get_plugins(request.parameters.get('algorithm', None))
|
||||||
@ -48,7 +50,7 @@ def main():
|
|||||||
res = main_function(sys.argv[1:])
|
res = main_function(sys.argv[1:])
|
||||||
print(res.serialize())
|
print(res.serialize())
|
||||||
except Error as err:
|
except Error as err:
|
||||||
print(err.serialize())
|
print(err.serialize(), file=sys.stderr)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ standard_library.install_aliases()
|
|||||||
|
|
||||||
from . import plugins, api
|
from . import plugins, api
|
||||||
from .models import Error, AggregatedEvaluation
|
from .models import Error, AggregatedEvaluation
|
||||||
|
from .plugins import AnalysisPlugin
|
||||||
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
from .blueprints import api_blueprint, demo_blueprint, ns_blueprint
|
||||||
|
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
@ -54,6 +55,7 @@ class Senpy(object):
|
|||||||
self.app = app
|
self.app = app
|
||||||
if app is not None:
|
if app is not None:
|
||||||
self.init_app(app)
|
self.init_app(app)
|
||||||
|
self._conversion_candidates = {}
|
||||||
|
|
||||||
def init_app(self, app):
|
def init_app(self, app):
|
||||||
""" Initialise a flask app to add plugins to its context """
|
""" Initialise a flask app to add plugins to its context """
|
||||||
@ -74,14 +76,18 @@ class Senpy(object):
|
|||||||
|
|
||||||
def add_plugin(self, plugin):
|
def add_plugin(self, plugin):
|
||||||
self._plugins[plugin.name.lower()] = plugin
|
self._plugins[plugin.name.lower()] = plugin
|
||||||
|
self._conversion_candidates = {}
|
||||||
|
|
||||||
def delete_plugin(self, plugin):
|
def delete_plugin(self, plugin):
|
||||||
del self._plugins[plugin.name.lower()]
|
del self._plugins[plugin.name.lower()]
|
||||||
|
|
||||||
def plugins(self, plugin_type=None, is_activated=True, **kwargs):
|
def plugins(self, plugin_type=None, is_activated=True, **kwargs):
|
||||||
""" Return the plugins registered for a given application. Filtered by criteria """
|
""" Return the plugins registered for a given application. Filtered by criteria """
|
||||||
return list(plugins.pfilter(self._plugins, plugin_type=plugin_type,
|
return sorted(plugins.pfilter(self._plugins,
|
||||||
is_activated=is_activated, **kwargs))
|
plugin_type=plugin_type,
|
||||||
|
is_activated=is_activated,
|
||||||
|
**kwargs),
|
||||||
|
key=lambda x: x.id)
|
||||||
|
|
||||||
def get_plugin(self, name, default=None):
|
def get_plugin(self, name, default=None):
|
||||||
if name == 'default':
|
if name == 'default':
|
||||||
@ -115,10 +121,10 @@ class Senpy(object):
|
|||||||
pass # Assume it is a tuple or a list
|
pass # Assume it is a tuple or a list
|
||||||
return tuple(self.get_plugin(n) for n in name)
|
return tuple(self.get_plugin(n) for n in name)
|
||||||
|
|
||||||
@property
|
def analysis_plugins(self, **kwargs):
|
||||||
def analysis_plugins(self):
|
|
||||||
""" Return only the analysis plugins that are active"""
|
""" Return only the analysis plugins that are active"""
|
||||||
return self.plugins(plugin_type='analysisPlugin', is_activated=True)
|
candidates = self.plugins(**kwargs)
|
||||||
|
return list(plugins.pfilter(candidates, plugin_type=AnalysisPlugin))
|
||||||
|
|
||||||
def add_folder(self, folder, from_root=False):
|
def add_folder(self, folder, from_root=False):
|
||||||
""" Find plugins in this folder and add them to this instance """
|
""" Find plugins in this folder and add them to this instance """
|
||||||
@ -144,14 +150,17 @@ class Senpy(object):
|
|||||||
|
|
||||||
analysis = pending[0]
|
analysis = pending[0]
|
||||||
results = analysis.run(req)
|
results = analysis.run(req)
|
||||||
results.analysis.append(analysis)
|
results.activities.append(analysis)
|
||||||
done += analysis
|
done += analysis
|
||||||
return self._process(results, pending[1:], done)
|
return self._process(results, pending[1:], done)
|
||||||
|
|
||||||
def install_deps(self):
|
def install_deps(self):
|
||||||
plugins.install_deps(*self.plugins())
|
logger.info('Installing dependencies')
|
||||||
|
# If a plugin is activated, its dependencies should already be installed
|
||||||
|
# Otherwise, it would've failed to activate.
|
||||||
|
plugins.install_deps(*self.plugins(is_activated=False))
|
||||||
|
|
||||||
def analyse(self, request, analysis=None):
|
def analyse(self, request, analyses=None):
|
||||||
"""
|
"""
|
||||||
Main method that analyses a request, either from CLI or HTTP.
|
Main method that analyses a request, either from CLI or HTTP.
|
||||||
It takes a processed request, provided by the user, as returned
|
It takes a processed request, provided by the user, as returned
|
||||||
@ -162,17 +171,17 @@ class Senpy(object):
|
|||||||
status=404,
|
status=404,
|
||||||
message=("No plugins found."
|
message=("No plugins found."
|
||||||
" Please install one."))
|
" Please install one."))
|
||||||
if analysis is None:
|
if analyses is None:
|
||||||
plugins = self.get_plugins(request.parameters['algorithm'])
|
plugins = self.get_plugins(request.parameters['algorithm'])
|
||||||
analysis = api.parse_analysis(request.parameters, plugins)
|
analyses = api.parse_analyses(request.parameters, plugins)
|
||||||
logger.debug("analysing request: {}".format(request))
|
logger.debug("analysing request: {}".format(request))
|
||||||
results = self._process(request, analysis)
|
results = self._process(request, analyses)
|
||||||
logger.debug("Got analysis result: {}".format(results))
|
logger.debug("Got analysis result: {}".format(results))
|
||||||
results = self.postprocess(results)
|
results = self.postprocess(results, analyses)
|
||||||
logger.debug("Returning post-processed result: {}".format(results))
|
logger.debug("Returning post-processed result: {}".format(results))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def convert_emotions(self, resp):
|
def convert_emotions(self, resp, analyses):
|
||||||
"""
|
"""
|
||||||
Conversion of all emotions in a response **in place**.
|
Conversion of all emotions in a response **in place**.
|
||||||
In addition to converting from one model to another, it has
|
In addition to converting from one model to another, it has
|
||||||
@ -180,45 +189,50 @@ class Senpy(object):
|
|||||||
Needless to say, this is far from an elegant solution, but it works.
|
Needless to say, this is far from an elegant solution, but it works.
|
||||||
@todo refactor and clean up
|
@todo refactor and clean up
|
||||||
"""
|
"""
|
||||||
plugins = resp.analysis
|
|
||||||
|
|
||||||
|
logger.debug("Converting emotions")
|
||||||
if 'parameters' not in resp:
|
if 'parameters' not in resp:
|
||||||
|
logger.debug("NO PARAMETERS")
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
params = resp['parameters']
|
params = resp['parameters']
|
||||||
toModel = params.get('emotionModel', None)
|
toModel = params.get('emotion-model', None)
|
||||||
if not toModel:
|
if not toModel:
|
||||||
|
logger.debug("NO tomodel PARAMETER")
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
logger.debug('Asked for model: {}'.format(toModel))
|
logger.debug('Asked for model: {}'.format(toModel))
|
||||||
output = params.get('conversion', None)
|
output = params.get('conversion', None)
|
||||||
candidates = {}
|
|
||||||
for plugin in plugins:
|
newentries = []
|
||||||
try:
|
done = []
|
||||||
fromModel = plugin.get('onyx:usesEmotionModel', None)
|
for i in resp.entries:
|
||||||
candidates[plugin.id] = next(self._conversion_candidates(fromModel, toModel))
|
|
||||||
logger.debug('Analysis plugin {} uses model: {}'.format(
|
if output == "full":
|
||||||
plugin.id, fromModel))
|
newemotions = copy.deepcopy(i.emotions)
|
||||||
except StopIteration:
|
else:
|
||||||
|
newemotions = []
|
||||||
|
for j in i.emotions:
|
||||||
|
activity = j['prov:wasGeneratedBy']
|
||||||
|
act = resp.activity(activity)
|
||||||
|
if not act:
|
||||||
|
raise Error('Could not find the emotion model for {}'.format(activity))
|
||||||
|
fromModel = act.plugin['onyx:usesEmotionModel']
|
||||||
|
if toModel == fromModel:
|
||||||
|
continue
|
||||||
|
candidate = self._conversion_candidate(fromModel, toModel)
|
||||||
|
if not candidate:
|
||||||
e = Error(('No conversion plugin found for: '
|
e = Error(('No conversion plugin found for: '
|
||||||
'{} -> {}'.format(fromModel, toModel)),
|
'{} -> {}'.format(fromModel, toModel)),
|
||||||
status=404)
|
status=404)
|
||||||
e.original_response = resp
|
e.original_response = resp
|
||||||
e.parameters = params
|
e.parameters = params
|
||||||
raise e
|
raise e
|
||||||
newentries = []
|
|
||||||
done = []
|
analysis = candidate.activity(params)
|
||||||
for i in resp.entries:
|
done.append(analysis)
|
||||||
if output == "full":
|
|
||||||
newemotions = copy.deepcopy(i.emotions)
|
|
||||||
else:
|
|
||||||
newemotions = []
|
|
||||||
for j in i.emotions:
|
|
||||||
plugname = j['prov:wasGeneratedBy']
|
|
||||||
candidate = candidates[plugname]
|
|
||||||
done.append({'plugin': candidate, 'parameters': params})
|
|
||||||
for k in candidate.convert(j, fromModel, toModel, params):
|
for k in candidate.convert(j, fromModel, toModel, params):
|
||||||
k.prov__wasGeneratedBy = candidate.id
|
k.prov__wasGeneratedBy = analysis.id
|
||||||
if output == 'nested':
|
if output == 'nested':
|
||||||
k.prov__wasDerivedFrom = j
|
k.prov__wasDerivedFrom = j
|
||||||
newemotions.append(k)
|
newemotions.append(k)
|
||||||
@ -227,26 +241,36 @@ class Senpy(object):
|
|||||||
resp.entries = newentries
|
resp.entries = newentries
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def _conversion_candidates(self, fromModel, toModel):
|
def _conversion_candidate(self, fromModel, toModel):
|
||||||
candidates = self.plugins(plugin_type=plugins.EmotionConversion)
|
if not self._conversion_candidates:
|
||||||
for candidate in candidates:
|
candidates = {}
|
||||||
for pair in candidate.onyx__doesConversion:
|
for conv in self.plugins(plugin_type=plugins.EmotionConversion):
|
||||||
|
for pair in conv.onyx__doesConversion:
|
||||||
logging.debug(pair)
|
logging.debug(pair)
|
||||||
if candidate.can_convert(fromModel, toModel):
|
key = (pair['onyx:conversionFrom'], pair['onyx:conversionTo'])
|
||||||
yield candidate
|
if key not in candidates:
|
||||||
|
candidates[key] = []
|
||||||
|
candidates[key].append(conv)
|
||||||
|
self._conversion_candidates = candidates
|
||||||
|
|
||||||
def postprocess(self, response):
|
key = (fromModel, toModel)
|
||||||
|
if key not in self._conversion_candidates:
|
||||||
|
return None
|
||||||
|
return self._conversion_candidates[key][0]
|
||||||
|
|
||||||
|
def postprocess(self, response, analyses):
|
||||||
'''
|
'''
|
||||||
Transform the results from the analysis plugins.
|
Transform the results from the analysis plugins.
|
||||||
It has some pre-defined post-processing like emotion conversion,
|
It has some pre-defined post-processing like emotion conversion,
|
||||||
and it also allows plugins to auto-select themselves.
|
and it also allows plugins to auto-select themselves.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
response = self.convert_emotions(response)
|
response = self.convert_emotions(response, analyses)
|
||||||
|
|
||||||
for plug in self.plugins(plugin_type=plugins.PostProcessing):
|
for plug in self.plugins(plugin_type=plugins.PostProcessing):
|
||||||
if plug.check(response, response.analysis):
|
if plug.check(response, response.activities):
|
||||||
response = plug.process(response)
|
activity = plug.activity(response.parameters)
|
||||||
|
response = plug.process(response, activity)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _get_datasets(self, request):
|
def _get_datasets(self, request):
|
||||||
@ -286,13 +310,16 @@ class Senpy(object):
|
|||||||
results = AggregatedEvaluation()
|
results = AggregatedEvaluation()
|
||||||
results.parameters = params
|
results.parameters = params
|
||||||
datasets = self._get_datasets(results)
|
datasets = self._get_datasets(results)
|
||||||
plugins = []
|
plugs = []
|
||||||
for plugname in params.algorithm:
|
for plugname in params['algorithm']:
|
||||||
plugins = self.get_plugin(plugname)
|
plugs = self.get_plugins(plugname)
|
||||||
|
for plug in plugs:
|
||||||
|
if not isinstance(plug, plugins.Evaluable):
|
||||||
|
raise Exception('Plugin {} can not be evaluated', plug.id)
|
||||||
|
|
||||||
for eval in plugins.evaluate(plugins, datasets):
|
for eval in plugins.evaluate(plugs, datasets):
|
||||||
results.evaluations.append(eval)
|
results.evaluations.append(eval)
|
||||||
if 'with_parameters' not in results.parameters:
|
if 'with-parameters' not in results.parameters:
|
||||||
del results.parameters
|
del results.parameters
|
||||||
logger.debug("Returning evaluation result: {}".format(results))
|
logger.debug("Returning evaluation result: {}".format(results))
|
||||||
return results
|
return results
|
||||||
@ -300,8 +327,7 @@ class Senpy(object):
|
|||||||
@property
|
@property
|
||||||
def default_plugin(self):
|
def default_plugin(self):
|
||||||
if not self._default or not self._default.is_activated:
|
if not self._default or not self._default.is_activated:
|
||||||
candidates = self.plugins(
|
candidates = self.analysis_plugins()
|
||||||
plugin_type='analysisPlugin', is_activated=True)
|
|
||||||
if len(candidates) > 0:
|
if len(candidates) > 0:
|
||||||
self._default = candidates[0]
|
self._default = candidates[0]
|
||||||
else:
|
else:
|
||||||
@ -336,22 +362,15 @@ class Senpy(object):
|
|||||||
ps.append(self.deactivate_plugin(plug, sync=sync))
|
ps.append(self.deactivate_plugin(plug, sync=sync))
|
||||||
return ps
|
return ps
|
||||||
|
|
||||||
def _set_active(self, plugin, active=True, *args, **kwargs):
|
|
||||||
''' We're using a variable in the plugin itself to activate/deactivate plugins.\
|
|
||||||
Note that plugins may activate themselves by setting this variable.
|
|
||||||
'''
|
|
||||||
plugin.is_activated = active
|
|
||||||
|
|
||||||
def _activate(self, plugin):
|
def _activate(self, plugin):
|
||||||
success = False
|
success = False
|
||||||
with plugin._lock:
|
with plugin._lock:
|
||||||
if plugin.is_activated:
|
if plugin.is_activated:
|
||||||
return
|
return
|
||||||
plugin.activate()
|
plugin._activate()
|
||||||
msg = "Plugin activated: {}".format(plugin.name)
|
msg = "Plugin activated: {}".format(plugin.name)
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
success = True
|
success = plugin.is_activated
|
||||||
self._set_active(plugin, success)
|
|
||||||
return success
|
return success
|
||||||
|
|
||||||
def activate_plugin(self, plugin_name, sync=True):
|
def activate_plugin(self, plugin_name, sync=True):
|
||||||
@ -375,7 +394,7 @@ class Senpy(object):
|
|||||||
with plugin._lock:
|
with plugin._lock:
|
||||||
if not plugin.is_activated:
|
if not plugin.is_activated:
|
||||||
return
|
return
|
||||||
plugin.deactivate()
|
plugin._deactivate()
|
||||||
logger.info("Plugin deactivated: {}".format(plugin.name))
|
logger.info("Plugin deactivated: {}".format(plugin.name))
|
||||||
|
|
||||||
def deactivate_plugin(self, plugin_name, sync=True):
|
def deactivate_plugin(self, plugin_name, sync=True):
|
||||||
@ -385,13 +404,11 @@ class Senpy(object):
|
|||||||
message="Plugin not found: {}".format(plugin_name), status=404)
|
message="Plugin not found: {}".format(plugin_name), status=404)
|
||||||
plugin = self._plugins[plugin_name]
|
plugin = self._plugins[plugin_name]
|
||||||
|
|
||||||
self._set_active(plugin, False)
|
|
||||||
|
|
||||||
if sync or not getattr(plugin, 'async', True) or not getattr(
|
if sync or not getattr(plugin, 'async', True) or not getattr(
|
||||||
plugin, 'sync', False):
|
plugin, 'sync', False):
|
||||||
self._deactivate(plugin)
|
plugin._deactivate()
|
||||||
else:
|
else:
|
||||||
th = Thread(target=partial(self._deactivate, plugin))
|
th = Thread(target=plugin.deactivate)
|
||||||
th.start()
|
th.start()
|
||||||
return th
|
return th
|
||||||
|
|
||||||
|
@ -16,16 +16,16 @@ def raise_exception(*args, **kwargs):
|
|||||||
try:
|
try:
|
||||||
gsitk_distro = get_distribution("gsitk")
|
gsitk_distro = get_distribution("gsitk")
|
||||||
GSITK_VERSION = parse_version(gsitk_distro.version)
|
GSITK_VERSION = parse_version(gsitk_distro.version)
|
||||||
GSITK_AVAILABLE = GSITK_VERSION > parse_version("0.1.9.1") # Earlier versions have a bug
|
|
||||||
except DistributionNotFound:
|
|
||||||
GSITK_AVAILABLE = False
|
|
||||||
GSITK_VERSION = ()
|
|
||||||
|
|
||||||
if GSITK_AVAILABLE:
|
|
||||||
from gsitk.datasets.datasets import DatasetManager
|
from gsitk.datasets.datasets import DatasetManager
|
||||||
from gsitk.evaluation.evaluation import Evaluation as Eval
|
from gsitk.evaluation.evaluation import Evaluation as Eval # noqa: F401
|
||||||
|
from gsitk.evaluation.evaluation import EvalPipeline # noqa: F401
|
||||||
from sklearn.pipeline import Pipeline
|
from sklearn.pipeline import Pipeline
|
||||||
modules = locals()
|
modules = locals()
|
||||||
else:
|
GSITK_AVAILABLE = True
|
||||||
|
except (DistributionNotFound, ImportError) as err:
|
||||||
|
logger.debug('Error importing GSITK: {}'.format(err))
|
||||||
logger.warning(IMPORTMSG)
|
logger.warning(IMPORTMSG)
|
||||||
|
GSITK_AVAILABLE = False
|
||||||
|
GSITK_VERSION = ()
|
||||||
DatasetManager = Eval = Pipeline = raise_exception
|
DatasetManager = Eval = Pipeline = raise_exception
|
||||||
|
@ -34,6 +34,7 @@ class BaseMeta(ABCMeta):
|
|||||||
def __new__(mcs, name, bases, attrs, **kwargs):
|
def __new__(mcs, name, bases, attrs, **kwargs):
|
||||||
register_afterwards = False
|
register_afterwards = False
|
||||||
defaults = {}
|
defaults = {}
|
||||||
|
aliases = {}
|
||||||
|
|
||||||
attrs = mcs.expand_with_schema(name, attrs)
|
attrs = mcs.expand_with_schema(name, attrs)
|
||||||
if 'schema' in attrs:
|
if 'schema' in attrs:
|
||||||
@ -41,17 +42,21 @@ class BaseMeta(ABCMeta):
|
|||||||
for base in bases:
|
for base in bases:
|
||||||
if hasattr(base, '_defaults'):
|
if hasattr(base, '_defaults'):
|
||||||
defaults.update(getattr(base, '_defaults'))
|
defaults.update(getattr(base, '_defaults'))
|
||||||
|
if hasattr(base, '_aliases'):
|
||||||
|
aliases.update(getattr(base, '_aliases'))
|
||||||
|
|
||||||
info, rest = mcs.split_attrs(attrs)
|
info, rest = mcs.split_attrs(attrs)
|
||||||
|
|
||||||
for i in list(info.keys()):
|
for i in list(info.keys()):
|
||||||
if isinstance(info[i], _Alias):
|
if isinstance(info[i], _Alias):
|
||||||
fget, fset, fdel = make_property(info[i].indict)
|
aliases[i] = info[i].indict
|
||||||
rest[i] = property(fget=fget, fset=fset, fdel=fdel)
|
if info[i].default is not None:
|
||||||
|
defaults[i] = info[i].default
|
||||||
else:
|
else:
|
||||||
defaults[i] = info[i]
|
defaults[i] = info[i]
|
||||||
|
|
||||||
rest['_defaults'] = defaults
|
rest['_defaults'] = defaults
|
||||||
|
rest['_aliases'] = aliases
|
||||||
|
|
||||||
cls = super(BaseMeta, mcs).__new__(mcs, name, tuple(bases), rest)
|
cls = super(BaseMeta, mcs).__new__(mcs, name, tuple(bases), rest)
|
||||||
|
|
||||||
@ -86,7 +91,7 @@ class BaseMeta(ABCMeta):
|
|||||||
|
|
||||||
resolver = jsonschema.RefResolver(schema_path, schema)
|
resolver = jsonschema.RefResolver(schema_path, schema)
|
||||||
if '@type' not in attrs:
|
if '@type' not in attrs:
|
||||||
attrs['@type'] = "".join((name[0].lower(), name[1:]))
|
attrs['@type'] = name
|
||||||
attrs['_schema_file'] = schema_file
|
attrs['_schema_file'] = schema_file
|
||||||
attrs['schema'] = schema
|
attrs['schema'] = schema
|
||||||
attrs['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver)
|
attrs['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver)
|
||||||
@ -140,9 +145,11 @@ class BaseMeta(ABCMeta):
|
|||||||
return temp
|
return temp
|
||||||
|
|
||||||
|
|
||||||
def make_property(key):
|
def make_property(key, default=None):
|
||||||
|
|
||||||
def fget(self):
|
def fget(self):
|
||||||
|
if default:
|
||||||
|
return self.get(key, copy.copy(default))
|
||||||
return self[key]
|
return self[key]
|
||||||
|
|
||||||
def fdel(self):
|
def fdel(self):
|
||||||
@ -168,7 +175,7 @@ class CustomDict(MutableMapping, object):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
_defaults = {}
|
_defaults = {}
|
||||||
_map_attr_key = {'id': '@id'}
|
_aliases = {'id': '@id'}
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(CustomDict, self).__init__()
|
super(CustomDict, self).__init__()
|
||||||
@ -177,13 +184,13 @@ class CustomDict(MutableMapping, object):
|
|||||||
for arg in args:
|
for arg in args:
|
||||||
self.update(arg)
|
self.update(arg)
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
self[self._attr_to_key(k)] = v
|
self[k] = v
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def serializable(self):
|
def serializable(self, **kwargs):
|
||||||
def ser_or_down(item):
|
def ser_or_down(item):
|
||||||
if hasattr(item, 'serializable'):
|
if hasattr(item, 'serializable'):
|
||||||
return item.serializable()
|
return item.serializable(**kwargs)
|
||||||
elif isinstance(item, dict):
|
elif isinstance(item, dict):
|
||||||
temp = dict()
|
temp = dict()
|
||||||
for kp in item:
|
for kp in item:
|
||||||
@ -195,10 +202,9 @@ class CustomDict(MutableMapping, object):
|
|||||||
else:
|
else:
|
||||||
return item
|
return item
|
||||||
|
|
||||||
return ser_or_down(self.as_dict())
|
return ser_or_down(self.as_dict(**kwargs))
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
key = self._key_to_attr(key)
|
|
||||||
return self.__dict__[key]
|
return self.__dict__[key]
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
@ -206,9 +212,23 @@ class CustomDict(MutableMapping, object):
|
|||||||
key = self._key_to_attr(key)
|
key = self._key_to_attr(key)
|
||||||
return setattr(self, key, value)
|
return setattr(self, key, value)
|
||||||
|
|
||||||
def as_dict(self):
|
def __delitem__(self, key):
|
||||||
return {self._attr_to_key(k): v for k, v in self.__dict__.items()
|
key = self._key_to_attr(key)
|
||||||
if not self._internal_key(k)}
|
del self.__dict__[key]
|
||||||
|
|
||||||
|
def as_dict(self, verbose=True, aliases=False):
|
||||||
|
attrs = self.__dict__.keys()
|
||||||
|
if not verbose and hasattr(self, '_terse_keys'):
|
||||||
|
attrs = self._terse_keys + ['@type', '@id']
|
||||||
|
res = {k: getattr(self, k) for k in attrs
|
||||||
|
if not self._internal_key(k) and hasattr(self, k)}
|
||||||
|
if not aliases:
|
||||||
|
return res
|
||||||
|
for k, ok in self._aliases.items():
|
||||||
|
if ok in res:
|
||||||
|
res[k] = getattr(res, ok)
|
||||||
|
del res[ok]
|
||||||
|
return res
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return (k for k in self.__dict__ if not self._internal_key(k))
|
return (k for k in self.__dict__ if not self._internal_key(k))
|
||||||
@ -216,29 +236,38 @@ class CustomDict(MutableMapping, object):
|
|||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.__dict__)
|
return len(self.__dict__)
|
||||||
|
|
||||||
def __delitem__(self, key):
|
|
||||||
del self.__dict__[key]
|
|
||||||
|
|
||||||
def update(self, other):
|
def update(self, other):
|
||||||
for k, v in other.items():
|
for k, v in other.items():
|
||||||
self[k] = v
|
self[k] = v
|
||||||
|
|
||||||
def _attr_to_key(self, key):
|
def _attr_to_key(self, key):
|
||||||
key = key.replace("__", ":", 1)
|
key = key.replace("__", ":", 1)
|
||||||
key = self._map_attr_key.get(key, key)
|
key = self._aliases.get(key, key)
|
||||||
return key
|
return key
|
||||||
|
|
||||||
def _key_to_attr(self, key):
|
def _key_to_attr(self, key):
|
||||||
if self._internal_key(key):
|
if self._internal_key(key):
|
||||||
return key
|
return key
|
||||||
|
|
||||||
|
if key in self._aliases:
|
||||||
|
key = self._aliases[key]
|
||||||
|
else:
|
||||||
key = key.replace(":", "__", 1)
|
key = key.replace(":", "__", 1)
|
||||||
return key
|
return key
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
try:
|
nkey = self._attr_to_key(key)
|
||||||
return self.__dict__[self._attr_to_key(key)]
|
if nkey in self.__dict__:
|
||||||
except KeyError:
|
return self.__dict__[nkey]
|
||||||
raise AttributeError
|
elif nkey == key:
|
||||||
|
raise AttributeError("Key not found: {}".format(key))
|
||||||
|
return getattr(self, nkey)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
super(CustomDict, self).__setattr__(self._attr_to_key(key), value)
|
||||||
|
|
||||||
|
def __delattr__(self, key):
|
||||||
|
super(CustomDict, self).__delattr__(self._attr_to_key(key))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _internal_key(key):
|
def _internal_key(key):
|
||||||
@ -251,8 +280,8 @@ class CustomDict(MutableMapping, object):
|
|||||||
return json.dumps(self.serializable(), sort_keys=True, indent=4)
|
return json.dumps(self.serializable(), sort_keys=True, indent=4)
|
||||||
|
|
||||||
|
|
||||||
_Alias = namedtuple('Alias', 'indict')
|
_Alias = namedtuple('Alias', ['indict', 'default'])
|
||||||
|
|
||||||
|
|
||||||
def alias(key):
|
def alias(key, default=None):
|
||||||
return _Alias(key)
|
return _Alias(key, default)
|
||||||
|
255
senpy/models.py
255
senpy/models.py
@ -12,6 +12,8 @@ standard_library.install_aliases()
|
|||||||
from future.utils import with_metaclass
|
from future.utils import with_metaclass
|
||||||
from past.builtins import basestring
|
from past.builtins import basestring
|
||||||
|
|
||||||
|
from jinja2 import Environment, BaseLoader
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
@ -21,6 +23,7 @@ from flask import Response as FlaskResponse
|
|||||||
from pyld import jsonld
|
from pyld import jsonld
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import jmespath
|
||||||
|
|
||||||
logging.getLogger('rdflib').setLevel(logging.WARN)
|
logging.getLogger('rdflib').setLevel(logging.WARN)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -31,8 +34,9 @@ from rdflib import Graph
|
|||||||
from .meta import BaseMeta, CustomDict, alias
|
from .meta import BaseMeta, CustomDict, alias
|
||||||
|
|
||||||
DEFINITIONS_FILE = 'definitions.json'
|
DEFINITIONS_FILE = 'definitions.json'
|
||||||
CONTEXT_PATH = os.path.join(
|
CONTEXT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||||
os.path.dirname(os.path.realpath(__file__)), 'schemas', 'context.jsonld')
|
'schemas',
|
||||||
|
'context.jsonld')
|
||||||
|
|
||||||
|
|
||||||
def get_schema_path(schema_file, absolute=False):
|
def get_schema_path(schema_file, absolute=False):
|
||||||
@ -132,13 +136,10 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
|||||||
if auto_id:
|
if auto_id:
|
||||||
self.id
|
self.id
|
||||||
|
|
||||||
if '@type' not in self:
|
|
||||||
logger.warning('Created an instance of an unknown model')
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def id(self):
|
def id(self):
|
||||||
if '@id' not in self:
|
if '@id' not in self:
|
||||||
self['@id'] = '_:{}_{}'.format(type(self).__name__, time.time())
|
self['@id'] = 'prefix:{}_{}'.format(type(self).__name__, time.time())
|
||||||
return self['@id']
|
return self['@id']
|
||||||
|
|
||||||
@id.setter
|
@id.setter
|
||||||
@ -174,24 +175,33 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
|||||||
headers=headers,
|
headers=headers,
|
||||||
mimetype=mimetype)
|
mimetype=mimetype)
|
||||||
|
|
||||||
def serialize(self, format='json-ld', with_mime=False, **kwargs):
|
def serialize(self, format='json-ld', with_mime=False,
|
||||||
js = self.jsonld(**kwargs)
|
template=None, prefix=None, fields=None, **kwargs):
|
||||||
|
js = self.jsonld(prefix=prefix, **kwargs)
|
||||||
|
if template is not None:
|
||||||
|
rtemplate = Environment(loader=BaseLoader).from_string(template)
|
||||||
|
content = rtemplate.render(**self)
|
||||||
|
mimetype = 'text'
|
||||||
|
elif fields is not None:
|
||||||
|
# Emulate field selection by constructing a template
|
||||||
|
content = json.dumps(jmespath.search(fields, js))
|
||||||
|
mimetype = 'text'
|
||||||
|
elif format == 'json-ld':
|
||||||
content = json.dumps(js, indent=2, sort_keys=True)
|
content = json.dumps(js, indent=2, sort_keys=True)
|
||||||
if format == 'json-ld':
|
|
||||||
mimetype = "application/json"
|
mimetype = "application/json"
|
||||||
elif format in ['turtle', 'ntriples']:
|
elif format in ['turtle', 'ntriples']:
|
||||||
|
content = json.dumps(js, indent=2, sort_keys=True)
|
||||||
logger.debug(js)
|
logger.debug(js)
|
||||||
base = kwargs.get('prefix')
|
context = [self._context, {'prefix': prefix, '@base': prefix}]
|
||||||
g = Graph().parse(
|
g = Graph().parse(
|
||||||
data=content,
|
data=content,
|
||||||
format='json-ld',
|
format='json-ld',
|
||||||
base=base,
|
prefix=prefix,
|
||||||
context=[self._context,
|
context=context)
|
||||||
{'@base': base}])
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Parsing with prefix: {}'.format(kwargs.get('prefix')))
|
'Parsing with prefix: {}'.format(kwargs.get('prefix')))
|
||||||
content = g.serialize(format=format,
|
content = g.serialize(format=format,
|
||||||
base=base).decode('utf-8')
|
prefix=prefix).decode('utf-8')
|
||||||
mimetype = 'text/{}'.format(format)
|
mimetype = 'text/{}'.format(format)
|
||||||
else:
|
else:
|
||||||
raise Error('Unknown outformat: {}'.format(format))
|
raise Error('Unknown outformat: {}'.format(format))
|
||||||
@ -204,14 +214,25 @@ class BaseModel(with_metaclass(BaseMeta, CustomDict)):
|
|||||||
with_context=False,
|
with_context=False,
|
||||||
context_uri=None,
|
context_uri=None,
|
||||||
prefix=None,
|
prefix=None,
|
||||||
expanded=False):
|
base=None,
|
||||||
|
expanded=False,
|
||||||
|
**kwargs):
|
||||||
|
|
||||||
result = self.serializable()
|
result = self.serializable(**kwargs)
|
||||||
|
|
||||||
if expanded:
|
if expanded:
|
||||||
result = jsonld.expand(
|
result = jsonld.expand(
|
||||||
result, options={'base': prefix,
|
result,
|
||||||
'expandContext': self._context})[0]
|
options={
|
||||||
|
'expandContext': [
|
||||||
|
self._context,
|
||||||
|
{
|
||||||
|
'prefix': prefix,
|
||||||
|
'endpoint': prefix
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)[0]
|
||||||
if not with_context:
|
if not with_context:
|
||||||
try:
|
try:
|
||||||
del result['@context']
|
del result['@context']
|
||||||
@ -239,7 +260,7 @@ def subtypes():
|
|||||||
return BaseMeta._subtypes
|
return BaseMeta._subtypes
|
||||||
|
|
||||||
|
|
||||||
def from_dict(indict, cls=None):
|
def from_dict(indict, cls=None, warn=True):
|
||||||
if not cls:
|
if not cls:
|
||||||
target = indict.get('@type', None)
|
target = indict.get('@type', None)
|
||||||
cls = BaseModel
|
cls = BaseModel
|
||||||
@ -247,6 +268,10 @@ def from_dict(indict, cls=None):
|
|||||||
cls = subtypes()[target]
|
cls = subtypes()[target]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if cls == BaseModel and warn:
|
||||||
|
logger.warning('Created an instance of an unknown model')
|
||||||
|
|
||||||
outdict = dict()
|
outdict = dict()
|
||||||
for k, v in indict.items():
|
for k, v in indict.items():
|
||||||
if k == '@context':
|
if k == '@context':
|
||||||
@ -266,22 +291,24 @@ def from_string(string, **kwargs):
|
|||||||
return from_dict(json.loads(string), **kwargs)
|
return from_dict(json.loads(string), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def from_json(injson):
|
def from_json(injson, **kwargs):
|
||||||
indict = json.loads(injson)
|
indict = json.loads(injson)
|
||||||
return from_dict(indict)
|
return from_dict(indict, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Entry(BaseModel):
|
class Entry(BaseModel):
|
||||||
schema = 'entry'
|
schema = 'entry'
|
||||||
|
|
||||||
text = alias('nif:isString')
|
text = alias('nif:isString')
|
||||||
|
sentiments = alias('marl:hasOpinion', [])
|
||||||
|
emotions = alias('onyx:hasEmotionSet', [])
|
||||||
|
|
||||||
|
|
||||||
class Sentiment(BaseModel):
|
class Sentiment(BaseModel):
|
||||||
schema = 'sentiment'
|
schema = 'sentiment'
|
||||||
|
|
||||||
polarity = alias('marl:hasPolarity')
|
polarity = alias('marl:hasPolarity')
|
||||||
polarityValue = alias('marl:hasPolarityValue')
|
polarityValue = alias('marl:polarityValue')
|
||||||
|
|
||||||
|
|
||||||
class Error(BaseModel, Exception):
|
class Error(BaseModel, Exception):
|
||||||
@ -301,59 +328,121 @@ class Error(BaseModel, Exception):
|
|||||||
return Exception.__hash__(self)
|
return Exception.__hash__(self)
|
||||||
|
|
||||||
|
|
||||||
# Add the remaining schemas programmatically
|
class AggregatedEvaluation(BaseModel):
|
||||||
|
schema = 'aggregatedEvaluation'
|
||||||
|
|
||||||
def _class_from_schema(name, schema=None, schema_file=None, base_classes=None):
|
evaluations = alias('senpy:evaluations', [])
|
||||||
base_classes = base_classes or []
|
|
||||||
base_classes.append(BaseModel)
|
|
||||||
attrs = {}
|
|
||||||
if schema:
|
|
||||||
attrs['schema'] = schema
|
|
||||||
elif schema_file:
|
|
||||||
attrs['schema_file'] = schema_file
|
|
||||||
else:
|
|
||||||
attrs['schema'] = name
|
|
||||||
name = "".join((name[0].upper(), name[1:]))
|
|
||||||
return BaseMeta(name, base_classes, attrs)
|
|
||||||
|
|
||||||
|
|
||||||
def _add_class_from_schema(*args, **kwargs):
|
class Dataset(BaseModel):
|
||||||
generatedClass = _class_from_schema(*args, **kwargs)
|
schema = 'dataset'
|
||||||
globals()[generatedClass.__name__] = generatedClass
|
|
||||||
del generatedClass
|
|
||||||
|
|
||||||
|
|
||||||
for i in [
|
class Datasets(BaseModel):
|
||||||
'aggregatedEvaluation',
|
schema = 'datasets'
|
||||||
'dataset',
|
|
||||||
'datasets',
|
|
||||||
'emotion',
|
|
||||||
'emotionConversion',
|
|
||||||
'emotionConversionPlugin',
|
|
||||||
'emotionAnalysis',
|
|
||||||
'emotionModel',
|
|
||||||
'emotionPlugin',
|
|
||||||
'emotionSet',
|
|
||||||
'evaluation',
|
|
||||||
'entity',
|
|
||||||
'help',
|
|
||||||
'metric',
|
|
||||||
'parameter',
|
|
||||||
'plugins',
|
|
||||||
'response',
|
|
||||||
'results',
|
|
||||||
'sentimentPlugin',
|
|
||||||
'suggestion',
|
|
||||||
'topic',
|
|
||||||
|
|
||||||
]:
|
datasets = []
|
||||||
_add_class_from_schema(i)
|
|
||||||
|
|
||||||
|
class Emotion(BaseModel):
|
||||||
|
schema = 'emotion'
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionConversion(BaseModel):
|
||||||
|
schema = 'emotionConversion'
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionConversionPlugin(BaseModel):
|
||||||
|
schema = 'emotionConversionPlugin'
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionAnalysis(BaseModel):
|
||||||
|
schema = 'emotionAnalysis'
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionModel(BaseModel):
|
||||||
|
schema = 'emotionModel'
|
||||||
|
onyx__hasEmotionCategory = []
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionPlugin(BaseModel):
|
||||||
|
schema = 'emotionPlugin'
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionSet(BaseModel):
|
||||||
|
schema = 'emotionSet'
|
||||||
|
|
||||||
|
onyx__hasEmotion = []
|
||||||
|
|
||||||
|
|
||||||
|
class Evaluation(BaseModel):
|
||||||
|
schema = 'evaluation'
|
||||||
|
|
||||||
|
metrics = alias('senpy:metrics', [])
|
||||||
|
|
||||||
|
|
||||||
|
class Entity(BaseModel):
|
||||||
|
schema = 'entity'
|
||||||
|
|
||||||
|
|
||||||
|
class Help(BaseModel):
|
||||||
|
schema = 'help'
|
||||||
|
|
||||||
|
|
||||||
|
class Metric(BaseModel):
|
||||||
|
schema = 'metric'
|
||||||
|
|
||||||
|
|
||||||
|
class Parameter(BaseModel):
|
||||||
|
schema = 'parameter'
|
||||||
|
|
||||||
|
|
||||||
|
class Plugins(BaseModel):
|
||||||
|
schema = 'plugins'
|
||||||
|
|
||||||
|
plugins = []
|
||||||
|
|
||||||
|
|
||||||
|
class Response(BaseModel):
|
||||||
|
schema = 'response'
|
||||||
|
|
||||||
|
|
||||||
|
class Results(BaseModel):
|
||||||
|
schema = 'results'
|
||||||
|
|
||||||
|
_terse_keys = ['entries', ]
|
||||||
|
|
||||||
|
activities = []
|
||||||
|
entries = []
|
||||||
|
|
||||||
|
def activity(self, id):
|
||||||
|
for i in self.activities:
|
||||||
|
if i.id == id:
|
||||||
|
return i
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class SentimentPlugin(BaseModel):
|
||||||
|
schema = 'sentimentPlugin'
|
||||||
|
|
||||||
|
|
||||||
|
class Suggestion(BaseModel):
|
||||||
|
schema = 'suggestion'
|
||||||
|
|
||||||
|
|
||||||
|
class Topic(BaseModel):
|
||||||
|
schema = 'topic'
|
||||||
|
|
||||||
|
|
||||||
class Analysis(BaseModel):
|
class Analysis(BaseModel):
|
||||||
|
'''
|
||||||
|
A prov:Activity that results of executing a Plugin on an entry with a set of
|
||||||
|
parameters.
|
||||||
|
'''
|
||||||
schema = 'analysis'
|
schema = 'analysis'
|
||||||
|
|
||||||
parameters = alias('prov:used')
|
parameters = alias('prov:used', [])
|
||||||
|
algorithm = alias('prov:wasAssociatedWith', [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def params(self):
|
def params(self):
|
||||||
@ -373,9 +462,11 @@ class Analysis(BaseModel):
|
|||||||
else:
|
else:
|
||||||
self.parameters.append(Parameter(name=k, value=v)) # noqa: F821
|
self.parameters.append(Parameter(name=k, value=v)) # noqa: F821
|
||||||
|
|
||||||
@property
|
def param(self, key, default=None):
|
||||||
def algorithm(self):
|
for param in self.parameters:
|
||||||
return self['prov:wasAssociatedWith']
|
if param['name'] == key:
|
||||||
|
return param['value']
|
||||||
|
return default
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin(self):
|
def plugin(self):
|
||||||
@ -387,15 +478,39 @@ class Analysis(BaseModel):
|
|||||||
self['prov:wasAssociatedWith'] = value.id
|
self['prov:wasAssociatedWith'] = value.id
|
||||||
|
|
||||||
def run(self, request):
|
def run(self, request):
|
||||||
return self.plugin.process(request, self.params)
|
return self.plugin.process(request, self)
|
||||||
|
|
||||||
|
|
||||||
class Plugin(BaseModel):
|
class Plugin(BaseModel):
|
||||||
schema = 'plugin'
|
schema = 'plugin'
|
||||||
|
extra_params = {}
|
||||||
|
|
||||||
def activity(self, parameters):
|
def activity(self, parameters=None):
|
||||||
'''Generate a prov:Activity from this plugin and the '''
|
'''Generate an Analysis (prov:Activity) from this plugin and the given parameters'''
|
||||||
a = Analysis()
|
a = Analysis()
|
||||||
a.plugin = self
|
a.plugin = self
|
||||||
|
if parameters:
|
||||||
a.params = parameters
|
a.params = parameters
|
||||||
return a
|
return a
|
||||||
|
|
||||||
|
|
||||||
|
# More classes could be added programmatically
|
||||||
|
|
||||||
|
def _class_from_schema(name, schema=None, schema_file=None, base_classes=None):
|
||||||
|
base_classes = base_classes or []
|
||||||
|
base_classes.append(BaseModel)
|
||||||
|
attrs = {}
|
||||||
|
if schema:
|
||||||
|
attrs['schema'] = schema
|
||||||
|
elif schema_file:
|
||||||
|
attrs['schema_file'] = schema_file
|
||||||
|
else:
|
||||||
|
attrs['schema'] = name
|
||||||
|
name = "".join((name[0].upper(), name[1:]))
|
||||||
|
return BaseMeta(name, base_classes, attrs)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_class_from_schema(*args, **kwargs):
|
||||||
|
generatedClass = _class_from_schema(*args, **kwargs)
|
||||||
|
globals()[generatedClass.__name__] = generatedClass
|
||||||
|
del generatedClass
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#!/usr/local/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
from future import standard_library
|
from future import standard_library
|
||||||
standard_library.install_aliases()
|
standard_library.install_aliases()
|
||||||
|
|
||||||
@ -17,7 +19,12 @@ import subprocess
|
|||||||
import importlib
|
import importlib
|
||||||
import yaml
|
import yaml
|
||||||
import threading
|
import threading
|
||||||
|
import multiprocessing
|
||||||
|
import pkg_resources
|
||||||
from nltk import download
|
from nltk import download
|
||||||
|
from textwrap import dedent
|
||||||
|
from sklearn.base import TransformerMixin, BaseEstimator
|
||||||
|
from itertools import product
|
||||||
|
|
||||||
from .. import models, utils
|
from .. import models, utils
|
||||||
from .. import api
|
from .. import api
|
||||||
@ -31,17 +38,19 @@ class PluginMeta(models.BaseMeta):
|
|||||||
_classes = {}
|
_classes = {}
|
||||||
|
|
||||||
def __new__(mcs, name, bases, attrs, **kwargs):
|
def __new__(mcs, name, bases, attrs, **kwargs):
|
||||||
plugin_type = []
|
plugin_type = set()
|
||||||
if hasattr(bases[0], 'plugin_type'):
|
for base in bases:
|
||||||
plugin_type += bases[0].plugin_type
|
if hasattr(base, '_plugin_type'):
|
||||||
plugin_type.append(name)
|
plugin_type |= base._plugin_type
|
||||||
alias = attrs.get('name', name)
|
plugin_type.add(name)
|
||||||
attrs['plugin_type'] = plugin_type
|
alias = attrs.get('name', name).lower()
|
||||||
|
attrs['_plugin_type'] = plugin_type
|
||||||
|
logger.debug('Adding new plugin class', name, bases, attrs, plugin_type)
|
||||||
attrs['name'] = alias
|
attrs['name'] = alias
|
||||||
if 'description' not in attrs:
|
if 'description' not in attrs:
|
||||||
doc = attrs.get('__doc__', None)
|
doc = attrs.get('__doc__', None)
|
||||||
if doc:
|
if doc:
|
||||||
attrs['description'] = doc
|
attrs['description'] = dedent(doc)
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
('Plugin {} does not have a description. '
|
('Plugin {} does not have a description. '
|
||||||
@ -77,6 +86,9 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
_terse_keys = ['name', '@id', '@type', 'author', 'description',
|
||||||
|
'extra_params', 'is_activated', 'url', 'version']
|
||||||
|
|
||||||
def __init__(self, info=None, data_folder=None, **kwargs):
|
def __init__(self, info=None, data_folder=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Provides a canonical name for plugins and serves as base for other
|
Provides a canonical name for plugins and serves as base for other
|
||||||
@ -126,33 +138,42 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
def get_folder(self):
|
def get_folder(self):
|
||||||
return os.path.dirname(inspect.getfile(self.__class__))
|
return os.path.dirname(inspect.getfile(self.__class__))
|
||||||
|
|
||||||
|
def _activate(self):
|
||||||
|
self.activate()
|
||||||
|
self.is_activated = True
|
||||||
|
|
||||||
|
def _deactivate(self):
|
||||||
|
self.is_activated = False
|
||||||
|
self.deactivate()
|
||||||
|
|
||||||
def activate(self):
|
def activate(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def deactivate(self):
|
def deactivate(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def process(self, request, parameters, **kwargs):
|
def process(self, request, activity, **kwargs):
|
||||||
"""
|
"""
|
||||||
An implemented plugin should override this method.
|
An implemented plugin should override this method.
|
||||||
Here, we assume that a process_entries method exists."""
|
Here, we assume that a process_entries method exists.
|
||||||
|
"""
|
||||||
newentries = list(
|
newentries = list(
|
||||||
self.process_entries(request.entries, parameters))
|
self.process_entries(request.entries, activity))
|
||||||
request.entries = newentries
|
request.entries = newentries
|
||||||
return request
|
return request
|
||||||
|
|
||||||
def process_entries(self, entries, parameters):
|
def process_entries(self, entries, activity):
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
self.log.debug('Processing entry with plugin {}: {}'.format(
|
self.log.debug('Processing entry with plugin {}: {}'.format(
|
||||||
self, entry))
|
self, entry))
|
||||||
results = self.process_entry(entry, parameters)
|
results = self.process_entry(entry, activity)
|
||||||
if inspect.isgenerator(results):
|
if inspect.isgenerator(results):
|
||||||
for result in results:
|
for result in results:
|
||||||
yield result
|
yield result
|
||||||
else:
|
else:
|
||||||
yield results
|
yield results
|
||||||
|
|
||||||
def process_entry(self, entry, parameters):
|
def process_entry(self, entry, activity):
|
||||||
"""
|
"""
|
||||||
This base method is here to adapt plugins which only
|
This base method is here to adapt plugins which only
|
||||||
implement the *process* function.
|
implement the *process* function.
|
||||||
@ -173,9 +194,12 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
test_cases = self.test_cases
|
test_cases = self.test_cases
|
||||||
for case in test_cases:
|
for case in test_cases:
|
||||||
try:
|
try:
|
||||||
|
fmt = 'case: {}'.format(case.get('name', case))
|
||||||
|
if 'name' in case:
|
||||||
|
self.log.info('Test case: {}'.format(case['name']))
|
||||||
|
self.log.debug('Test case:\n\t{}'.format(
|
||||||
|
pprint.pformat(fmt)))
|
||||||
self.test_case(case)
|
self.test_case(case)
|
||||||
self.log.debug('Test case passed:\n{}'.format(
|
|
||||||
pprint.pformat(case)))
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.log.warning('Test case failed:\n{}'.format(
|
self.log.warning('Test case failed:\n{}'.format(
|
||||||
pprint.pformat(case)))
|
pprint.pformat(case)))
|
||||||
@ -200,7 +224,9 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
entry,
|
entry,
|
||||||
]
|
]
|
||||||
|
|
||||||
method = partial(self.process, request, parameters)
|
activity = self.activity(parameters)
|
||||||
|
|
||||||
|
method = partial(self.process, request, activity)
|
||||||
|
|
||||||
if mock:
|
if mock:
|
||||||
res = method()
|
res = method()
|
||||||
@ -243,34 +269,41 @@ class Plugin(with_metaclass(PluginMeta, models.Plugin)):
|
|||||||
SenpyPlugin = Plugin
|
SenpyPlugin = Plugin
|
||||||
|
|
||||||
|
|
||||||
class Analysis(Plugin):
|
class Analyser(Plugin):
|
||||||
'''
|
'''
|
||||||
A subclass of Plugin that analyses text and provides an annotation.
|
A subclass of Plugin that analyses text and provides an annotation.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def analyse(self, request, parameters):
|
# Deprecated
|
||||||
return super(Analysis, self).process(request, parameters)
|
def analyse(self, request, activity):
|
||||||
|
return super(Analyser, self).process(request, activity)
|
||||||
|
|
||||||
def analyse_entries(self, entries, parameters):
|
# Deprecated
|
||||||
for i in super(Analysis, self).process_entries(entries, parameters):
|
def analyse_entries(self, entries, activity):
|
||||||
|
for i in super(Analyser, self).process_entries(entries, activity):
|
||||||
yield i
|
yield i
|
||||||
|
|
||||||
def process(self, request, parameters, **kwargs):
|
def process(self, request, activity, **kwargs):
|
||||||
return self.analyse(request, parameters)
|
return self.analyse(request, activity)
|
||||||
|
|
||||||
def process_entries(self, entries, parameters):
|
def process_entries(self, entries, activity):
|
||||||
for i in self.analyse_entries(entries, parameters):
|
for i in self.analyse_entries(entries, activity):
|
||||||
yield i
|
yield i
|
||||||
|
|
||||||
def process_entry(self, entry, parameters, **kwargs):
|
def process_entry(self, entry, activity, **kwargs):
|
||||||
if hasattr(self, 'analyse_entry'):
|
if hasattr(self, 'analyse_entry'):
|
||||||
for i in self.analyse_entry(entry, parameters):
|
for i in self.analyse_entry(entry, activity):
|
||||||
yield i
|
yield i
|
||||||
else:
|
else:
|
||||||
super(Analysis, self).process_entry(entry, parameters, **kwargs)
|
super(Analyser, self).process_entry(entry, activity, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
AnalysisPlugin = Analysis
|
AnalysisPlugin = Analyser
|
||||||
|
|
||||||
|
|
||||||
|
class Transformation(AnalysisPlugin):
|
||||||
|
'''Empty'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Conversion(Plugin):
|
class Conversion(Plugin):
|
||||||
@ -297,32 +330,79 @@ class Conversion(Plugin):
|
|||||||
ConversionPlugin = Conversion
|
ConversionPlugin = Conversion
|
||||||
|
|
||||||
|
|
||||||
class SentimentPlugin(Analysis, models.SentimentPlugin):
|
class Evaluable(Plugin):
|
||||||
|
'''
|
||||||
|
Common class for plugins that can be evaluated with GSITK.
|
||||||
|
|
||||||
|
They should implement the methods below.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def as_pipe(self):
|
||||||
|
raise Exception('Implement the as_pipe function')
|
||||||
|
|
||||||
|
def evaluate_func(self, X, activity=None):
|
||||||
|
raise Exception('Implement the evaluate_func function')
|
||||||
|
|
||||||
|
|
||||||
|
class SentimentPlugin(Analyser, Evaluable, models.SentimentPlugin):
|
||||||
'''
|
'''
|
||||||
Sentiment plugins provide sentiment annotation (using Marl)
|
Sentiment plugins provide sentiment annotation (using Marl)
|
||||||
'''
|
'''
|
||||||
minPolarityValue = 0
|
minPolarityValue = 0
|
||||||
maxPolarityValue = 1
|
maxPolarityValue = 1
|
||||||
|
|
||||||
|
_terse_keys = Analyser._terse_keys + ['minPolarityValue', 'maxPolarityValue']
|
||||||
|
|
||||||
def test_case(self, case):
|
def test_case(self, case):
|
||||||
if 'polarity' in case:
|
if 'polarity' in case:
|
||||||
expected = case.get('expected', {})
|
expected = case.get('expected', {})
|
||||||
s = models.Sentiment(_auto_id=False)
|
s = models.Sentiment(_auto_id=False)
|
||||||
s.marl__hasPolarity = case['polarity']
|
s.marl__hasPolarity = case['polarity']
|
||||||
if 'sentiments' not in expected:
|
if 'marl:hasOpinion' not in expected:
|
||||||
expected['sentiments'] = []
|
expected['marl:hasOpinion'] = []
|
||||||
expected['sentiments'].append(s)
|
expected['marl:hasOpinion'].append(s)
|
||||||
case['expected'] = expected
|
case['expected'] = expected
|
||||||
super(SentimentPlugin, self).test_case(case)
|
super(SentimentPlugin, self).test_case(case)
|
||||||
|
|
||||||
|
def normalize(self, value, minValue, maxValue):
|
||||||
|
nv = minValue + (value - self.minPolarityValue) * (
|
||||||
|
self.maxPolarityValue - self.minPolarityValue) / (maxValue - minValue)
|
||||||
|
return nv
|
||||||
|
|
||||||
class EmotionPlugin(Analysis, models.EmotionPlugin):
|
def as_pipe(self):
|
||||||
|
pipe = gsitk_compat.Pipeline([('senpy-plugin', ScikitWrapper(self))])
|
||||||
|
pipe.name = self.id
|
||||||
|
return pipe
|
||||||
|
|
||||||
|
def evaluate_func(self, X, activity=None):
|
||||||
|
if activity is None:
|
||||||
|
parameters = api.parse_params({},
|
||||||
|
self.extra_params)
|
||||||
|
activity = self.activity(parameters)
|
||||||
|
entries = []
|
||||||
|
for feat in X:
|
||||||
|
entries.append(models.Entry(nif__isString=feat[0]))
|
||||||
|
labels = []
|
||||||
|
for e in self.process_entries(entries, activity):
|
||||||
|
sent = e.sentiments[0].polarity
|
||||||
|
label = -1
|
||||||
|
if sent == 'marl:Positive':
|
||||||
|
label = 1
|
||||||
|
elif sent == 'marl:Negative':
|
||||||
|
label = -1
|
||||||
|
labels.append(label)
|
||||||
|
return labels
|
||||||
|
|
||||||
|
|
||||||
|
class EmotionPlugin(Analyser, models.EmotionPlugin):
|
||||||
'''
|
'''
|
||||||
Emotion plugins provide emotion annotation (using Onyx)
|
Emotion plugins provide emotion annotation (using Onyx)
|
||||||
'''
|
'''
|
||||||
minEmotionValue = 0
|
minEmotionValue = 0
|
||||||
maxEmotionValue = 1
|
maxEmotionValue = 1
|
||||||
|
|
||||||
|
_terse_keys = Analyser._terse_keys + ['minEmotionValue', 'maxEmotionValue']
|
||||||
|
|
||||||
|
|
||||||
class EmotionConversion(Conversion):
|
class EmotionConversion(Conversion):
|
||||||
'''
|
'''
|
||||||
@ -345,69 +425,67 @@ EmotionConversionPlugin = EmotionConversion
|
|||||||
|
|
||||||
|
|
||||||
class PostProcessing(Plugin):
|
class PostProcessing(Plugin):
|
||||||
|
'''
|
||||||
|
A plugin that converts the output of other plugins (post-processing).
|
||||||
|
'''
|
||||||
def check(self, request, plugins):
|
def check(self, request, plugins):
|
||||||
'''Should this plugin be run for this request?'''
|
'''Should this plugin be run for this request?'''
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Box(AnalysisPlugin):
|
class Box(Analyser):
|
||||||
'''
|
'''
|
||||||
Black box plugins delegate analysis to a function.
|
Black box plugins delegate analysis to a function.
|
||||||
The flow is like so:
|
The flow is like this:
|
||||||
|
|
||||||
.. code-block::
|
.. code-block::
|
||||||
|
|
||||||
entry --> input() --> predict_one() --> output() --> entry'
|
entries --> to_features() --> predict_many() --> to_entry() --> entries'
|
||||||
|
|
||||||
|
|
||||||
In other words: their ``input`` method convers a query (entry and a set of parameters) into
|
In other words: their ``to_features`` method converts a query (entry and a set of parameters)
|
||||||
the input to the box method. The ``output`` method convers the results given by the box into
|
into the input to the `predict_one` method, which only uses an array of features.
|
||||||
an entry that senpy can handle.
|
The ``to_entry`` method converts the results given by the box into an entry that senpy can
|
||||||
|
handle.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def input(self, entry, params=None):
|
def to_features(self, entry, activity=None):
|
||||||
'''Transforms a query (entry+param) into an input for the black box'''
|
'''Transforms a query (entry+param) into an input for the black box'''
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
def output(self, output, entry=None, params=None):
|
def to_entry(self, features, entry=None, activity=None):
|
||||||
'''Transforms the results of the black box into an entry'''
|
'''Transforms the results of the black box into an entry'''
|
||||||
return output
|
return entry
|
||||||
|
|
||||||
def predict_one(self, input):
|
def predict_one(self, features, activity=None):
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
'You should define the behavior of this plugin')
|
'You should define the behavior of this plugin')
|
||||||
|
|
||||||
def process_entries(self, entries, params):
|
def predict_many(self, features, activity=None):
|
||||||
|
results = []
|
||||||
|
for feat in features:
|
||||||
|
results.append(self.predict_one(features=feat, activity=activity))
|
||||||
|
return results
|
||||||
|
|
||||||
|
def process_entry(self, entry, activity):
|
||||||
|
for i in self.process_entries([entry], activity):
|
||||||
|
yield i
|
||||||
|
|
||||||
|
def process_entries(self, entries, activity):
|
||||||
|
features = []
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
input = self.input(entry=entry, params=params)
|
features.append(self.to_features(entry=entry, activity=activity))
|
||||||
results = self.predict_one(input=input)
|
results = self.predict_many(features=features, activity=activity)
|
||||||
yield self.output(output=results, entry=entry, params=params)
|
|
||||||
|
|
||||||
def fit(self, X=None, y=None):
|
for (result, entry) in zip(results, entries):
|
||||||
return self
|
yield self.to_entry(features=result, entry=entry, activity=activity)
|
||||||
|
|
||||||
def transform(self, X):
|
|
||||||
return [self.predict_one(x) for x in X]
|
|
||||||
|
|
||||||
def predict(self, X):
|
|
||||||
return self.transform(X)
|
|
||||||
|
|
||||||
def fit_transform(self, X, y):
|
|
||||||
self.fit(X, y)
|
|
||||||
return self.transform(X)
|
|
||||||
|
|
||||||
def as_pipe(self):
|
|
||||||
pipe = gsitk_compat.Pipeline([('plugin', self)])
|
|
||||||
pipe.name = self.name
|
|
||||||
return pipe
|
|
||||||
|
|
||||||
|
|
||||||
class TextBox(Box):
|
class TextBox(Box):
|
||||||
'''A black box plugin that takes only text as input'''
|
'''A black box plugin that takes only text as input'''
|
||||||
|
|
||||||
def input(self, entry, params):
|
def to_features(self, entry, activity):
|
||||||
entry = super(TextBox, self).input(entry, params)
|
return [entry['nif:isString']]
|
||||||
return entry['nif:isString']
|
|
||||||
|
|
||||||
|
|
||||||
class SentimentBox(TextBox, SentimentPlugin):
|
class SentimentBox(TextBox, SentimentPlugin):
|
||||||
@ -415,17 +493,35 @@ class SentimentBox(TextBox, SentimentPlugin):
|
|||||||
A box plugin where the output is only a polarity label or a tuple (polarity, polarityValue)
|
A box plugin where the output is only a polarity label or a tuple (polarity, polarityValue)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def output(self, output, entry, **kwargs):
|
classes = ['marl:Positive', 'marl:Neutral', 'marl:Negative']
|
||||||
|
binary = True
|
||||||
|
|
||||||
|
def to_entry(self, features, entry, activity, **kwargs):
|
||||||
|
|
||||||
|
if len(features) != len(self.classes):
|
||||||
|
raise models.Error('The number of features ({}) does not match the classes '
|
||||||
|
'(plugin.classes ({})'.format(len(features), len(self.classes)))
|
||||||
|
|
||||||
|
minValue = activity.param('marl:minPolarityValue', 0)
|
||||||
|
maxValue = activity.param('marl:minPolarityValue', 1)
|
||||||
|
activity['marl:minPolarityValue'] = minValue
|
||||||
|
activity['marl:maxPolarityValue'] = maxValue
|
||||||
|
|
||||||
|
for k, v in zip(self.classes, features):
|
||||||
s = models.Sentiment()
|
s = models.Sentiment()
|
||||||
try:
|
if self.binary:
|
||||||
label, value = output
|
if not v: # Carry on if the value is 0
|
||||||
except ValueError:
|
continue
|
||||||
label, value = output, None
|
s['marl:hasPolarity'] = k
|
||||||
s.prov(self)
|
else:
|
||||||
s.polarity = label
|
if v is not None:
|
||||||
if value is not None:
|
s['marl:hasPolarity'] = k
|
||||||
s.polarityValue = value
|
nv = self.normalize(v, minValue, maxValue)
|
||||||
|
s['marl:polarityValue'] = nv
|
||||||
|
s.prov(activity)
|
||||||
|
|
||||||
entry.sentiments.append(s)
|
entry.sentiments.append(s)
|
||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
|
|
||||||
@ -434,14 +530,23 @@ class EmotionBox(TextBox, EmotionPlugin):
|
|||||||
A box plugin where the output is only an a tuple of emotion labels
|
A box plugin where the output is only an a tuple of emotion labels
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def output(self, output, entry, **kwargs):
|
EMOTIONS = []
|
||||||
if not isinstance(output, list):
|
with_intensity = True
|
||||||
output = [output]
|
|
||||||
|
def to_entry(self, features, entry, activity, **kwargs):
|
||||||
s = models.EmotionSet()
|
s = models.EmotionSet()
|
||||||
entry.emotions.append(s)
|
|
||||||
for label in output:
|
if len(features) != len(self.EMOTIONS):
|
||||||
|
raise Exception(('The number of classes in the plugin and the number of features '
|
||||||
|
'do not match'))
|
||||||
|
|
||||||
|
for label, intensity in zip(self.EMOTIONS, features):
|
||||||
e = models.Emotion(onyx__hasEmotionCategory=label)
|
e = models.Emotion(onyx__hasEmotionCategory=label)
|
||||||
s.append(e)
|
if self.with_intensity:
|
||||||
|
e.onyx__hasEmotionIntensity = intensity
|
||||||
|
s.onyx__hasEmotion.append(e)
|
||||||
|
s.prov(activity)
|
||||||
|
entry.emotions.append(s)
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
|
|
||||||
@ -454,11 +559,15 @@ class MappingMixin(object):
|
|||||||
def mappings(self, value):
|
def mappings(self, value):
|
||||||
self._mappings = value
|
self._mappings = value
|
||||||
|
|
||||||
def output(self, output, entry, params):
|
def to_entry(self, features, entry, activity):
|
||||||
output = self.mappings.get(output, self.mappings.get(
|
features = list(features)
|
||||||
'default', output))
|
for i, feat in enumerate(features):
|
||||||
return super(MappingMixin, self).output(
|
features[i] = self.mappings.get(feat,
|
||||||
output=output, entry=entry, params=params)
|
self.mappings.get('default',
|
||||||
|
feat))
|
||||||
|
return super(MappingMixin, self).to_entry(features=features,
|
||||||
|
entry=entry,
|
||||||
|
activity=activity)
|
||||||
|
|
||||||
|
|
||||||
class ShelfMixin(object):
|
class ShelfMixin(object):
|
||||||
@ -505,7 +614,7 @@ class ShelfMixin(object):
|
|||||||
pickle.dump(self._sh, f)
|
pickle.dump(self._sh, f)
|
||||||
|
|
||||||
|
|
||||||
def pfilter(plugins, plugin_type=Analysis, **kwargs):
|
def pfilter(plugins, plugin_type=Analyser, **kwargs):
|
||||||
""" Filter plugins by different criteria """
|
""" Filter plugins by different criteria """
|
||||||
if isinstance(plugins, models.Plugins):
|
if isinstance(plugins, models.Plugins):
|
||||||
plugins = plugins.plugins
|
plugins = plugins.plugins
|
||||||
@ -526,6 +635,9 @@ def pfilter(plugins, plugin_type=Analysis, **kwargs):
|
|||||||
else:
|
else:
|
||||||
candidates = plugins
|
candidates = plugins
|
||||||
|
|
||||||
|
if 'name' in kwargs:
|
||||||
|
kwargs['name'] = kwargs['name'].lower()
|
||||||
|
|
||||||
logger.debug(candidates)
|
logger.debug(candidates)
|
||||||
|
|
||||||
def matches(plug):
|
def matches(plug):
|
||||||
@ -549,21 +661,40 @@ def load_module(name, root=None):
|
|||||||
|
|
||||||
def _log_subprocess_output(process):
|
def _log_subprocess_output(process):
|
||||||
for line in iter(process.stdout.readline, b''):
|
for line in iter(process.stdout.readline, b''):
|
||||||
logger.info('%r', line)
|
logger.info('%s', line.decode())
|
||||||
for line in iter(process.stderr.readline, b''):
|
for line in iter(process.stderr.readline, b''):
|
||||||
logger.error('%r', line)
|
logger.error('%s', line.decode())
|
||||||
|
|
||||||
|
|
||||||
|
def missing_requirements(reqs):
|
||||||
|
queue = []
|
||||||
|
pool = multiprocessing.Pool(4)
|
||||||
|
for req in reqs:
|
||||||
|
res = pool.apply_async(pkg_resources.get_distribution, (req,))
|
||||||
|
queue.append((req, res))
|
||||||
|
missing = []
|
||||||
|
for req, job in queue:
|
||||||
|
try:
|
||||||
|
job.get(1)
|
||||||
|
except Exception:
|
||||||
|
missing.append(req)
|
||||||
|
return missing
|
||||||
|
|
||||||
|
|
||||||
def install_deps(*plugins):
|
def install_deps(*plugins):
|
||||||
installed = False
|
installed = False
|
||||||
nltk_resources = set()
|
nltk_resources = set()
|
||||||
|
requirements = []
|
||||||
for info in plugins:
|
for info in plugins:
|
||||||
requirements = info.get('requirements', [])
|
requirements = info.get('requirements', [])
|
||||||
if requirements:
|
if requirements:
|
||||||
|
requirements += missing_requirements(requirements)
|
||||||
|
nltk_resources |= set(info.get('nltk_resources', []))
|
||||||
|
if requirements:
|
||||||
|
logger.info('Installing requirements: ' + str(requirements))
|
||||||
pip_args = [sys.executable, '-m', 'pip', 'install']
|
pip_args = [sys.executable, '-m', 'pip', 'install']
|
||||||
for req in requirements:
|
for req in requirements:
|
||||||
pip_args.append(req)
|
pip_args.append(req)
|
||||||
logger.info('Installing requirements: ' + str(requirements))
|
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
pip_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
pip_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
_log_subprocess_output(process)
|
_log_subprocess_output(process)
|
||||||
@ -572,8 +703,6 @@ def install_deps(*plugins):
|
|||||||
if exitcode != 0:
|
if exitcode != 0:
|
||||||
raise models.Error(
|
raise models.Error(
|
||||||
"Dependencies not properly installed: {}".format(pip_args))
|
"Dependencies not properly installed: {}".format(pip_args))
|
||||||
nltk_resources |= set(info.get('nltk_resources', []))
|
|
||||||
|
|
||||||
installed |= download(list(nltk_resources))
|
installed |= download(list(nltk_resources))
|
||||||
return installed
|
return installed
|
||||||
|
|
||||||
@ -632,7 +761,7 @@ def from_info(info, root=None, install_on_fail=True, **kwargs):
|
|||||||
def parse_plugin_info(fpath):
|
def parse_plugin_info(fpath):
|
||||||
logger.debug("Parsing plugin info: {}".format(fpath))
|
logger.debug("Parsing plugin info: {}".format(fpath))
|
||||||
with open(fpath, 'r') as f:
|
with open(fpath, 'r') as f:
|
||||||
info = yaml.load(f)
|
info = yaml.load(f, Loader=yaml.FullLoader)
|
||||||
info['_path'] = fpath
|
info['_path'] = fpath
|
||||||
return info
|
return info
|
||||||
|
|
||||||
@ -688,14 +817,34 @@ def _from_loaded_module(module, info=None, **kwargs):
|
|||||||
yield instance
|
yield instance
|
||||||
|
|
||||||
|
|
||||||
|
cached_evs = {}
|
||||||
|
|
||||||
|
|
||||||
def evaluate(plugins, datasets, **kwargs):
|
def evaluate(plugins, datasets, **kwargs):
|
||||||
ev = gsitk_compat.Eval(
|
for plug in plugins:
|
||||||
tuples=None,
|
if not hasattr(plug, 'as_pipe'):
|
||||||
datasets=datasets,
|
raise models.Error('Plugin {} cannot be evaluated'.format(plug.name))
|
||||||
pipelines=[plugin.as_pipe() for plugin in plugins])
|
|
||||||
|
tuples = list(product(plugins, datasets))
|
||||||
|
missing = []
|
||||||
|
for (p, d) in tuples:
|
||||||
|
if (p.id, d) not in cached_evs:
|
||||||
|
pipe = p.as_pipe()
|
||||||
|
missing.append(gsitk_compat.EvalPipeline(pipe, d))
|
||||||
|
if missing:
|
||||||
|
ev = gsitk_compat.Eval(tuples=missing, datasets=datasets)
|
||||||
ev.evaluate()
|
ev.evaluate()
|
||||||
results = ev.results
|
results = ev.results
|
||||||
evaluations = evaluations_to_JSONLD(results, **kwargs)
|
new_ev = evaluations_to_JSONLD(results, **kwargs)
|
||||||
|
for ev in new_ev:
|
||||||
|
dataset = ev.evaluatesOn
|
||||||
|
model = ev.evaluates.rstrip('__' + dataset)
|
||||||
|
cached_evs[(model, dataset)] = ev
|
||||||
|
evaluations = []
|
||||||
|
print(tuples, 'Cached evs', cached_evs)
|
||||||
|
for (p, d) in tuples:
|
||||||
|
print('Adding', d, p)
|
||||||
|
evaluations.append(cached_evs[(p.id, d)])
|
||||||
return evaluations
|
return evaluations
|
||||||
|
|
||||||
|
|
||||||
@ -708,7 +857,7 @@ def evaluations_to_JSONLD(results, flatten=False):
|
|||||||
metric_names = ['accuracy', 'precision_macro', 'recall_macro',
|
metric_names = ['accuracy', 'precision_macro', 'recall_macro',
|
||||||
'f1_macro', 'f1_weighted', 'f1_micro', 'f1_macro']
|
'f1_macro', 'f1_weighted', 'f1_micro', 'f1_macro']
|
||||||
|
|
||||||
for index, row in results.iterrows():
|
for index, row in results.fillna('Not Available').iterrows():
|
||||||
evaluation = models.Evaluation()
|
evaluation = models.Evaluation()
|
||||||
if row.get('CV', True):
|
if row.get('CV', True):
|
||||||
evaluation['@type'] = ['StaticCV', 'Evaluation']
|
evaluation['@type'] = ['StaticCV', 'Evaluation']
|
||||||
@ -724,10 +873,29 @@ def evaluations_to_JSONLD(results, flatten=False):
|
|||||||
# We should probably discontinue this representation
|
# We should probably discontinue this representation
|
||||||
for name in metric_names:
|
for name in metric_names:
|
||||||
metric = models.Metric()
|
metric = models.Metric()
|
||||||
metric['@id'] = 'Metric' + str(i)
|
|
||||||
metric['@type'] = name.capitalize()
|
metric['@type'] = name.capitalize()
|
||||||
metric.value = row[name]
|
metric.value = row[name]
|
||||||
evaluation.metrics.append(metric)
|
evaluation.metrics.append(metric)
|
||||||
i += 1
|
i += 1
|
||||||
evaluations.append(evaluation)
|
evaluations.append(evaluation)
|
||||||
return evaluations
|
return evaluations
|
||||||
|
|
||||||
|
|
||||||
|
class ScikitWrapper(BaseEstimator, TransformerMixin):
|
||||||
|
def __init__(self, plugin=None):
|
||||||
|
self.plugin = plugin
|
||||||
|
|
||||||
|
def fit(self, X=None, y=None):
|
||||||
|
if self.plugin is not None and not self.plugin.is_activated:
|
||||||
|
self.plugin.activate()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def transform(self, X):
|
||||||
|
return self.plugin.evaluate_func(X, None)
|
||||||
|
|
||||||
|
def predict(self, X):
|
||||||
|
return self.transform(X)
|
||||||
|
|
||||||
|
def fit_transform(self, X, y):
|
||||||
|
self.fit(X, y)
|
||||||
|
return self.transform(X)
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
import random
|
|
||||||
|
|
||||||
from senpy.plugins import EmotionPlugin
|
|
||||||
from senpy.models import EmotionSet, Emotion, Entry
|
|
||||||
|
|
||||||
|
|
||||||
class EmoRand(EmotionPlugin):
|
|
||||||
name = "emoRand"
|
|
||||||
description = 'A sample plugin that returns a random emotion annotation'
|
|
||||||
author = '@balkian'
|
|
||||||
version = '0.1'
|
|
||||||
url = "https://github.com/gsi-upm/senpy-plugins-community"
|
|
||||||
requirements = {}
|
|
||||||
onyx__usesEmotionModel = "emoml:big6"
|
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
|
||||||
category = "emoml:big6happiness"
|
|
||||||
number = max(-1, min(1, random.gauss(0, 0.5)))
|
|
||||||
if number > 0:
|
|
||||||
category = "emoml:big6anger"
|
|
||||||
emotionSet = EmotionSet()
|
|
||||||
emotion = Emotion({"onyx:hasEmotionCategory": category})
|
|
||||||
emotionSet.onyx__hasEmotion.append(emotion)
|
|
||||||
emotionSet.prov__wasGeneratedBy = self.id
|
|
||||||
entry.emotions.append(emotionSet)
|
|
||||||
yield entry
|
|
||||||
|
|
||||||
def test(self):
|
|
||||||
params = dict()
|
|
||||||
results = list()
|
|
||||||
for i in range(100):
|
|
||||||
res = next(self.analyse_entry(Entry(nif__isString="Hello"), params))
|
|
||||||
res.validate()
|
|
||||||
results.append(res.emotions[0]['onyx:hasEmotion'][0]['onyx:hasEmotionCategory'])
|
|
@ -1,19 +1,26 @@
|
|||||||
from senpy.plugins import AnalysisPlugin
|
from senpy.plugins import Transformation
|
||||||
from senpy.models import Entry
|
from senpy.models import Entry
|
||||||
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
from nltk.tokenize.punkt import PunktSentenceTokenizer
|
||||||
from nltk.tokenize.simple import LineTokenizer
|
from nltk.tokenize.simple import LineTokenizer
|
||||||
import nltk
|
|
||||||
|
|
||||||
|
|
||||||
class Split(AnalysisPlugin):
|
class Split(Transformation):
|
||||||
'''description: A sample plugin that chunks input text'''
|
'''
|
||||||
|
A plugin that chunks input text, into paragraphs or sentences.
|
||||||
|
|
||||||
|
It does not provide any sort of annotation, and it is meant to precede
|
||||||
|
other annotation plugins, when the annotation of individual sentences
|
||||||
|
(or paragraphs) is required.
|
||||||
|
'''
|
||||||
|
|
||||||
author = ["@militarpancho", '@balkian']
|
author = ["@militarpancho", '@balkian']
|
||||||
version = '0.3'
|
version = '0.3'
|
||||||
url = "https://github.com/gsi-upm/senpy"
|
url = "https://github.com/gsi-upm/senpy"
|
||||||
|
nltk_resources = ['punkt']
|
||||||
|
|
||||||
extra_params = {
|
extra_params = {
|
||||||
'delimiter': {
|
'delimiter': {
|
||||||
|
'description': 'Split text into paragraphs or sentences.',
|
||||||
'aliases': ['type', 't'],
|
'aliases': ['type', 't'],
|
||||||
'required': False,
|
'required': False,
|
||||||
'default': 'sentence',
|
'default': 'sentence',
|
||||||
@ -21,12 +28,9 @@ class Split(AnalysisPlugin):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
def activate(self):
|
def analyse_entry(self, entry, activity):
|
||||||
nltk.download('punkt')
|
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
|
||||||
yield entry
|
yield entry
|
||||||
chunker_type = params["delimiter"]
|
chunker_type = activity.params["delimiter"]
|
||||||
original_text = entry['nif:isString']
|
original_text = entry['nif:isString']
|
||||||
if chunker_type == "sentence":
|
if chunker_type == "sentence":
|
||||||
tokenizer = PunktSentenceTokenizer()
|
tokenizer = PunktSentenceTokenizer()
|
||||||
|
@ -103,7 +103,9 @@ class CentroidConversion(EmotionConversionPlugin):
|
|||||||
for i in emotionSet.onyx__hasEmotion:
|
for i in emotionSet.onyx__hasEmotion:
|
||||||
e.onyx__hasEmotion.append(self._backwards_conversion(i))
|
e.onyx__hasEmotion.append(self._backwards_conversion(i))
|
||||||
else:
|
else:
|
||||||
raise Error('EMOTION MODEL NOT KNOWN')
|
raise Error('EMOTION MODEL NOT KNOWN. '
|
||||||
|
'Cannot convert from {} to {}'.format(fromModel,
|
||||||
|
toModel))
|
||||||
yield e
|
yield e
|
||||||
|
|
||||||
def test(self, info=None):
|
def test(self, info=None):
|
||||||
|
@ -31,7 +31,7 @@ centroids_direction:
|
|||||||
- emoml:pad
|
- emoml:pad
|
||||||
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
aliases: # These are aliases for any key in the centroid, to avoid repeating a long name several times
|
||||||
A: emoml:pad-dimensions:arousal
|
A: emoml:pad-dimensions:arousal
|
||||||
V: emoml:pad-dimensions:pleasure
|
V: emoml:pad-dimensions:valence
|
||||||
D: emoml:pad-dimensions:dominance
|
D: emoml:pad-dimensions:dominance
|
||||||
anger: emoml:big6anger
|
anger: emoml:big6anger
|
||||||
disgust: emoml:big6disgust
|
disgust: emoml:big6disgust
|
||||||
|
@ -6,7 +6,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
author = '@dsuarezsouto'
|
author = '@dsuarezsouto'
|
||||||
version = '0.1'
|
version = '0.1'
|
||||||
|
|
||||||
def process_entry(self, entry, params):
|
def process_entry(self, entry, activity):
|
||||||
if len(entry.emotions) < 1:
|
if len(entry.emotions) < 1:
|
||||||
yield entry
|
yield entry
|
||||||
return
|
return
|
||||||
@ -32,7 +32,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
|
|
||||||
entry.emotions[0]['onyx:hasEmotion'] = [max_emotion]
|
entry.emotions[0]['onyx:hasEmotion'] = [max_emotion]
|
||||||
|
|
||||||
entry.emotions[0]['prov:wasGeneratedBy'] = "maxSentiment"
|
entry.emotions[0]['prov:wasGeneratedBy'] = activity.id
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
def check(self, request, plugins):
|
def check(self, request, plugins):
|
||||||
@ -43,12 +43,11 @@ class MaxEmotion(PostProcessing):
|
|||||||
# 2 Case to return a Neutral Emotion.
|
# 2 Case to return a Neutral Emotion.
|
||||||
test_cases = [
|
test_cases = [
|
||||||
{
|
{
|
||||||
"name":
|
"name": "If there are several emotions within an emotion set, reduce it to one.",
|
||||||
"If there are several emotions within an emotion set, reduce it to one.",
|
|
||||||
"entry": {
|
"entry": {
|
||||||
"@type":
|
"@type":
|
||||||
"entry",
|
"entry",
|
||||||
"emotions": [
|
"onyx:hasEmotionSet": [
|
||||||
{
|
{
|
||||||
"@id":
|
"@id":
|
||||||
"Emotions0",
|
"Emotions0",
|
||||||
@ -94,7 +93,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
'expected': {
|
'expected': {
|
||||||
"@type":
|
"@type":
|
||||||
"entry",
|
"entry",
|
||||||
"emotions": [
|
"onyx:hasEmotionSet": [
|
||||||
{
|
{
|
||||||
"@id":
|
"@id":
|
||||||
"Emotions0",
|
"Emotions0",
|
||||||
@ -107,9 +106,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
"onyx:hasEmotionCategory": "joy",
|
"onyx:hasEmotionCategory": "joy",
|
||||||
"onyx:hasEmotionIntensity": 0.3333333333333333
|
"onyx:hasEmotionIntensity": 0.3333333333333333
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"prov:wasGeneratedBy":
|
|
||||||
'maxSentiment'
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"nif:isString":
|
"nif:isString":
|
||||||
@ -122,7 +119,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
"entry": {
|
"entry": {
|
||||||
"@type":
|
"@type":
|
||||||
"entry",
|
"entry",
|
||||||
"emotions": [{
|
"onyx:hasEmotionSet": [{
|
||||||
"@id":
|
"@id":
|
||||||
"Emotions0",
|
"Emotions0",
|
||||||
"@type":
|
"@type":
|
||||||
@ -171,7 +168,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
'expected': {
|
'expected': {
|
||||||
"@type":
|
"@type":
|
||||||
"entry",
|
"entry",
|
||||||
"emotions": [{
|
"onyx:hasEmotionSet": [{
|
||||||
"@id":
|
"@id":
|
||||||
"Emotions0",
|
"Emotions0",
|
||||||
"@type":
|
"@type":
|
||||||
@ -181,9 +178,7 @@ class MaxEmotion(PostProcessing):
|
|||||||
"@type": "emotion",
|
"@type": "emotion",
|
||||||
"onyx:hasEmotionCategory": "neutral",
|
"onyx:hasEmotionCategory": "neutral",
|
||||||
"onyx:hasEmotionIntensity": 1
|
"onyx:hasEmotionIntensity": 1
|
||||||
}],
|
}]
|
||||||
"prov:wasGeneratedBy":
|
|
||||||
'maxSentiment'
|
|
||||||
}],
|
}],
|
||||||
"nif:isString":
|
"nif:isString":
|
||||||
"Test"
|
"Test"
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from senpy.plugins import SentimentPlugin
|
from senpy.plugins import SentimentBox
|
||||||
from senpy.models import Sentiment
|
|
||||||
|
|
||||||
ENDPOINT = 'http://www.sentiment140.com/api/bulkClassifyJson'
|
ENDPOINT = 'http://www.sentiment140.com/api/bulkClassifyJson'
|
||||||
|
|
||||||
|
|
||||||
class Sentiment140(SentimentPlugin):
|
class Sentiment140(SentimentBox):
|
||||||
'''Connects to the sentiment140 free API: http://sentiment140.com'''
|
'''Connects to the sentiment140 free API: http://sentiment140.com'''
|
||||||
|
|
||||||
author = "@balkian"
|
author = "@balkian"
|
||||||
@ -16,43 +15,40 @@ class Sentiment140(SentimentPlugin):
|
|||||||
extra_params = {
|
extra_params = {
|
||||||
'language': {
|
'language': {
|
||||||
"@id": 'lang_sentiment140',
|
"@id": 'lang_sentiment140',
|
||||||
|
'description': 'language of the text',
|
||||||
'aliases': ['language', 'l'],
|
'aliases': ['language', 'l'],
|
||||||
'required': False,
|
'required': True,
|
||||||
'default': 'auto',
|
'default': 'auto',
|
||||||
'options': ['es', 'en', 'auto']
|
'options': ['es', 'en', 'auto']
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
maxPolarityValue = 1
|
classes = ['marl:Positive', 'marl:Neutral', 'marl:Negative']
|
||||||
minPolarityValue = 0
|
binary = True
|
||||||
|
|
||||||
|
def predict_many(self, features, activity):
|
||||||
|
lang = activity.params["language"]
|
||||||
|
data = []
|
||||||
|
|
||||||
|
for feature in features:
|
||||||
|
data.append({'text': feature[0]})
|
||||||
|
|
||||||
def analyse_entry(self, entry, params):
|
|
||||||
lang = params["language"]
|
|
||||||
res = requests.post(ENDPOINT,
|
res = requests.post(ENDPOINT,
|
||||||
json.dumps({
|
json.dumps({
|
||||||
"language": lang,
|
"language": lang,
|
||||||
"data": [{
|
"data": data
|
||||||
"text": entry['nif:isString']
|
|
||||||
}]
|
|
||||||
}))
|
}))
|
||||||
p = params.get("prefix", None)
|
|
||||||
polarity_value = self.maxPolarityValue * int(
|
|
||||||
res.json()["data"][0]["polarity"]) * 0.25
|
|
||||||
polarity = "marl:Neutral"
|
|
||||||
neutral_value = self.maxPolarityValue / 2.0
|
|
||||||
if polarity_value > neutral_value:
|
|
||||||
polarity = "marl:Positive"
|
|
||||||
elif polarity_value < neutral_value:
|
|
||||||
polarity = "marl:Negative"
|
|
||||||
|
|
||||||
sentiment = Sentiment(
|
for res in res.json()["data"]:
|
||||||
prefix=p,
|
polarity = int(res['polarity'])
|
||||||
marl__hasPolarity=polarity,
|
neutral_value = 2
|
||||||
marl__polarityValue=polarity_value)
|
if polarity > neutral_value:
|
||||||
sentiment.prov__wasGeneratedBy = self.id
|
yield [1, 0, 0]
|
||||||
entry.sentiments.append(sentiment)
|
continue
|
||||||
entry.language = lang
|
elif polarity < neutral_value:
|
||||||
yield entry
|
yield [0, 0, 1]
|
||||||
|
continue
|
||||||
|
yield [0, 1, 0]
|
||||||
|
|
||||||
test_cases = [
|
test_cases = [
|
||||||
{
|
{
|
||||||
@ -62,7 +58,7 @@ class Sentiment140(SentimentPlugin):
|
|||||||
'params': {},
|
'params': {},
|
||||||
'expected': {
|
'expected': {
|
||||||
"nif:isString": "I love Titanic",
|
"nif:isString": "I love Titanic",
|
||||||
'sentiments': [
|
'marl:hasOpinion': [
|
||||||
{
|
{
|
||||||
'marl:hasPolarity': 'marl:Positive',
|
'marl:hasPolarity': 'marl:Positive',
|
||||||
}
|
}
|
||||||
|
@ -11,14 +11,12 @@
|
|||||||
"$ref": "context.json"
|
"$ref": "context.json"
|
||||||
},
|
},
|
||||||
"@type": {
|
"@type": {
|
||||||
"default": "AggregatedEvaluation"
|
|
||||||
},
|
},
|
||||||
"@id": {
|
"@id": {
|
||||||
"description": "ID of the aggregated evaluation",
|
"description": "ID of the aggregated evaluation",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"evaluations": {
|
"evaluations": {
|
||||||
"default": [],
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
"prov:used": {
|
"prov:used": {
|
||||||
"description": "Parameters of the algorithm",
|
"description": "Parameters of the algorithm",
|
||||||
"@type": "array",
|
"@type": "array",
|
||||||
"default": [],
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "parameter.json"
|
"$ref": "parameter.json"
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"@context": {
|
"@context": {
|
||||||
"@vocab": "http://www.gsi.dit.upm.es/ontologies/senpy#",
|
"@vocab": "http://www.gsi.upm.es/onto/senpy/ns#",
|
||||||
"dc": "http://dublincore.org/2012/06/14/dcelements#",
|
"dc": "http://dublincore.org/2012/06/14/dcelements#",
|
||||||
"me": "http://www.mixedemotions-project.eu/ns/model#",
|
"senpy": "http://www.gsi.upm.es/onto/senpy/ns#",
|
||||||
"prov": "http://www.w3.org/ns/prov#",
|
"prov": "http://www.w3.org/ns/prov#",
|
||||||
"nif": "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#",
|
"nif": "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#",
|
||||||
"marl": "http://www.gsi.dit.upm.es/ontologies/marl/ns#",
|
"marl": "http://www.gsi.dit.upm.es/ontologies/marl/ns#",
|
||||||
@ -16,10 +16,10 @@
|
|||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"entities": {
|
"entities": {
|
||||||
"@id": "me:hasEntities"
|
"@id": "senpy:hasEntities"
|
||||||
},
|
},
|
||||||
"suggestions": {
|
"suggestions": {
|
||||||
"@id": "me:hasSuggestions",
|
"@id": "senpy:hasSuggestions",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"onyx:hasEmotion": {
|
"onyx:hasEmotion": {
|
||||||
@ -40,7 +40,7 @@
|
|||||||
"@id": "prov:used",
|
"@id": "prov:used",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
},
|
},
|
||||||
"analysis": {
|
"activities": {
|
||||||
"@id": "prov:wasInformedBy",
|
"@id": "prov:wasInformedBy",
|
||||||
"@type": "@id",
|
"@type": "@id",
|
||||||
"@container": "@set"
|
"@container": "@set"
|
||||||
@ -65,6 +65,13 @@
|
|||||||
},
|
},
|
||||||
"onyx:conversionTo": {
|
"onyx:conversionTo": {
|
||||||
"@type": "@id"
|
"@type": "@id"
|
||||||
}
|
},
|
||||||
|
"parameters": {
|
||||||
|
"@type": "Parameter"
|
||||||
|
},
|
||||||
|
"errors": {
|
||||||
|
"@type": "ParameterError"
|
||||||
|
},
|
||||||
|
"prefix": "http://senpy.invalid/"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
"properties": {
|
"properties": {
|
||||||
"datasets": {
|
"datasets": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"default": [],
|
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "dataset.json"
|
"$ref": "dataset.json"
|
||||||
}
|
}
|
||||||
|
@ -19,8 +19,7 @@
|
|||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "emotion.json"
|
"$ref": "emotion.json"
|
||||||
},
|
}
|
||||||
"default": []
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["@id", "onyx:hasEmotion"]
|
"required": ["@id", "onyx:hasEmotion"]
|
||||||
|
@ -12,8 +12,7 @@
|
|||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "emotion.json"
|
"$ref": "emotion.json"
|
||||||
},
|
}
|
||||||
"default": []
|
|
||||||
},
|
},
|
||||||
"prov:wasGeneratedBy": {
|
"prov:wasGeneratedBy": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
@ -9,30 +9,13 @@
|
|||||||
"description": "String contained in this Context. Alternative: nif:isString",
|
"description": "String contained in this Context. Alternative: nif:isString",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"sentiments": {
|
"marl:hasOpinion": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {"$ref": "sentiment.json" },
|
"items": {"$ref": "sentiment.json" }
|
||||||
"default": []
|
|
||||||
},
|
},
|
||||||
"emotions": {
|
"onyx:hasEmotionSet": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {"$ref": "emotionSet.json" },
|
"items": {"$ref": "emotionSet.json" }
|
||||||
"default": []
|
|
||||||
},
|
|
||||||
"entities": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"$ref": "entity.json" },
|
|
||||||
"default": []
|
|
||||||
},
|
|
||||||
"topics": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"$ref": "topic.json" },
|
|
||||||
"default": []
|
|
||||||
},
|
|
||||||
"suggestions": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"$ref": "suggestion.json" },
|
|
||||||
"default": []
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["nif:isString"]
|
"required": ["nif:isString"]
|
||||||
|
@ -6,8 +6,7 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"@type": {
|
"@type": {
|
||||||
"type": "array",
|
"type": "array"
|
||||||
"default": "Evaluation"
|
|
||||||
|
|
||||||
},
|
},
|
||||||
"metrics": {
|
"metrics": {
|
||||||
|
@ -24,8 +24,7 @@
|
|||||||
"description": "Sub-type of plugin. e.g. sentimentPlugin"
|
"description": "Sub-type of plugin. e.g. sentimentPlugin"
|
||||||
},
|
},
|
||||||
"extra_params": {
|
"extra_params": {
|
||||||
"type": "object",
|
"type": "object"
|
||||||
"default": {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,10 +7,16 @@
|
|||||||
"properties": {
|
"properties": {
|
||||||
"plugins": {
|
"plugins": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"default": [],
|
|
||||||
"items": {
|
"items": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
"$ref": "plugin.json"
|
"$ref": "plugin.json"
|
||||||
}
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"name": "Entry",
|
||||||
"allOf": [
|
"allOf": [
|
||||||
{"$ref": "response.json"},
|
{"$ref": "response.json"},
|
||||||
{
|
{
|
||||||
@ -10,15 +11,11 @@
|
|||||||
"@context": {
|
"@context": {
|
||||||
"$ref": "context.json"
|
"$ref": "context.json"
|
||||||
},
|
},
|
||||||
"@type": {
|
|
||||||
"default": "results"
|
|
||||||
},
|
|
||||||
"@id": {
|
"@id": {
|
||||||
"description": "ID of the analysis",
|
"description": "ID of the analysis",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"analysis": {
|
"activities": {
|
||||||
"default": [],
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "analysis.json"
|
"$ref": "analysis.json"
|
||||||
@ -26,14 +23,13 @@
|
|||||||
},
|
},
|
||||||
"entries": {
|
"entries": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"default": [],
|
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "entry.json"
|
"$ref": "entry.json"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
},
|
},
|
||||||
"required": ["@id", "analysis", "entries"]
|
"required": ["@id", "activities", "entries"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
347
senpy/static/css/bootstrap-theme.css
vendored
347
senpy/static/css/bootstrap-theme.css
vendored
@ -1,347 +0,0 @@
|
|||||||
/*!
|
|
||||||
* Bootstrap v3.1.1 (http://getbootstrap.com)
|
|
||||||
* Copyright 2011-2014 Twitter, Inc.
|
|
||||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
|
|
||||||
*/
|
|
||||||
|
|
||||||
.btn-default,
|
|
||||||
.btn-primary,
|
|
||||||
.btn-success,
|
|
||||||
.btn-info,
|
|
||||||
.btn-warning,
|
|
||||||
.btn-danger {
|
|
||||||
text-shadow: 0 -1px 0 rgba(0, 0, 0, .2);
|
|
||||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 1px rgba(0, 0, 0, .075);
|
|
||||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 1px rgba(0, 0, 0, .075);
|
|
||||||
}
|
|
||||||
.btn-default:active,
|
|
||||||
.btn-primary:active,
|
|
||||||
.btn-success:active,
|
|
||||||
.btn-info:active,
|
|
||||||
.btn-warning:active,
|
|
||||||
.btn-danger:active,
|
|
||||||
.btn-default.active,
|
|
||||||
.btn-primary.active,
|
|
||||||
.btn-success.active,
|
|
||||||
.btn-info.active,
|
|
||||||
.btn-warning.active,
|
|
||||||
.btn-danger.active {
|
|
||||||
-webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125);
|
|
||||||
box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125);
|
|
||||||
}
|
|
||||||
.btn:active,
|
|
||||||
.btn.active {
|
|
||||||
background-image: none;
|
|
||||||
}
|
|
||||||
.btn-default {
|
|
||||||
text-shadow: 0 1px 0 #fff;
|
|
||||||
background-image: -webkit-linear-gradient(top, #fff 0%, #e0e0e0 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #fff 0%, #e0e0e0 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe0e0e0', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #dbdbdb;
|
|
||||||
border-color: #ccc;
|
|
||||||
}
|
|
||||||
.btn-default:hover,
|
|
||||||
.btn-default:focus {
|
|
||||||
background-color: #e0e0e0;
|
|
||||||
background-position: 0 -15px;
|
|
||||||
}
|
|
||||||
.btn-default:active,
|
|
||||||
.btn-default.active {
|
|
||||||
background-color: #e0e0e0;
|
|
||||||
border-color: #dbdbdb;
|
|
||||||
}
|
|
||||||
.btn-primary {
|
|
||||||
background-image: -webkit-linear-gradient(top, #428bca 0%, #2d6ca2 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #428bca 0%, #2d6ca2 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff2d6ca2', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #2b669a;
|
|
||||||
}
|
|
||||||
.btn-primary:hover,
|
|
||||||
.btn-primary:focus {
|
|
||||||
background-color: #2d6ca2;
|
|
||||||
background-position: 0 -15px;
|
|
||||||
}
|
|
||||||
.btn-primary:active,
|
|
||||||
.btn-primary.active {
|
|
||||||
background-color: #2d6ca2;
|
|
||||||
border-color: #2b669a;
|
|
||||||
}
|
|
||||||
.btn-success {
|
|
||||||
background-image: -webkit-linear-gradient(top, #5cb85c 0%, #419641 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #5cb85c 0%, #419641 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff419641', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #3e8f3e;
|
|
||||||
}
|
|
||||||
.btn-success:hover,
|
|
||||||
.btn-success:focus {
|
|
||||||
background-color: #419641;
|
|
||||||
background-position: 0 -15px;
|
|
||||||
}
|
|
||||||
.btn-success:active,
|
|
||||||
.btn-success.active {
|
|
||||||
background-color: #419641;
|
|
||||||
border-color: #3e8f3e;
|
|
||||||
}
|
|
||||||
.btn-info {
|
|
||||||
background-image: -webkit-linear-gradient(top, #5bc0de 0%, #2aabd2 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #5bc0de 0%, #2aabd2 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2aabd2', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #28a4c9;
|
|
||||||
}
|
|
||||||
.btn-info:hover,
|
|
||||||
.btn-info:focus {
|
|
||||||
background-color: #2aabd2;
|
|
||||||
background-position: 0 -15px;
|
|
||||||
}
|
|
||||||
.btn-info:active,
|
|
||||||
.btn-info.active {
|
|
||||||
background-color: #2aabd2;
|
|
||||||
border-color: #28a4c9;
|
|
||||||
}
|
|
||||||
.btn-warning {
|
|
||||||
background-image: -webkit-linear-gradient(top, #f0ad4e 0%, #eb9316 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #f0ad4e 0%, #eb9316 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffeb9316', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #e38d13;
|
|
||||||
}
|
|
||||||
.btn-warning:hover,
|
|
||||||
.btn-warning:focus {
|
|
||||||
background-color: #eb9316;
|
|
||||||
background-position: 0 -15px;
|
|
||||||
}
|
|
||||||
.btn-warning:active,
|
|
||||||
.btn-warning.active {
|
|
||||||
background-color: #eb9316;
|
|
||||||
border-color: #e38d13;
|
|
||||||
}
|
|
||||||
.btn-danger {
|
|
||||||
background-image: -webkit-linear-gradient(top, #d9534f 0%, #c12e2a 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #d9534f 0%, #c12e2a 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc12e2a', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #b92c28;
|
|
||||||
}
|
|
||||||
.btn-danger:hover,
|
|
||||||
.btn-danger:focus {
|
|
||||||
background-color: #c12e2a;
|
|
||||||
background-position: 0 -15px;
|
|
||||||
}
|
|
||||||
.btn-danger:active,
|
|
||||||
.btn-danger.active {
|
|
||||||
background-color: #c12e2a;
|
|
||||||
border-color: #b92c28;
|
|
||||||
}
|
|
||||||
.thumbnail,
|
|
||||||
.img-thumbnail {
|
|
||||||
-webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
|
||||||
box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
|
||||||
}
|
|
||||||
.dropdown-menu > li > a:hover,
|
|
||||||
.dropdown-menu > li > a:focus {
|
|
||||||
background-color: #e8e8e8;
|
|
||||||
background-image: -webkit-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #f5f5f5 0%, #e8e8e8 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.dropdown-menu > .active > a,
|
|
||||||
.dropdown-menu > .active > a:hover,
|
|
||||||
.dropdown-menu > .active > a:focus {
|
|
||||||
background-color: #357ebd;
|
|
||||||
background-image: -webkit-linear-gradient(top, #428bca 0%, #357ebd 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #428bca 0%, #357ebd 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff357ebd', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.navbar-default {
|
|
||||||
background-image: -webkit-linear-gradient(top, #fff 0%, #f8f8f8 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #fff 0%, #f8f8f8 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-radius: 4px;
|
|
||||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 5px rgba(0, 0, 0, .075);
|
|
||||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 5px rgba(0, 0, 0, .075);
|
|
||||||
}
|
|
||||||
.navbar-default .navbar-nav > .active > a {
|
|
||||||
background-image: -webkit-linear-gradient(top, #ebebeb 0%, #f3f3f3 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #ebebeb 0%, #f3f3f3 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff3f3f3', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
-webkit-box-shadow: inset 0 3px 9px rgba(0, 0, 0, .075);
|
|
||||||
box-shadow: inset 0 3px 9px rgba(0, 0, 0, .075);
|
|
||||||
}
|
|
||||||
.navbar-brand,
|
|
||||||
.navbar-nav > li > a {
|
|
||||||
text-shadow: 0 1px 0 rgba(255, 255, 255, .25);
|
|
||||||
}
|
|
||||||
.navbar-inverse {
|
|
||||||
background-image: -webkit-linear-gradient(top, #3c3c3c 0%, #222 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #3c3c3c 0%, #222 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.navbar-inverse .navbar-nav > .active > a {
|
|
||||||
background-image: -webkit-linear-gradient(top, #222 0%, #282828 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #222 0%, #282828 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222', endColorstr='#ff282828', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
-webkit-box-shadow: inset 0 3px 9px rgba(0, 0, 0, .25);
|
|
||||||
box-shadow: inset 0 3px 9px rgba(0, 0, 0, .25);
|
|
||||||
}
|
|
||||||
.navbar-inverse .navbar-brand,
|
|
||||||
.navbar-inverse .navbar-nav > li > a {
|
|
||||||
text-shadow: 0 -1px 0 rgba(0, 0, 0, .25);
|
|
||||||
}
|
|
||||||
.navbar-static-top,
|
|
||||||
.navbar-fixed-top,
|
|
||||||
.navbar-fixed-bottom {
|
|
||||||
border-radius: 0;
|
|
||||||
}
|
|
||||||
.alert {
|
|
||||||
text-shadow: 0 1px 0 rgba(255, 255, 255, .2);
|
|
||||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .25), 0 1px 2px rgba(0, 0, 0, .05);
|
|
||||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .25), 0 1px 2px rgba(0, 0, 0, .05);
|
|
||||||
}
|
|
||||||
.alert-success {
|
|
||||||
background-image: -webkit-linear-gradient(top, #dff0d8 0%, #c8e5bc 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #dff0d8 0%, #c8e5bc 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #b2dba1;
|
|
||||||
}
|
|
||||||
.alert-info {
|
|
||||||
background-image: -webkit-linear-gradient(top, #d9edf7 0%, #b9def0 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #d9edf7 0%, #b9def0 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #9acfea;
|
|
||||||
}
|
|
||||||
.alert-warning {
|
|
||||||
background-image: -webkit-linear-gradient(top, #fcf8e3 0%, #f8efc0 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #fcf8e3 0%, #f8efc0 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #f5e79e;
|
|
||||||
}
|
|
||||||
.alert-danger {
|
|
||||||
background-image: -webkit-linear-gradient(top, #f2dede 0%, #e7c3c3 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #f2dede 0%, #e7c3c3 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #dca7a7;
|
|
||||||
}
|
|
||||||
.progress {
|
|
||||||
background-image: -webkit-linear-gradient(top, #ebebeb 0%, #f5f5f5 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #ebebeb 0%, #f5f5f5 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.progress-bar {
|
|
||||||
background-image: -webkit-linear-gradient(top, #428bca 0%, #3071a9 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #428bca 0%, #3071a9 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff3071a9', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.progress-bar-success {
|
|
||||||
background-image: -webkit-linear-gradient(top, #5cb85c 0%, #449d44 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #5cb85c 0%, #449d44 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.progress-bar-info {
|
|
||||||
background-image: -webkit-linear-gradient(top, #5bc0de 0%, #31b0d5 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #5bc0de 0%, #31b0d5 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.progress-bar-warning {
|
|
||||||
background-image: -webkit-linear-gradient(top, #f0ad4e 0%, #ec971f 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #f0ad4e 0%, #ec971f 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.progress-bar-danger {
|
|
||||||
background-image: -webkit-linear-gradient(top, #d9534f 0%, #c9302c 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #d9534f 0%, #c9302c 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.list-group {
|
|
||||||
border-radius: 4px;
|
|
||||||
-webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
|
||||||
box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
|
||||||
}
|
|
||||||
.list-group-item.active,
|
|
||||||
.list-group-item.active:hover,
|
|
||||||
.list-group-item.active:focus {
|
|
||||||
text-shadow: 0 -1px 0 #3071a9;
|
|
||||||
background-image: -webkit-linear-gradient(top, #428bca 0%, #3278b3 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #428bca 0%, #3278b3 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff3278b3', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #3278b3;
|
|
||||||
}
|
|
||||||
.panel {
|
|
||||||
-webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, .05);
|
|
||||||
box-shadow: 0 1px 2px rgba(0, 0, 0, .05);
|
|
||||||
}
|
|
||||||
.panel-default > .panel-heading {
|
|
||||||
background-image: -webkit-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #f5f5f5 0%, #e8e8e8 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.panel-primary > .panel-heading {
|
|
||||||
background-image: -webkit-linear-gradient(top, #428bca 0%, #357ebd 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #428bca 0%, #357ebd 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff357ebd', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.panel-success > .panel-heading {
|
|
||||||
background-image: -webkit-linear-gradient(top, #dff0d8 0%, #d0e9c6 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #dff0d8 0%, #d0e9c6 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.panel-info > .panel-heading {
|
|
||||||
background-image: -webkit-linear-gradient(top, #d9edf7 0%, #c4e3f3 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #d9edf7 0%, #c4e3f3 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.panel-warning > .panel-heading {
|
|
||||||
background-image: -webkit-linear-gradient(top, #fcf8e3 0%, #faf2cc 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #fcf8e3 0%, #faf2cc 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.panel-danger > .panel-heading {
|
|
||||||
background-image: -webkit-linear-gradient(top, #f2dede 0%, #ebcccc 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #f2dede 0%, #ebcccc 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
.well {
|
|
||||||
background-image: -webkit-linear-gradient(top, #e8e8e8 0%, #f5f5f5 100%);
|
|
||||||
background-image: linear-gradient(to bottom, #e8e8e8 0%, #f5f5f5 100%);
|
|
||||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
border-color: #dcdcdc;
|
|
||||||
-webkit-box-shadow: inset 0 1px 3px rgba(0, 0, 0, .05), 0 1px 0 rgba(255, 255, 255, .1);
|
|
||||||
box-shadow: inset 0 1px 3px rgba(0, 0, 0, .05), 0 1px 0 rgba(255, 255, 255, .1);
|
|
||||||
}
|
|
||||||
/*# sourceMappingURL=bootstrap-theme.css.map */
|
|
File diff suppressed because one or more lines are too long
7
senpy/static/css/bootstrap-theme.min.css
vendored
7
senpy/static/css/bootstrap-theme.min.css
vendored
File diff suppressed because one or more lines are too long
5785
senpy/static/css/bootstrap.css
vendored
5785
senpy/static/css/bootstrap.css
vendored
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
10
senpy/static/css/bootstrap.min.css
vendored
10
senpy/static/css/bootstrap.min.css
vendored
File diff suppressed because one or more lines are too long
1
senpy/static/css/bootstrap.min.css.map
Normal file
1
senpy/static/css/bootstrap.min.css.map
Normal file
File diff suppressed because one or more lines are too long
@ -130,7 +130,7 @@ textarea{
|
|||||||
|
|
||||||
.tab-content {
|
.tab-content {
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
border: 1px solid #ddd;
|
/* border: 1px solid #ddd; */
|
||||||
border-top: none;
|
border-top: none;
|
||||||
}
|
}
|
||||||
#content-services, #content-resources {
|
#content-services, #content-resources {
|
||||||
@ -144,9 +144,12 @@ textarea{
|
|||||||
.center-block {
|
.center-block {
|
||||||
float:none;
|
float:none;
|
||||||
}
|
}
|
||||||
#header {
|
#header h3, #header h4, #header h5, #header h6 {
|
||||||
font-family: 'Architects Daughter', cursive;
|
font-family: 'Architects Daughter', cursive;
|
||||||
}
|
}
|
||||||
|
#header {
|
||||||
|
background-color: white;
|
||||||
|
}
|
||||||
|
|
||||||
#results-div {
|
#results-div {
|
||||||
/* background: white; */
|
/* background: white; */
|
||||||
@ -196,7 +199,63 @@ textarea{
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
small pre {
|
||||||
|
font-size: smaller;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
@keyframes spin {
|
@keyframes spin {
|
||||||
0% { transform: rotate(0deg); }
|
0% { transform: rotate(0deg); }
|
||||||
100% { transform: rotate(360deg); }
|
100% { transform: rotate(360deg); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* NODES */
|
||||||
|
.node {
|
||||||
|
stroke: #fff;
|
||||||
|
fill:#ddd;
|
||||||
|
stroke-width: 1.5px;
|
||||||
|
}
|
||||||
|
.link {
|
||||||
|
fill: none;
|
||||||
|
stroke: #999;
|
||||||
|
stroke-opacity: .6;
|
||||||
|
stroke-width: 1px;
|
||||||
|
}
|
||||||
|
marker {
|
||||||
|
stroke: #999;
|
||||||
|
fill:rgba(124,240,10,0);
|
||||||
|
}
|
||||||
|
.node-text {
|
||||||
|
font: 11px sans-serif;
|
||||||
|
fill:black;
|
||||||
|
}
|
||||||
|
.link-text {
|
||||||
|
font: 9px sans-serif;
|
||||||
|
fill:grey;
|
||||||
|
}
|
||||||
|
|
||||||
|
svg{
|
||||||
|
border:1px solid black;
|
||||||
|
background-color: white;
|
||||||
|
width: 100%;
|
||||||
|
height: 600px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ribbon {
|
||||||
|
position: fixed;
|
||||||
|
right: 0;
|
||||||
|
top: 0;
|
||||||
|
z-index: 9999;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {outline: 1px solid #ccc; padding: 1em; }
|
||||||
|
.string { color: green; }
|
||||||
|
.number { color: darkorange; }
|
||||||
|
.boolean { color: blue; }
|
||||||
|
.null { color: magenta; }
|
||||||
|
.key { color: black; }
|
||||||
|
|
||||||
|
#basic_params {
|
||||||
|
padding: 2em;
|
||||||
|
}
|
||||||
|
BIN
senpy/static/img/ribbon.png
Normal file
BIN
senpy/static/img/ribbon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.1 KiB |
1951
senpy/static/js/bootstrap.js
vendored
1951
senpy/static/js/bootstrap.js
vendored
File diff suppressed because it is too large
Load Diff
7
senpy/static/js/bootstrap.min.js
vendored
7
senpy/static/js/bootstrap.min.js
vendored
File diff suppressed because one or more lines are too long
1
senpy/static/js/bootstrap.min.js.map
Normal file
1
senpy/static/js/bootstrap.min.js.map
Normal file
File diff suppressed because one or more lines are too long
5
senpy/static/js/d3.min.js
vendored
Normal file
5
senpy/static/js/d3.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -5,6 +5,7 @@ var plugins = [];
|
|||||||
var defaultPlugin = {};
|
var defaultPlugin = {};
|
||||||
var gplugins = {};
|
var gplugins = {};
|
||||||
var pipeline = [];
|
var pipeline = [];
|
||||||
|
var converter = new showdown.Converter({simplifiedAutoLink: true});
|
||||||
|
|
||||||
function replaceURLWithHTMLLinks(text) {
|
function replaceURLWithHTMLLinks(text) {
|
||||||
console.log('Text: ' + text);
|
console.log('Text: ' + text);
|
||||||
@ -21,10 +22,17 @@ function encodeHTML(text) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function hashchanged(){
|
function hashchanged(){
|
||||||
var hash = location.hash
|
console.log('Hash changed');
|
||||||
, hashPieces = hash.split('?');
|
var hash = location.hash,
|
||||||
if( hashPieces[0].length > 0 ){
|
hashPieces = hash.split('?');
|
||||||
activeTab = $('[href=' + hashPieces[0] + ']');
|
loc = hashPieces[0]
|
||||||
|
if( loc.length > 0 ){
|
||||||
|
|
||||||
|
if(loc[loc.length-1] == '.' ){
|
||||||
|
loc = loc.slice(0, -1)
|
||||||
|
}
|
||||||
|
console.log(loc)
|
||||||
|
activeTab = $('[href=' + loc + ']');
|
||||||
activeTab && activeTab.tab('show');
|
activeTab && activeTab.tab('show');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -52,18 +60,8 @@ function group_plugins(){
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_parameters(){
|
|
||||||
for (p in plugins){
|
|
||||||
plugin = plugins[p];
|
|
||||||
if (plugin["extra_params"]){
|
|
||||||
plugins_params[plugin["name"]] = plugin["extra_params"];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function draw_plugins_selection(){
|
function draw_plugins_selection(){
|
||||||
html="";
|
html="";
|
||||||
group_plugins();
|
|
||||||
for (g in gplugins){
|
for (g in gplugins){
|
||||||
html += "<optgroup label=\""+g+"\">"
|
html += "<optgroup label=\""+g+"\">"
|
||||||
for (r in gplugins[g]){
|
for (r in gplugins[g]){
|
||||||
@ -83,14 +81,39 @@ function draw_plugins_selection(){
|
|||||||
html+=">"+plugin["name"]+"</option>"
|
html+=">"+plugin["name"]+"</option>"
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
html += "</optgroup>"
|
html += "</optgroup>"
|
||||||
|
}
|
||||||
// Two elements with plugin class
|
// Two elements with plugin class
|
||||||
// One from the evaluate tab and another one from the analyse tab
|
// One from the evaluate tab and another one from the analyse tab
|
||||||
plugin_lists = document.getElementsByClassName('plugin')
|
$('#plugins-select').html(html)
|
||||||
for (element in plugin_lists){
|
draw_plugin_pipeline();
|
||||||
plugin_lists[element].innerHTML = html;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function draw_plugins_eval_selection(){
|
||||||
|
evaluable = JSON.parse($.ajax({type: "GET", url: "/api/plugins/?plugin_type=Evaluable" , async: false}).responseText).plugins;
|
||||||
|
|
||||||
|
html="";
|
||||||
|
for (r in evaluable){
|
||||||
|
plugin = evaluable[r]
|
||||||
|
if (!plugin["name"]){
|
||||||
|
console.log("No name for plugin ", plugin);
|
||||||
|
continue;
|
||||||
|
|
||||||
|
}
|
||||||
|
html+= "<option value=\""+plugin.name+"\" "
|
||||||
|
if (plugin["name"] == defaultPlugin["name"]){
|
||||||
|
html+= " selected=\"selected\""
|
||||||
|
}
|
||||||
|
if (!plugin["is_activated"]){
|
||||||
|
html+= " disabled=\"disabled\" "
|
||||||
|
}
|
||||||
|
html+=">"+plugin["name"]+"</option>"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Two elements with plugin class
|
||||||
|
// One from the evaluate tab and another one from the analyse tab
|
||||||
|
$('#plugins-eval').html(html)
|
||||||
draw_plugin_pipeline();
|
draw_plugin_pipeline();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,19 +139,30 @@ function remove_plugin_pipeline(name){
|
|||||||
function draw_plugins_list(){
|
function draw_plugins_list(){
|
||||||
var availablePlugins = document.getElementById('availablePlugins');
|
var availablePlugins = document.getElementById('availablePlugins');
|
||||||
|
|
||||||
for(p in plugins){
|
availablePlugins.innerHTML = '';
|
||||||
var pluginEntry = document.createElement('li');
|
html = ''
|
||||||
|
|
||||||
|
for (g in gplugins){
|
||||||
|
for (r in gplugins[g]){
|
||||||
|
p = gplugins[g][r];
|
||||||
plugin = plugins[p];
|
plugin = plugins[p];
|
||||||
newHtml = ""
|
html += `<div class="card my-2">
|
||||||
if(plugin.url) {
|
<div class="card-body">
|
||||||
newHtml= "<a href="+plugin.url+">" + plugin.name + "</a>";
|
<h4 class="card-title"> ${plugin.name} <span class="badge badge-success">${plugin.version}</span>`
|
||||||
}else {
|
// if (typeof plugin.author !== 'undefined'){
|
||||||
newHtml= plugin["name"];
|
// html += ` <span class="badge badge-secondary">${plugin.author}</span>`
|
||||||
|
// }
|
||||||
|
// for (ptype in plugin['@type'] ){
|
||||||
|
html += ` <span class="badge badge-secondary">${plugin['@type']}</span>`
|
||||||
|
// }
|
||||||
|
html += `</h4>
|
||||||
|
<p class="card-text">${converter.makeHtml(plugin.description)}</p>
|
||||||
|
</div>
|
||||||
|
</div>`
|
||||||
|
html += "</div>";
|
||||||
}
|
}
|
||||||
newHtml += ": " + replaceURLWithHTMLLinks(plugin.description);
|
|
||||||
pluginEntry.innerHTML = newHtml;
|
|
||||||
availablePlugins.appendChild(pluginEntry)
|
|
||||||
}
|
}
|
||||||
|
availablePlugins.innerHTML = html;
|
||||||
}
|
}
|
||||||
|
|
||||||
function add_plugin_pipeline(){
|
function add_plugin_pipeline(){
|
||||||
@ -149,16 +183,19 @@ function draw_datasets(){
|
|||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function() {
|
$(document).ready(function() {
|
||||||
var response = JSON.parse($.ajax({type: "GET", url: "/api/plugins/" , async: false}).responseText);
|
var response = JSON.parse($.ajax({type: "GET", url: "/api/plugins/?verbose=1" , async: false}).responseText);
|
||||||
defaultPlugin= JSON.parse($.ajax({type: "GET", url: "/api/plugins/default" , async: false}).responseText);
|
defaultPlugin= JSON.parse($.ajax({type: "GET", url: "/api/plugins/default" , async: false}).responseText);
|
||||||
|
|
||||||
get_plugins(response);
|
get_plugins(response);
|
||||||
get_default_parameters();
|
get_default_parameters();
|
||||||
|
|
||||||
draw_plugins_list();
|
group_plugins();
|
||||||
draw_plugins_selection();
|
draw_plugins_selection();
|
||||||
|
draw_plugins_eval_selection();
|
||||||
draw_parameters();
|
draw_parameters();
|
||||||
|
draw_plugins_list();
|
||||||
draw_plugin_description();
|
draw_plugin_description();
|
||||||
|
example_selected();
|
||||||
|
|
||||||
if (evaluation_enabled) {
|
if (evaluation_enabled) {
|
||||||
var response2 = JSON.parse($.ajax({type: "GET", url: "/api/datasets/" , async: false}).responseText);
|
var response2 = JSON.parse($.ajax({type: "GET", url: "/api/datasets/" , async: false}).responseText);
|
||||||
@ -166,19 +203,33 @@ $(document).ready(function() {
|
|||||||
draw_datasets();
|
draw_datasets();
|
||||||
}
|
}
|
||||||
|
|
||||||
$(window).on('hashchange', hashchanged);
|
// $(window).on('hashchange', hashchanged);
|
||||||
hashchanged();
|
// hashchanged();
|
||||||
$('.tooltip-form').tooltip();
|
$('.tooltip-form').tooltip();
|
||||||
|
|
||||||
|
$('.nav-pills a').on('shown.bs.tab', function (e) {
|
||||||
|
window.location.hash = e.target.hash + '.';
|
||||||
|
})
|
||||||
|
$('form').on("submit",function( event ) {
|
||||||
|
event.preventDefault();
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
function get_default_parameters(){
|
function get_parameters(plugin, verbose){
|
||||||
default_params = JSON.parse($.ajax({type: "GET", url: "/api?help=true" , async: false}).responseText).valid_parameters;
|
verbose = typeof verbose !== 'undefined' ? verbose : false;
|
||||||
|
resp = JSON.parse($.ajax({type: "GET", url: "/api/"+plugin+"?help=true&verbose=" + verbose , async: false}).responseText)
|
||||||
|
params = resp.valid_parameters;
|
||||||
// Remove the parameters that are always added
|
// Remove the parameters that are always added
|
||||||
delete default_params["input"];
|
delete params["input"];
|
||||||
delete default_params["algorithm"];
|
delete params["algorithm"];
|
||||||
delete default_params["help"];
|
delete params["outformat"];
|
||||||
|
delete params["help"];
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_default_parameters() {
|
||||||
|
default_params = get_parameters('default', true);
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_selected_plugin(){
|
function get_selected_plugin(){
|
||||||
@ -186,8 +237,9 @@ function get_selected_plugin(){
|
|||||||
}
|
}
|
||||||
|
|
||||||
function draw_default_parameters(){
|
function draw_default_parameters(){
|
||||||
var basic_params = document.getElementById("basic_params");
|
bp = $('#basic_params');
|
||||||
basic_params.innerHTML = params_div(default_params);
|
console.log(bp)
|
||||||
|
bp.html(params_div(default_params));
|
||||||
}
|
}
|
||||||
|
|
||||||
function update_params(params, plug){
|
function update_params(params, plug){
|
||||||
@ -200,7 +252,10 @@ function update_params(params, plug){
|
|||||||
|
|
||||||
function draw_extra_parameters(){
|
function draw_extra_parameters(){
|
||||||
var plugin = get_selected_plugin();
|
var plugin = get_selected_plugin();
|
||||||
get_parameters();
|
if (typeof plugins_params[plugin] === 'undefined' ){
|
||||||
|
params = get_parameters(plugin, false);
|
||||||
|
plugins_params[plugin] = params;
|
||||||
|
}
|
||||||
|
|
||||||
var extra_params = document.getElementById("extra_params");
|
var extra_params = document.getElementById("extra_params");
|
||||||
var params = {};
|
var params = {};
|
||||||
@ -218,17 +273,15 @@ function draw_parameters(){
|
|||||||
|
|
||||||
|
|
||||||
function add_default_params(){
|
function add_default_params(){
|
||||||
var html = "";
|
var html = "<form>";
|
||||||
// html += '<a href="#basic_params" class="btn btn-info" data-toggle="collapse">Basic API parameters</a>';
|
// html += '<a href="#basic_params" class="btn btn-info" data-toggle="collapse">Basic API parameters</a>';
|
||||||
html += '<span id="basic_params" class="panel-collapse collapse">';
|
|
||||||
html += '<ul class="list-group">'
|
|
||||||
html += params_div(default_params);
|
html += params_div(default_params);
|
||||||
html += '</span>';
|
html += '</form>'
|
||||||
return html;
|
return html;
|
||||||
}
|
}
|
||||||
|
|
||||||
function params_div(params){
|
function params_div(params){
|
||||||
var html = '<div class="container-fluid">';
|
var html = '';
|
||||||
if (Object.keys(params).length === 0) {
|
if (Object.keys(params).length === 0) {
|
||||||
html += '<p class="text text-muted text-center">This plugin does not take any extra parameters</p>';
|
html += '<p class="text text-muted text-center">This plugin does not take any extra parameters</p>';
|
||||||
}
|
}
|
||||||
@ -267,13 +320,14 @@ function params_div(params){
|
|||||||
html+='</div>';
|
html+='</div>';
|
||||||
html+='<div class="row">';
|
html+='<div class="row">';
|
||||||
if ('description' in param){
|
if ('description' in param){
|
||||||
html += '<p class="form-text sm-sm-12 text-muted text-center">' + param.description + '</p>';
|
|
||||||
|
html += '<small class="form-text sm-sm-12 text-muted">'
|
||||||
|
html += converter.makeHtml(param.description) + '</small>';
|
||||||
|
|
||||||
}
|
}
|
||||||
html+='</div>';
|
html+='</div>';
|
||||||
html+='</div>';
|
html+='</div>';
|
||||||
}
|
}
|
||||||
html+='</div>';
|
|
||||||
return html;
|
return html;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,19 +379,18 @@ function get_pipeline_arg(){
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function load_JSON(){
|
function get_result(outformat, cb, eb) {
|
||||||
url = "/api";
|
url = "/api";
|
||||||
var container = document.getElementById('results');
|
$('.results').html('');
|
||||||
var rawcontainer = document.getElementById("jsonraw");
|
|
||||||
rawcontainer.innerHTML = '';
|
|
||||||
container.innerHTML = '';
|
|
||||||
|
|
||||||
var plugin = get_pipeline_arg();
|
var plugin = get_pipeline_arg();
|
||||||
$(".loading").addClass("loader");
|
$(".loading").addClass("loader");
|
||||||
$("#preview").hide();
|
$("#preview").hide();
|
||||||
|
|
||||||
|
|
||||||
var input = encodeURIComponent(document.getElementById("input").value);
|
var input = encodeURIComponent(document.getElementById("input").value);
|
||||||
url += "?algo="+plugin+"&i="+input
|
|
||||||
|
url += "?algo="+plugin+"&i="+input+"&outformat="+outformat;
|
||||||
|
|
||||||
params = get_form_parameters();
|
params = get_form_parameters();
|
||||||
|
|
||||||
@ -345,26 +398,47 @@ function load_JSON(){
|
|||||||
url += add_param(key, params[key]);
|
url += add_param(key, params[key]);
|
||||||
}
|
}
|
||||||
|
|
||||||
$.ajax({type: "GET", url: url}).always(function(response){
|
return $.ajax({type: "GET", url: url}).done(function(){
|
||||||
document.getElementById("results-div").style.display = 'block';
|
|
||||||
if(typeof response=="object") {
|
|
||||||
var options = {
|
|
||||||
mode: 'view'
|
|
||||||
};
|
|
||||||
var editor = new JSONEditor(container, options, response);
|
|
||||||
editor.expandAll();
|
|
||||||
$('#results-div a[href="#viewer"]').click();
|
|
||||||
response = JSON.stringify(response, null, 4);
|
|
||||||
} else {
|
|
||||||
console.log("Got turtle?");
|
|
||||||
$('#results-div a[href="#raw"]').click();
|
|
||||||
}
|
|
||||||
|
|
||||||
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response);
|
|
||||||
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
|
||||||
|
|
||||||
$(".loading").removeClass("loader");
|
$(".loading").removeClass("loader");
|
||||||
$("#preview").show();
|
$("#preview").show();
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function load_results(outformat){
|
||||||
|
|
||||||
|
if (typeof outformat === 'undefined') {
|
||||||
|
active = $('#results-container .tab-pane.active').get(0);
|
||||||
|
console.log(active.id);
|
||||||
|
outformat = active.id
|
||||||
|
if (outformat == 'viewer') {
|
||||||
|
outformat = 'json-ld';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get_result(outformat).always(function(response, txt, request){
|
||||||
|
console.log('outformat is', outformat)
|
||||||
|
document.getElementById("results-div").style.display = 'block';
|
||||||
|
container = $('#results-'+outformat).get(0);
|
||||||
|
raw = '';
|
||||||
|
|
||||||
|
try {
|
||||||
|
raw = replaceURLWithHTMLLinks(response)
|
||||||
|
}catch(error){
|
||||||
|
response = JSON.stringify(response, null, 1);
|
||||||
|
raw = syntaxHighlight(response)
|
||||||
|
}
|
||||||
|
console.log('highlighted');
|
||||||
|
container.innerHTML = replaceURLWithHTMLLinks(raw);
|
||||||
|
|
||||||
|
document.getElementById("input_request").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||||
|
console.log("Request failed", request);
|
||||||
|
$(".loading").removeClass("loader");
|
||||||
|
$("#preview").show();
|
||||||
|
}).fail(function(data) {
|
||||||
|
if (data.readyState == 0) {
|
||||||
|
$('#results-'+outformat).html('');
|
||||||
|
alert( "The server is not responding. Make sure it is still running." );
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -385,9 +459,14 @@ function create_body_metrics(evaluations){
|
|||||||
var new_tbody = document.createElement('tbody')
|
var new_tbody = document.createElement('tbody')
|
||||||
var metric_html = ""
|
var metric_html = ""
|
||||||
for (var eval in evaluations){
|
for (var eval in evaluations){
|
||||||
metric_html += "<tr><th>"+evaluations[eval].evaluates+"</th><th>"+evaluations[eval].evaluatesOn+"</th>";
|
metric_html += "<tr><td>"+evaluations[eval].evaluates+"</td><td>"+evaluations[eval].evaluatesOn+"</td>";
|
||||||
for (var metric in evaluations[eval].metrics){
|
for (var metric in evaluations[eval].metrics){
|
||||||
metric_html += "<th>"+parseFloat(evaluations[eval].metrics[metric].value.toFixed(4))+"</th>";
|
var value = "Not available";
|
||||||
|
try {
|
||||||
|
value = parseFloat(evaluations[eval].metrics[metric].value.toFixed(4));
|
||||||
|
}catch(err){
|
||||||
|
}
|
||||||
|
metric_html += "<td>"+value+"</td>";
|
||||||
}
|
}
|
||||||
metric_html += "</tr>";
|
metric_html += "</tr>";
|
||||||
}
|
}
|
||||||
@ -396,6 +475,7 @@ function create_body_metrics(evaluations){
|
|||||||
}
|
}
|
||||||
|
|
||||||
function evaluate_JSON(){
|
function evaluate_JSON(){
|
||||||
|
$(".loading").addClass("loader");
|
||||||
|
|
||||||
url = "/api/evaluate";
|
url = "/api/evaluate";
|
||||||
|
|
||||||
@ -405,56 +485,64 @@ function evaluate_JSON(){
|
|||||||
|
|
||||||
rawcontainer.innerHTML = "";
|
rawcontainer.innerHTML = "";
|
||||||
container.innerHTML = "";
|
container.innerHTML = "";
|
||||||
|
$("#evaluate-div").hide();
|
||||||
|
|
||||||
var plugin = document.getElementsByClassName("plugin")[0].options[document.getElementsByClassName("plugin")[0].selectedIndex].value;
|
var plugin = $("#plugins-eval option:selected").val();
|
||||||
|
|
||||||
get_datasets_from_checkbox();
|
get_datasets_from_checkbox();
|
||||||
|
|
||||||
url += "?algo="+plugin+"&dataset="+datasets
|
url += "?algo="+plugin+"&dataset="+datasets
|
||||||
|
|
||||||
$('#doevaluate').attr("disabled", true);
|
$('#doevaluate').attr("disabled", true);
|
||||||
$.ajax({type: "GET", url: url, dataType: 'json'}).always(function(resp) {
|
$.ajax({type: "GET", url: url, dataType: 'text'}).always(function(response) {
|
||||||
$('#doevaluate').attr("disabled", false);
|
$('#doevaluate').attr("disabled", false);
|
||||||
response = resp.responseText;
|
|
||||||
|
|
||||||
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response);
|
|
||||||
|
|
||||||
document.getElementById("input_request_eval").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
document.getElementById("input_request_eval").innerHTML = "<a href='"+url+"'>"+url+"</a>"
|
||||||
document.getElementById("evaluate-div").style.display = 'block';
|
document.getElementById("evaluate-div").style.display = 'block';
|
||||||
|
|
||||||
|
$('#evaluate-div a[href="#evaluate-raw"]').click();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
response = JSON.parse(response);
|
js = JSON.parse(response);
|
||||||
var options = {
|
rawcontainer.innerHTML = replaceURLWithHTMLLinks(response);
|
||||||
mode: 'view'
|
|
||||||
};
|
|
||||||
|
|
||||||
//Control the single response results
|
|
||||||
if (!(Array.isArray(response.evaluations))){
|
|
||||||
response.evaluations = [response.evaluations]
|
|
||||||
}
|
|
||||||
|
|
||||||
new_tbody = create_body_metrics(response.evaluations)
|
|
||||||
table.replaceChild(new_tbody, table.lastElementChild)
|
|
||||||
|
|
||||||
var editor = new JSONEditor(container, options, response);
|
|
||||||
editor.expandAll();
|
|
||||||
// $('#results-div a[href="#viewer"]').tab('show');
|
|
||||||
$('#evaluate-div a[href="#evaluate-table"]').click();
|
|
||||||
// location.hash = 'raw';
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
catch(err){
|
catch(err){
|
||||||
console.log("Error decoding JSON (got turtle?)");
|
console.log("Error decoding JSON");
|
||||||
$('#evaluate-div a[href="#evaluate-raw"]').click();
|
if (typeof(response.responseText) !== 'undefined') {
|
||||||
// location.hash = 'raw';
|
rawcontainer.innerHTML = response.responseText;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
rawcontainer.innerHTML = response;
|
||||||
|
}
|
||||||
|
$(".loading").removeClass("loader");
|
||||||
|
$('#evaluate-div a[href="#evaluate-raw"]').click();
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof(js['senpy:evaluations']) === 'undefined') {
|
||||||
|
alert('Could not evaluate on that dataset');
|
||||||
|
} else {
|
||||||
|
//Control the single response results
|
||||||
|
if (!(Array.isArray(js['senpy:evaluations']))){
|
||||||
|
js['senpy:evaluations'] = [js['senpy:evaluations']]
|
||||||
|
rawcontainer.innerHtml = response;
|
||||||
|
}
|
||||||
|
new_tbody = create_body_metrics(js['senpy:evaluations'])
|
||||||
|
table.replaceChild(new_tbody, table.lastElementChild)
|
||||||
|
$('#evaluate-div a[href="#evaluate-table"]').click();
|
||||||
|
// location.hash = 'raw';
|
||||||
|
$("#evaluate-div").show();
|
||||||
|
}
|
||||||
|
|
||||||
|
$(".loading").removeClass("loader");
|
||||||
|
|
||||||
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function draw_plugin_description(){
|
function draw_plugin_description(){
|
||||||
var plugin = plugins[get_selected_plugin()];
|
var plugin = plugins[get_selected_plugin()];
|
||||||
$("#plugdescription").text(plugin.description);
|
$("#plugdescription").html(converter.makeHtml(plugin.description));
|
||||||
console.log(plugin);
|
console.log(plugin);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -462,3 +550,32 @@ function plugin_selected(){
|
|||||||
draw_extra_parameters();
|
draw_extra_parameters();
|
||||||
draw_plugin_description();
|
draw_plugin_description();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function example_selected(){
|
||||||
|
console.log('changing text');
|
||||||
|
var text = $('#examples option:selected').data("text");
|
||||||
|
$('#input').val(text);
|
||||||
|
console.log('done');
|
||||||
|
}
|
||||||
|
|
||||||
|
function syntaxHighlight(json) {
|
||||||
|
if (typeof json != 'string') {
|
||||||
|
json = JSON.stringify(json, undefined, 1);
|
||||||
|
}
|
||||||
|
json = json.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||||
|
return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, function (match) {
|
||||||
|
var cls = 'number';
|
||||||
|
if (/^"/.test(match)) {
|
||||||
|
if (/:$/.test(match)) {
|
||||||
|
cls = 'key';
|
||||||
|
} else {
|
||||||
|
cls = 'string';
|
||||||
|
}
|
||||||
|
} else if (/true|false/.test(match)) {
|
||||||
|
cls = 'boolean';
|
||||||
|
} else if (/null/.test(match)) {
|
||||||
|
cls = 'null';
|
||||||
|
}
|
||||||
|
return '<span class="' + cls + '">' + match + '</span>';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
223
senpy/static/js/nodes.js
Normal file
223
senpy/static/js/nodes.js
Normal file
@ -0,0 +1,223 @@
|
|||||||
|
ns = {
|
||||||
|
'http://www.gsi.dit.upm.es/ontologies/marl/ns#': 'marl',
|
||||||
|
'http://www.gsi.dit.upm.es/ontologies/onyx/ns#': 'onyx',
|
||||||
|
'http://www.gsi.dit.upm.es/ontologies/senpy/ns#': 'onyx',
|
||||||
|
'http://www.gsi.upm.es/onto/senpy/ns#': 'senpy',
|
||||||
|
'http://www.w3.org/ns/prov#': 'prov',
|
||||||
|
'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#': 'nif'
|
||||||
|
}
|
||||||
|
|
||||||
|
mappings = {
|
||||||
|
'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': 'a',
|
||||||
|
}
|
||||||
|
|
||||||
|
function load_graph(){
|
||||||
|
|
||||||
|
function filterNodesById(nodes,id){
|
||||||
|
return nodes.filter(function(n) { return n.id === id; });
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterNodesByType(nodes,value){
|
||||||
|
return nodes.filter(function(n) { return n.type === value; });
|
||||||
|
}
|
||||||
|
|
||||||
|
function triplesToGraph(triples){
|
||||||
|
|
||||||
|
svg.html("");
|
||||||
|
//Graph
|
||||||
|
var graph={nodes:[], links:[], triples: []};
|
||||||
|
|
||||||
|
triples = triples.filter(t=>t!=null).map(t => {
|
||||||
|
return t.map(e =>{
|
||||||
|
ids = e.match(/^\<(.*)\>/)
|
||||||
|
if (! ids ) {
|
||||||
|
return e
|
||||||
|
|
||||||
|
}
|
||||||
|
id = ids[1]
|
||||||
|
for (ix in ns) {
|
||||||
|
id = id.replace(ix, ns[ix] + ':')
|
||||||
|
}
|
||||||
|
if (! (id in mappings)) {
|
||||||
|
console.log(id, id in mappings)
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
return mappings[id]
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
//Initial Graph from triples
|
||||||
|
triples.forEach(function(triple){
|
||||||
|
if (triple == null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var subjId = triple[0];
|
||||||
|
var predId = triple[1];
|
||||||
|
var objId = triple[2];
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
var subjNode = filterNodesById(graph.nodes, subjId)[0];
|
||||||
|
var objNode = filterNodesById(graph.nodes, objId)[0];
|
||||||
|
|
||||||
|
if(subjNode==null){
|
||||||
|
subjNode = {id:subjId, label:subjId, weight:1, type:"node"};
|
||||||
|
graph.nodes.push(subjNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(objNode==null){
|
||||||
|
objNode = {id:objId, label:objId, weight:1, type:"node"};
|
||||||
|
graph.nodes.push(objNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
var predNode = {id:predId, label:predId, weight:1, type:"pred"} ;
|
||||||
|
graph.nodes.push(predNode);
|
||||||
|
|
||||||
|
var blankLabel = "";
|
||||||
|
|
||||||
|
graph.links.push({source:subjNode, target:predNode, predicate:blankLabel, weight:1});
|
||||||
|
graph.links.push({source:predNode, target:objNode, predicate:blankLabel, weight:1});
|
||||||
|
graph.triples.push({s:subjNode, p:predNode, o:objNode});
|
||||||
|
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return graph;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function update(graph){
|
||||||
|
|
||||||
|
|
||||||
|
// ==================== Add Marker ====================
|
||||||
|
svg.append("svg:defs").selectAll("marker")
|
||||||
|
.data(["end"])
|
||||||
|
.enter().append("svg:marker")
|
||||||
|
.attr("id", String)
|
||||||
|
.attr("viewBox", "0 -5 10 10")
|
||||||
|
.attr("refX", 30)
|
||||||
|
.attr("refY", -0.5)
|
||||||
|
.attr("markerWidth", 6)
|
||||||
|
.attr("markerHeight", 6)
|
||||||
|
.attr("orient", "auto")
|
||||||
|
.append("svg:polyline")
|
||||||
|
.attr("points", "0,-5 10,0 0,5")
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
// ==================== Add Links ====================
|
||||||
|
var links = svg.selectAll(".link")
|
||||||
|
.data(graph.triples)
|
||||||
|
.enter()
|
||||||
|
.append("path")
|
||||||
|
.attr("marker-end", "url(#end)")
|
||||||
|
.attr("class", "link")
|
||||||
|
;
|
||||||
|
;//links
|
||||||
|
|
||||||
|
// ==================== Add Link Names =====================
|
||||||
|
var linkTexts = svg.selectAll(".link-text")
|
||||||
|
.data(graph.triples)
|
||||||
|
.enter()
|
||||||
|
.append("text")
|
||||||
|
.attr("class", "link-text")
|
||||||
|
.text( function (d) { return d.p.label; })
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
//linkTexts.append("title")
|
||||||
|
// .text(function(d) { return d.predicate; });
|
||||||
|
|
||||||
|
// ==================== Add Link Names =====================
|
||||||
|
var nodeTexts = svg.selectAll(".node-text")
|
||||||
|
.data(filterNodesByType(graph.nodes, "node"))
|
||||||
|
.enter()
|
||||||
|
.append("text")
|
||||||
|
.attr("class", "node-text")
|
||||||
|
.text( function (d) { return d.label; })
|
||||||
|
;
|
||||||
|
//nodeTexts.append("title")
|
||||||
|
// .text(function(d) { return d.label; });
|
||||||
|
|
||||||
|
// ==================== Add Node =====================
|
||||||
|
var nodes = svg.selectAll(".node")
|
||||||
|
.data(filterNodesByType(graph.nodes, "node"))
|
||||||
|
.enter()
|
||||||
|
.append("circle")
|
||||||
|
.attr("class", "node")
|
||||||
|
.attr("r",8)
|
||||||
|
.call(force.drag)
|
||||||
|
;//nodes
|
||||||
|
|
||||||
|
// ==================== Add Predicate =====================
|
||||||
|
/*var preds = svg.selectAll(".node")
|
||||||
|
.data(graph.preds)
|
||||||
|
.enter()
|
||||||
|
.append("circle")
|
||||||
|
.attr("class", "node")
|
||||||
|
.attr("r",1)
|
||||||
|
//.call(force.drag)*/
|
||||||
|
;//nodes
|
||||||
|
|
||||||
|
// ==================== Force ====================
|
||||||
|
force.on("tick", function() {
|
||||||
|
nodes
|
||||||
|
.attr("cx", function(d){ return d.x; })
|
||||||
|
.attr("cy", function(d){ return d.y; })
|
||||||
|
;
|
||||||
|
|
||||||
|
links
|
||||||
|
.attr("d", function(d) {
|
||||||
|
return "M" + d.s.x + "," + d.s.y
|
||||||
|
+ "S" + d.p.x + "," + d.p.y
|
||||||
|
+ " " + d.o.x + "," + d.o.y;
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
nodeTexts
|
||||||
|
.attr("x", function(d) { return d.x + 12 ; })
|
||||||
|
.attr("y", function(d) { return d.y + 3; })
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
linkTexts
|
||||||
|
.attr("x", function(d) { return 4 + (d.s.x + d.p.x + d.o.x)/3 ; })
|
||||||
|
.attr("y", function(d) { return 4 + (d.s.y + d.p.y + d.o.y)/3 ; })
|
||||||
|
; });
|
||||||
|
|
||||||
|
// ==================== Run ====================
|
||||||
|
force
|
||||||
|
.nodes(graph.nodes)
|
||||||
|
.links(graph.links)
|
||||||
|
.charge(-500)
|
||||||
|
.linkDistance(50)
|
||||||
|
.start()
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
get_result('ntriples').done(resp => {
|
||||||
|
triples = resp.split('\n').map(line => line.match(/[^" ][^ ]*|\"[^"]+\"[^ ]*/g))
|
||||||
|
|
||||||
|
console.log(triples);
|
||||||
|
|
||||||
|
|
||||||
|
var graph = triplesToGraph(triples);
|
||||||
|
console.log(graph);
|
||||||
|
|
||||||
|
update(graph);
|
||||||
|
}).fail(resp => {
|
||||||
|
alert('Could not get a response.');
|
||||||
|
});
|
||||||
|
|
||||||
|
d3.select('#svg-body').selectAll("*").remove();
|
||||||
|
var svg = d3.select("#svg-body").append("svg")
|
||||||
|
.attr("width", width)
|
||||||
|
.attr("height", height)
|
||||||
|
;
|
||||||
|
|
||||||
|
var height = 600;
|
||||||
|
var width = $('.tab-content').width();
|
||||||
|
|
||||||
|
console.log('Graph with', width, height);
|
||||||
|
var force = d3.layout.force().size([width, height]);
|
||||||
|
}
|
3
senpy/static/js/showdown.min.js
vendored
Normal file
3
senpy/static/js/showdown.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -9,45 +9,49 @@
|
|||||||
this.evaluation_enabled = {% if evaluation %}true{%else %}false{%endif%};
|
this.evaluation_enabled = {% if evaluation %}true{%else %}false{%endif%};
|
||||||
</script>
|
</script>
|
||||||
<script src="static/js/jquery-2.1.1.min.js" ></script>
|
<script src="static/js/jquery-2.1.1.min.js" ></script>
|
||||||
|
<script src="static/js/d3.min.js" ></script>
|
||||||
<!--<script src="jquery.autosize.min.js"></script>-->
|
<!--<script src="jquery.autosize.min.js"></script>-->
|
||||||
<link rel="stylesheet" href="static/css/bootstrap.min.css">
|
<link rel="stylesheet" href="static/css/bootstrap.min.css">
|
||||||
<link rel="stylesheet" href="static/css/main.css">
|
<link rel="stylesheet" href="static/css/main.css">
|
||||||
<link rel="stylesheet" href="static/font-awesome-4.1.0/css/font-awesome.min.css">
|
<link rel="stylesheet" href="static/font-awesome-4.1.0/css/font-awesome.min.css">
|
||||||
<link href="static/css/jsoneditor.css" rel="stylesheet" type="text/css">
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<script src="static/js/bootstrap.min.js"></script>
|
<script src="static/js/bootstrap.min.js"></script>
|
||||||
<script src="static/js/jsoneditor.js"></script>
|
<script src="static/js/showdown.min.js"></script>
|
||||||
<script src="static/js/main.js"></script>
|
<script src="static/js/main.js"></script>
|
||||||
|
<script src="static/js/nodes.js"></script>
|
||||||
|
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
<nav id="header" class="navbar navbar-default sticky-top">
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div id="header">
|
|
||||||
<h3 id="header-title">
|
<h3 id="header-title" class="p-2">
|
||||||
<a href="https://github.com/gsi-upm/senpy" target="_blank">
|
<a href="https://github.com/gsi-upm/senpy" target="_blank">
|
||||||
<img id="header-logo" class="imsg-responsive" src="static/img/header.png"/></a> Playground
|
<img id="header-logo" class="imsg-responsive" src="static/img/header.png"/></a> Playground
|
||||||
</h3>
|
</h3>
|
||||||
<h4>v{{ version}}</h4>
|
<span class="nav nav-pills p-2" id="nav-pills" role="pill-list">
|
||||||
</div>
|
<a class="nav-item nav-link" data-toggle="pill" id="nav-about" role="pill" aria-controls="about" href="#about" aria-selected="false" >About</a>
|
||||||
|
<a class="nav-item nav-link" data-toggle="pill" id="nav-plugins" role="pill" aria-controls="plugins" href="#plugins" aria-selected="false">Plugins</a>
|
||||||
<ul class="nav nav-tabs" role="tablist">
|
<a class="nav-item nav-link active" data-toggle="pill" id="nav-test" role="pill" aria-controls="test" href="#test" aria-selected="true">Test it</a>
|
||||||
<li role="presentation" ><a class="active" href="#about">About</a></li>
|
|
||||||
<li role="presentation"class="active"><a class="active" href="#test">Test it</a></li>
|
|
||||||
{% if evaluation %}
|
{% if evaluation %}
|
||||||
<li role="presentation"><a class="active" href="#evaluate">Evaluate Plugins</a></li>
|
<a class="nav-item nav-link" data-toggle="pill" id="nav-evaluate" role="pill" aria-controls="evaluate" href="#evaluate" aria-selected="false">Evaluate Plugins</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
</span>
|
||||||
</ul>
|
<h6 class="p-2 ml-auto">v{{ version}}</h6>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
<div id="content" class="container">
|
||||||
|
<a href="https://github.com/gsi-upm/senpy" target="_blank"><img width="149" height="149" src="static/img/ribbon.png" class="ribbon" alt="Fork me on GitHub" data-recalc-dims="1"></a>
|
||||||
<div class="tab-content">
|
<div class="tab-content">
|
||||||
<div class="tab-pane" id="about">
|
|
||||||
|
<div class="tab-pane" role="tabpanel" aria-labelledby="nav-about" id="about">
|
||||||
|
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-lg-6">
|
<div class="col-lg-6">
|
||||||
<h2>About Senpy</h2>
|
<h2>About Senpy</h2>
|
||||||
<p>Senpy is a framework to build semantic sentiment and emotion analysis services. It does so by using a mix of web and semantic technologies, such as JSON-LD, RDFlib and Flask.</p>
|
<p>Senpy is a framework to build semantic sentiment and emotion analysis services. It leverages a mix of web and semantic technologies, such as JSON-LD, RDFlib and Flask.</p>
|
||||||
<p>Senpy makes it easy to develop and publish your own analysis algorithms (plugins in senpy terms).
|
<p>Senpy makes it easy to develop and publish your own analysis algorithms (plugins in senpy terms).
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
@ -66,123 +70,174 @@
|
|||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
</p>
|
</p>
|
||||||
<p>Senpy is a research project. If you use it in your research, please cite:
|
|
||||||
|
</div>
|
||||||
|
<div class="col-lg-6">
|
||||||
|
|
||||||
|
<div class="card my-2">
|
||||||
|
<div id="plugin_selection" class="card-body">
|
||||||
|
<h6 class="card-title">
|
||||||
|
Senpy is a research project. If you use it in your research, please cite:
|
||||||
|
</h6>
|
||||||
<pre>
|
<pre>
|
||||||
Senpy: A Pragmatic Linked Sentiment Analysis Framework.
|
Senpy: A Pragmatic Linked Sentiment Analysis Framework.
|
||||||
Sánchez-Rada, J. F., Iglesias, C. A., Corcuera, I., & Araque, Ó.
|
Sánchez-Rada, J. F., Iglesias, C. A., Corcuera, I., & Araque, Ó.
|
||||||
In Data Science and Advanced Analytics (DSAA),
|
In Data Science and Advanced Analytics (DSAA),
|
||||||
2016 IEEE International Conference on (pp. 735-742). IEEE.
|
2016 IEEE International Conference on (pp. 735-742). IEEE.
|
||||||
</pre>
|
</pre>
|
||||||
</p>
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<div class="col-lg-6 ">
|
|
||||||
<div class="panel panel-default">
|
|
||||||
<div class="panel-heading">
|
|
||||||
Available Plugins
|
|
||||||
</div>
|
|
||||||
<div class="panel-body"><ul id=availablePlugins></ul></div>
|
|
||||||
</div>
|
</div>
|
||||||
<a href="http://senpy.readthedocs.io">
|
<a href="http://senpy.readthedocs.io">
|
||||||
<div class="panel panel-default">
|
<div class="card">
|
||||||
<div class="panel-heading"><i class="fa fa-book"></i> If you are new to senpy, you might want to read senpy's documentation</div>
|
<div class="card-body">
|
||||||
|
<h6 class="card-title"><i class="fa fa-book"></i> If you are new to senpy, you might want to read senpy's documentation</h6>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
<a href="http://www.github.com/gsi-upm/senpy">
|
<a href="http://www.github.com/gsi-upm/senpy">
|
||||||
<div class="panel panel-default">
|
<div class="card my-2">
|
||||||
<div class="panel-heading"><i class="fa fa-sign-in"></i> Feel free to follow us on GitHub</div>
|
<div class="card-body">
|
||||||
|
<h6 class="card-title"><i class="fa fa-sign-in"></i> If you like senpy, feel free star it on GitHub</h6>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="tab-pane" role="tabpanel" aria-labelledby="nav-plugins" id="plugins">
|
||||||
|
|
||||||
<div class="tab-pane active" id="test">
|
<div class="row">
|
||||||
<div class="well">
|
<div class="col-lg-12">
|
||||||
<form id="form" class="container" onsubmit="return getPlugins();" accept-charset="utf-8">
|
<h5>The following plugins are available in this instance:</h5>
|
||||||
<div><textarea id="input" class="boxsizingBorder" rows="5" name="i">This text makes me sad.
|
<div id="availablePlugins" class="card-columns"></div>
|
||||||
whilst this text makes me happy and surprised at the same time.
|
</div>
|
||||||
I cannot believe it!</textarea>
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div role="tabpanel" aria-labelledby="nav-test" class="tab-pane active" id="test">
|
||||||
|
<div class="card my-2">
|
||||||
|
<div class="card-body">
|
||||||
|
<form id="form" class="container" onsubmit="load_results();" accept-charset="utf-8">
|
||||||
|
<label>Enter the text you want to analyze or select one of the pre-defined examples:</label>
|
||||||
|
<div>
|
||||||
|
<select id="examples" name="examples" onchange="example_selected()">
|
||||||
|
<option data-text="Who knew NLP and text preprocessing could be so easy with python? #DataScience #NLP">
|
||||||
|
Regular Tweet</option>
|
||||||
|
<option data-text="Russia attacked the 2016 election to help Trump. His campaign signaled Moscow it was fine with that, and Trump also lied about the attack and helped Putin get away with it. And most Republicans and conservatives don’t give a damn. This is sad and troubling.">
|
||||||
|
Political Tweet</option>
|
||||||
|
<option data-text='The bus was traveling from Florida to New York with 57 people aboard when it swerved "like a roller coaster" and tumbled "five or six times" off the left side of a Virginia interstate, killing two passengers and injuring others aboard.'>
|
||||||
|
Excerpt from a news article</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div><textarea id="input" class="boxsizingBorder" rows="5" name="i"></textarea>
|
||||||
</div>
|
</div>
|
||||||
<!-- PARAMETERS -->
|
<!-- PARAMETERS -->
|
||||||
<div class="panel-group" id="parameters">
|
<div id="parameters">
|
||||||
<div class="panel panel-default">
|
<div class="card my-2">
|
||||||
<div class="panel-heading">
|
<div class="card-header">
|
||||||
<h4 class="panel-title">
|
<h5>
|
||||||
Select the plugin.
|
Select the plugin.
|
||||||
</h4>
|
</h5>
|
||||||
</div>
|
</div>
|
||||||
<div id="plugin_selection" class="panel-collapse panel-body">
|
<div id="plugin_selection" class="card-body">
|
||||||
<span id="pipeline"></span>
|
<span id="pipeline"></span>
|
||||||
<select name="plugins" class="plugin" onchange="plugin_selected()">
|
<select id="plugins-select" name="plugins" class="plugin" onchange="plugin_selected()">
|
||||||
</select>
|
</select>
|
||||||
<span onclick="add_plugin_pipeline()"><span class="btn"><i class="fa fa-plus" title="Add more plugins to the pipeline. Processing order is left to right. i.e. the results of the leftmost plugin will be used as input for the second leftmost, and so on."></i></span></span>
|
<span onclick="add_plugin_pipeline()"><span class="btn"><i class="fa fa-plus" title="Add more plugins to the pipeline. Processing order is left to right. i.e. the results of the leftmost plugin will be used as input for the second leftmost, and so on."></i></span></span>
|
||||||
<label class="help-block " id="plugdescription"></label>
|
<span class="help-block " id="plugdescription"></span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="panel panel-default">
|
<div class="card my-2">
|
||||||
<a data-toggle="collapse" class="deco-none collapsed" href="#basic_params">
|
<div class="card-header">
|
||||||
<div class="panel-heading">
|
<h5>
|
||||||
<h4 class="panel-title">
|
|
||||||
<i class="fa fa-chevron-right pull-left expandicon"></i>
|
|
||||||
<i class="fa fa-chevron-down pull-left collapseicon"></i>
|
|
||||||
Basic API parameters
|
|
||||||
</h4>
|
|
||||||
</div>
|
|
||||||
</a>
|
|
||||||
<div id="basic_params" class="panel-collapse collapse panel-body">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="panel panel-default">
|
|
||||||
<a data-toggle="collapse" class="deco-none" href="#extra_params">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<h4 class="panel-title">
|
|
||||||
<i class="fa fa-chevron-right pull-left expandicon"></i>
|
|
||||||
<i class="fa fa-chevron-down pull-left collapseicon"></i>
|
|
||||||
Plugin extra parameters
|
Plugin extra parameters
|
||||||
</h4>
|
</h5>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
<div id="extra_params" class="card-body">
|
||||||
<div id="extra_params" class="panel-collapse collapse in panel-body">
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="card my-2">
|
||||||
|
<div class="card-body">
|
||||||
|
<button type="button" class="btn btn-primary" data-toggle="modal" data-target="#parametersModal">
|
||||||
|
Change advanced parameters
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<!-- MODAL -->
|
||||||
|
<div class="modal fade" id="parametersModal" tabindex="-1" role="dialog" aria-labelledby="modalTitle" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-lg" role="document">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title" id="modalTitle">Advanced API parameters</h5>
|
||||||
|
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
||||||
|
<span aria-hidden="true">×</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div id="basic_params" class="modal-body">
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- END MODAL -->
|
||||||
<!-- END PARAMETERS -->
|
<!-- END PARAMETERS -->
|
||||||
|
|
||||||
<a id="preview" class="btn btn-lg btn-primary" onclick="load_JSON()">Analyse!</a>
|
<button id="preview" class="btn btn-lg btn-primary" onclick="load_results()">Analyse</button>
|
||||||
<div id="loading-results" class="loading"></div>
|
<div id="loading-results" class="loading"></div>
|
||||||
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
<span id="input_request"></span>
|
<span id="input_request"></span>
|
||||||
<div id="results-div">
|
<div id="results-div">
|
||||||
<ul class="nav nav-tabs" role="tablist">
|
<ul class="nav nav-pills nav-header-pills" role="tablist">
|
||||||
<li role="presentation" class="active"><a data-toggle="tab" class="active" href="#viewer">Viewer</a></li>
|
<li role="presentation" class="nav-item active"><a data-toggle="pill" class="active nav-link" href="#json-ld" onclick='load_results("json-ld")'>JSON-LD</a></li>
|
||||||
<li role="presentation"><a data-toggle="tab" class="active" href="#raw">Raw</a></li>
|
<li role="presentation nav-item" ><a class="nav-link" data-toggle="pill" href="#turtle" onclick='load_results("turtle")'>Turtle</a></li>
|
||||||
|
<li role="presentation nav-item" ><a class="nav-link" data-toggle="pill" href="#ntriples" onclick='load_results("ntriples")'>N-Triples</a></li>
|
||||||
|
<li role="presentation nav-item" ><a class="nav-link" data-toggle="pill" href="#graph" onclick='load_graph()'>Graph</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="tab-content" id="results-container">
|
<div class="tab-content" id="results-container">
|
||||||
|
|
||||||
<div class="tab-pane active" id="viewer">
|
<div role="tabpanel" aria-labelledby="" class="tab-pane active" id="json-ld">
|
||||||
<div id="content">
|
<div>
|
||||||
<pre id="results" class="results"></pre>
|
<pre id="results-json-ld" class="results"></pre>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="tab-pane" id="raw">
|
<div class="tab-pane" role="tabpanel" aria-labelledby="" id="turtle">
|
||||||
<div id="content">
|
<div>
|
||||||
<pre id="jsonraw" class="results"></pre>
|
<pre id="results-turtle" class="results"></pre>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-pane" role="tabpanel" aria-labelledby="" id="ntriples">
|
||||||
|
<div>
|
||||||
|
<pre id="results-ntriples" class="results"></pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="tab-pane" role="tabpanel" aria-labelledby="" id="graph">
|
||||||
|
<svg id='svg-body'></svg>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if evaluation %}
|
{% if evaluation %}
|
||||||
|
|
||||||
<div class="tab-pane" id="evaluate">
|
<div class="tab-pane" role="tabpanel" aria-labelledby="nav-evaluate" id="evaluate">
|
||||||
<div class="well">
|
<div class="card my-2">
|
||||||
<form id="form" class="container" onsubmit="return getPlugins();" accept-charset="utf-8">
|
<div class="card-body">
|
||||||
|
|
||||||
|
<form id="form" class="container" onsubmit="" accept-charset="utf-8">
|
||||||
<div>
|
<div>
|
||||||
|
<p>Automatically evaluate the classification performance of your plugin in several public datasets, and compare it with other plugins.</p>
|
||||||
|
<p>The datasets will be automatically downloaded if they are not already available locally. Depending on the size of the dataset and the speed of the plugin, the evaluation may take a long time.</p>
|
||||||
<label>Select the plugin:</label>
|
<label>Select the plugin:</label>
|
||||||
<select id="plugins-eval" name="plugins-eval" class=plugin onchange="draw_extra_parameters()">
|
<select id="plugins-eval" name="plugins-eval" class=plugin onchange="draw_extra_parameters()">
|
||||||
</select>
|
</select>
|
||||||
@ -193,31 +248,31 @@ I cannot believe it!</textarea>
|
|||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<a id="doevaluate" class="btn btn-lg btn-primary" onclick="evaluate_JSON()">Evaluate Plugin!</a>
|
<button id="doevaluate" class="btn btn-lg btn-primary" onclick="evaluate_JSON()">Evaluate Plugin</button>
|
||||||
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
<!--<button id="visualise" name="type" type="button">Visualise!</button>-->
|
||||||
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="loading-results" class="loading"></div>
|
||||||
<span id="input_request_eval"></span>
|
<span id="input_request_eval"></span>
|
||||||
<div id="evaluate-div">
|
<div id="evaluate-div">
|
||||||
<ul class="nav nav-tabs" role="tablist">
|
<ul class="nav nav-pills" role="tablist">
|
||||||
<li role="presentation" class="active"><a data-toggle="tab" class="active" href="#evaluate-viewer">Viewer</a></li>
|
|
||||||
<li role="presentation"><a data-toggle="tab" class="active" href="#evaluate-raw">Raw</a></li>
|
<li role="presentation nav-item" ><a class="nav-link" data-toggle="pill" href="#evaluate-raw" onclick=''>Raw</a></li>
|
||||||
<li role="presentation"><a data-toggle="tab" class="active" href="#evaluate-table">Table</a></li>
|
<li role="presentation nav-item" ><a class="nav-link" data-toggle="pill" href="#evaluate-table" onclick=''>Table</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="tab-content" id="evaluate-container">
|
<div class="tab-content" id="evaluate-container">
|
||||||
|
<div role="tabpanel" aria-labelledby="" class="tab-pane active" id="evaluate-viewer">
|
||||||
<div class="tab-pane active" id="evaluate-viewer">
|
<div>
|
||||||
<div id="content">
|
|
||||||
<pre id="results_eval" class="results_eval"></pre>
|
<pre id="results_eval" class="results_eval"></pre>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="tab-pane" role="tabpanel" aria-labelledby="" id="evaluate-raw">
|
||||||
<div class="tab-pane" id="evaluate-raw">
|
<div>
|
||||||
<div id="content">
|
|
||||||
<pre id="jsonraw_eval" class="results_eval"></pre>
|
<pre id="jsonraw_eval" class="results_eval"></pre>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="tab-pane" id="evaluate-table">
|
<div class="tab-pane" role="tabpanel" aria-labelledby="" id="evaluate-table">
|
||||||
<table id="eval_table" class="table table-condensed">
|
<table id="eval_table" class="table table-condensed">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
@ -241,12 +296,11 @@ I cannot believe it!</textarea>
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
|
||||||
|
|
||||||
<a href="http://www.gsi.dit.upm.es" target="_blank"><img class="center-block" src="static/img/gsi.png"/> </a>
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="footer">
|
<div id="footer">
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div id="site-info">
|
<div id="site-info">
|
||||||
@ -259,6 +313,7 @@ I cannot believe it!</textarea>
|
|||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
</body>
|
</body>
|
||||||
<link href='http://fonts.googleapis.com/css?family=Architects+Daughter' rel='stylesheet' type='text/css'>
|
<link href='http://fonts.googleapis.com/css?family=Architects+Daughter' rel='stylesheet' type='text/css'>
|
||||||
</html>
|
</html>
|
||||||
|
@ -77,6 +77,7 @@ def easy_test(plugin_list=None, debug=True):
|
|||||||
logger.info('Loading classes from {}'.format(__main__))
|
logger.info('Loading classes from {}'.format(__main__))
|
||||||
from . import plugins
|
from . import plugins
|
||||||
plugin_list = plugins.from_module(__main__)
|
plugin_list = plugins.from_module(__main__)
|
||||||
|
plugin_list = list(plugin_list)
|
||||||
for plug in plugin_list:
|
for plug in plugin_list:
|
||||||
plug.test()
|
plug.test()
|
||||||
plug.log.info('My tests passed!')
|
plug.log.info('My tests passed!')
|
||||||
@ -87,7 +88,7 @@ def easy_test(plugin_list=None, debug=True):
|
|||||||
pdb.post_mortem()
|
pdb.post_mortem()
|
||||||
|
|
||||||
|
|
||||||
def easy(host='0.0.0.0', port=5000, debug=True, **kwargs):
|
def easy(host='0.0.0.0', port=5000, debug=False, **kwargs):
|
||||||
'''
|
'''
|
||||||
Run a server with a specific plugin.
|
Run a server with a specific plugin.
|
||||||
'''
|
'''
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user