merge community plugins

master
J. Fernando Sánchez 7 months ago
parent e1d888ebd6
commit 5b28b6d1b4

@ -1,65 +0,0 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
.*
*.pyc
**/__pycache__
*/wordnet1.6
*/Corpus
*/a-hierarchy.xml
*/a-synsets.xml
*/wn16.txt

@ -1,51 +0,0 @@
# Uncomment if you want to use docker-in-docker
# image: gsiupm/dockermake:latest
# services:
# - docker:dind
# When using dind, it's wise to use the overlayfs driver for
# improved performance.
variables:
GIT_SUBMODULE_STRATEGY: recursive
stages:
- build
- test
- push
- deploy
- clean
before_script:
- make -e login
build:
stage: build
script:
- make -e docker-build
only:
- master
- fix-makefiles
test:
stage: test
script:
- make -e test
push:
stage: push
script:
- make -e docker-push
deploy:
stage: deploy
script:
- make -e deploy
only:
- master
- fix-makefiles
clean :
stage: clean
script:
- make -e clean
when: manual

@ -1,3 +0,0 @@
[submodule "data"]
path = data
url = ../data

@ -1,27 +0,0 @@
These makefiles are recipes for several common tasks in different types of projects.
To add them to your project, simply do:
```
git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git
git subtree add --prefix=.makefiles/ makefiles master
touch Makefile
echo "include .makefiles/base.mk" >> Makefile
```
Now you can take advantage of the recipes.
For instance, to add useful targets for a python project, just add this to your Makefile:
```
include .makefiles/python.mk
```
You may need to set special variables like the name of your project or the python versions you're targetting.
Take a look at each specific `.mk` file for more information, and the `Makefile` in the [senpy](https://lab.cluster.gsi.dit.upm.es/senpy/senpy) project for a real use case.
If you update the makefiles from your repository, make sure to push the changes for review in upstream (this repository):
```
make makefiles-push
```
It will automatically commit all unstaged changes in the .makefiles folder.

@ -1,36 +0,0 @@
export
NAME ?= $(shell basename $(CURDIR))
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
ifeq ($(VERSION),)
VERSION:=unknown
endif
# Get the location of this makefile.
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
-include .env
-include ../.env
help: ## Show this help.
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
config: ## Load config from the environment. You should run it once in every session before other tasks. Run: eval $(make config)
@awk '{ print "export " $$0}' ../.env
@awk '{ print "export " $$0}' .env
@echo "# Please, run: "
@echo "# eval \$$(make config)"
# If you need to run a command on the key/value pairs, use this:
# @awk '{ split($$0, a, "="); "echo " a[2] " | base64 -w 0" |& getline b64; print "export " a[1] "=" a[2]; print "export " a[1] "_BASE64=" b64}' .env
ci: ## Run a task using gitlab-runner. Only use to debug problems in the CI pipeline
gitlab-runner exec shell --builds-dir '.builds' --env CI_PROJECT_NAME=$(NAME) ${action}
include $(MK_DIR)/makefiles.mk
include $(MK_DIR)/docker.mk
include $(MK_DIR)/git.mk
info:: ## List all variables
env
.PHONY:: config help ci

@ -1,51 +0,0 @@
ifndef IMAGENAME
ifdef CI_REGISTRY_IMAGE
IMAGENAME=$(CI_REGISTRY_IMAGE)
else
IMAGENAME=$(NAME)
endif
endif
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
DOCKER_FLAGS?=$(-ti)
DOCKER_CMD?=
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
ifeq ($(CI_BUILD_TOKEN),)
@echo "Not logging in to the docker registry" "$(CI_REGISTRY)"
else
@docker login -u gitlab-ci-token -p $(CI_BUILD_TOKEN) $(CI_REGISTRY)
endif
ifeq ($(HUB_USER),)
@echo "Not logging in to global the docker registry"
else
@docker login -u $(HUB_USER) -p $(HUB_PASSWORD)
endif
docker-clean: ## Remove docker credentials
ifeq ($(HUB_USER),)
else
@docker logout
endif
docker-run: ## Build a generic docker image
docker run $(DOCKER_FLAGS) $(IMAGEWTAG) $(DOCKER_CMD)
docker-build: ## Build a generic docker image
docker build . -t $(IMAGEWTAG)
docker-push: docker-build docker-login ## Push a generic docker image
docker push $(IMAGEWTAG)
docker-latest-push: docker-build ## Push the latest image
docker tag $(IMAGEWTAG) $(IMAGENAME)
docker push $(IMAGENAME)
login:: docker-login
clean:: docker-clean
docker-info:
@echo IMAGEWTAG=${IMAGEWTAG}
.PHONY:: docker-login docker-clean login clean

@ -1,28 +0,0 @@
commit:
git commit -a
tag:
git tag ${VERSION}
git-push::
git push --tags -u origin HEAD
git-pull:
git pull --all
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
ifeq ($(GITHUB_DEPLOY_KEY),)
else
$(eval KEY_FILE := "$(shell mktemp)")
@printf '%b' '$(GITHUB_DEPLOY_KEY)' > $(KEY_FILE)
@git remote rm github-deploy || true
git remote add github-deploy $(GITHUB_REPO)
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy HEAD:$(CI_COMMIT_REF_NAME)
rm $(KEY_FILE)
endif
push:: git-push
pull:: git-pull
.PHONY:: commit tag push git-push git-pull push-github

@ -1,51 +0,0 @@
# Deployment with Kubernetes
# KUBE_CA_PEM_FILE is the path of a certificate file. It automatically set by GitLab
# if you enable Kubernetes integration in a project.
#
# As of this writing, Kubernetes integration can not be set on a group level, so it has to
# be manually set in every project.
# Alternatively, we use a custom KUBE_CA_BUNDLE environment variable, which can be set at
# the group level. In this case, the variable contains the whole content of the certificate,
# which we dump to a temporary file
#
# Check if the KUBE_CA_PEM_FILE exists. Otherwise, create it from KUBE_CA_BUNDLE
KUBE_CA_TEMP=false
ifndef KUBE_CA_PEM_FILE
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
CREATED:=$(shell printf '%b\n' '$(KUBE_CA_BUNDLE)' > $(KUBE_CA_PEM_FILE))
endif
KUBE_TOKEN?=""
KUBE_NAMESPACE?=$(NAME)
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
CI_COMMIT_REF_NAME?=master
info:: ## Print variables. Useful for debugging.
@echo "#KUBERNETES"
@echo KUBE_URL=$(KUBE_URL)
@echo KUBE_CA_PEM_FILE=$(KUBE_CA_PEM_FILE)
@echo KUBE_CA_BUNDLE=$$KUBE_CA_BUNDLE
@echo KUBE_TOKEN=$(KUBE_TOKEN)
@echo KUBE_NAMESPACE=$(KUBE_NAMESPACE)
@echo KUBECTL=$(KUBECTL)
@echo "#CI"
@echo CI_PROJECT_NAME=$(CI_PROJECT_NAME)
@echo CI_REGISTRY=$(CI_REGISTRY)
@echo CI_REGISTRY_USER=$(CI_REGISTRY_USER)
@echo CI_COMMIT_REF_NAME=$(CI_COMMIT_REF_NAME)
@echo "CREATED=$(CREATED)"
#
# Deployment and advanced features
#
deploy: ## Deploy to kubernetes using the credentials in KUBE_CA_PEM_FILE (or KUBE_CA_BUNDLE ) and TOKEN
@ls k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null || true
@cat k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null | envsubst | $(KUBECTL) apply -f -
deploy-check: ## Get the deployed configuration.
@$(KUBECTL) get deploy,pods,svc,ingress
.PHONY:: info deploy deploy-check

@ -1,17 +0,0 @@
makefiles-remote:
@git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git 2>/dev/null || true
makefiles-commit: makefiles-remote
git add -f .makefiles
git commit -em "Updated makefiles from ${NAME}"
makefiles-push:
git subtree push --prefix=.makefiles/ makefiles $(NAME)
makefiles-pull: makefiles-remote
git subtree pull --prefix=.makefiles/ makefiles master --squash
pull:: makefiles-pull
push:: makefiles-push
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull pull push

@ -1,5 +0,0 @@
init: ## Init pre-commit hooks (i.e. enforcing format checking before allowing a commit)
pip install --user pre-commit
pre-commit install
.PHONY:: init

@ -1,100 +0,0 @@
PYVERSIONS ?= 3.5
PYMAIN ?= $(firstword $(PYVERSIONS))
TARNAME ?= $(NAME)-$(VERSION).tar.gz
VERSIONFILE ?= $(NAME)/VERSION
DEVPORT ?= 6000
.FORCE:
version: .FORCE
@echo $(VERSION) > $(VERSIONFILE)
@echo $(VERSION)
yapf: ## Format python code
yapf -i -r $(NAME)
yapf -i -r tests
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS)) ## Generate dockerfiles for each python version
@unlink Dockerfile >/dev/null
ln -s Dockerfile-$(PYMAIN) Dockerfile
Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Dockerfile-2.7)
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
quick_build: $(addprefix build-, $(PYMAIN))
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
@docker start $(NAME)-dev$* || (\
$(MAKE) build-$*; \
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
)\
docker exec -ti $(NAME)-dev$* bash
dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
quick_test: test-$(PYMAIN)
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
# This speeds tests up because the image has most (if not all) of the dependencies already.
docker rm $(NAME)-test-$* || true
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
docker cp . $(NAME)-test-$*:/usr/src/app
docker start -a $(NAME)-test-$*
test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
run-%: build-%
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
run: run-$(PYMAIN)
# Pypy - Upload a package
dist/$(TARNAME): version
python setup.py sdist;
sdist: dist/$(TARNAME) ## Generate the distribution file (wheel)
pip_test-%: sdist ## Test the distribution file using pip install and a specific python version (e.g. pip_test-2.7)
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
pip_test: $(addprefix pip_test-,$(PYVERSIONS)) ## Test pip installation with the main python version
pip_upload: pip_test ## Upload package to pip
python setup.py sdist upload ;
# Pushing to docker
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
docker push '$(IMAGENAME):latest'
docker push '$(IMAGEWTAG)'
push-latest-%: build-% ## Push the latest image for a specific python version
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
docker push $(IMAGENAME):$(VERSION)-python$*
docker push $(IMAGENAME):python$*
push-%: build-% ## Push the image of the current version (tagged). e.g. push-2.7
docker push $(IMAGENAME):$(VERSION)-python$*
push:: $(addprefix push-,$(PYVERSIONS)) ## Push an image with the current version for every python version
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker push $(IMAGENAME):$(VERSION)
clean:: ## Clean older docker images and containers related to this project and dev environments
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push version .FORCE

@ -1,12 +0,0 @@
language: python
python:
- "2.7"
- "3.4"
env:
- PLUGIN=example-plugin
- PLUGIN=sentiText
install:
- "pip install senpy pytest"
- "python -m senpy --only-install -f $PLUGIN"
script:
- "py.test $PLUGIN"

@ -1 +0,0 @@
from gsiupm/senpy:1.0.1-python3.6

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -1,30 +0,0 @@
PYVERSION=3.7
NAME=senpycommunity
REPO=gsiupm
PLUGINS= $(filter %/, $(wildcard */))
IMAGENAME=gsiupm/senpy-plugins-community
DOCKER_FLAGS=-e MOCK_REQUESTS=$(MOCK_REQUESTS)
DEV_PORT?=5000
ifdef SENPY_FOLDER
DOCKER_FLAGS+= -v $(realpath $(SENPY_FOLDER)):/usr/src/app/
endif
all: build run
test-fast-%: docker-build
docker run $(DOCKER_FLAGS) -v $$PWD/$*:/senpy-plugins/ -v $$PWD/data:/data/ --rm $(IMAGEWTAG) --only-test $(TEST_FLAGS)
test-fast: test-fast-/
test: docker-build
docker run $(DOCKER_FLAGS) -v $$PWD/data:/data/ --rm $(IMAGEWTAG) --only-test $(TEST_FLAGS)
dev: docker-build
docker run -p $(DEV_PORT):5000 $(DOCKER_FLAGS) -ti $(DOCKER_FLAGS) -v $$PWD/$*:/senpy-plugins/ --entrypoint /bin/bash -v $$PWD/data:/data/ --rm $(IMAGEWTAG)
.PHONY:: test test-fast dev
include .makefiles/base.mk
include .makefiles/k8s.mk

@ -1,76 +0,0 @@
# Senpy Plugins
# Requirements
Some of these plugins require licensed files to run, such as lexicons or corpora.
You can **manually download these resources and add them to the `data` folder**.
Most plugins will look for these resources on activation.
By default, we set the flag `--allow-fail` in senpy, so if a plugin fails to activate, the server will still run with the remaining plugins.
# Running
## Using docker
To deploy all the plugins in this repository, run:
```
docker-compose up
```
A server should now be available at `http://localhost:5000`.
Alternatively, you can use docker manually with the version of senpy you wish:
```
docker run --rm -ti -p 5000:5000 -v $PWD:/senpy-plugins gsiupm/senpy:0.10.8-python2.7
```
Note that some versions are untested.
## Manually
First, install senpy from source or through pip:
```
pip install senpy
```
Now, you can try to run your plugins:
```
senpy -f .
```
Each plugin has different requirements.
Senpy will try its best to automatically install requirements (python libraries and NLTK resources) for each plugin.
Some cases may require manual installation of dependencies, or external packages.
# For developers / Contributors
## Licensed data
In our deployments, we keep all licensed data in a private submodule.
You will likely need to initialize this submodule if you're a contributor:
```
git submodule update --init --recursive
```
## Adding a plugin from a separate repository
To add a plugin that has been developed in its own repository, you can use git-subtree as so:
```
$mname=<your plugin name>
$murl=<URL to your repository>
git remote add $mname $murl
git subtree add --prefix=$mname $mname master
```
Make sure to also add
# LICENSE
This compilation of plugins for Senpy use Apache 2.0 License. Some of the resources used for train these plugins can not be distributed, specifically, resources for the plugins `emotion-anew` and `emotion-wnaffect`. For more information visit [Senpy documentation](senpy.readthedocs.io)

@ -1 +0,0 @@
Subproject commit f34155c3891b8919a1d73bae410cb5113f1bf959

@ -1,12 +0,0 @@
version: '3'
services:
community:
build: .
image: "${IMAGEWTAG:-gsi-upm/senpy-community:dev}"
volumes:
- ".:/senpy-plugins/"
# - "./data:/data"
ports:
- '5000:5000'
command:
- "--allow-fail"

@ -1,2 +0,0 @@
__pycache__
*.pyc

@ -1,67 +0,0 @@
# Uncomment if you want to use docker-in-docker
# image: gsiupm/dockermake:latest
# services:
# - docker:dind
# When using dind, it's wise to use the overlayfs driver for
# improved performance.
stages:
- test
- push
- deploy
- clean
before_script:
- make -e login
.test: &test_definition
stage: test
script:
- make -e test-$PYTHON_VERSION
test-3.5:
<<: *test_definition
variables:
PYTHON_VERSION: "3.5"
.image: &image_definition
stage: push
script:
- make -e push-$PYTHON_VERSION
only:
- tags
- triggers
push-3.5:
<<: *image_definition
variables:
PYTHON_VERSION: "3.5"
push-latest:
<<: *image_definition
variables:
PYTHON_VERSION: latest
only:
- tags
- triggers
deploy:
stage: deploy
environment: production
script:
- make -e deploy
only:
- tags
- triggers
clean :
stage: clean
script:
- make -e clean
when: manual
cleanup_py:
stage: clean
when: always # this is important; run even if preceding stages failed.
script:
- docker logout

@ -1,27 +0,0 @@
These makefiles are recipes for several common tasks in different types of projects.
To add them to your project, simply do:
```
git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git
git subtree add --prefix=.makefiles/ makefiles master
touch Makefile
echo "include .makefiles/base.mk" >> Makefile
```
Now you can take advantage of the recipes.
For instance, to add useful targets for a python project, just add this to your Makefile:
```
include .makefiles/python.mk
```
You may need to set special variables like the name of your project or the python versions you're targetting.
Take a look at each specific `.mk` file for more information, and the `Makefile` in the [senpy](https://lab.cluster.gsi.dit.upm.es/senpy/senpy) project for a real use case.
If you update the makefiles from your repository, make sure to push the changes for review in upstream (this repository):
```
make makefiles-push
```
It will automatically commit all unstaged changes in the .makefiles folder.

@ -1,36 +0,0 @@
export
NAME ?= $(shell basename $(CURDIR))
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
ifeq ($(VERSION),)
VERSION:=unknown
endif
# Get the location of this makefile.
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
-include .env
-include ../.env
help: ## Show this help.
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
config: ## Load config from the environment. You should run it once in every session before other tasks. Run: eval $(make config)
@awk '{ print "export " $$0}' ../.env
@awk '{ print "export " $$0}' .env
@echo "# Please, run: "
@echo "# eval \$$(make config)"
# If you need to run a command on the key/value pairs, use this:
# @awk '{ split($$0, a, "="); "echo " a[2] " | base64 -w 0" |& getline b64; print "export " a[1] "=" a[2]; print "export " a[1] "_BASE64=" b64}' .env
ci: ## Run a task using gitlab-runner. Only use to debug problems in the CI pipeline
gitlab-runner exec shell --builds-dir '.builds' --env CI_PROJECT_NAME=$(NAME) ${action}
include $(MK_DIR)/makefiles.mk
include $(MK_DIR)/docker.mk
include $(MK_DIR)/git.mk
info:: ## List all variables
env
.PHONY:: config help ci

@ -1,29 +0,0 @@
IMAGENAME?=$(NAME)
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
ifeq ($(CI_BUILD_TOKEN),)
@echo "Not logging in to the docker registry" "$(CI_REGISTRY)"
else
@docker login -u gitlab-ci-token -p $(CI_BUILD_TOKEN) $(CI_REGISTRY)
endif
ifeq ($(HUB_USER),)
@echo "Not logging in to global the docker registry"
else
@docker login -u $(HUB_USER) -p $(HUB_PASSWORD)
endif
docker-clean: ## Remove docker credentials
ifeq ($(HUB_USER),)
else
@docker logout
endif
login:: docker-login
clean:: docker-clean
docker-info:
@echo IMAGEWTAG=${IMAGEWTAG}
.PHONY:: docker-login docker-clean login clean

@ -1,28 +0,0 @@
commit:
git commit -a
tag:
git tag ${VERSION}
git-push::
git push --tags -u origin HEAD
git-pull:
git pull --all
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
ifeq ($(GITHUB_DEPLOY_KEY),)
else
$(eval KEY_FILE := "$(shell mktemp)")
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
@git remote rm github-deploy || true
git remote add github-deploy $(GITHUB_REPO)
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy HEAD:$(CI_COMMIT_REF_NAME)
rm $(KEY_FILE)
endif
push:: git-push
pull:: git-pull
.PHONY:: commit tag push git-push git-pull push-github

@ -1,51 +0,0 @@
# Deployment with Kubernetes
# KUBE_CA_PEM_FILE is the path of a certificate file. It automatically set by GitLab
# if you enable Kubernetes integration in a project.
#
# As of this writing, Kubernetes integration can not be set on a group level, so it has to
# be manually set in every project.
# Alternatively, we use a custom KUBE_CA_BUNDLE environment variable, which can be set at
# the group level. In this case, the variable contains the whole content of the certificate,
# which we dump to a temporary file
#
# Check if the KUBE_CA_PEM_FILE exists. Otherwise, create it from KUBE_CA_BUNDLE
KUBE_CA_TEMP=false
ifndef KUBE_CA_PEM_FILE
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
endif
KUBE_TOKEN?=""
KUBE_NAMESPACE?=$(NAME)
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
CI_COMMIT_REF_NAME?=master
info:: ## Print variables. Useful for debugging.
@echo "#KUBERNETES"
@echo KUBE_URL=$(KUBE_URL)
@echo KUBE_CA_PEM_FILE=$(KUBE_CA_PEM_FILE)
@echo KUBE_CA_BUNDLE=$$KUBE_CA_BUNDLE
@echo KUBE_TOKEN=$(KUBE_TOKEN)
@echo KUBE_NAMESPACE=$(KUBE_NAMESPACE)
@echo KUBECTL=$(KUBECTL)
@echo "#CI"
@echo CI_PROJECT_NAME=$(CI_PROJECT_NAME)
@echo CI_REGISTRY=$(CI_REGISTRY)
@echo CI_REGISTRY_USER=$(CI_REGISTRY_USER)
@echo CI_COMMIT_REF_NAME=$(CI_COMMIT_REF_NAME)
@echo "CREATED=$(CREATED)"
#
# Deployment and advanced features
#
deploy: ## Deploy to kubernetes using the credentials in KUBE_CA_PEM_FILE (or KUBE_CA_BUNDLE ) and TOKEN
@ls k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null || true
@cat k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null | envsubst | $(KUBECTL) apply -f -
deploy-check: ## Get the deployed configuration.
@$(KUBECTL) get deploy,pods,svc,ingress
.PHONY:: info deploy deploy-check

@ -1,17 +0,0 @@
makefiles-remote:
@git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git 2>/dev/null || true
makefiles-commit: makefiles-remote
git add -f .makefiles
git commit -em "Updated makefiles from ${NAME}"
makefiles-push:
git subtree push --prefix=.makefiles/ makefiles $(NAME)
makefiles-pull: makefiles-remote
git subtree pull --prefix=.makefiles/ makefiles master --squash
pull:: makefiles-pull
push:: makefiles-push
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull pull push

@ -1,5 +0,0 @@
init: ## Init pre-commit hooks (i.e. enforcing format checking before allowing a commit)
pip install --user pre-commit
pre-commit install
.PHONY:: init

@ -1,100 +0,0 @@
PYVERSIONS ?= 3.5
PYMAIN ?= $(firstword $(PYVERSIONS))
TARNAME ?= $(NAME)-$(VERSION).tar.gz
VERSIONFILE ?= $(NAME)/VERSION
DEVPORT ?= 6000
.FORCE:
version: .FORCE
@echo $(VERSION) > $(VERSIONFILE)
@echo $(VERSION)
yapf: ## Format python code
yapf -i -r $(NAME)
yapf -i -r tests
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS)) ## Generate dockerfiles for each python version
@unlink Dockerfile >/dev/null
ln -s Dockerfile-$(PYMAIN) Dockerfile
Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Dockerfile-2.7)
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
quick_build: $(addprefix build-, $(PYMAIN))
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
@docker start $(NAME)-dev$* || (\
$(MAKE) build-$*; \
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
)\
docker exec -ti $(NAME)-dev$* bash
dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
quick_test: test-$(PYMAIN)
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
# This speeds tests up because the image has most (if not all) of the dependencies already.
docker rm $(NAME)-test-$* || true
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
docker cp . $(NAME)-test-$*:/usr/src/app
docker start -a $(NAME)-test-$*
test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
run-%: build-%
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
run: run-$(PYMAIN)
# Pypy - Upload a package
dist/$(TARNAME): version
python setup.py sdist;
sdist: dist/$(TARNAME) ## Generate the distribution file (wheel)
pip_test-%: sdist ## Test the distribution file using pip install and a specific python version (e.g. pip_test-2.7)
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
pip_test: $(addprefix pip_test-,$(PYVERSIONS)) ## Test pip installation with the main python version
pip_upload: pip_test ## Upload package to pip
python setup.py sdist upload ;
# Pushing to docker
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
docker push '$(IMAGENAME):latest'
docker push '$(IMAGEWTAG)'
push-latest-%: build-% ## Push the latest image for a specific python version
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
docker push $(IMAGENAME):$(VERSION)-python$*
docker push $(IMAGENAME):python$*
push-%: build-% ## Push the image of the current version (tagged). e.g. push-2.7
docker push $(IMAGENAME):$(VERSION)-python$*
push:: $(addprefix push-,$(PYVERSIONS)) ## Push an image with the current version for every python version
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker push $(IMAGENAME):$(VERSION)
clean:: ## Clean older docker images and containers related to this project and dev environments
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push version .FORCE

@ -1,5 +0,0 @@
FROM gsiupm/senpy:python2.7
MAINTAINER manuel.garcia-amado.sancho@alumnos.upm.es
COPY data /data

@ -1,4 +0,0 @@
FROM gsiupm/senpy:python3.5
MAINTAINER manuel.garcia-amado.sancho@alumnos.upm.es
COPY data /data

@ -1,36 +0,0 @@
from senpy.plugins import AnalysisPlugin
from senpy.models import Response, Entry
class ExamplePlugin(AnalysisPlugin):
'''A *VERY* simple plugin that exemplifies the development of Senpy Plugins'''
name = "example-plugin"
author = "@balkian"
version = "0.1"
extra_params = {
"parameter": {
"@id": "parameter",
"description": "this parameter does nothing, it is only an example",
"aliases": ["parameter", "param"],
"required": True,
"default": 42
}
}
custom_attribute = "42"
def analyse_entry(self, entry, activity):
params = activity.params
self.log.debug('Analysing with the example.')
self.log.debug('The answer to this response is: %s.' % params['parameter'])
resp = Response()
entry['example:reversed'] = entry.text[::-1]
entry['example:the_answer'] = params['parameter']
yield entry
test_cases = [{
'input': 'hello',
'expected': {
'example:reversed': 'olleh'
}
}]

@ -1,8 +0,0 @@
Deploy senpy to a kubernetes cluster.
The files are templates, which need to be expanded with something like envsubst.
Example usage:
```
cat k8s/*.ya*ml | envsubst | kubectl apply -n senpy -f -
```

@ -1,36 +0,0 @@
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
name: senpy-$NAME-latest
spec:
replicas: 1
template:
metadata:
labels:
role: $NAME-latest
app: test
spec:
containers:
- name: senpy-latest
image: $IMAGEWTAG
imagePullPolicy: Always
resources:
limits:
memory: "2048Mi"
cpu: "1000m"
ports:
- name: web
containerPort: 5000
volumeMounts:
# name must match the volume name below
- name: senpy-data
mountPath: "/data"
subPath: data
env:
- name: SENPY_DATA
value: '/data'
volumes:
- name: senpy-data
persistentVolumeClaim:
claimName: senpy-pvc

@ -1,24 +0,0 @@
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: senpy-$NAME-ingress
annotations:
ingress.kubernetes.io/proxy-body-size: 0m
ingress.kubernetes.io/proxy-buffer-size: "256k"
spec:
rules:
- host: senpy.cluster.gsi.dit.upm.es
http:
paths:
- path: /
backend:
serviceName: senpy-$NAME-latest
servicePort: 5000
- host: senpy.gsi.upm.es
http:
paths:
- path: /
backend:
serviceName: senpy-$NAME-latest
servicePort: 5000

@ -1,12 +0,0 @@
---
apiVersion: v1
kind: Service
metadata:
name: senpy-$NAME-latest
spec:
type: ClusterIP
ports:
- port: 5000
protocol: TCP
selector:
role: $NAME-latest

@ -1,5 +0,0 @@
.*
.env
__pycache__
.pyc
VERSION

@ -1,67 +0,0 @@
# Uncomment if you want to use docker-in-docker
# image: gsiupm/dockermake:latest
# services:
# - docker:dind
# When using dind, it's wise to use the overlayfs driver for
# improved performance.
stages:
- test
- push
- deploy
- clean
before_script:
- make -e login
.test: &test_definition
stage: test
script:
- make -e test-$PYTHON_VERSION
test-3.5:
<<: *test_definition
variables:
PYTHON_VERSION: "3.5"
.image: &image_definition
stage: push
script:
- make -e push-$PYTHON_VERSION
only:
- tags
- triggers
push-3.5:
<<: *image_definition
variables:
PYTHON_VERSION: "3.5"
push-latest:
<<: *image_definition
variables:
PYTHON_VERSION: latest
only:
- tags
- triggers
deploy:
stage: deploy
environment: production
script:
- make -e deploy
only:
- tags
- triggers
clean :
stage: clean
script:
- make -e clean
when: manual
cleanup_py:
stage: clean
when: always # this is important; run even if preceding stages failed.
script:
- docker logout

@ -1,27 +0,0 @@
These makefiles are recipes for several common tasks in different types of projects.
To add them to your project, simply do:
```
git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git
git subtree add --prefix=.makefiles/ makefiles master
touch Makefile
echo "include .makefiles/base.mk" >> Makefile
```
Now you can take advantage of the recipes.
For instance, to add useful targets for a python project, just add this to your Makefile:
```
include .makefiles/python.mk
```
You may need to set special variables like the name of your project or the python versions you're targetting.
Take a look at each specific `.mk` file for more information, and the `Makefile` in the [senpy](https://lab.cluster.gsi.dit.upm.es/senpy/senpy) project for a real use case.
If you update the makefiles from your repository, make sure to push the changes for review in upstream (this repository):
```
make makefiles-push
```
It will automatically commit all unstaged changes in the .makefiles folder.

@ -1,36 +0,0 @@
export
NAME ?= $(shell basename $(CURDIR))
VERSION ?= $(shell git describe --tags --dirty 2>/dev/null)
ifeq ($(VERSION),)
VERSION:="unknown"
endif
# Get the location of this makefile.
MK_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
-include .env
-include ../.env
help: ## Show this help.
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/\(.*:\)[^#]*##\s*\(.*\)/\1\t\2/' | column -t -s " "
config: ## Load config from the environment. You should run it once in every session before other tasks. Run: eval $(make config)
@awk '{ print "export " $$0}' ../.env
@awk '{ print "export " $$0}' .env
@echo "# Please, run: "
@echo "# eval \$$(make config)"
# If you need to run a command on the key/value pairs, use this:
# @awk '{ split($$0, a, "="); "echo " a[2] " | base64 -w 0" |& getline b64; print "export " a[1] "=" a[2]; print "export " a[1] "_BASE64=" b64}' .env
ci: ## Run a task using gitlab-runner. Only use to debug problems in the CI pipeline
gitlab-runner exec shell --builds-dir '.builds' --env CI_PROJECT_NAME=$(NAME) ${action}
include $(MK_DIR)/makefiles.mk
include $(MK_DIR)/docker.mk
include $(MK_DIR)/git.mk
info:: ## List all variables
env
.PHONY:: config help ci

@ -1,29 +0,0 @@
IMAGENAME?=$(NAME)
IMAGEWTAG?=$(IMAGENAME):$(VERSION)
docker-login: ## Log in to the registry. It will only be used in the server, or when running a CI task locally (if CI_BUILD_TOKEN is set).
ifeq ($(CI_BUILD_TOKEN),)
@echo "Not logging in to the docker registry" "$(CI_REGISTRY)"
else
@docker login -u gitlab-ci-token -p $(CI_BUILD_TOKEN) $(CI_REGISTRY)
endif
ifeq ($(HUB_USER),)
@echo "Not logging in to global the docker registry"
else
@docker login -u $(HUB_USER) -p $(HUB_PASSWORD)
endif
docker-clean: ## Remove docker credentials
ifeq ($(HUB_USER),)
else
@docker logout
endif
login:: docker-login
clean:: docker-clean
docker-info:
@echo IMAGEWTAG=${IMAGEWTAG}
.PHONY:: docker-login docker-clean login clean

@ -1,28 +0,0 @@
commit:
git commit -a
tag:
git tag ${VERSION}
git-push::
git push --tags -u origin HEAD
git-pull:
git pull --all
push-github: ## Push the code to github. You need to set up GITHUB_DEPLOY_KEY
ifeq ($(GITHUB_DEPLOY_KEY),)
else
$(eval KEY_FILE := "$(shell mktemp)")
@echo "$(GITHUB_DEPLOY_KEY)" > $(KEY_FILE)
@git remote rm github-deploy || true
git remote add github-deploy $(GITHUB_REPO)
-@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git fetch github-deploy $(CI_COMMIT_REF_NAME)
@GIT_SSH_COMMAND="ssh -i $(KEY_FILE)" git push github-deploy HEAD:$(CI_COMMIT_REF_NAME)
rm $(KEY_FILE)
endif
push:: git-push
pull:: git-pull
.PHONY:: commit tag push git-push git-pull push-github

@ -1,51 +0,0 @@
# Deployment with Kubernetes
# KUBE_CA_PEM_FILE is the path of a certificate file. It automatically set by GitLab
# if you enable Kubernetes integration in a project.
#
# As of this writing, Kubernetes integration can not be set on a group level, so it has to
# be manually set in every project.
# Alternatively, we use a custom KUBE_CA_BUNDLE environment variable, which can be set at
# the group level. In this case, the variable contains the whole content of the certificate,
# which we dump to a temporary file
#
# Check if the KUBE_CA_PEM_FILE exists. Otherwise, create it from KUBE_CA_BUNDLE
KUBE_CA_TEMP=false
ifndef KUBE_CA_PEM_FILE
KUBE_CA_PEM_FILE:=$$PWD/.ca.crt
CREATED:=$(shell echo -e "$(KUBE_CA_BUNDLE)" > $(KUBE_CA_PEM_FILE))
endif
KUBE_TOKEN?=""
KUBE_NAMESPACE?=$(NAME)
KUBECTL=docker run --rm -v $(KUBE_CA_PEM_FILE):/tmp/ca.pem -i lachlanevenson/k8s-kubectl --server="$(KUBE_URL)" --token="$(KUBE_TOKEN)" --certificate-authority="/tmp/ca.pem" -n $(KUBE_NAMESPACE)
CI_COMMIT_REF_NAME?=master
info:: ## Print variables. Useful for debugging.
@echo "#KUBERNETES"
@echo KUBE_URL=$(KUBE_URL)
@echo KUBE_CA_PEM_FILE=$(KUBE_CA_PEM_FILE)
@echo KUBE_CA_BUNDLE=$$KUBE_CA_BUNDLE
@echo KUBE_TOKEN=$(KUBE_TOKEN)
@echo KUBE_NAMESPACE=$(KUBE_NAMESPACE)
@echo KUBECTL=$(KUBECTL)
@echo "#CI"
@echo CI_PROJECT_NAME=$(CI_PROJECT_NAME)
@echo CI_REGISTRY=$(CI_REGISTRY)
@echo CI_REGISTRY_USER=$(CI_REGISTRY_USER)
@echo CI_COMMIT_REF_NAME=$(CI_COMMIT_REF_NAME)
@echo "CREATED=$(CREATED)"
#
# Deployment and advanced features
#
deploy: ## Deploy to kubernetes using the credentials in KUBE_CA_PEM_FILE (or KUBE_CA_BUNDLE ) and TOKEN
@ls k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null || true
@cat k8s/*.yaml k8s/*.yml k8s/*.tmpl 2>/dev/null | envsubst | $(KUBECTL) apply -f -
deploy-check: ## Get the deployed configuration.
@$(KUBECTL) get deploy,pods,svc,ingress
.PHONY:: info deploy deploy-check

@ -1,17 +0,0 @@
makefiles-remote:
@git remote add makefiles ssh://git@lab.cluster.gsi.dit.upm.es:2200/docs/templates/makefiles.git 2>/dev/null || true
makefiles-commit: makefiles-remote
git add -f .makefiles
git commit -em "Updated makefiles from ${NAME}"
makefiles-push:
git subtree push --prefix=.makefiles/ makefiles $(NAME)
makefiles-pull: makefiles-remote
git subtree pull --prefix=.makefiles/ makefiles master --squash
pull:: makefiles-pull
push:: makefiles-push
.PHONY:: makefiles-remote makefiles-commit makefiles-push makefiles-pull pull push

@ -1,5 +0,0 @@
init: ## Init pre-commit hooks (i.e. enforcing format checking before allowing a commit)
pip install --user pre-commit
pre-commit install
.PHONY:: init

@ -1,100 +0,0 @@
PYVERSIONS ?= 3.5
PYMAIN ?= $(firstword $(PYVERSIONS))
TARNAME ?= $(NAME)-$(VERSION).tar.gz
VERSIONFILE ?= $(NAME)/VERSION
DEVPORT ?= 6000
.FORCE:
version: .FORCE
@echo $(VERSION) > $(VERSIONFILE)
@echo $(VERSION)
yapf: ## Format python code
yapf -i -r $(NAME)
yapf -i -r tests
dockerfiles: $(addprefix Dockerfile-,$(PYVERSIONS)) ## Generate dockerfiles for each python version
@unlink Dockerfile >/dev/null
ln -s Dockerfile-$(PYMAIN) Dockerfile
Dockerfile-%: Dockerfile.template ## Generate a specific dockerfile (e.g. Dockerfile-2.7)
sed "s/{{PYVERSION}}/$*/" Dockerfile.template > Dockerfile-$*
quick_build: $(addprefix build-, $(PYMAIN))
build: $(addprefix build-, $(PYVERSIONS)) ## Build all images / python versions
build-%: version Dockerfile-% ## Build a specific version (e.g. build-2.7)
docker build -t '$(IMAGEWTAG)-python$*' --cache-from $(IMAGENAME):python$* -f Dockerfile-$* .;
dev-%: ## Launch a specific development environment using docker (e.g. dev-2.7)
@docker start $(NAME)-dev$* || (\
$(MAKE) build-$*; \
docker run -d -w /usr/src/app/ -p $(DEVPORT):5000 -v $$PWD:/usr/src/app --entrypoint=/bin/bash -ti --name $(NAME)-dev$* '$(IMAGEWTAG)-python$*'; \
)\
docker exec -ti $(NAME)-dev$* bash
dev: dev-$(PYMAIN) ## Launch a development environment using docker, using the default python version
quick_test: test-$(PYMAIN)
test-%: ## Run setup.py from in an isolated container, built from the base image. (e.g. test-2.7)
# This speeds tests up because the image has most (if not all) of the dependencies already.
docker rm $(NAME)-test-$* || true
docker create -ti --name $(NAME)-test-$* --entrypoint="" -w /usr/src/app/ $(IMAGENAME):python$* python setup.py test
docker cp . $(NAME)-test-$*:/usr/src/app
docker start -a $(NAME)-test-$*
test: $(addprefix test-,$(PYVERSIONS)) ## Run the tests with the main python version
run-%: build-%
docker run --rm -p $(DEVPORT):5000 -ti '$(IMAGEWTAG)-python$(PYMAIN)' --default-plugins
run: run-$(PYMAIN)
# Pypy - Upload a package
dist/$(TARNAME): version
python setup.py sdist;
sdist: dist/$(TARNAME) ## Generate the distribution file (wheel)
pip_test-%: sdist ## Test the distribution file using pip install and a specific python version (e.g. pip_test-2.7)
docker run --rm -v $$PWD/dist:/dist/ python:$* pip install /dist/$(TARNAME);
pip_test: $(addprefix pip_test-,$(PYVERSIONS)) ## Test pip installation with the main python version
pip_upload: pip_test ## Upload package to pip
python setup.py sdist upload ;
# Pushing to docker
push-latest: $(addprefix push-latest-,$(PYVERSIONS)) ## Push the "latest" tag to dockerhub
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGENAME)'
docker push '$(IMAGENAME):latest'
docker push '$(IMAGEWTAG)'
push-latest-%: build-% ## Push the latest image for a specific python version
docker tag $(IMAGENAME):$(VERSION)-python$* $(IMAGENAME):python$*
docker push $(IMAGENAME):$(VERSION)-python$*
docker push $(IMAGENAME):python$*
push-%: build-% ## Push the image of the current version (tagged). e.g. push-2.7
docker push $(IMAGENAME):$(VERSION)-python$*
push:: $(addprefix push-,$(PYVERSIONS)) ## Push an image with the current version for every python version
docker tag '$(IMAGEWTAG)-python$(PYMAIN)' '$(IMAGEWTAG)'
docker push $(IMAGENAME):$(VERSION)
clean:: ## Clean older docker images and containers related to this project and dev environments
@docker stop $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@docker rm $(addprefix $(NAME)-dev,$(PYVERSIONS)) 2>/dev/null || true
@docker ps -a | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1;}}' | xargs docker rm -v 2>/dev/null|| true
@docker images | grep $(IMAGENAME) | awk '{ split($$2, vers, "-"); if(vers[0] != "${VERSION}"){ print $$1":"$$2;}}' | xargs docker rmi 2>/dev/null|| true
.PHONY:: yapf dockerfiles Dockerfile-% quick_build build build-% dev-% quick-dev test quick_test push-latest push-latest-% push-% push version .FORCE

@ -1,4 +0,0 @@
FROM gsiupm/senpy:0.10.4-python3.5
MAINTAINER manuel.garcia-amado.sancho@alumnos.upm.es

@ -1,4 +0,0 @@
FROM gsiupm/senpy:0.10.4-python{{PYVERSION}}
MAINTAINER manuel.garcia-amado.sancho@alumnos.upm.es

@ -1,9 +0,0 @@
NAME:=meaningcloud
VERSIONFILE:=VERSION
IMAGENAME:=registry.cluster.gsi.dit.upm.es/senpy/sentiment-meaningcloud
PYVERSIONS:= 3.5
DEVPORT:=5000
include .makefiles/base.mk
include .makefiles/k8s.mk
include .makefiles/python.mk

@ -1,34 +0,0 @@
# Senpy Plugin MeaningCloud
MeaningCloud plugin uses API from Meaning Cloud to perform sentiment analysis.
For more information about Meaning Cloud and its services, please visit: https://www.meaningcloud.com/developer/apis
## Usage
To use this plugin, you need to obtain an API key from meaningCloud signing up here: https://www.meaningcloud.com/developer/login
When you had obtained the meaningCloud API Key, you have to provide it to the plugin, using the param **apiKey**.
To use this plugin, you should use a GET Requests with the following possible params:
Params:
- Language: English (en) and Spanish (es). (default: en)
- API Key: the API key from Meaning Cloud. Aliases: ["apiKey","meaningCloud-key"]. (required)
- Input: text to analyse.(required)
- Model: model provided to Meaning Cloud API (for general domain). (default: general)
## Example of Usage
Example request:
```
http://senpy.cluster.gsi.dit.upm.es/api/?algo=meaningCloud&language=en&apiKey=<put here your API key>&input=I%20love%20Madrid
```
Example respond: This plugin follows the standard for the senpy plugin response. For more information, please visit [senpy documentation](http://senpy.readthedocs.io). Specifically, NIF API section.
This plugin supports **python2.7** and **python3**.
![alt GSI Logo][logoGSI]
[logoGSI]: http://www.gsi.dit.upm.es/images/stories/logos/gsi.png "GSI Logo"

@ -1,28 +0,0 @@
version: '3'
services:
dev:
image: gsiupm/senpy:latest
entrypoint: ["/bin/bash"]
working_dir: "/senpy-plugins"
tty: true
ports:
- "127.0.0.1:5005:5000"
volumes:
- ".:/senpy-plugins"
test:
image: gsiupm/senpy:latest
entrypoint: ["py.test"]
working_dir: "/usr/src/app/"
volumes:
- ".:/senpy-plugins/"
command:
[]
meaningcloud:
image: "${IMAGENAME-gsiupm/meaningcloud}:${VERSION-dev}"
build:
context: .
dockerfile: Dockerfile-3.5
ports:
- 5001:5000
volumes:
- "./data:/data"

@ -1,7 +0,0 @@
Deploy senpy to a kubernetes cluster.
Usage:
```
kubectl apply -f . -n senpy
```

@ -1,27 +0,0 @@
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
name: ${NAME}
spec:
replicas: 1
template:
metadata:
labels:
role: senpy-plugin
app: ${NAME}
spec:
containers:
- name: senpy-latest
image: ${CI_REGISTRY_IMAGE}:${VERSION}
imagePullPolicy: Always
args:
- "-f"
- "/senpy-plugins"
resources:
limits:
memory: "512Mi"
cpu: "1000m"
ports:
- name: web
containerPort: 5000

@ -1,14 +0,0 @@
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: ${NAME}
spec:
rules:
- host: ${NAME}.senpy.cluster.gsi.dit.upm.es
http:
paths:
- path: /
backend:
serviceName: ${NAME}
servicePort: 5000

@ -1,12 +0,0 @@
---
apiVersion: v1
kind: Service
metadata:
name: ${NAME}
spec:
type: ClusterIP
ports:
- port: 5000
protocol: TCP
selector:
app: ${NAME}

@ -1,4 +1,30 @@
# -*- coding: utf-8 -*-
'''
MeaningCloud plugin uses API from Meaning Cloud to perform sentiment analysis.
For more information about Meaning Cloud and its services, please visit: https://www.meaningcloud.com/developer/apis
## Usage
To use this plugin, you need to obtain an API key from meaningCloud signing up here: https://www.meaningcloud.com/developer/login
When you had obtained the meaningCloud API Key, you have to provide it to the plugin, using the param **apiKey**.
To use this plugin, you should use a GET Requests with the following possible params:
Params:
- Language: English (en) and Spanish (es). (default: en)
- API Key: the API key from Meaning Cloud. Aliases: ["apiKey","meaningCloud-key"]. (required)
- Input: text to analyse.(required)
- Model: model provided to Meaning Cloud API (for general domain). (default: general)
## Example of Usage
Example request:
```
http://senpy.gsi.upm.es/api/?algo=meaningCloud&language=en&apiKey=<put here your API key>&input=I%20love%20Madrid
```
'''
import time
import requests
Loading…
Cancel
Save