Browse Source

Add files via upload

main
cs151098 4 years ago
committed by GitHub
parent
commit
d095389985
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. BIN
      container-storage/1.jpg
  2. BIN
      container-storage/2.jpg
  3. BIN
      container-storage/3.jpg
  4. BIN
      container-storage/4.jpg
  5. BIN
      container-storage/5.jpg
  6. 13
      docker-grafana/Dockerfile
  7. 3
      docker-grafana/configuration.env
  8. 10
      docker-grafana/datasources/influx.json
  9. 29
      docker-grafana/entrypoint.sh
  10. 7
      docker-influxdb/Dockerfile
  11. 3
      docker-influxdb/configuration.env
  12. 18
      docker-influxdb/entrypoint.sh
  13. 10
      docker-mosquitto/Dockerfile
  14. 9
      docker-python-pypy/Dockerfile
  15. 9
      docker-python/Dockerfile
  16. 17
      docker-python/base/Dockerfile
  17. 4
      docker-redis/Dockerfile
  18. 1
      docker-redis/redis.conf
  19. 22
      kubernetes/fulltext-search-deplyment.yaml
  20. 12
      kubernetes/fulltext-serarch-service.yaml
  21. 21
      kubernetes/mongodb-deplyment.yaml
  22. 12
      kubernetes/mongodb-service.yaml
  23. 22
      kubernetes/random-demo-deplyment.yaml
  24. 12
      kubernetes/random-demo-service.yaml
  25. 230
      python/baesian.py
  26. 332
      python/bookcollection.py
  27. 57
      python/caching.py
  28. 52
      python/diagrams_generator.py
  29. 115
      python/fulltext_search.py
  30. 135
      python/geolocation_search.py
  31. 110
      python/mqtt.py
  32. 227
      python/photo_process.py
  33. 19
      python/python_app.log
  34. 120
      python/random_demo.py
  35. 4
      python/requirements-dev.txt
  36. 6
      python/requirements-fastapi.txt
  37. 3
      python/requirements-mqtt.txt
  38. 5
      python/requirements-photo.txt
  39. 8
      python/requirements-restplus.txt
  40. 8
      python/requirements.txt
  41. 55
      python/templates/tictactoe.html
  42. 81
      python/tictactoe.py
  43. 75
      python/users-fastapi.py
  44. 209
      python/users.py
  45. 37
      python/utils.py
  46. BIN
      resources/autogenerated.png
  47. BIN
      resources/diagram.jpg
  48. BIN
      resources/diagram.odp
  49. BIN
      resources/grafana.png
  50. 1
      secrets/mqtt_pass.txt
  51. 1
      secrets/mqtt_user.txt
  52. 1
      secrets/redis_pass.txt
  53. 33
      stresstest-locusts/baesian.py
  54. 25
      stresstest-locusts/fulltext_search.py
  55. 27
      stresstest-locusts/geolocation_search.py
  56. 16
      stresstest-locusts/random_demo.py
  57. 37
      stresstest-locusts/users.py
  58. 10
      tests/conftest.py
  59. 4
      tests/requirements.txt
  60. BIN
      tests/resources/test.jpg
  61. 82
      tests/test_baesian.py
  62. 258
      tests/test_bookcollection.py
  63. 57
      tests/test_fulltext_search.py
  64. 64
      tests/test_geolocation_search.py
  65. 111
      tests/test_mqtt.py
  66. 66
      tests/test_photo.py
  67. 55
      tests/test_random_demo.py
  68. 62
      tests/test_users.py
  69. 69
      tests/test_users_fastapi.py
  70. 42
      tests/utils.py

BIN
container-storage/1.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

BIN
container-storage/2.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 757 KiB

BIN
container-storage/3.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 685 KiB

BIN
container-storage/4.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

BIN
container-storage/5.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 MiB

13
docker-grafana/Dockerfile

@ -0,0 +1,13 @@
FROM grafana/grafana:5.4.3
USER root
RUN apt-get update && apt-get install -y curl gettext-base && rm -rf /var/lib/apt/lists/*
WORKDIR /etc/grafana
COPY datasources ./datasources
WORKDIR /app
COPY entrypoint.sh ./
RUN chmod u+x entrypoint.sh
ENTRYPOINT ["/app/entrypoint.sh"]

3
docker-grafana/configuration.env

@ -0,0 +1,3 @@
GF_SECURITY_ADMIN_USER=admin
GF_SECURITY_ADMIN_PASSWORD=admin
GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-worldmap-panel,grafana-piechart-panel

10
docker-grafana/datasources/influx.json

@ -0,0 +1,10 @@
{
"name": "InfluxDB",
"type": "influxdb",
"url": "http://influxdb:8086",
"access": "proxy",
"user": "$INFLUX_USER",
"password": "$INFLUX_PASSWORD",
"database": "$INFLUX_DB",
"basicAuth": false
}

29
docker-grafana/entrypoint.sh

@ -0,0 +1,29 @@
#!/usr/bin/env sh
url="http://$GF_SECURITY_ADMIN_USER:$GF_SECURITY_ADMIN_PASSWORD@localhost:3000"
post() {
curl -s -X POST -d "$1" \
-H 'Content-Type: application/json;charset=UTF-8' \
"$url$2" 2> /dev/null
}
if [ ! -f "/var/lib/grafana/.init" ]; then
exec /run.sh $@ &
until curl -s "$url/api/datasources" 2> /dev/null; do
sleep 1
done
for datasource in /etc/grafana/datasources/*; do
post "$(envsubst < $datasource)" "/api/datasources"
done
post '{"meta":{"type":"db","canSave":true,"canEdit":true,"canAdmin":true,"canStar":true,"slug":"sensormetrics","expires":"0001-01-01T00:00:00Z","created":"2019-12-25T17:58:23Z","updated":"2019-12-25T18:04:59Z","updatedBy":"admin","createdBy":"admin","version":6,"hasAcl":false,"isFolder":false,"folderId":0,"folderTitle":"General","folderUrl":"","provisioned":false},"dashboard":{"annotations":{"list":[{"builtIn":1,"datasource":"-- Grafana --","enable":true,"hide":true,"iconColor":"rgba(0, 211, 255, 1)","name":"Annotations & Alerts","type":"dashboard"}]},"editable":true,"gnetId":null,"graphTooltip":0,"iteration":1577296839762,"links":[],"panels":[{"aliasColors":{},"bars":false,"dashLength":10,"dashes":false,"datasource":"InfluxDB","fill":1,"gridPos":{"h":9,"w":12,"x":0,"y":0},"id":2,"legend":{"avg":false,"current":false,"max":false,"min":false,"show":true,"total":false,"values":false},"lines":true,"linewidth":1,"links":[],"nullPointMode":"null","percentage":false,"pointradius":5,"points":false,"renderer":"flot","seriesOverrides":[],"spaceLength":10,"stack":false,"steppedLine":false,"targets":[{"groupBy":[{"params":["$__interval"],"type":"time"},{"params":["null"],"type":"fill"}],"measurement":"temperature","orderByTime":"ASC","policy":"default","query":"SELECT \"value\" FROM \"$sensortype\" WHERE $timeFilter ","rawQuery":true,"refId":"A","resultFormat":"time_series","select":[[{"params":["value"],"type":"field"}]],"tags":[]}],"thresholds":[],"timeFrom":null,"timeRegions":[],"timeShift":null,"title":"Sensors","tooltip":{"shared":true,"sort":0,"value_type":"individual"},"type":"graph","xaxis":{"buckets":null,"mode":"time","name":null,"show":true,"values":[]},"yaxes":[{"format":"short","label":null,"logBase":1,"max":null,"min":null,"show":true},{"format":"short","label":null,"logBase":1,"max":null,"min":null,"show":true}],"yaxis":{"align":false,"alignLevel":null}}],"schemaVersion":16,"style":"dark","tags":[],"templating":{"list":[{"current":{"text":"temperature","value":"temperature"},"hide":0,"label":null,"name":"sensortype","options":[{"text":"humidity","value":"humidity"}],"query":"humidity","skipUrlSync":false,"type":"textbox"}]},"time":{"from":"now-6h","to":"now"},"timepicker":{"refresh_intervals":["5s","10s","30s","1m","5m","15m","30m","1h","2h","1d"],"time_options":["5m","15m","1h","6h","12h","24h","2d","7d","30d"]},"timezone":"","title":"SensorMetrics","version":6}}' "/api/dashboards/db"
touch "/var/lib/grafana/.init"
kill $(pgrep grafana)
fi
exec /run.sh $@

7
docker-influxdb/Dockerfile

@ -0,0 +1,7 @@
FROM influxdb:1.3.1-alpine
WORKDIR /app
COPY entrypoint.sh ./
RUN chmod u+x entrypoint.sh
ENTRYPOINT ["/app/entrypoint.sh"]

3
docker-influxdb/configuration.env

@ -0,0 +1,3 @@
INFLUX_USER=admin
INFLUX_PASSWORD=admin
INFLUX_DB=influx

18
docker-influxdb/entrypoint.sh

@ -0,0 +1,18 @@
#!/usr/bin/env sh
if [ ! -f "/var/lib/influxdb/.init" ]; then
exec influxd $@ &
until wget -q "http://localhost:8086/ping" 2> /dev/null; do
sleep 1
done
influx -host=localhost -port=8086 -execute="CREATE USER ${INFLUX_USER} WITH PASSWORD '${INFLUX_PASSWORD}' WITH ALL PRIVILEGES"
influx -host=localhost -port=8086 -execute="CREATE DATABASE ${INFLUX_DB}"
touch "/var/lib/influxdb/.init"
kill -s TERM %1
fi
exec influxd $@

10
docker-mosquitto/Dockerfile

@ -0,0 +1,10 @@
FROM eclipse-mosquitto:1.6.8
RUN echo 'password_file /mosquitto/config/pwfile' >> /mosquitto/config/mosquitto.conf
RUN echo 'allow_anonymous false' >> /mosquitto/config/mosquitto.conf
RUN touch /mosquitto/config/pwfile
RUN mosquitto_passwd -b /mosquitto/config/pwfile some_user some_pass
EXPOSE 1883
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["/usr/sbin/mosquitto", "-c", "/mosquitto/config/mosquitto.conf"]

9
docker-python-pypy/Dockerfile

@ -0,0 +1,9 @@
FROM pypy:3-slim
ARG requirements
RUN apt-get update
RUN apt install git -y
WORKDIR /root
RUN git clone https://github.com/danionescu0/docker-flask-mongodb-example.git flask-mongodb-example
WORKDIR /root/flask-mongodb-example/python
RUN pip install -qr $requirements
EXPOSE 5000

9
docker-python/Dockerfile

@ -0,0 +1,9 @@
FROM web-base
# web-base is the Dockerfile inside ./base folder, it's splitted in 2 to speed up the multiple image build process
ARG requirements
WORKDIR /root/flask-mongodb-example/python
RUN pip install -qr $requirements
EXPOSE 5000

17
docker-python/base/Dockerfile

@ -0,0 +1,17 @@
FROM python:3.8-buster as web-base
# this is the python base image that contains olny git and the downloaded project
RUN apt-get update
RUN apt install git -y
WORKDIR /root
# [DEVELOPMENT ONLY]
# 1. [DEVELOPMENT ONLY] uncomment the following 2 lines (will copy files from local instead from github)
# RUN mkdir flask-mongodb-example
# COPY ./project ./flask-mongodb-example/
# 2. [DEVELOPMENT ONLY] comment the line with git clone
RUN git clone https://github.com/danionescu0/docker-flask-mongodb-example.git flask-mongodb-example
# 3. [DEVELOPMENT ONLY] run in shell from Dockerfile location: mkdir project; rsync -av --progress ../../ ./project/ --exclude docker-python

4
docker-redis/Dockerfile

@ -0,0 +1,4 @@
FROM redis:6
COPY redis.conf /usr/local/etc/redis/redis.conf
CMD [ "redis-server", "/usr/local/etc/redis/redis.conf" ]

1
docker-redis/redis.conf

@ -0,0 +1 @@
requirepass someredispassword

22
kubernetes/fulltext-search-deplyment.yaml

@ -0,0 +1,22 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: fulltext-search-deployment
namespace: default
spec:
selector:
matchLabels:
app: fulltext-search
replicas: 1
template:
metadata:
labels:
app: fulltext-search
spec:
containers:
- name: fulltext-search-conainer
image: danionescu/docker-flask-mongodb-example-python-default:latest
command: ["python", "/root/flask-mongodb-example/fulltext_search.py", "mongodb-service.default.svc.cluster.local"]
imagePullPolicy: Always
ports:
- containerPort: 5000

12
kubernetes/fulltext-serarch-service.yaml

@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: fulltext-search-service
spec:
selector:
app: fulltext-search
ports:
- protocol: "TCP"
port: 82
targetPort: 5000
type: LoadBalancer

21
kubernetes/mongodb-deplyment.yaml

@ -0,0 +1,21 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: mongodb-deployment
namespace: default
spec:
selector:
matchLabels:
app: mongodb
replicas: 1
template:
metadata:
labels:
app: mongodb
spec:
containers:
- name: mongodb-conainer
image: mongo:4.2-bionic
imagePullPolicy: Always
ports:
- containerPort: 27017

12
kubernetes/mongodb-service.yaml

@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: mongodb-service
spec:
selector:
app: mongodb
ports:
- protocol: "TCP"
port: 27017
targetPort: 27017
type: LoadBalancer

22
kubernetes/random-demo-deplyment.yaml

@ -0,0 +1,22 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: random-demo-deployment
namespace: default
spec:
selector:
matchLabels:
app: random-demo
replicas: 1
template:
metadata:
labels:
app: random-demo
spec:
containers:
- name: random-demo-conainer
image: danionescu/docker-flask-mongodb-example-python-default:latest
command: ["python", "/root/flask-mongodb-example/random_demo.py", "mongodb-service.default.svc.cluster.local"]
imagePullPolicy: Always
ports:
- containerPort: 5000

12
kubernetes/random-demo-service.yaml

@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: random-demo-service
spec:
selector:
app: random-demo
ports:
- protocol: "TCP"
port: 800
targetPort: 5000
type: LoadBalancer

230
python/baesian.py

@ -0,0 +1,230 @@
import json
from flask import Flask, request, Response
from pymongo import MongoClient
from flasgger import Swagger
app = Flask(__name__)
swagger = Swagger(app)
baesian = MongoClient("mongodb", 27017).demo.baesian
@app.route("/item/<int:itemid>", methods=["POST"])
def upsert_item(itemid):
"""Create item
---
parameters:
- name: itemid
in: path
type: string
required: true
- name: name
in: formData
type: string
required: false
responses:
200:
description: Item added
"""
request_params = request.form
if "name" not in request_params:
return Response(
"Name not present in parameters!", status=404, mimetype="application/json"
)
baesian.update_one(
{"_id": itemid},
{"$set": {"name": request_params["name"], "nr_votes": 0}},
upsert=True,
)
return Response(
json.dumps({"_id": itemid, "name": request_params["name"]}),
status=200,
mimetype="application/json",
)
@app.route("/item/vote/<int:itemid>", methods=["PUT"])
def add_vote(itemid):
"""Vote an item
---
parameters:
- name: itemid
in: path
type: string
required: true
- name: mark
in: formData
type: integer
required: false
- name: userid
in: formData
type: integer
required: false
responses:
200:
description: Update succeded
"""
request_params = request.form
if "mark" not in request_params or "userid" not in request_params:
return Response(
"Mark and userid must be present in form data!",
status=404,
mimetype="application/json",
)
mark = int(request_params["mark"])
if mark not in range(0, 10):
return Response(
"Mark must be in range (0, 10) !", status=500, mimetype="application/json"
)
userid = int(request_params["userid"])
update_items_data = {
"$push": {"marks": {"userid": userid, "mark": mark}},
"$inc": {"nr_votes": 1, "sum_votes": mark},
}
baesian.update_one({"_id": itemid}, update_items_data)
return Response("", status=200, mimetype="application/json")
@app.route("/item/<int:itemid>", methods=["GET"])
def get_item(itemid):
"""Item details
---
parameters:
- name: itemid
in: path
type: string
required: true
definitions:
Item:
type: object
properties:
_id:
type: integer
name:
type: string
marks:
type: array
items:
type: integer
sum_votes:
type: integer
nr_votes:
type: integer
baesian_average:
type: float
responses:
200:
description: Item model
schema:
$ref: '#/definitions/Item'
404:
description: Item not found
"""
item_data = baesian.find_one({"_id": itemid})
if None == item_data:
return Response("", status=404, mimetype="application/json")
if "marks" not in item_data:
item_data["nr_votes"] = 0
item_data["sum_votes"] = 0
item_data["baesian_average"] = 0
return Response(json.dumps(item_data), status=200, mimetype="application/json")
average_nr_votes_pipeline = [
{"$group": {"_id": "avg_nr_votes", "avg_nr_votes": {"$avg": "$nr_votes"}}},
]
average_nr_votes = list(baesian.aggregate(average_nr_votes_pipeline))[0][
"avg_nr_votes"
]
average_rating = [
{
"$group": {
"_id": "avg",
"avg": {"$sum": "$sum_votes"},
"count": {"$sum": "$nr_votes"},
}
},
{"$project": {"result": {"$divide": ["$avg", "$count"]}}},
]
average_rating = list(baesian.aggregate(average_rating))[0]["result"]
item_nr_votes = item_data["nr_votes"]
item_average_rating = item_data["sum_votes"] / item_data["nr_votes"]
baesian_average = round(
((average_nr_votes * average_rating) + (item_nr_votes * item_average_rating))
/ (average_nr_votes + item_nr_votes),
3,
)
item_data["baesian_average"] = baesian_average
return Response(json.dumps(item_data), status=200, mimetype="application/json")
@app.route("/items", methods=["GET"])
def get_items():
"""All items with pagination without averages
---
parameters:
- name: limit
in: query
type: integer
required: false
- name: offset
in: query
type: integer
required: false
definitions:
Items:
type: array
items:
properties:
_id:
type: integer
name:
type: string
marks:
type: array
items:
type: integer
responses:
200:
description: List of items
schema:
$ref: '#/definitions/Items'
"""
request_args = request.args
limit = int(request_args.get("limit")) if "limit" in request_args else 10
offset = int(request_args.get("offset")) if "offset" in request_args else 0
item_list = baesian.find().limit(limit).skip(offset)
if None == baesian:
return Response(json.dumps([]), status=200, mimetype="application/json")
extracted = [
{
"_id": d["_id"],
"name": d["name"],
"marks": d["marks"] if "marks" in d else [],
}
for d in item_list
]
return Response(json.dumps(extracted), status=200, mimetype="application/json")
@app.route("/item/<int:itemid>", methods=["DELETE"])
def delete_item(itemid):
"""Delete operation for a item
---
parameters:
- name: itemid
in: path
type: string
required: true
responses:
200:
description: Item deleted
"""
baesian.delete_one({"_id": itemid})
return Response("", status=200, mimetype="application/json")
if __name__ == "__main__":
# starts the app in debug mode, bind on all ip's and on port 5000
app.run(debug=True, host="0.0.0.0", port=5000)

332
python/bookcollection.py

@ -0,0 +1,332 @@
import sys
import json
import requests
import dateutil.parser
from flask import Flask, request, Response
from flask_restplus import Api, Resource, fields, reqparse
from pymongo import MongoClient, errors
from utils import get_logger
if len(sys.argv) == 3:
_, users_host, mongo_host = sys.argv
mongo_client = MongoClient(mongo_host, 27017)
else:
users_host = "http://web-users:5000"
mongo_client = MongoClient("mongodb", 27017)
bookcollection = mongo_client.demo.bookcollection
borrowcollection = mongo_client.demo.borrowcollection
logger = get_logger()
app = Flask(__name__)
api = Api(
app=app,
title="Book collection",
description="Simulates a book library with users and book borrwing",
)
book_api = api.namespace("book", description="Book api")
borrow_api = api.namespace("borrow", description="Boorrow, returing api")
book_model = book_api.model(
"Book",
{
"isbn": fields.String(description="ISBN", required=True),
"name": fields.String(description="Name of the book", required=True),
"author": fields.String(description="Book author", required=True),
"publisher": fields.String(description="Book publisher", required=True),
"nr_available": fields.Integer(
min=0, description="Nr books available for lend", required=True
),
},
)
borrow_model = borrow_api.model(
"Borrow",
{
"id": fields.String(
min=0, description="Unique uuid for borrowing", required=True
),
"userid": fields.Integer(
min=0, description="Userid of the borrower", required=True
),
"isbn": fields.String(description="ISBN", required=True),
"borrow_date": fields.DateTime(required=True),
"return_date": fields.DateTime(required=False),
"max_return_date": fields.DateTime(required=True),
},
)
return_model = borrow_api.model(
"Return",
{
"id": fields.String(
min=0, description="Unique uuid for borrowing", required=True
),
"return_date": fields.DateTime(required=False),
},
)
class User:
def __init__(self, exists: bool, userid: int, name: str, email: str) -> None:
self.exists = exists
self.userid = userid
self.name = name
self.email = email
pagination_parser = reqparse.RequestParser()
pagination_parser.add_argument("limit", type=int, help="Limit")
pagination_parser.add_argument("offset", type=int, help="Offset")
def get_user(id: int) -> User:
try:
response = requests.get(url="{0}/users/{1}".format(users_host, str(id)))
except Exception as e:
logger.error("Error getting user data error: {0}".format(str(e)))
return User(False, id, None, None)
if response.status_code != 200:
return User(False, id, None, None)
try:
result = response.json()
return User(True, id, result["name"], result["email"])
except:
return User(False, id, None, None)
@borrow_api.route("/return/<string:id>")
class Return(Resource):
@borrow_api.doc(responses={200: "Ok"})
@borrow_api.expect(return_model)
def put(self, id):
borrow_api.payload["id"] = id
borrow = borrowcollection.find_one({"id": id})
if None is borrow:
return Response(
json.dumps({"error": "Borrow id not found"}),
status=404,
mimetype="application/json",
)
if "return_date" in borrow:
return Response(
json.dumps({"error": "Book already returned"}),
status=404,
mimetype="application/json",
)
del borrow["_id"]
bookcollection.update_one(
{"isbn": borrow["isbn"]}, {"$inc": {"nr_available": 1}}
)
borrowcollection.update_one(
{"id": borrow_api.payload["id"]},
{
"$set": {
"return_date": dateutil.parser.parse(
borrow_api.payload["return_date"]
)
}
},
)
return Response(
json.dumps(borrow_api.payload, default=str),
status=200,
mimetype="application/json",
)
@borrow_api.route("/<string:id>")
class Borrow(Resource):
def get(self, id):
borrow = borrowcollection.find_one({"id": id})
if None is borrow:
return Response(
json.dumps({"error": "Borrow id not found"}),
status=404,
mimetype="application/json",
)
del borrow["_id"]
user = get_user(borrow["userid"])
borrow["user_name"] = user.name
borrow["user_email"] = user.email
book = bookcollection.find_one({"isbn": borrow["isbn"]})
if None is book:
return Response(
json.dumps({"error": "Book not found"}),
status=404,
mimetype="application/json",
)
borrow["book_name"] = book["name"]
borrow["book_author"] = book["author"]
return Response(
json.dumps(borrow, default=str), status=200, mimetype="application/json"
)
@borrow_api.doc(responses={200: "Ok"})
@borrow_api.expect(borrow_model)
def put(self, id):
session = mongo_client.start_session()
session.start_transaction()
try:
borrow = borrowcollection.find_one({"id": id}, session=session)
if None is not borrow:
return Response(
json.dumps({"error": "Borrow already used"}),
status=404,
mimetype="application/json",
)
borrow_api.payload["id"] = id
user = get_user(borrow_api.payload["userid"])
if not user.exists:
return Response(
json.dumps({"error": "User not found"}),
status=404,
mimetype="application/json",
)
book = bookcollection.find_one(
{"isbn": borrow_api.payload["isbn"]}, session=session
)
if book is None:
return Response(
json.dumps({"error": "Book not found"}),
status=404,
mimetype="application/json",
)
if book["nr_available"] < 1:
return Response(
json.dumps({"error": "Book is not available yet"}),
status=404,
mimetype="application/json",
)
borrow_api.payload["borrow_date"] = dateutil.parser.parse(
borrow_api.payload["borrow_date"]
)
borrow_api.payload["max_return_date"] = dateutil.parser.parse(
borrow_api.payload["max_return_date"]
)
borrow_api.payload.pop("return_date", None)
borrowcollection.insert_one(borrow_api.payload, session=session)
bookcollection.update_one(
{"isbn": borrow_api.payload["isbn"]},
{"$inc": {"nr_available": -1}},
session=session,
)
del borrow_api.payload["_id"]
db_entry = borrowcollection.find_one({"id": id}, session=session)
session.commit_transaction()
except Exception as e:
session.end_session()
return Response(
json.dumps({"error": str(e)}, default=str),
status=500,
mimetype="application/json",
)
session.end_session()
return Response(
json.dumps(db_entry, default=str), status=200, mimetype="application/json"
)
@borrow_api.route("")
class BorrowList(Resource):
@borrow_api.marshal_with(borrow_model, as_list=True)
@borrow_api.expect(pagination_parser, validate=True)
def get(self):
args = pagination_parser.parse_args(request)
data = (
borrowcollection.find()
.sort("id", 1)
.limit(args["limit"])
.skip(args["offset"])
)
extracted = [
{
"id": d["id"],
"userid": d["userid"],
"isbn": d["isbn"],
"borrow_date": d["borrow_date"],
"return_date": d["return_date"] if "return_date" in d else None,
"max_return_date": d["max_return_date"],
}
for d in data
]
return extracted
@book_api.route("/<string:isbn>")
class Book(Resource):
def get(self, isbn):
book = bookcollection.find_one({"isbn": isbn})
if None is book:
return Response(
json.dumps({"error": "Book not found"}),
status=404,
mimetype="application/json",
)
del book["_id"]
return Response(json.dumps(book), status=200, mimetype="application/json")
@book_api.doc(responses={200: "Ok"})
@book_api.expect(book_model)
def put(self, isbn):
book_api.payload["isbn"] = isbn
try:
bookcollection.insert_one(book_api.payload)
except errors.DuplicateKeyError:
return Response(
json.dumps({"error": "Isbn already exists"}),
status=404,
mimetype="application/json",
)
del book_api.payload["_id"]
return Response(
json.dumps(book_api.payload), status=200, mimetype="application/json"
)
def delete(self, isbn):
bookcollection.delete_one({"isbn": isbn})
return Response("", status=200, mimetype="application/json")
@book_api.route("")
class BookList(Resource):
@book_api.marshal_with(book_model, as_list=True)
@book_api.expect(pagination_parser, validate=True)
def get(self):
args = pagination_parser.parse_args(request)
books = (
bookcollection.find()
.sort("id", 1)
.limit(args["limit"])
.skip(args["offset"])
)
extracted = [
{
"isbn": d["isbn"],
"name": d["name"],
"author": d["author"],
"publisher": d["publisher"],
"nr_available": d["nr_available"],
}
for d in books
]
return extracted
if __name__ == "__main__":
try:
mongo_client.admin.command("replSetInitiate")
except errors.OperationFailure as e:
logger.error("Error setting mongodb replSetInitiate error: {0}".format(str(e)))
bookcollection.insert_one({"isbn": 0})
bookcollection.delete_one({"isbn": 0})
borrowcollection.insert_one({"id": 0})
borrowcollection.delete_one({"id": 0})
bookcollection.create_index("isbn", unique=True)
# starts the app in debug mode, bind on all ip's and on port 5000
app.run(debug=True, host="0.0.0.0", port=5000)

57
python/caching.py

@ -0,0 +1,57 @@
import redis
import pickle
from functools import wraps
def cache(redis: redis.Redis, key: str):
"""
Caches the result of the function in redis and pickle, used a key to cache it
:param redis: a redis configured instance
:param key: the key to use as a parameter for the cache
:return: the result of the wrapped function
"""
def decorator(fn): # define a decorator for a function "fn"
@wraps(fn)
def wrapped(
*args, **kwargs
): # define a wrapper that will finally call "fn" with all arguments
# if cache exists -> load it and return its content
cached = redis.get(kwargs[key])
if cached:
return pickle.loads(cached)
# execute the function with all arguments passed
res = fn(*args, **kwargs)
# save cache in redis
redis.set(kwargs[key], pickle.dumps(res))
return res
return wrapped
return decorator
def cache_invalidate(redis: redis.Redis, key: str):
"""
Deletes the redis cache by the key specified
:param redis: a redis configured instance
:param key: the key to use as a parameter for the cache deletion
:return: the result of the wrapped function
"""
def decorator(fn): # define a decorator for a function "fn"
@wraps(fn)
def wrapped_f(
*args, **kwargs
): # define a wrapper that will finally call "fn" with all arguments
# execute the function with all arguments passed
res = fn(*args, **kwargs)
# delete cache
redis.delete(kwargs[key])
return res
return wrapped_f
return decorator

52
python/diagrams_generator.py

@ -0,0 +1,52 @@
from diagrams import Cluster, Diagram, Edge
from diagrams.onprem.compute import Server
from diagrams.onprem.monitoring import Grafana
from diagrams.aws.iot import IotMqtt
from diagrams.onprem.database import MongoDB
from diagrams.onprem.database import InfluxDB
from diagrams.onprem.network import HAProxy
from diagrams.onprem.inmemory import Redis
with Diagram(
name="Docker Flask MongoDB example",
show=True,
filename="../resources/autogenerated",
direction="LR",
):
with Cluster("Services"):
fulltext_search = Server("Fulltext search")
users = Server("Users")
book_collection = Server("Book collection")
geolocation_search = Server("Geolocation search")
photo_process = Server("Photo process")
random_demo = Server("Random demo")
tic_tac_toe = Server("Tic tac toe")
users_fastapi = Server("Users Fastapi")
webservers = [
fulltext_search,
book_collection,
geolocation_search,
random_demo,
users,
users_fastapi,
]
proxy = HAProxy("Krakend")
mqtt_service = Server("MQTT service")
mongo = MongoDB("MongoDb")
mosquitto = IotMqtt("Mosquitto")
grafana = Grafana("Grafana")
influxdb = InfluxDB("InfluxDB")
redis = Redis("Redis")
webservers >> Edge(color="brown") >> mongo
users >> Edge(color="brows") >> redis
book_collection >> Edge(color="black") >> users
mqtt_service >> Edge(color="brown") >> mosquitto
mqtt_service >> Edge(color="brown") >> mongo
mqtt_service >> Edge(color="brown") >> influxdb
grafana >> Edge(color="brown") >> influxdb
proxy >> Edge(color="black") >> random_demo
proxy >> Edge(color="black") >> users

115
python/fulltext_search.py

@ -0,0 +1,115 @@
import sys
import json, datetime
from flask import Flask, request, Response
from flask_httpauth import HTTPBasicAuth
from werkzeug.security import generate_password_hash, check_password_hash
from flasgger import Swagger
from pymongo import MongoClient, TEXT
from bson import json_util
app = Flask(__name__)
auth = HTTPBasicAuth()
swagger_template = {"securityDefinitions": {"basicAuth": {"type": "basic"}}}
users = {
"admin": generate_password_hash("changeme"),
}
@auth.verify_password
def verify_password(username, password):
if username in users and check_password_hash(users.get(username), password):
return username
swagger = Swagger(app, template=swagger_template)
mongo_host = "mongodb"
if len(sys.argv) == 2:
mongo_host = sys.argv[1]
fulltext_search = MongoClient(mongo_host, 27017).demo.fulltext_search
@app.route("/search/<string:searched_expression>")
@auth.login_required
def search(searched_expression: str):
"""Search by an expression
---
parameters:
- name: searched_expression
in: path
type: string
required: true
definitions:
Result:
type: object
properties:
app_text:
type: string
indexed_date:
type: date
responses:
200:
description: List of results
schema:
$ref: '#/definitions/Result'
"""
results = (
fulltext_search.find(
{"$text": {"$search": searched_expression}},
{"score": {"$meta": "textScore"}},
)
.sort([("score", {"$meta": "textScore"})])
.limit(10)
)
results = [
{"text": result["app_text"], "date": result["indexed_date"].isoformat()}
for result in results
]
return Response(
json.dumps(list(results), default=json_util.default),
status=200,
mimetype="application/json",
)
@app.route("/fulltext", methods=["PUT"])
@auth.login_required
def add_expression():
"""Add an expression to fulltext index
---
parameters:
- name: expression
in: formData
type: string
required: true
responses:
200:
description: Creation succeded
"""
request_params = request.form
if "expression" not in request_params:
return Response(
'"Expression" must be present as a POST parameter!',
status=404,
mimetype="application/json",
)
document = {
"app_text": request_params["expression"],
"indexed_date": datetime.datetime.utcnow(),
}
fulltext_search.save(document)
return Response(
json.dumps(document, default=json_util.default),
status=200,
mimetype="application/json",
)
if __name__ == "__main__":
# create the fulltext index
fulltext_search.create_index(
[("app_text", TEXT)], name="fulltextsearch_index", default_language="english"
)
# starts the app in debug mode, bind on all ip's and on port 5000
app.run(debug=True, host="0.0.0.0", port=5000)

135
python/geolocation_search.py

@ -0,0 +1,135 @@
import json
from flask import Flask, request, Response
from flasgger import Swagger
from pymongo import MongoClient, GEOSPHERE
from bson import json_util
app = Flask(__name__)
swagger = Swagger(app)
places = MongoClient("mongodb", 27017).demo.places
@app.route("/location", methods=["POST"])
def new_location():
"""Add a place (name, latitude and longitude)
---
parameters:
- name: name
in: formData
type: string
required: true
- name: lat
in: formData
type: string
required: true
- name: lng
in: formData
type: string
required: true
responses:
200:
description: Place added
"""
request_params = request.form
if (
"name" not in request_params
or "lat" not in request_params
or "lng" not in request_params
):
return Response(
"Name, lat, lng must be present in parameters!",
status=404,
mimetype="application/json",
)
latitude = float(request_params["lng"])
longitude = float(request_params["lat"])
places.insert_one(
{
"name": request_params["name"],
"location": {"type": "Point", "coordinates": [latitude, longitude]},
}
)
return Response(
json.dumps({"name": request_params["name"], "lat": latitude, "lng": longitude}),
status=200,
mimetype="application/json",
)
@app.route("/location/<string:lat>/<string:lng>")
def get_near(lat, lng):
"""Get all points near a location given coordonates, and radius
---
parameters:
- name: lat
in: path
type: string
required: true
- name: lng
in: path
type: string
required: true
- name: max_distance
in: query
type: integer
required: false
- name: limit
in: query
type: integer
required: false
definitions:
Place:
type: object
properties:
name:
type: string
lat:
type: double
long:
type: double
responses:
200:
description: Places list
schema:
$ref: '#/definitions/Place'
type: array
"""
max_distance = int(request.args.get("max_distance", 10000))
limit = int(request.args.get("limit", 10))
cursor = places.find(
{
"location": {
"$near": {
"$geometry": {
"type": "Point",
"coordinates": [float(lng), float(lat)],
},
"$maxDistance": max_distance,
}
}
}
).limit(limit)
extracted = [
{
"name": d["name"],
"lat": d["location"]["coordinates"][1],
"lng": d["location"]["coordinates"][0],
}
for d in cursor
]
return Response(
json.dumps(extracted, default=json_util.default),
status=200,
mimetype="application/json",
)
if __name__ == "__main__":
# cretes a GEOSHPHERE (2dsphere in MongoDb: https://docs.mongodb.com/manual/core/2dsphere/) index
# named "location_index" on "location" field, it's used to search by distance
places.create_index([("location", GEOSPHERE)], name="location_index")
# starts the app in debug mode, bind on all ip's and on port 5000
app.run(debug=True, host="0.0.0.0", port=5000)

110
python/mqtt.py

@ -0,0 +1,110 @@
from logging import RootLogger
import json, time, datetime, statistics, requests
import paho.mqtt.client
from pymongo import MongoClient, errors
from utils import get_logger, read_docker_secret
influxdb_url = "http://influxdb:8086/write?db=influx"
mongo_host = "mongodb"
mqtt_host = "mqtt"
mqtt_user = read_docker_secret("MQTT_USER")
mqtt_password = read_docker_secret("MQTT_PASSWORD")
mongo_client = MongoClient(mongo_host, 27017)
sensors = mongo_client.demo.sensors
logger = get_logger()
class Mqtt:
def __init__(self, host: str, user: str, password: str, logger: RootLogger) -> None:
self.__host = host
self.__user = user
self.__password = password
self.__logger = logger
self.__topic = None
def connect(self, topic: str):
self.__topic = topic
client = paho.mqtt.client.Client()
client.username_pw_set(mqtt_user, mqtt_password)
client.on_connect = self.on_connect
client.on_message = self.on_message
client.connect(self.__host, 1883, 60)
client.loop_start()
def on_connect(
self, client: paho.mqtt.client.Client, userdata, flags: dict, rc: int
):
client.subscribe(self.__topic)
def on_message(
self,
client: paho.mqtt.client.Client,
userdata,
msg: paho.mqtt.client.MQTTMessage,
):
try:
message = msg.payload.decode("utf-8")
decoded_data = json.loads(message)
except Exception as e:
self.__logger.error(
"could not decode message {0}, error: {1}".format(msg, str(e))
)
return
# skip processing if it's averages topic to avoid an infinit loop
sensors.update_one(
{"_id": decoded_data["sensor_id"]},
{
"$push": {
"items": {
"$each": [
{
"value": decoded_data["sensor_value"],
"date": datetime.datetime.utcnow(),
}
],
"$sort": {"date": -1},
"$slice": 5,
}
}
},
upsert=True,
)
# add data to grafana through influxdb
try:
requests.post(
url=influxdb_url,
data="{0} value={1}".format(
decoded_data["sensor_id"], decoded_data["sensor_value"]
),
)
except Exception as e:
self.__logger.error(
"Error writing to influxdb {0}, error: {1}".format(msg, str(e))
)
# obtain the mongo sensor data by id
sensor_data = list(sensors.find({"_id": decoded_data["sensor_id"]}))
# we extract the sensor last values from sensor_data
sensor_values = [d["value"] for d in sensor_data[0]["items"]]
client.publish(
"averages/{0}".format(decoded_data["sensor_id"]),
statistics.mean(sensor_values),
2,
)
mqtt = Mqtt(mqtt_host, mqtt_user, mqtt_password, logger)
mqtt.connect("sensors")
logger.debug("MQTT App started")
try:
mongo_client.admin.command("replSetInitiate")
except errors.OperationFailure as e:
logger.error("Error setting mongodb replSetInitiate error: {0}".format(str(e)))
while True:
time.sleep(0.05)

227
python/photo_process.py

@ -0,0 +1,227 @@
# Note: the image search algorithm is a naive implementation and it's for demo purposes only
import os
import io
import json
import imagehash
from PIL import Image, ImageEnhance
from flasgger import Swagger
from flask import Flask, Response, request
app = Flask(__name__)
swagger = Swagger(app)
storage_path = "/root/storage"
class FileHashSearch:
hashes = {}
def load_from_path(self, path: str) -> None:
for root, subdirs, files in os.walk(path):
for file in os.listdir(root):
filePath = os.path.join(root, file)
hash = imagehash.average_hash(Image.open(filePath))
self.hashes[hash] = os.path.splitext(file)[0]
def add(self, file, id) -> None:
self.hashes[imagehash.average_hash(Image.open(file.stream))] = id
def delete(self, id: int) -> None:
self.hashes = {k: v for k, v in self.hashes.items() if v != str(id)}
def get_similar(self, hash, similarity: int = 10):
return [
self.hashes[current_hash]
for id, current_hash in enumerate(self.hashes)
if hash - current_hash < similarity
]
def get_photo_path(photo_id: str):
return "{0}/{1}.jpg".format(storage_path, str(photo_id))
def get_resized_by_height(img, new_height: int):
width, height = img.size
hpercent = new_height / float(height)
wsize = int((float(width) * float(hpercent)))
return img.resize((wsize, new_height), Image.ANTIALIAS)
file_hash_search = FileHashSearch()
file_hash_search.load_from_path(storage_path)
@app.route("/photo/<int:id>", methods=["GET"])
def get_photo(id):
"""Returns the photo by id
---
parameters:
- name: id
in: path
type: string
required: true
- name: resize
description: Resize by width in pixels
in: query
type: integer
required: false
- name: rotate
description: Rotate left in degrees
in: query
type: integer
required: false
- name: brightness
in: query
type: float
required: false
maximum: 20
responses:
200:
description: The actual photo
404:
description: Photo not found
"""
request_args = request.args
resize = int(request_args.get("resize")) if "resize" in request_args else 0
rotate = int(request.args.get("rotate")) if "rotate" in request_args else 0
brightness = (
float(request.args.get("brightness")) if "brightness" in request_args else 0
)
if brightness > 20:
return get_response({"error": "Maximum value for brightness is 20"}, 500)
try:
img = Image.open(get_photo_path(id))
except IOError:
return get_response({"error": "Error loading image"}, 500)
if resize > 0:
img = get_resized_by_height(img, resize)
if rotate > 0:
img = img.rotate(rotate)
if brightness > 0:
enhancer = ImageEnhance.Brightness(img)
img = enhancer.enhance(brightness)
output = io.BytesIO()
img.save(output, format="JPEG")
image_data = output.getvalue()
output.close()
return Response(image_data, status=200, mimetype="image/jpeg")
@app.route("/photo/similar", methods=["PUT"])
def get_photos_like_this():
"""Find similar photos:
---
parameters:
- name: file
required: false
in: formData
type: file
- name: similarity
description: How similar the file should be, minimum 0 maximum 40
in: query
type: integer
required: false
maximum: 40
definitions:
Number:
type: integer
responses:
200:
description: Found
schema:
$ref: '#/definitions/Number'
type: array
404:
description: Erros occured
"""
if "file" not in request.files:
return get_response({"error": "File parameter not present!"}, 500)
file = request.files["file"]
if file.mimetype != "image/jpeg":
return get_response({"error": "File mimetype must pe jpeg!"}, 500)
request_args = request.args
similarity = (
int(request.args.get("similarity")) if "similarity" in request_args else 10
)
result = file_hash_search.get_similar(
imagehash.average_hash(Image.open(file.stream)), similarity
)
return Response(json.dumps(result), status=200, mimetype="application/json")
@app.route("/photo/<int:id>", methods=["PUT"])
def set_photo(id):
"""Add jpeg photo on disk:
---
parameters:
- name: id
in: path
type: string
required: true
- name: file
required: false
in: formData
type: file
responses:
200:
description: Added succesfully
404:
description: Error saving photo
"""
if "file" not in request.files:
return get_response({"error": "File parameter not present!"}, 500)
file = request.files["file"]
if file.mimetype != "image/jpeg":
return get_response({"error": "File mimetype must pe jpeg!"}, 500)
try:
file.save(get_photo_path(id))
except Exception as e:
return get_response({"error": "Could not save file to disk!"}, 500)
file_hash_search.add(file, id)
return get_response({"status": "success"}, 200)
@app.route("/photo/<int:id>", methods=["DELETE"])
def delete_photo(id):
"""Delete photo by id:
---
parameters:
- name: id
in: path
type: string
required: true
responses:
200:
description: Deleted succesfully
404:
description: Error deleting
"""
try:
os.remove(get_photo_path(id))
file_hash_search.delete(id)
except OSError as e:
return get_response({"error": "File does not exists!"}, 500)
return get_response({"status": "success"}, 200)
def get_response(data: dict, status: int) -> Response:
return Response(
json.dumps(data),
status=status,
mimetype="application/json",
)
if __name__ == "__main__":
# starts the app in debug mode, bind on all ip's and on port 5000
app.run(debug=True, host="0.0.0.0", port=5000)

19
python/python_app.log

@ -0,0 +1,19 @@
2021-01-31 19:01:48,263 - python_app - DEBUG - Random demo app started
2021-01-31 19:01:48,484 - python_app - DEBUG - Random demo app started
2021-01-31 19:03:10,004 - python_app - DEBUG - Random demo app started
2021-01-31 19:03:10,217 - python_app - DEBUG - Random demo app started
2021-01-31 19:04:11,058 - python_app - DEBUG - Random demo app started
2021-01-31 19:06:43,742 - python_app - DEBUG - Random demo app started
2021-01-31 19:06:43,949 - python_app - DEBUG - Random demo app started
2021-01-31 19:12:37,999 - python_app - DEBUG - Random demo app started
2021-01-31 19:12:38,203 - python_app - DEBUG - Random demo app started
2021-01-31 19:58:31,479 - python_app - DEBUG - MQTT App started
2021-01-31 19:58:46,800 - python_app - DEBUG - MQTT App started
2021-01-31 22:08:23,694 - python_app - DEBUG - MQTT App started
2021-01-31 22:24:12,124 - python_app - DEBUG - MQTT App started
2021-01-31 22:25:11,955 - python_app - DEBUG - MQTT App started
2021-01-31 22:25:16,675 - python_app - ERROR - Erro writing to grafana <paho.mqtt.client.MQTTMessage object at 0x7fe018051eb0>, error: HTTPConnectionPool(host='influxdb', port=8086): Max retries exceeded with url: /write?db=influx (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fe01656e610>: Failed to establish a new connection: [Errno -2] Name or service not known'))
2021-02-21 15:49:03,502 - python_app - DEBUG - Random demo app started
2021-02-21 15:49:03,711 - python_app - DEBUG - Random demo app started
2021-02-21 15:52:54,615 - python_app - ERROR - Error setting mongodb replSetInitiate error: already initialized
2021-02-21 15:57:12,566 - python_app - ERROR - Error setting mongodb replSetInitiate error: already initialized

120
python/random_demo.py

@ -0,0 +1,120 @@
import random, json, datetime, sys
from flask import Flask, Response, request
from flasgger import Swagger
from pymongo import MongoClient
from bson import json_util
from utils import get_logger
app = Flask(__name__)
swagger = Swagger(app)
mongo_host = "mongodb"
if len(sys.argv) == 2:
mongo_host = sys.argv[1]
random_numbers = MongoClient(mongo_host, 27017).demo.random_numbers
logger = get_logger()
@app.route("/random", methods=["PUT"])
def random_insert():
"""Add a number number to the list of last 5 numbers
---
parameters:
- name: lower
in: formData
type: integer
required: false
- name: upper
in: formData
type: integer
required: false
responses:
200:
description: Random number added successfully
type: integer
"""
request_params = request.form
number = str(
random.randint(int(request_params["lower"]), int(request_params["upper"]))
)
random_numbers.update_one(
{"_id": "lasts"},
{
"$push": {
"items": {
"$each": [{"value": number, "date": datetime.datetime.utcnow()}],
"$sort": {"date": -1},
"$slice": 5,
}
}
},
upsert=True,
)
return Response(number, status=200, mimetype="application/json")
@app.route("/random", methods=["GET"])
def random_generator():
"""Returns a random number in interval
---
parameters:
- name: lower
in: query
type: integer
required: false
- name: upper
in: query
type: integer
required: false
responses:
200:
description: Random number generated
type: integer
"""
request_args = request.args
lower = int(request_args.get("lower")) if "lower" in request_args else 10
upper = int(request_args.get("upper")) if "upper" in request_args else 0
if upper < lower:
return Response(
json.dumps(
{"error": "Upper boundary must be greater or equal than lower boundary"}
),
status=400,
mimetype="application/json",
)
number = str(random.randint(lower, upper))
return Response(number, status=200, mimetype="application/json")
@app.route("/random-list")
def last_number_list():
"""Gets the latest 5 generated numbers
---
definitions:
Number:
type: int
responses:
200:
description: list of results
schema:
$ref: '#/definitions/Number'
type: array
"""
last_numbers = list(random_numbers.find({"_id": "lasts"}))
if len(last_numbers) == 0:
extracted = []
else:
extracted = [d["value"] for d in last_numbers[0]["items"]]
return Response(
json.dumps(extracted, default=json_util.default),
status=200,
mimetype="application/json",
)
if __name__ == "__main__":
logger.debug("Random demo app started")
# starts the app in debug mode, bind on all ip's and on port 5000
app.run(debug=True, host="0.0.0.0", port=5000)

4
python/requirements-dev.txt

@ -0,0 +1,4 @@
black==20.8b1
diagrams==0.19.1
graphviz==0.16
Faker==8.1.1

6
python/requirements-fastapi.txt

@ -0,0 +1,6 @@
motor==2.3.1
python-dateutil==2.8.1
fastapi==0.65.2
uvicorn==0.13.4
gunicorn==20.0.4
email-validator==1.1.2

3
python/requirements-mqtt.txt

@ -0,0 +1,3 @@
pymongo==3.9.0
paho-mqtt==1.2.3
requests==2.21.0

5
python/requirements-photo.txt

@ -0,0 +1,5 @@
Flask==2.0.1
pymongo==3.9.0
flasgger==0.9.2
Pillow==8.2.0
ImageHash==4.0.0

8
python/requirements-restplus.txt

@ -0,0 +1,8 @@
Flask==1.1.2
# locked due to bug: https://github.com/jarus/flask-testing/issues/143
Werkzeug==0.16.1
flask-restplus==0.13.0
pymongo==3.9.0
flasgger==0.9.5
requests==2.25.1
python-dateutil==2.8.1

8
python/requirements.txt

@ -0,0 +1,8 @@
Flask==2.0.1
Flask-Session==0.3.2
pymongo==3.9.0
flasgger==0.9.5
requests==2.25.0
python-dateutil==2.8.1
redis==3.5.3
flask-httpauth==4.3.0

55
python/templates/tictactoe.html

@ -0,0 +1,55 @@
<!DOCTYPE html>
<html>
<head>
<title>Tic Tac Toe</title>
<link rel="stylesheet"
href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" integrity="sha384-Gn5384xqQ1aoWXA+058RXPxPg6fy4IWvTNh0E263XmFcJlSAwiGgFAW/dAiS6JXm" crossorigin="anonymous">
<style>
table {
border-collapse: collapse;
margin: 20px;
}
td {
border: 1px solid black;
width: 150px;
height: 150px;
font-size: 30px;
text-align: center;
}
td > a {
font-size: 18px;
}
</style>
</head>
<body>
<div>
<a href="{{url_for('reset')}}" class="btn btn-default">Reset Game</a>
</div>
{% if draw %}
<div>
<h1> Game Drawn</h1>
</div>
{% endif %}
{% if winnerFound %}
<div>
<h1> WINNER is {{winner}}</h1>
</div>
{% endif %}
<table>
{% for i in range(0, 3) %}
<tr>
{% for j in range(0, 3) %}
<td>
{% if game[j*3+i] %}
{{ game[j*3+i] }}
{% else %}
<a href="{{ url_for('play', row=i, col=j) }}">Play {{turn}} here.</a>
{% endif %}
</td>
{% endfor %}
</tr>
{% endfor %}
</table>
</body>
</html>

81
python/tictactoe.py

@ -0,0 +1,81 @@
from flask import Flask, render_template, session, redirect, url_for
from flask_session import Session
from tempfile import mkdtemp
app = Flask(__name__)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
class Game:
WIN_LINES = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9], # horiz.
[1, 4, 7],
[2, 5, 8],
[3, 6, 9], # vertical
[1, 5, 9],
[3, 5, 7], # diagonal
]
def has_won(self, board: list, turn: str) -> bool:
wins = [all([(board[c - 1] == turn) for c in line]) for line in self.WIN_LINES]
return any(wins)
def has_moves_left(self, board: list) -> bool:
return all([move is not None for move in board])
def get_next_player(self, turn: str):
return {"O": "X", "X": "O"}[turn]
game = Game()
def initiate_session(session):
session["board"] = [None, None, None, None, None, None, None, None, None]
session["turn"] = "X"
session["winner"] = False
session["draw"] = False
@app.route("/")
def index():
if "board" not in session:
initiate_session(session)
winner_x = game.has_won(session["board"], "X")
winner_O = game.has_won(session["board"], "O")
if winner_x or winner_O:
session["winner"] = True
session["turn"] = "X" if winner_x else "O"
if game.has_moves_left(session["board"]):
session["draw"] = True
return render_template(
"tictactoe.html",
game=session["board"],
turn=session["turn"],
winnerFound=session["winner"],
winner=session["turn"],
draw=session["draw"],
)
@app.route("/play/<int:row>/<int:col>")
def play(row: int, col: int):
session["board"][col * 3 + row] = session["turn"]
session["turn"] = game.get_next_player(session["turn"])
return redirect(url_for("index"))
@app.route("/reset")
def reset():
initiate_session(session)
return redirect(url_for("index"))
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5000)

75
python/users-fastapi.py

@ -0,0 +1,75 @@
from typing import Optional, List
from pymongo import errors
import motor.motor_asyncio
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel, Field, EmailStr
app = FastAPI()
users_async = motor.motor_asyncio.AsyncIOMotorClient("mongodb", 27017).demo.users
class User(BaseModel):
userid: int
email: EmailStr
name: str = Field(..., title="Name of the user", max_length=50)
@app.post("/users/{userid}")
async def add_user(userid: int, user: User):
if user.email is None and user.name is None:
raise HTTPException(
status_code=500, detail="Email or name not present in user!"
)
try:
await users_async.insert_one(
{"_id": userid, "email": user.email, "name": user.name}
)
except errors.DuplicateKeyError as e:
raise HTTPException(status_code=500, detail="Duplicate user id!")
db_item = await users_async.find_one({"_id": userid})
return format_user(db_item)
@app.put("/users/{userid}")
async def update_user(userid: int, user: User):
if user.email is None and user.name is None:
raise HTTPException(
status_code=500, detail="Email or name must be present in parameters!"
)
updated_user = {}
if user.email is not None:
updated_user["email"] = user.email
if user.name is not None:
updated_user["name"] = user.name
await users_async.update_one({"_id": userid}, {"$set": updated_user})
return format_user(await users_async.find_one({"_id": userid}))
@app.get("/users/{userid}", response_model=User)
async def get_user(userid: int):
user = await users_async.find_one({"_id": userid})
if None == user:
raise HTTPException(status_code=404, detail="User not found")
return format_user(user)
@app.get("/users", response_model=List[User])
async def get_users(limit: Optional[int] = 10, offset: Optional[int] = 0):
items_cursor = users_async.find().limit(limit).skip(offset)
items = await items_cursor.to_list(limit)
return list(map(format_user, items))
@app.delete("/users/{userid}", response_model=User)
async def delete_user(userid: int):
user = await users_async.find_one({"_id": userid})
await users_async.delete_one({"_id": userid})
return format_user(user)
def format_user(user):
if user is None:
return None
return {"userid": user["_id"], "name": user["name"], "email": user["email"]}

209
python/users.py

@ -0,0 +1,209 @@
import json
import redis
from flask import Flask, request, Response
from pymongo import MongoClient, errors
from bson import json_util
from flasgger import Swagger
from utils import read_docker_secret
from caching import cache, cache_invalidate
app = Flask(__name__)
swagger = Swagger(app)
users = MongoClient("mongodb", 27017).demo.users
redis_cache = redis.Redis(
host="redis", port=6379, db=0, password=read_docker_secret("REDIS_PASSWORD")
)
@app.route("/users/<int:userid>", methods=["POST"])
def add_user(userid):
"""Create user
---
parameters:
- name: userid
in: path
type: string
required: true
- name: email
in: formData
type: string
required: true
- name: name
in: formData
type: string
required: true
responses:
200:
description: Creation succeded
"""
request_params = request.form
if "email" not in request_params or "name" not in request_params:
return Response(
"Email and name not present in parameters!",
status=404,
mimetype="application/json",
)
try:
users.insert_one(
{
"_id": userid,
"email": request_params["email"],
"name": request_params["name"],
}
)
except errors.DuplicateKeyError as e:
return Response("Duplicate user id!", status=404, mimetype="application/json")
return Response(
json.dumps(users.find_one({"_id": userid})),
status=200,
mimetype="application/json",
)
@app.route("/users/<int:userid>", methods=["PUT"])
@cache_invalidate(redis=redis_cache, key="userid")
def update_user(userid):
"""Update user information
---
parameters:
- name: userid
in: path
type: string
required: true
- name: email
in: formData
type: string
required: false
- name: name
in: formData
type: string
required: false
responses:
200:
description: Update succeded
"""
request_params = request.form
if "email" not in request_params and "name" not in request_params:
return Response(
"Email or name must be present in parameters!",
status=404,
mimetype="application/json",
)
set = {}
if "email" in request_params:
set["email"] = request_params["email"]
if "name" in request_params:
set["name"] = request_params["name"]
users.update_one({"_id": userid}, {"$set": set})
return Response(
json.dumps(users.find_one({"_id": userid})),
status=200,
mimetype="application/json",
)
@app.route("/users/<int:userid>", methods=["GET"])
@cache(redis=redis_cache, key="userid")
def get_user(userid):
"""Details about a user
---
parameters:
- name: userid
in: path
type: string
required: true
definitions:
User:
type: object
properties:
_id:
type: integer
email:
type: string
name:
type: string
responses:
200:
description: User model
schema:
$ref: '#/definitions/User'
404:
description: User not found
"""
user = users.find_one({"_id": userid})
print("getting user")
if None == user:
return Response("", status=404, mimetype="application/json")
return Response(json.dumps(user), status=200, mimetype="application/json")
@app.route("/users", methods=["GET"])
def get_users():
"""Example endpoint returning all users with pagination
---
parameters:
- name: limit
in: query
type: integer
required: false
- name: offset
in: query
type: integer
required: false
definitions:
Users:
type: array
items:
properties:
_id:
type: integer
email:
type: string
name:
type: string
responses:
200:
description: List of user models
schema:
$ref: '#/definitions/Users'
"""
request_args = request.args
limit = int(request_args.get("limit")) if "limit" in request_args else 10
offset = int(request_args.get("offset")) if "offset" in request_args else 0
user_list = users.find().limit(limit).skip(offset)
if None == users:
return Response(json.dumps([]), status=200, mimetype="application/json")
extracted = [
{"userid": d["_id"], "name": d["name"], "email": d["email"]} for d in user_list
]
return Response(
json.dumps(extracted, default=json_util.default),
status=200,
mimetype="application/json",
)
@app.route("/users/<int:userid>", methods=["DELETE"])
@cache_invalidate(redis=redis_cache, key="userid")
def delete_user(userid):
"""Delete operation for a user
---
parameters:
- name: userid
in: path
type: string
required: true
responses:
200:
description: User deleted
"""
users.delete_one({"_id": userid})
return Response("", status=200, mimetype="application/json")
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5000)

37
python/utils.py

@ -0,0 +1,37 @@
import logging, os
def get_logger():
"""Configures the logging module, and returns it
Writes to a file log, also outputs it in the console
"""
logger = logging.getLogger("python_app")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler("python_app.log")
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
return logger
def read_docker_secret(name: str) -> str:
"""
Read a secret by name from as a docker configuration
:param name: name of the secret
:return: the secret as a string
"""
with open(os.environ.get(name), "r") as file:
return file.read()

BIN
resources/autogenerated.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

BIN
resources/diagram.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

BIN
resources/diagram.odp

Binary file not shown.

BIN
resources/grafana.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

1
secrets/mqtt_pass.txt

@ -0,0 +1 @@
some_pass

1
secrets/mqtt_user.txt

@ -0,0 +1 @@
some_user

1
secrets/redis_pass.txt

@ -0,0 +1 @@
someredispassword

33
stresstest-locusts/baesian.py

@ -0,0 +1,33 @@
from random import randrange
from locust import HttpUser, TaskSet, task
class RegistredUser(HttpUser):
min_wait = 5000
max_wait = 9000
@task
class BaesianStresstest(TaskSet):
@task(1)
def create_item(self):
id = self.__get_item_id()
url = '/item/{0}'.format(id)
self.client.post(url, {'name': 'item_{0}'.format(id)})
@task(2)
def add_vote(self):
item_id = self.__get_item_id()
user_id = self.__get_user_id()
url = '/item/vote/{0}'.format(item_id)
self.client.put(url, {'mark': randrange(0, 10), 'userid': user_id})
@task(3)
def get_by_id(self):
self.client.get('/item/{0}'.format(self.__get_item_id()))
def __get_item_id(self) -> int:
return randrange(10, 50)
def __get_user_id(self) -> int:
return randrange(1, 3)

25
stresstest-locusts/fulltext_search.py

@ -0,0 +1,25 @@
from locust import HttpUser, TaskSet, task
from faker import Faker
class RegistredUser(HttpUser):
min_wait = 5000
max_wait = 9000
auth = ("admin", "changeme")
@task
class FulltextSearchStresstest(TaskSet):
def __init__(self, parent):
super().__init__(parent)
self.__faker = Faker("en_US")
@task(1)
def add_random_text(self):
data = {
'expression': self.__faker.text()
}
self.client.put('/fulltext', data, auth=RegistredUser.auth)
@task(2)
def search(self):
self.client.get('/search/' + self.__faker.text(), auth=RegistredUser.auth)

27
stresstest-locusts/geolocation_search.py

@ -0,0 +1,27 @@
from locust import HttpUser, TaskSet, task
from faker import Faker
class RegistredUser(HttpUser):
min_wait = 5000
max_wait = 9000
@task
class GeolocationStresstest(TaskSet):
def __init__(self, parent):
super().__init__(parent)
self.__faker = Faker("en_US")
@task(1)
def add_location(self):
coordonates = self.__faker.location_on_land()
data = {
'lat': coordonates[0],
'lng': coordonates[1],
'name': coordonates[2]
}
self.client.post('/location', data)
@task(2)
def search(self):
self.client.get('/location/{0}/{1}'.format(self.__faker.latitude(), self.__faker.longitude()))

16
stresstest-locusts/random_demo.py

@ -0,0 +1,16 @@
from locust import HttpUser, TaskSet, task
class RegistredUser(HttpUser):
min_wait = 5000
max_wait = 9000
@task
class RandomStresstest(TaskSet):
@task(2)
def list(self):
self.client.get('/random-list')
@task(1)
def insert_random_value(self):
self.client.put('/random', {'lower': 0, 'upper': 10000})

37
stresstest-locusts/users.py

@ -0,0 +1,37 @@
from random import randrange
from locust import HttpUser, TaskSet, task
class RegistredUser(HttpUser):
min_wait = 5000
max_wait = 9000
@task
class CrudStresstest(TaskSet):
def __get_random_user(self):
userid = str(randrange(0, 10000))
username = 'testuser_{0}'.format(userid)
email = 'some-email{0}@yahoo.com'.format(userid)
return userid, username, email
@task(1)
def add_user(self):
user_data = self.__get_random_user()
user = {
'id': user_data[0],
'name': user_data[1],
'email': user_data[2],
}
self.client.put('/users/' + user_data[0], user)
@task(2)
def update_user(self):
user_data = self.__get_random_user()
user = {
'id': user_data[0],
'name': 'upd_' + user_data[1],
'email': 'upd_' + user_data[2],
}
self.client.post('/users/' + user_data[0], user)

10
tests/conftest.py

@ -0,0 +1,10 @@
import pytest
from utils import MongoDb
from pymongo import MongoClient
@pytest.fixture()
def demo_db() -> MongoClient:
db = MongoDb(host="localhost")
db.create_connection()
return db.connection

4
tests/requirements.txt

@ -0,0 +1,4 @@
pytest==6.2.2
pymongo==3.11.2
requests==2.25.1
Pillow==8.2.0

BIN
tests/resources/test.jpg

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

82
tests/test_baesian.py

@ -0,0 +1,82 @@
import pytest
import requests
from typing import Generator
from utils import Collection
baesian_host = "http://localhost:84"
name = "Cicero"
item_id = 1
userid_seven = 7
userid_eight = 8
upsert_data = {
"marks": [{"mark": 9, "userid": userid_eight}, {"mark": 9, "userid": userid_seven}],
"name": name,
"nr_votes": 2,
"sum_votes": 18,
}
@pytest.fixture
def baesian(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "baesian")
yield collection
collection.drop()
def test_upsert_item(baesian):
requests.post(url="{0}/item/{1}".format(baesian_host, item_id), data={"name": name})
response = baesian.get({})
assert response[0]["name"] == name
assert response[0]["nr_votes"] == 0
def test_add_vote(baesian):
requests.post(url="{0}/item/{1}".format(baesian_host, item_id), data={"name": name})
requests.put(
url="{0}/item/vote/{1}".format(baesian_host, item_id),
data={"userid": userid_eight, "mark": 9},
)
requests.put(
url="{0}/item/vote/{1}".format(baesian_host, item_id),
data={"userid": userid_seven, "mark": 9},
)
response = baesian.get({})
assert len(response[0]["marks"]) == response[0]["nr_votes"]
assert response[0]["name"] == name
assert response[0]["sum_votes"] == 18
def test_get_item(baesian):
baesian.upsert(key=item_id, data=upsert_data)
response = requests.get(
url="{0}/item/{1}".format(baesian_host, item_id),
).json()
assert response["baesian_average"] == 9.0
assert response["sum_votes"] == 18
def test_get_items(baesian):
baesian.upsert(key=item_id, data=upsert_data)
response = requests.get(
url="{0}/items".format(baesian_host),
).json()
assert response[0]["name"] == name
assert len(response[0]["marks"]) > 0
def delete_item(baesian):
baesian.upsert(key=item_id, data=upsert_data)
response = requests.delete(
url="{0}/item/{1}".format(baesian_host, item_id),
).json()
assert response.status_code == 200
db_response = baesian.get({})
assert db_response == []

258
tests/test_bookcollection.py

@ -0,0 +1,258 @@
import pytest
import requests
import json
import datetime
import dateutil.parser
from typing import Generator
from pytest import FixtureRequest
from utils import Collection, get_random_objectid
headers = {"accept": "application/json", "Content-Type": "application/json"}
book_collection_host = "http://localhost:86"
books = [
{
"isbn": "978-1607965503",
"name": "Lincoln the Unknown",
"author": "Dale Carnegie",
"publisher": "snowballpublishing",
"nr_available": 5,
},
{
"isbn": "9780262529624",
"name": "Intro to Computation and Programming using Python",
"author": "John Guttag",
"publisher": "MIT Press",
"nr_available": 3,
},
]
@pytest.fixture
def book_collection(
demo_db, request: FixtureRequest
) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "bookcollection")
yield collection
collection.delete_many()
@pytest.fixture
def load_books(book_collection):
for book in books:
book_collection.upsert(get_random_objectid(), book)
def test_book_add(book_collection):
responses = list()
for counter in range(0, len(books)):
response = requests.put(
url="{0}/book/{1}".format(book_collection_host, books[counter]["isbn"]),
headers=headers,
data=json.dumps(books[counter]),
)
assert response.status_code == 200
responses.append(response)
assert all([response.status_code == 200 for response in responses])
db_response = book_collection.get({})
assert len(db_response) == len(books)
# assert authors
authors = [book["author"] for book in books]
expected_authors = [book["author"] for book in db_response]
assert authors == expected_authors
def test_get_book(load_books):
response = requests.get(
url="{0}/book/{1}".format(book_collection_host, books[0]["isbn"]),
)
assert response.status_code == 200
assert response.json() in books
def test_list_all_books(load_books):
# check with limit=1
limit, offset = 1, 0
response = requests.get(
url="{0}/book?limit={limit}&offset={offset}".format(
book_collection_host, limit=limit, offset=offset
),
)
assert response.status_code == 200
response = response.json()
assert len(response) == 1
assert response[0] == books[0]
# check with limit=2
limit, offset = 2, 0
response = requests.get(
url="{0}/book?limit={limit}&offset={offset}".format(
book_collection_host, limit=limit, offset=offset
),
)
assert response.status_code == 200
response = response.json()
assert len(response) == 2
assert response == books
def test_delete_book(load_books, book_collection):
assert len(book_collection.get({})) == len(books)
response = requests.delete(
url="{0}/book/{1}".format(book_collection_host, books[0]["isbn"]),
headers=headers,
)
assert response.status_code == 200
# after delete
assert len(book_collection.get({})) == len(books) - 1
#
#
#
# borrow tests
#
#
#
users = {
100: {"name": "John", "email": "john@email.com"},
101: {"name": "Doe", "email": "doe@email.com"},
}
return_days = 10
max_return_days = 20
today_date = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
borrow_data = [
{
"id": "1",
"userid": 100,
"isbn": books[0]["isbn"],
"borrow_date": today_date,
"return_date": today_date + datetime.timedelta(days=return_days),
"max_return_date": today_date + datetime.timedelta(days=max_return_days),
},
{
"id": "2",
"userid": 100,
"isbn": books[1]["isbn"],
"borrow_date": today_date,
"return_date": today_date + datetime.timedelta(days=return_days),
"max_return_date": today_date + datetime.timedelta(days=max_return_days),
},
{
"id": "3",
"userid": 101,
"isbn": books[1]["isbn"],
"borrow_date": today_date,
"max_return_date": today_date + datetime.timedelta(days=max_return_days),
},
]
@pytest.fixture
def users_collection(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "users")
yield collection
collection.drop()
@pytest.fixture
def load_users(users_collection):
for user in users:
users_collection.upsert(user, users[user])
@pytest.fixture
def borrow_collection(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "borrowcollection")
yield collection
collection.drop()
@pytest.fixture
def load_book_borrows(borrow_collection):
for borrow in borrow_data:
borrow_collection.upsert(get_random_objectid(), borrow)
def test_borrow_book(load_users, load_books, borrow_collection, book_collection):
data = {
"id": "1",
"userid": 100,
"isbn": books[0]["isbn"],
"borrow_date": str(today_date),
"return_date": str(today_date + datetime.timedelta(days=return_days)),
"max_return_date": str(today_date + datetime.timedelta(days=max_return_days)),
}
response = requests.put(
url="{}/borrow/{}".format(book_collection_host, str(data["userid"])),
headers=headers,
data=json.dumps(data),
)
assert response.status_code == 200
db_response = borrow_collection.get({})[0]
db_response["isbn"] = data["isbn"]
db_response["userid"] = data["userid"]
db_response["return_date"] = dateutil.parser.parse(data["return_date"])
db_response["borrow_date"] = dateutil.parser.parse(data["borrow_date"])
db_response["max_return_date"] = dateutil.parser.parse(data["max_return_date"])
# check one less in book collection
assert book_collection.get({})[0]["nr_available"] == books[0]["nr_available"] - 1
def test_list_a_book_borrow(load_book_borrows, load_books, load_users):
response = requests.get(url="{}/borrow/{}".format(book_collection_host, "1"))
assert response.status_code == 200
response_json = response.json()
assert response_json["book_name"] == books[0]["name"]
assert response_json["user_name"] == users[100]["name"]
assert response_json["borrow_date"] == str(borrow_data[0]["borrow_date"])
assert response_json["book_author"] == books[0]["author"]
def test_book_borrows(load_book_borrows, load_books):
limit, offset = 1, 0
response = requests.get(
url="{0}/borrow?limit={limit}&offset={offset}".format(
book_collection_host, limit=limit, offset=offset
),
)
assert response.status_code == 200
response = response.json()
assert len(response) == 1
assert response[0]["isbn"] in [book["isbn"] for book in books]
assert isinstance(
dateutil.parser.parse(response[0]["max_return_date"]), (datetime.datetime)
)
limit, offset = 2, 0
response = requests.get(
url="{0}/borrow?limit={limit}&offset={offset}".format(
book_collection_host, limit=limit, offset=offset
),
)
assert response.status_code == 200
response = response.json()
assert len(response) == 2
def test_return_book(load_book_borrows, load_books, book_collection, borrow_collection):
book_collection.get({})
return_date = str(today_date + datetime.timedelta(days=4))
response = requests.put(
url="{}/borrow/return/{}".format(book_collection_host, "3"),
headers=headers,
data=json.dumps({"id": "3", "return_date": return_date}),
)
response_json = response.json()
assert response.status_code == 200
assert response_json["id"] == "3"
assert response_json["return_date"] == return_date
assert book_collection.get({})[1]["nr_available"] == books[1]["nr_available"] + 1

57
tests/test_fulltext_search.py

@ -0,0 +1,57 @@
import pytest
from pytest import FixtureRequest
import requests
from requests.auth import HTTPBasicAuth
import datetime
from typing import Generator
from bson.objectid import ObjectId
from utils import Collection
fulltext_search_host = "http://localhost:82"
expression_one = "ana has many more apples"
expression_two = "john has many more apples"
@pytest.fixture
def fulltext_search(
demo_db, request: FixtureRequest
) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "fulltext_search")
yield collection
param = getattr(request, "param", None)
for key in param:
collection.delete_many("app_text", key)
@pytest.mark.parametrize("fulltext_search", [expression_one], indirect=True)
def test_add_expression(fulltext_search):
requests.put(
url="{0}/fulltext".format(fulltext_search_host),
data={"expression": expression_one},
auth=HTTPBasicAuth("admin", "changeme"),
)
response = fulltext_search.get({"app_text": expression_one})
assert response[0]["app_text"] == expression_one
@pytest.mark.parametrize(
"fulltext_search", [expression_one, expression_two], indirect=True
)
def test_search(fulltext_search):
fulltext_search.upsert(
ObjectId(b"foo-bar-quux"),
{"app_text": expression_one, "indexed_date": datetime.datetime.utcnow()},
)
fulltext_search.upsert(
ObjectId(b"foo-bar-baaz"),
{"app_text": expression_two, "indexed_date": datetime.datetime.utcnow()},
)
response = requests.get(
url="{0}/search/apples".format(fulltext_search_host),
auth=HTTPBasicAuth("admin", "changeme"),
).json()
assert response[0]["text"].find("apples") > -1
assert response[1]["text"].find("apples") > -1

64
tests/test_geolocation_search.py

@ -0,0 +1,64 @@
import pytest
import requests
from typing import Generator
from bson.objectid import ObjectId
from utils import Collection
geolocation_host = "http://localhost:83"
new_york = {"name": "NewYork", "lat": 40.730610, "lng": -73.935242}
jersey_city = {"name": "JerseyCity", "lat": 40.719074, "lng": -74.050552}
@pytest.fixture
def places(demo_db, request) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "places")
param = getattr(request, "param", None)
yield collection
if param:
for key in param:
collection.delete_many("name", param["name"])
@pytest.mark.parametrize("places", [new_york], indirect=True)
def test_new_location(places):
requests.post("{0}/location".format(geolocation_host), data=new_york)
response = places.get({})
assert response[0]["name"] == new_york["name"]
coordinates = response[0]["location"]["coordinates"]
assert coordinates == [new_york["lng"], new_york["lat"]]
@pytest.mark.parametrize("places", [new_york, jersey_city], indirect=True)
def test_get_near(places):
places.upsert(
ObjectId(b"foo-bar-baaz"),
{
"name": new_york["name"],
"location": {
"type": "Point",
"coordinates": [new_york["lng"], new_york["lat"]],
},
},
)
places.upsert(
ObjectId(b"foo-bar-quux"),
{
"name": jersey_city["name"],
"location": {
"type": "Point",
"coordinates": [jersey_city["lng"], jersey_city["lat"]],
},
},
)
response = requests.get(
url="{0}/location/{1}/{2}".format(
geolocation_host, new_york["lat"], new_york["lng"]
),
data={"max_distance": 50000},
).json()
assert response[0]["name"] == new_york["name"]
assert response[1]["name"] == jersey_city["name"]

111
tests/test_mqtt.py

@ -0,0 +1,111 @@
import json
import requests
import pytest
import time
import os
from typing import Generator, Any
import paho.mqtt.client as mqtt
from utils import Collection
influx_query_url = "http://localhost:8086/query?db=influx&"
# status for mqtt
SUCCESS = 0
@pytest.fixture
def sensors(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "sensors")
yield collection
collection.drop()
@pytest.fixture
def mqtt_client() -> Generator[mqtt.Client, None, None]:
username = ""
password = ""
parent_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
secrets_path = os.path.join(parent_path, "secrets")
with open(os.path.join(secrets_path, "mqtt_user.txt"), "r") as file:
username = file.read()
with open(os.path.join(secrets_path, "mqtt_pass.txt"), "r") as file:
password = file.read()
mqtt_client = mqtt.Client()
mqtt_client.username_pw_set(username, password)
mqtt_client.connect("localhost", 1883)
yield mqtt_client
mqtt_client.disconnect()
def test_db_insert(mqtt_client, sensors):
# publish message
measurement = "temperature"
cleanup_influx(measurement)
mqtt_response = publish_message(
mqtt_client,
"sensors",
json.dumps({"sensor_id": measurement, "sensor_value": 10}),
)
assert mqtt_response == SUCCESS
# influx
query = "q=SELECT * FROM {}".format(measurement)
response = requests.get(influx_query_url + query)
results = response.json()["results"]
series = results[0]["series"]
values = series[0]["values"]
name = series[0]["name"]
assert len(results) == 1
assert name == measurement
assert values[0][1] == 10
mqtt_client.disconnect()
# mongo
response = sensors.get({})
items = response[0]["items"]
assert len(items) == 1
assert items[0]["value"] == 10
# delete data
cleanup_influx(measurement)
def test_mqtt_publish(mqtt_client, sensors):
measurement = "temperature"
cleanup_influx(measurement)
publish_message(
mqtt_client,
"sensors",
json.dumps({"sensor_id": measurement, "sensor_value": 10}),
)
mqtt_client.subscribe("averages/{}".format(measurement))
mqtt_client.on_message = check_message
mqtt_client.loop_start()
cleanup_influx(measurement)
sensors.delete(measurement)
def publish_message(mqtt_client, topic: str, data: str) -> int:
mqtt_response = mqtt_client.publish(topic, data)
time.sleep(0.5)
return mqtt_response[0]
def cleanup_influx(measurement: str) -> int:
resp = requests.post(
'http://localhost:8086/query?db=influx&q=DELETE FROM "{}"'.format(measurement)
)
return resp.status_code
def check_message(client: mqtt.Client, userdata: Any, msg: mqtt.MQTTMessage):
message = msg.payload.decode("utf-8")
decoded_data = json.loads(message)
assert decoded_data["sensor_value"] == 10

66
tests/test_photo.py

@ -0,0 +1,66 @@
import pytest
import requests
import os
from pathlib import Path
from PIL import Image
photo_process_host = "http://localhost:85"
parent_path = Path(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
image_path = os.path.join(str(parent_path) + "/tests/resources/test.jpg")
storage_path = os.path.join(str(parent_path) + "/container-storage")
image_id = "101"
@pytest.fixture
def set_photo():
response = requests.put(
url="{0}/photo/{1}".format(photo_process_host, image_id),
files={"file": ("test.jpg", open(image_path, "rb"), "image/jpeg")},
)
return response
def test_put_photo(set_photo):
assert set_photo.status_code == 200
image_storage_path = Path(os.path.join(storage_path, "{}.jpg".format(image_id)))
assert image_storage_path.exists()
# cleanup
image_storage_path.unlink()
def test_get_photo_and_similar(set_photo):
# get photo resized to 100
response = requests.get(
url="{0}/photo/{1}".format(photo_process_host, image_id), data={"resize": 100}
)
assert response.status_code == 200
temp_image_path = os.path.join(str(parent_path) + "/tests/resources/temp.jpg")
# store the resized photo
with open(temp_image_path, "wb") as f:
f.write(response.content)
im = Image.open(temp_image_path)
assert im.format == "JPEG"
# search for photo similar to resized one
response = requests.put(
url="{0}/photo/similar".format(photo_process_host),
files={"file": ("temp.jpg", open(temp_image_path, "rb"), "image/jpeg")},
)
assert response.status_code == 200
assert response.json() == [int(image_id)]
# cleanup
os.remove(temp_image_path)
def test_delete_image(set_photo):
image_storage_path = Path(os.path.join(storage_path, "{}.jpg".format(image_id)))
assert image_storage_path.exists()
# delete the image
requests.delete(url="{0}/photo/{1}".format(photo_process_host, image_id))
assert image_storage_path.exists() == False

55
tests/test_random_demo.py

@ -0,0 +1,55 @@
import pytest, requests
import datetime
from utils import Collection
from typing import Generator
random_host = "http://localhost:800"
@pytest.fixture
def random_numbers(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "random_numbers")
yield collection
collection.drop()
def test_random_insert(random_numbers):
requests.put(
url="{0}/random".format(random_host),
data={"upper": 100, "lower": 10},
).json()
response = random_numbers.get(dict())
assert len(response) == 1
assert response[0]["_id"] == "lasts"
items = response[0]["items"]
assert len(items) == 1
first_item = items[0]
assert isinstance(first_item["date"], datetime.datetime)
assert 10 < int(first_item["value"]) < 100
def test_random_generator():
response = requests.get(
url="{0}/random?lower=10&upper=100".format(random_host)
).json()
assert 10 < int(response) < 100
def test_last_number_list(random_numbers):
random_numbers.upsert(
"lasts",
{
"items": [
{"date": datetime.datetime(2021, 3, 1, 0, 0, 000000), "value": 10},
{"date": datetime.datetime(2021, 3, 2, 0, 0, 000000), "value": 11},
{"date": datetime.datetime(2021, 3, 3, 0, 0, 000000), "value": 12},
{"date": datetime.datetime(2021, 3, 4, 0, 0, 000000), "value": 13},
{"date": datetime.datetime(2021, 3, 5, 0, 0, 000000), "value": 14},
]
},
)
response = requests.get(url="{0}/random-list".format(random_host)).json()
assert response == [10, 11, 12, 13, 14]

62
tests/test_users.py

@ -0,0 +1,62 @@
import pytest, requests
from utils import Collection
from typing import Generator
users_host = "http://localhost:81"
@pytest.fixture
def users(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "users")
yield collection
collection.drop()
def test_get_user(users):
users.upsert(100, {"name": "John", "email": "test@email.eu"})
response = requests.get(url="{0}/users/100".format(users_host)).json()
assert response["_id"] == 100
assert response["email"] == "test@email.eu"
assert response["name"] == "John"
def test_create_user(users):
response = requests.post(
url="{0}/users/101".format(users_host),
data={"name": "John Doe", "email": "johny@email.eu"},
)
assert response.status_code == 200
response = users.get({"_id": 101})
assert len(response) == 1
assert response[0]["_id"] == 101
assert response[0]["email"] == "johny@email.eu"
assert response[0]["name"] == "John Doe"
def test_update_user(users):
users.upsert(100, {"name": "John", "email": "test@email.eu"})
requests.put(
url="{0}/users/100".format(users_host),
data={"name": "John", "email": "john@email.com"},
).json()
response = users.get({"_id": 100})
assert response[0] == {"_id": 100, "name": "John", "email": "john@email.com"}
def test_get_and_delete_users(users):
users.upsert(100, {"name": "John", "email": "john@email.com"})
users.upsert(101, {"name": "Doe", "email": "doe@email.com"})
response = requests.get(url="{}/users".format(users_host)).json()
# testing get request
assert response == [
{"userid": 100, "name": "John", "email": "john@email.com"},
{"userid": 101, "name": "Doe", "email": "doe@email.com"},
]
requests.delete(url="{}/users/100".format(users_host))
response = users.get({"_id": 100})
# asserting the delete has been done
assert response == []

69
tests/test_users_fastapi.py

@ -0,0 +1,69 @@
import pytest
import requests
import json
from utils import Collection
from typing import Generator
users_host = "http://localhost:88"
headers = {"accept": "application/json", "Content-Type": "application/json"}
user_id = 100
user_data = {user_id: {"name": "John", "email": "test@email.eu"}}
@pytest.fixture
def users(demo_db) -> Generator[Collection, None, None]:
collection = Collection(demo_db, "users")
yield collection
collection.drop()
@pytest.fixture
def load_users(users):
users.upsert(user_id, user_data[100])
def test_get_user(load_users):
response = requests.get(url="{0}/users/100".format(users_host)).json()
assert response["email"] == user_data[user_id]["email"]
assert response["name"] == user_data[user_id]["name"]
def test_create_user(users):
response = requests.post(
url="{0}/users/{1}".format(users_host, str(user_id)),
headers=headers,
data=json.dumps({**user_data[user_id], **{"userid": user_id}}),
)
assert response.status_code == 200
db_response = users.get({})
assert len(db_response) == 1
assert db_response[0]["_id"] == user_id
assert db_response[0]["email"] == user_data[user_id]["email"]
assert db_response[0]["name"] == user_data[user_id]["name"]
def test_update_user(users, load_users):
response = requests.put(
url="{}/users/{}".format(users_host, str(user_id)),
headers=headers,
data=json.dumps(
{**user_data[user_id], **{"userid": user_id, "email": "john@email.com"}}
),
)
assert response.status_code == 200
assert response.json()["email"] == "john@email.com"
db_response = users.get({})
assert db_response[0] == {"_id": user_id, "name": "John", "email": "john@email.com"}
def test_get_and_delete_users(users, load_users):
response = requests.get(url="{}/users".format(users_host)).json()
# look for user before deleting
assert response == [{**{"userid": user_id}, **user_data[user_id]}]
requests.delete(url="{}/users/100".format(users_host))
response = users.get({"_id": 100})
assert response == []

42
tests/utils.py

@ -0,0 +1,42 @@
import uuid
from pymongo import MongoClient, database
from bson.objectid import ObjectId
def get_random_objectid():
return ObjectId(str(uuid.uuid4())[:12].encode("utf-8"))
class MongoDb:
def __init__(self, host="mongodb", dbname="demo") -> None:
self.__host = host
self.__dbname = dbname
def create_connection(self):
self.connection = MongoClient(self.__host, 27017)[self.__dbname]
class Collection:
def __init__(self, db: database.Database, collection_name: str):
self.__db = db
self.__collection = collection_name
def get(self, query: dict, limit: int = 10, offset: int = 0):
return list(self.__db[self.__collection].find(query).limit(limit).skip(offset))
def upsert(self, key, data: dict):
self.__db[self.__collection].update_one(
{"_id": key}, {"$set": data}, upsert=True
)
def delete(self, key):
self.__db[self.__collection].delete_one({"_id": key})
def delete_many(self, index=None, key=None):
if index and key:
self.__db[self.__collection].delete_many({index: key})
else:
self.__db[self.__collection].delete_many({})
def drop(self):
self.__db[self.__collection].drop()
Loading…
Cancel
Save